summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJustin R. Miller <incanus@codesorcery.net>2014-05-29 14:42:36 -0700
committerJustin R. Miller <incanus@codesorcery.net>2014-05-29 14:42:36 -0700
commitd25229ebc8aad0ca432ba27f155cbdebb2edf49d (patch)
tree5008e84577a389d890ebf57f85a85f7ec406b635
parent0580874afae80fc6a11249355e8e5855a7264e5f (diff)
parenta83e749e0977ae057e9ae4e0e82cd2d3cf5b9bba (diff)
downloadqtlocation-mapboxgl-d25229ebc8aad0ca432ba27f155cbdebb2edf49d.tar.gz
Merge branch 'master' of github.com:mapbox/llmr-native
-rw-r--r--.gitignore1
-rw-r--r--.travis.yml20
-rw-r--r--Makefile24
-rw-r--r--bin/style.js439
-rw-r--r--common.gypi14
-rw-r--r--deps/gyp/PRESUBMIT.py2
-rw-r--r--deps/gyp/codereview.settings10
-rw-r--r--deps/gyp/pylib/gyp/MSVSSettings.py73
-rw-r--r--deps/gyp/pylib/gyp/MSVSUtil.py21
-rw-r--r--deps/gyp/pylib/gyp/MSVSVersion.py2
-rw-r--r--deps/gyp/pylib/gyp/common.py67
-rw-r--r--deps/gyp/pylib/gyp/generator/android.py77
-rw-r--r--deps/gyp/pylib/gyp/generator/eclipse.py7
-rw-r--r--deps/gyp/pylib/gyp/generator/msvs.py88
-rw-r--r--deps/gyp/pylib/gyp/generator/ninja.py163
-rw-r--r--deps/gyp/pylib/gyp/generator/xcode.py16
-rw-r--r--deps/gyp/pylib/gyp/input.py403
-rwxr-xr-xdeps/gyp/pylib/gyp/mac_tool.py6
-rw-r--r--deps/gyp/pylib/gyp/msvs_emulation.py20
-rw-r--r--deps/gyp/pylib/gyp/simple_copy.py46
-rwxr-xr-xdeps/gyp/pylib/gyp/win_tool.py31
-rw-r--r--deps/gyp/pylib/gyp/xcode_emulation.py343
-rw-r--r--deps/gyp/pylib/gyp/xcode_ninja.py257
-rwxr-xr-xdeps/gyp/setup.py19
-rw-r--r--include/llmr/geometry/glyph_atlas.hpp3
-rw-r--r--include/llmr/map/map.hpp3
-rw-r--r--include/llmr/map/tile.hpp1
-rw-r--r--include/llmr/map/tile_parser.hpp8
-rw-r--r--include/llmr/map/vector_tile_data.hpp4
-rw-r--r--include/llmr/platform/gl.hpp2
-rw-r--r--include/llmr/platform/platform.hpp4
-rw-r--r--include/llmr/renderer/bucket.hpp1
-rw-r--r--include/llmr/renderer/prerendered_texture.hpp1
-rw-r--r--include/llmr/renderer/text_bucket.hpp3
-rw-r--r--include/llmr/style/properties.hpp1
-rw-r--r--include/llmr/style/style_parser.hpp2
-rw-r--r--include/llmr/text/collision.hpp2
-rw-r--r--include/llmr/text/glyph.hpp5
-rw-r--r--include/llmr/text/glyph_store.hpp80
-rw-r--r--include/llmr/text/placement.hpp4
-rw-r--r--include/llmr/util/constants.hpp5
-rw-r--r--include/llmr/util/utf.hpp45
-rw-r--r--ios/img/Icon-40.pngbin0 -> 951 bytes
-rw-r--r--ios/img/Icon-40@2x.pngbin0 -> 1528 bytes
-rw-r--r--ios/img/Icon-60.pngbin9366 -> 1313 bytes
-rw-r--r--ios/img/Icon-60@2x.pngbin28880 -> 2091 bytes
-rw-r--r--ios/img/Icon-72.pngbin12517 -> 1418 bytes
-rw-r--r--ios/img/Icon-72@2x.pngbin38964 -> 2442 bytes
-rw-r--r--ios/img/Icon-76.pngbin13775 -> 1447 bytes
-rw-r--r--ios/img/Icon-76@2x.pngbin42213 -> 2520 bytes
-rw-r--r--ios/img/Icon-Small-50.pngbin6960 -> 1118 bytes
-rw-r--r--ios/img/Icon-Small-50@2x.pngbin21430 -> 1839 bytes
-rw-r--r--ios/img/Icon-Small.pngbin3270 -> 684 bytes
-rw-r--r--ios/img/Icon-Small@2x.pngbin8688 -> 1231 bytes
-rw-r--r--ios/img/Icon-Spotlight-40.pngbin5052 -> 951 bytes
-rw-r--r--ios/img/Icon-Spotlight-40@2x.pngbin14870 -> 1528 bytes
-rw-r--r--ios/img/Icon.pngbin8532 -> 1171 bytes
-rw-r--r--ios/img/Icon@2x.pngbin26725 -> 1931 bytes
-rw-r--r--ios/img/iTunesArtwork.pngbin0 -> 5655 bytes
-rw-r--r--ios/img/iTunesArtwork@2x.pngbin0 -> 9293 bytes
-rw-r--r--macosx/Icon.icnsbin359231 -> 62095 bytes
-rw-r--r--proto/glyphs.proto33
-rwxr-xr-xsetup-libraries.sh17
-rwxr-xr-xsrc/clipper/clipper.cpp4
-rw-r--r--src/geometry/glyph_atlas.cpp2
-rw-r--r--src/map/map.cpp4
-rw-r--r--src/map/tile_parser.cpp107
-rw-r--r--src/map/transform.cpp4
-rw-r--r--src/map/vector_tile_data.cpp2
-rw-r--r--src/platform/request.cpp34
-rw-r--r--src/renderer/painter.cpp16
-rw-r--r--src/renderer/painter_fill.cpp4
-rw-r--r--src/renderer/painter_prerender.cpp4
-rw-r--r--src/renderer/text_bucket.cpp16
-rw-r--r--src/style/properties.cpp6
-rw-r--r--src/style/style.cpp32
-rw-r--r--src/style/style_parser.cpp15
-rw-r--r--src/text/collision.cpp20
-rw-r--r--src/text/glyph.cpp17
-rw-r--r--src/text/glyph_store.cpp185
-rw-r--r--src/text/placement.cpp72
-rw-r--r--src/util/constants.cpp5
-rw-r--r--src/util/threadpool.cpp1
-rw-r--r--test/headless.cpp9
84 files changed, 2150 insertions, 782 deletions
diff --git a/.gitignore b/.gitignore
index a85ec00140..b3b9aabb17 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+.DS_Store
*.o
*.xcodeproj
*.o
diff --git a/.travis.yml b/.travis.yml
index 9dde1b9bb2..99dad26fd3 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,6 +4,11 @@ compiler:
- gcc
- clang
+env:
+ matrix:
+ - BUILDTYPE=Release
+ - BUILDTYPE=Debug
+
cache:
- apt
- directories:
@@ -18,14 +23,21 @@ before_install:
install:
- ./setup-libraries.sh
+- rm -rf mapnik-packaging/out/packages
+
before_script:
-- if [ ${CXX} = "g++" ]; then export CXX="g++-4.8" ; fi
+- if [[ ${CXX} == "g++" ]]; then export CXX="g++-4.8" ; fi
+- if [[ $BUILDTYPE} == "Debug" ]]; then export CXXFLAGS="-fsanitize=address -fsanitize=thread";export LDFLAGS="-fsanitize=address -fsanitize=thread"
script:
-- make llmr
-- make linux
-- make run-tests
+- make llmr BUILDTYPE=${BUILDTYPE}
+- make clean
+- make linux BUILDTYPE=${BUILDTYPE}
+- make clean
+- make run-tests BUILDTYPE=${BUILDTYPE}
+# TODO - port to linux
+#- make run-headless-test BUILDTYPE=${BUILDTYPE}
notifications:
- hipchat: WB52YkcbCGMbNcStsGeaoRO7cyBSK4wX2ZHo2Y4b@GL
diff --git a/Makefile b/Makefile
index b864ca4d86..b10cdbb7b7 100644
--- a/Makefile
+++ b/Makefile
@@ -9,7 +9,7 @@ all: llmr
# Builds the regular library
llmr: config.gypi llmr.gyp node
deps/run_gyp llmr.gyp --depth=. -Goutput_dir=.. --generator-output=./build/llmr -f make
- make -C build/llmr V=$(V) llmr-x86
+ $(MAKE) -C build/llmr BUILDTYPE=$(BUILDTYPE) V=$(V) llmr-x86
node:
@if [ ! `which node` ]; then echo 'error: depends on node.js. please make sure node is on your PATH'; exit 1; fi;
@@ -20,7 +20,7 @@ build/test/Makefile: src common config.gypi test/test.gyp
deps/run_gyp test/test.gyp --depth=. -Goutput_dir=.. --generator-output=./build/test -f make
test: build/test/Makefile
- make -C build/test BUILDTYPE=$(BUILDTYPE) V=$(V) test
+ $(MAKE) -C build/test BUILDTYPE=$(BUILDTYPE) V=$(V) test
# Runs all test cases
run-tests: test
@@ -29,13 +29,13 @@ run-tests: test
done
test/%:
- make -C build/test BUILDTYPE=$(BUILDTYPE) V=$(V) $*
+ $(MAKE) -C build/test BUILDTYPE=$(BUILDTYPE) V=$(V) $*
build/$(BUILDTYPE)/test_$*
# Only runs headless test case
run-headless-test: build/test/Makefile
- make -C build/test BUILDTYPE=Debug V=$(V) headless
- build/Debug/test_headless
+ $(MAKE) -C build/test BUILDTYPE=$(BUILDTYPE) V=$(V) headless
+ build/$(BUILDTYPE)/test_headless
##### Makefile builds ##########################################################
@@ -44,7 +44,7 @@ run-headless-test: build/test/Makefile
# Builds the linux app with make. This is also used by Travis CI
linux: config.gypi linux/llmr-app.gyp node
deps/run_gyp linux/llmr-app.gyp --depth=. -Goutput_dir=.. --generator-output=./build/linux -f make
- make -C build/linux V=$(V) linuxapp
+ $(MAKE) -C build/linux BUILDTYPE=$(BUILDTYPE) V=$(V) linuxapp
# Executes the Linux binary
run-linux: linux
@@ -55,7 +55,7 @@ run-linux: linux
# Builds the OS X app with make.
osx: config.gypi macosx/llmr-app.gyp node
deps/run_gyp macosx/llmr-app.gyp --depth=. -Goutput_dir=.. --generator-output=./build/macosx -f make
- make -C build/macosx V=$(V) osxapp
+ $(MAKE) -C build/macosx BUILDTYPE=$(BUILDTYPE) V=$(V) osxapp
# Executes the OS X binary
run-osx: osx
@@ -93,11 +93,11 @@ lproj: config.gypi linux/llmr-app.gyp clear_xcode_cache node
##### Maintenace operations ####################################################
clean: clear_xcode_cache
- -rm -rf ./build/Release
- -rm -rf ./build/Debug
- -rm -f include/llmr/shader/shaders.hpp
- -rm -f include/llmr/style/resources.hpp
- -rm -f src/style/resources.cpp
+ -find ./deps/gyp -name "*.pyc" -exec rm {} \;
+ -rm -rf ./build/
+ -rm -f ./include/llmr/shader/shaders.hpp
+ -rm -f ./include/llmr/style/resources.hpp
+ -rm -f ./src/style/resources.cpp
distclean: clean
-rm -rf ./build
diff --git a/bin/style.js b/bin/style.js
index 1dbe2b2cf7..c831fd4144 100644
--- a/bin/style.js
+++ b/bin/style.js
@@ -267,6 +267,7 @@ module.exports = {
"value": 10,
"path": "curve",
"text_field": "ele",
+ "font": "Open Sans Regular, Arial Unicode MS Regular",
"fontSize": 10,
"feature_type": "line",
"type": "text"
@@ -648,6 +649,7 @@ module.exports = {
"layer": "country_label",
"text_field": "name",
"path": "horizontal",
+ "font": "Open Sans Bold, Arial Unicode MS Bold",
"fontSize": 24,
"feature_type": "point",
"type": "text"
@@ -664,6 +666,7 @@ module.exports = {
"type": "text",
"text_field": "name",
"path": "curve",
+ "font": "Open Sans Italic, Arial Unicode MS Regular",
"fontSize": 16
},
"marine_label_point_1": {
@@ -675,6 +678,7 @@ module.exports = {
"path": "horizontal",
"field": "labelrank",
"value": 1,
+ "font": "Open Sans Italic, Arial Unicode MS Regular",
"fontSize": 30
},
"marine_label_point_2": {
@@ -686,6 +690,7 @@ module.exports = {
"path": "horizontal",
"field": "labelrank",
"value": 2,
+ "font": "Open Sans Italic, Arial Unicode MS Regular",
"fontSize": 24
},
"marine_label_point_other": {
@@ -697,6 +702,7 @@ module.exports = {
"path": "horizontal",
"field": "labelrank",
"value": [3,4,5,6],
+ "font": "Open Sans Italic, Arial Unicode MS Regular",
"fontSize": 18
},
"state_label": {
@@ -704,19 +710,12 @@ module.exports = {
"layer": "state_label",
"text_field": "name",
"path": "horizontal",
+ "font": "Open Sans Regular, Arial Unicode MS Regular",
"fontSize": 16,
"feature_type": "point",
"type": "text",
"enabled": 4
},
- "place_label_city_point": {
- "source": "outdoors",
- "layer": "place_label",
- "field": "type",
- "value": "city",
- "type": "point",
- "size": 4
- },
"place_label_city": {
"source": "outdoors",
"layer": "place_label",
@@ -724,6 +723,7 @@ module.exports = {
"value": "city",
"text_field": "name",
"path": "horizontal",
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
"fontSize": 20,
"feature_type": "point",
"type": "text"
@@ -735,6 +735,7 @@ module.exports = {
"value": "town",
"text_field": "name",
"path": "horizontal",
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
"fontSize": 24,
"feature_type": "point",
"type": "text"
@@ -746,6 +747,7 @@ module.exports = {
"value": "village",
"text_field": "name",
"path": "horizontal",
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
"fontSize": 22,
"feature_type": "point",
"type": "text"
@@ -761,17 +763,49 @@ module.exports = {
],
"text_field": "name",
"path": "horizontal",
- "fontSize": 14,
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
+ "fontSize": 18,
"feature_type": "point",
"type": "text"
},
- "road_label": {
+ "road_label_1": {
"source": "outdoors",
"layer": "road_label",
+ "field": "class",
+ "value": ["motorway","main"],
"text_field": "name",
"path": "curve",
"padding": 2,
- "fontSize": 13,
+ "font": "Open Sans Regular, Arial Unicode MS Regular",
+ "fontSize": 18,
+ "feature_type": "line",
+ "type": "text",
+ "maxAngleDelta": 0.5
+ },
+ "road_label_2": {
+ "source": "outdoors",
+ "layer": "road_label",
+ "field": "class",
+ "value": ["street","street_limited"],
+ "text_field": "name",
+ "path": "curve",
+ "padding": 2,
+ "font": "Open Sans Regular, Arial Unicode MS Regular",
+ "fontSize": 16,
+ "feature_type": "line",
+ "type": "text",
+ "maxAngleDelta": 0.5
+ },
+ "road_label_3": {
+ "source": "outdoors",
+ "layer": "road_label",
+ "field": "class",
+ "value": ["service","driveway","path"],
+ "text_field": "name",
+ "path": "curve",
+ "padding": 2,
+ "font": "Open Sans Regular, Arial Unicode MS Regular",
+ "fontSize": 14,
"feature_type": "line",
"type": "text",
"maxAngleDelta": 0.5
@@ -781,6 +815,7 @@ module.exports = {
"layer": "water_label",
"text_field": "name",
"path": "horizontal",
+ "font": "Open Sans Semibold Italic, Arial Unicode MS Bold",
"fontSize": 12,
"feature_type": "point",
"type": "text"
@@ -790,6 +825,7 @@ module.exports = {
"layer": "waterway_label",
"text_field": "name",
"path": "curve",
+ "font": "Open Sans Semibold Italic, Arial Unicode MS Bold",
"fontSize": 12,
"textMinDistance": 15,
"feature_type": "line",
@@ -800,7 +836,25 @@ module.exports = {
"layer": "poi_label",
"icon": "maki",
"field": "scalerank",
- "value": [1, 2, 3],
+ "value": [1, 2],
+ "size": 12,
+ "type": "point"
+ },
+ "poi_3": {
+ "source": "outdoors",
+ "layer": "poi_label",
+ "icon": "maki",
+ "field": "scalerank",
+ "value": 3,
+ "size": 12,
+ "type": "point"
+ },
+ "poi_4": {
+ "source": "outdoors",
+ "layer": "poi_label",
+ "icon": "maki",
+ "field": "scalerank",
+ "value": 4,
"size": 12,
"type": "point"
},
@@ -813,7 +867,7 @@ module.exports = {
"size": 24,
"type": "point"
},
- "poi_label": {
+ "poi_label_1-2": {
"source": "outdoors",
"layer": "poi_label",
"field": "scalerank",
@@ -824,10 +878,36 @@ module.exports = {
"text_field": "name",
"path": "horizontal",
"padding": 2,
- "fontSize": 12,
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
+ "fontSize": 11,
"feature_type": "point",
- "type": "text",
- "alwaysVisible": true
+ "type": "text"
+ },
+ "poi_label_3": {
+ "source": "outdoors",
+ "layer": "poi_label",
+ "field": "scalerank",
+ "value": 3,
+ "text_field": "name",
+ "path": "horizontal",
+ "padding": 2,
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
+ "fontSize": 10,
+ "feature_type": "point",
+ "type": "text"
+ },
+ "poi_label_4": {
+ "source": "outdoors",
+ "layer": "poi_label",
+ "field": "scalerank",
+ "value": 4,
+ "text_field": "name",
+ "path": "horizontal",
+ "padding": 2,
+ "font": "Open Sans Semibold, Arial Unicode MS Bold",
+ "fontSize": 10,
+ "feature_type": "point",
+ "type": "text"
}
},
"structure": [
@@ -924,6 +1004,18 @@ module.exports = {
"bucket": "waterway_stream"
},
{
+ "name": "building_shadow",
+ "bucket": "building"
+ },
+ {
+ "name": "building",
+ "bucket": "building"
+ },
+ {
+ "name": "building_wall",
+ "bucket": "building"
+ },
+ {
"name": "hillshade_full_highlight",
"bucket": "hillshade_full_highlight"
},
@@ -992,18 +1084,6 @@ module.exports = {
"bucket": "aeroway_taxiway"
},
{
- "name": "building_shadow",
- "bucket": "building"
- },
- {
- "name": "building",
- "bucket": "building"
- },
- {
- "name": "building_wall",
- "bucket": "building"
- },
- {
"name": "tunnel_motorway_link_casing",
"bucket": "tunnel_motorway_link"
},
@@ -1280,10 +1360,6 @@ module.exports = {
"bucket": "state_label"
},
{
- "name": "place_label_city_point",
- "bucket": "place_label_city_point"
- },
- {
"name": "place_label_city",
"bucket": "place_label_city"
},
@@ -1300,12 +1376,16 @@ module.exports = {
"bucket": "place_label_other"
},
{
- "name": "poi_label",
- "bucket": "poi_label"
+ "name": "road_label_1",
+ "bucket": "road_label_1"
+ },
+ {
+ "name": "road_label_2",
+ "bucket": "road_label_2"
},
{
- "name": "road_label",
- "bucket": "road_label"
+ "name": "road_label_3",
+ "bucket": "road_label_3"
},
{
"name": "contour_label",
@@ -1324,6 +1404,26 @@ module.exports = {
"bucket": "poi"
},
{
+ "name": "poi_label_1-2",
+ "bucket": "poi_label_1-2"
+ },
+ {
+ "name": "poi_3",
+ "bucket": "poi_3"
+ },
+ {
+ "name": "poi_label_3",
+ "bucket": "poi_label_3"
+ },
+ {
+ "name": "poi_4",
+ "bucket": "poi_4"
+ },
+ {
+ "name": "poi_label_4",
+ "bucket": "poi_label_4"
+ },
+ {
"name": "poi_aerodrome",
"bucket": "poi_aerodrome"
}
@@ -1556,12 +1656,27 @@ module.exports = {
{"z": 8, "val": 1},
{"z": 12, "val": 2}
],
- "road_label_size": [
+ "road_label_1_size": [
"stops",
- {"z": 0, "val": 12},
+ {"z": 13, "val": 11},
{"z": 14, "val": 12},
{"z": 15, "val": 13},
- {"z": 22, "val": 13}
+ {"z": 16, "val": 14},
+ {"z": 17, "val": 16},
+ {"z": 18, "val": 18}
+ ],
+ "road_label_2_size": [
+ "stops",
+ {"z": 13, "val": 11},
+ {"z": 14, "val": 12},
+ {"z": 16, "val": 14},
+ {"z": 18, "val": 16}
+ ],
+ "road_label_3_size": [
+ "stops",
+ {"z": 15, "val": 10},
+ {"z": 16, "val": 12},
+ {"z": 18, "val": 14}
],
"fence_width": [
"stops",
@@ -1584,6 +1699,11 @@ module.exports = {
{"z": 19, "val": 12},
{"z": 20, "val": 24},
{"z": 21, "val": 48}
+ ],
+ "country_label_size": [
+ "stops",
+ {"z": 1, "val": 14},
+ {"z": 12, "val": 24}
]
},
"classes": [
@@ -1738,6 +1858,7 @@ module.exports = {
"color": "#103",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
@@ -1750,6 +1871,7 @@ module.exports = {
"color": "#206",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
@@ -1762,6 +1884,7 @@ module.exports = {
"color": "#fffff3",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
@@ -1774,6 +1897,7 @@ module.exports = {
"color": "#ffd",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
@@ -2127,11 +2251,8 @@ module.exports = {
"country_label": {
"color": "country_text",
"stroke": "rgba(255,255,255,0.5)",
- "size": [
- "stops",
- {"z": 6, "val": 14},
- {"z": 12, "val": 24}
- ]
+ "strokeWidth": 0.5,
+ "size": "country_label_size"
},
"country_label_line": {
"color": "country_text",
@@ -2145,7 +2266,6 @@ module.exports = {
"marine_label_point_1": {
"color": "#a9c4c7",
"size": ["stops",
- {"z": 0, "val": 20},
{"z": 3, "val": 20},
{"z": 4, "val": 25},
{"z": 5, "val": 30},
@@ -2156,7 +2276,6 @@ module.exports = {
"marine_label_point_2": {
"color": "#a9c4c7",
"size": ["stops",
- {"z": 0, "val": 13},
{"z": 3, "val": 13},
{"z": 4, "val": 14},
{"z": 5, "val": 20},
@@ -2168,7 +2287,6 @@ module.exports = {
"marine_label_point_other": {
"color": "#a9c4c7",
"size": ["stops",
- {"z": 0, "val": 12},
{"z": 3, "val": 12},
{"z": 4, "val": 13},
{"z": 5, "val": 15},
@@ -2190,9 +2308,6 @@ module.exports = {
{"z": 10, "val": 0}
]
},
- "place_label_city_point": {
- "color": "#4a4032"
- },
"place_label_city": {
"color": "#444",
"strokeWidth": 0.4,
@@ -2204,20 +2319,12 @@ module.exports = {
{"z": 7, "val": 14},
{"z": 14.99, "val": 20},
{"z": 15, "val": 0}
- ],
- "translate": [
- 0,
- [
- "stops",
- {"z": 4, "val": 10},
- {"z": 6, "val": 30},
- {"z": 7, "val": 0}
- ]
]
},
"place_label_town": {
"color": "#716656",
- "strokeWidth": 0.5,
+ "strokeWidth": 0.3,
+ "strokeBlur": 2,
"stroke": "text_stroke",
"size": [
"stops",
@@ -2229,7 +2336,8 @@ module.exports = {
},
"place_label_village": {
"color": "#635644",
- "strokeWidth": 0.5,
+ "strokeWidth": 0.3,
+ "strokeBlur": 2,
"stroke": "text_stroke",
"size": [
"stops",
@@ -2245,18 +2353,32 @@ module.exports = {
"stroke": "text_stroke",
"size": [
"stops",
- {"z": 0, "val": 10},
- {"z": 14, "val": 11},
- {"z": 15, "val": 12},
- {"z": 16, "val": 14}
+ {"z": 13, "val": 11},
+ {"z": 14, "val": 12},
+ {"z": 16, "val": 14},
+ {"z": 18, "val": 18}
]
},
- "road_label": {
+ "road_label_1": {
+ "color": "#585042",
+ "stroke": "land",
+ "strokeWidth": 0.6,
+ "strokeBlur": 2,
+ "size": "road_label_1_size"
+ },
+ "road_label_2": {
+ "color": "#585042",
+ "stroke": "land",
+ "strokeWidth": 0.6,
+ "strokeBlur": 2,
+ "size": "road_label_2_size"
+ },
+ "road_label_3": {
"color": "#585042",
"stroke": "land",
"strokeWidth": 0.6,
"strokeBlur": 2,
- "size": "road_label_size"
+ "size": "road_label_3_size"
},
"water_label": {
"color": "water_dark",
@@ -2269,18 +2391,56 @@ module.exports = {
"stroke": "text_stroke"
},
"poi": {
- //"color": "rgba(50,50,50,0.9)",
"antialias": false
- //"stroke": #000,
- //"strokeWidth": 0.4,
- //"strokeBlur": 2
},
- "poi_label": {
+ "poi_3": {
+ "antialias": false,
+ "opacity": [
+ "stops",
+ {"z": 16.5, "val": 0},
+ {"z": 16.75, "val": 1}
+ ]
+ },
+ "poi_4": {
+ "antialias": false,
+ "opacity": [
+ "stops",
+ {"z": 18.5, "val": 0},
+ {"z": 18.75, "val": 1}
+ ]
+ },
+ "poi_label_1-2": {
+ "color": "#444",
+ "size": 11,
+ "stroke": "land",
+ "strokeWidth": 0.3,
+ "strokeBlur": 1,
+ "translate": [0, 14]
+ },
+ "poi_label_3": {
"color": "#444",
"size": 10,
"stroke": "land",
- "strokeWidth": 0.6,
- "strokeBlur": 2,
+ "strokeWidth": 0.3,
+ "strokeBlur": 1,
+ "translate": [0, 14],
+ "opacity": [
+ "stops",
+ {"z": 16.5, "val": 0},
+ {"z": 16.75, "val": 1}
+ ]
+ },
+ "poi_label_4": {
+ "color": "#444",
+ "size": 10,
+ "opacity": [
+ "stops",
+ {"z": 18.5, "val": 0},
+ {"z": 18.75, "val": 1}
+ ],
+ "stroke": "land",
+ "strokeWidth": 0.3,
+ "strokeBlur": 1,
"translate": [0, 14]
},
"poi_aerodrome": {
@@ -2299,7 +2459,7 @@ module.exports = {
"color": "land_night"
},
"admin_maritime_cover": {
- "color": "#0a1347",
+ "color": "water_night",
"width": 5
},
"admin_maritime": {
@@ -2450,10 +2610,11 @@ module.exports = {
"color": "#103",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
- {"z": 0, "val": 0.3},
+ {"z": 16, "val": 0.3},
{"z": 17, "val": 0.2},
{"z": 18, "val": 0.1}
]
@@ -2462,10 +2623,11 @@ module.exports = {
"color": "#206",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
- {"z": 0, "val": 0.3},
+ {"z": 16, "val": 0.3},
{"z": 17, "val": 0.2},
{"z": 18, "val": 0.1}
]
@@ -2474,6 +2636,7 @@ module.exports = {
"color": "#fdfdad",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
@@ -2487,6 +2650,7 @@ module.exports = {
"color": "#ffe1b7",
"antialias": false,
"prerender": true,
+ "prerender-size": 1024,
"prerender-blur": 1,
"opacity": [
"stops",
@@ -2835,12 +2999,9 @@ module.exports = {
},
"country_label": {
"color": "text_night",
- "stroke": "text_stroke_night",
- "size": [
- "stops",
- {"z": 6, "val": 14},
- {"z": 12, "val": 24}
- ]
+ "stroke": "text2_stroke_night",
+ "strokeWidth": 0.5,
+ "size": "country_label_size"
},
"country_label_line": {
"color": "text_night",
@@ -2854,7 +3015,6 @@ module.exports = {
"marine_label_point_1": {
"color": "water_dark_night",
"size": ["stops",
- {"z": 0, "val": 20},
{"z": 3, "val": 20},
{"z": 4, "val": 25},
{"z": 5, "val": 30},
@@ -2865,7 +3025,6 @@ module.exports = {
"marine_label_point_2": {
"color": "water_dark_night",
"size": ["stops",
- {"z": 0, "val": 13},
{"z": 3, "val": 13},
{"z": 4, "val": 14},
{"z": 5, "val": 20},
@@ -2877,7 +3036,6 @@ module.exports = {
"marine_label_point_other": {
"color": "#a9c4c7",
"size": ["stops",
- {"z": 0, "val": 12},
{"z": 3, "val": 12},
{"z": 4, "val": 13},
{"z": 5, "val": 15},
@@ -2899,13 +3057,10 @@ module.exports = {
{"z": 10, "val": 0}
]
},
- "place_label_city_point": {
- "color": "#ddd"
- },
"place_label_city": {
"color": "#fff",
"strokeWidth": 0.4,
- "stroke": "text_stroke_night",
+ "stroke": "text2_stroke_night",
"size": [
"stops",
{"z": 3.99, "val": 0},
@@ -2913,21 +3068,13 @@ module.exports = {
{"z": 7, "val": 14},
{"z": 14.99, "val": 20},
{"z": 15, "val": 0}
- ],
- "translate": [
- 0,
- [
- "stops",
- {"z": 4, "val": 10},
- {"z": 6, "val": 30},
- {"z": 7, "val": 0}
- ]
]
},
"place_label_town": {
"color": "text_night",
- "strokeWidth": 0.5,
- "stroke": "text_stroke_night",
+ "strokeWidth": 0.3,
+ "strokeBlur": 2,
+ "stroke": "text2_stroke_night",
"size": [
"stops",
{"z": 9, "val": 10},
@@ -2938,8 +3085,9 @@ module.exports = {
},
"place_label_village": {
"color": "text_night",
- "strokeWidth": 0.5,
- "stroke": "text_stroke_night",
+ "strokeWidth": 0.3,
+ "strokeBlur": 2,
+ "stroke": "text2_stroke_night",
"size": [
"stops",
{"z": 9, "val": 8},
@@ -2951,43 +3099,100 @@ module.exports = {
},
"place_label_other": {
"color": "text_night",
- "stroke": "text_stroke_night",
- "strokeWidth": 0.5,
- "strokeBlur": 3,
+ "stroke": "text2_stroke_night",
+ "strokeWidth": 0.3,
+ "strokeBlur": 2,
"size": [
"stops",
- {"z": 0, "val": 10},
- {"z": 14, "val": 11},
- {"z": 15, "val": 12},
- {"z": 16, "val": 14}
+ {"z": 13, "val": 11},
+ {"z": 14, "val": 12},
+ {"z": 16, "val": 14},
+ {"z": 18, "val": 18}
]
},
- "road_label": {
+ "road_label_1": {
+ "color": "text_night",
+ "stroke": "text2_stroke_night",
+ "strokeWidth": 0.5,
+ "strokeBlur": 3,
+ "size": "road_label_1_size"
+ },
+ "road_label_2": {
"color": "text_night",
"stroke": "text2_stroke_night",
"strokeWidth": 0.5,
"strokeBlur": 3,
- "size": "road_label_size"
+ "size": "road_label_2_size"
+ },
+ "road_label_3": {
+ "color": "text_night",
+ "stroke": "text2_stroke_night",
+ "strokeWidth": 0.5,
+ "strokeBlur": 3,
+ "size": "road_label_3_size"
},
"water_label": {
"color": "water_dark_night",
- "stroke": "text_stroke_night"
+ "strokeWidth": 0.8,
+ "stroke": "text2_stroke_night"
},
"waterway_label": {
- "color": "land_night",
+ "color": "water_dark_night",
"strokeWidth": 0.8,
- "stroke": "water_night"
+ "stroke": "text2_stroke_night"
},
"poi": {
"color": "white",
"antialias": false
},
- "poi_label": {
+ "poi_3": {
+ "antialias": false,
+ "opacity": [
+ "stops",
+ {"z": 16.5, "val": 0},
+ {"z": 16.75, "val": 1}
+ ]
+ },
+ "poi_4": {
+ "antialias": false,
+ "opacity": [
+ "stops",
+ {"z": 18.5, "val": 0},
+ {"z": 18.75, "val": 1}
+ ]
+ },
+ "poi_label_1-2": {
+ "color": "#fff",
+ "size": 11,
+ "stroke": "text2_stroke_night",
+ "strokeWidth": 0.3,
+ "strokeBlur": 1,
+ "translate": [0, 14]
+ },
+ "poi_label_3": {
"color": "#fff",
"size": 10,
"stroke": "text2_stroke_night",
"strokeWidth": 0.3,
- "strokeBlur": 2,
+ "strokeBlur": 1,
+ "translate": [0, 14],
+ "opacity": [
+ "stops",
+ {"z": 16.5, "val": 0},
+ {"z": 16.75, "val": 1}
+ ]
+ },
+ "poi_label_4": {
+ "color": "#fff",
+ "size": 10,
+ "opacity": [
+ "stops",
+ {"z": 18.5, "val": 0},
+ {"z": 18.75, "val": 1}
+ ],
+ "stroke": "text2_stroke_night",
+ "strokeWidth": 0.3,
+ "strokeBlur": 1,
"translate": [0, 14]
},
"poi_aerodrome": {
diff --git a/common.gypi b/common.gypi
index b1ad460153..a3e4e5e2fa 100644
--- a/common.gypi
+++ b/common.gypi
@@ -5,6 +5,11 @@
'CLANG_CXX_LIBRARY': 'libc++',
'CLANG_CXX_LANGUAGE_STANDARD':'c++11',
'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
+ 'GCC_ENABLE_CPP_RTTI':'YES',
+ 'OTHER_CPLUSPLUSFLAGS': [ '-Wall', '-Wextra' ],
+ 'GCC_WARN_PEDANTIC': 'YES',
+ 'GCC_WARN_UNINITIALIZED_AUTOS': 'YES_AGGRESSIVE'
},
'cflags_cc': ['-std=c++11'],
'configurations': {
@@ -14,7 +19,11 @@
'xcode_settings': {
'GCC_OPTIMIZATION_LEVEL': '0',
'GCC_GENERATE_DEBUGGING_SYMBOLS': 'YES',
- 'OTHER_CPLUSPLUSFLAGS': [ '-Wall', '-Wextra', '-pedantic' ]
+ 'DEAD_CODE_STRIPPING': 'NO',
+ 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'NO',
+ 'RUN_CLANG_STATIC_ANALYZER':'YES',
+ 'CLANG_ANALYZER_SECURITY_INSECUREAPI_STRCPY': 'YES',
+ 'CLANG_ANALYZER_SECURITY_FLOATLOOPCOUNTER': 'YES'
}
},
'Release': {
@@ -24,8 +33,7 @@
'GCC_OPTIMIZATION_LEVEL': '3',
'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
'DEAD_CODE_STRIPPING': 'YES',
- 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
- 'OTHER_CPLUSPLUSFLAGS': [ '-Wall', '-Wextra', '-pedantic' ]
+ 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES'
}
}
}
diff --git a/deps/gyp/PRESUBMIT.py b/deps/gyp/PRESUBMIT.py
index 9c474eb2b1..b79316a915 100644
--- a/deps/gyp/PRESUBMIT.py
+++ b/deps/gyp/PRESUBMIT.py
@@ -16,8 +16,6 @@ PYLINT_BLACKLIST = [
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
- # Needs style fix.
- 'pylib/gyp/generator/xcode.py',
]
diff --git a/deps/gyp/codereview.settings b/deps/gyp/codereview.settings
new file mode 100644
index 0000000000..a04a2440df
--- /dev/null
+++ b/deps/gyp/codereview.settings
@@ -0,0 +1,10 @@
+# This file is used by gcl to get repository specific information.
+CODE_REVIEW_SERVER: codereview.chromium.org
+CC_LIST: gyp-developer@googlegroups.com
+VIEW_VC: http://code.google.com/p/gyp/source/detail?r=
+TRY_ON_UPLOAD: True
+TRYSERVER_PROJECT: gyp
+TRYSERVER_PATCHLEVEL: 0
+TRYSERVER_ROOT: trunk
+TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
+
diff --git a/deps/gyp/pylib/gyp/MSVSSettings.py b/deps/gyp/pylib/gyp/MSVSSettings.py
index 773b74e984..205b3b5b9b 100644
--- a/deps/gyp/pylib/gyp/MSVSSettings.py
+++ b/deps/gyp/pylib/gyp/MSVSSettings.py
@@ -367,6 +367,35 @@ fix_vc_macro_slashes_regex = re.compile(
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
)
+# Regular expression to detect keys that were generated by exclusion lists
+_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
+
+
+def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
+ """Verify that 'setting' is valid if it is generated from an exclusion list.
+
+ If the setting appears to be generated from an exclusion list, the root name
+ is checked.
+
+ Args:
+ setting: A string that is the setting name to validate
+ settings: A dictionary where the keys are valid settings
+ error_msg: The message to emit in the event of error
+ stderr: The stream receiving the error messages.
+ """
+ # This may be unrecognized because it's an exclusion list. If the
+ # setting name has the _excluded suffix, then check the root name.
+ unrecognized = True
+ m = re.match(_EXCLUDED_SUFFIX_RE, setting)
+ if m:
+ root_setting = m.group(1)
+ unrecognized = root_setting not in settings
+
+ if unrecognized:
+ # We don't know this setting. Give a warning.
+ print >> stderr, error_msg
+
+
def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
@@ -403,9 +432,6 @@ def ConvertVCMacrosToMSBuild(s):
return s
-_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
-
-
def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
"""Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
@@ -432,19 +458,12 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
'%s' % (msvs_tool_name, msvs_setting, e))
else:
- # This may be unrecognized because it's an exclusion list. If the
- # setting name has the _excluded suffix, then check the root name.
- unrecognized = True
- m = re.match(_EXCLUDED_SUFFIX_RE, msvs_setting)
- if m:
- root_msvs_setting = m.group(1)
- unrecognized = root_msvs_setting not in msvs_tool
-
- if unrecognized:
- # We don't know this setting. Give a warning.
- print >> stderr, ('Warning: unrecognized setting %s/%s '
- 'while converting to MSBuild.' %
- (msvs_tool_name, msvs_setting))
+ _ValidateExclusionSetting(msvs_setting,
+ msvs_tool,
+ ('Warning: unrecognized setting %s/%s '
+ 'while converting to MSBuild.' %
+ (msvs_tool_name, msvs_setting)),
+ stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s while converting to '
'MSBuild.' % msvs_tool_name)
@@ -495,8 +514,12 @@ def _ValidateSettings(validators, settings, stderr):
print >> stderr, ('Warning: for %s/%s, %s' %
(tool_name, setting, e))
else:
- print >> stderr, ('Warning: unrecognized setting %s/%s' %
- (tool_name, setting))
+ _ValidateExclusionSetting(setting,
+ tool_validators,
+ ('Warning: unrecognized setting %s/%s' %
+ (tool_name, setting)),
+ stderr)
+
else:
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
@@ -593,7 +616,9 @@ _Same(_compile, 'DebugInformationFormat',
_Same(_compile, 'EnableEnhancedInstructionSet',
_Enumeration(['NotSet',
'StreamingSIMDExtensions', # /arch:SSE
- 'StreamingSIMDExtensions2'])) # /arch:SSE2
+ 'StreamingSIMDExtensions2', # /arch:SSE2
+ 'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
+ 'NoExtensions',])) # /arch:IA32 (vs2012+)
_Same(_compile, 'ErrorReporting',
_Enumeration(['None', # /errorReport:none
'Prompt', # /errorReport:prompt
@@ -848,13 +873,6 @@ _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-# These settings generate correctly in the MSVS output files when using
-# e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from
-# configuration entries, but result in spurious artifacts which can be
-# safely ignored here. See crbug.com/246570
-_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
-_MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list)
-_MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
@@ -1003,9 +1021,6 @@ _Same(_lib, 'TargetMachine', _target_machine_enumeration)
# ProjectReference. We may want to validate that they are consistent.
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
-_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list)
-
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
_MSBuildOnly(_lib, 'ErrorReporting',
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
diff --git a/deps/gyp/pylib/gyp/MSVSUtil.py b/deps/gyp/pylib/gyp/MSVSUtil.py
index 62e8d260d4..fbf3ed2e3c 100644
--- a/deps/gyp/pylib/gyp/MSVSUtil.py
+++ b/deps/gyp/pylib/gyp/MSVSUtil.py
@@ -109,15 +109,16 @@ def ShardTargets(target_list, target_dicts):
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
- dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
- new_dependencies = []
- for d in dependencies:
- if d in targets_to_shard:
- for i in range(targets_to_shard[d]):
- new_dependencies.append(_ShardName(d, i))
- else:
- new_dependencies.append(d)
- new_target_dicts[t]['dependencies'] = new_dependencies
+ for deptype in ('dependencies', 'dependencies_original'):
+ dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
+ new_dependencies = []
+ for d in dependencies:
+ if d in targets_to_shard:
+ for i in range(targets_to_shard[d]):
+ new_dependencies.append(_ShardName(d, i))
+ else:
+ new_dependencies.append(d)
+ new_target_dicts[t][deptype] = new_dependencies
return (new_target_list, new_target_dicts)
@@ -264,4 +265,4 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
- return (target_list, target_dicts) \ No newline at end of file
+ return (target_list, target_dicts)
diff --git a/deps/gyp/pylib/gyp/MSVSVersion.py b/deps/gyp/pylib/gyp/MSVSVersion.py
index 03b6d8ad42..bcd6122f2d 100644
--- a/deps/gyp/pylib/gyp/MSVSVersion.py
+++ b/deps/gyp/pylib/gyp/MSVSVersion.py
@@ -379,7 +379,7 @@ def SelectVisualStudioVersion(version='auto'):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
- 'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
+ 'auto': ('12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
diff --git a/deps/gyp/pylib/gyp/common.py b/deps/gyp/pylib/gyp/common.py
index f9c6c6f3a8..df71d973e1 100644
--- a/deps/gyp/pylib/gyp/common.py
+++ b/deps/gyp/pylib/gyp/common.py
@@ -4,6 +4,7 @@
from __future__ import with_statement
+import collections
import errno
import filecmp
import os.path
@@ -472,6 +473,72 @@ def uniquer(seq, idfun=None):
return result
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(collections.MutableSet):
+ def __init__(self, iterable=None):
+ self.end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.map = {} # key --> [key, prev, next]
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ return len(self.map)
+
+ def __contains__(self, key):
+ return key in self.map
+
+ def add(self, key):
+ if key not in self.map:
+ end = self.end
+ curr = end[1]
+ curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+ def discard(self, key):
+ if key in self.map:
+ key, prev_item, next_item = self.map.pop(key)
+ prev_item[2] = next_item
+ next_item[1] = prev_item
+
+ def __iter__(self):
+ end = self.end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ # The second argument is an addition that causes a pylint warning.
+ def pop(self, last=True): # pylint: disable=W0221
+ if not self:
+ raise KeyError('set is empty')
+ key = self.end[1][0] if last else self.end[2][0]
+ self.discard(key)
+ return key
+
+ def __repr__(self):
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, list(self))
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedSet):
+ return len(self) == len(other) and list(self) == list(other)
+ return set(self) == set(other)
+
+ # Extensions to the recipe.
+ def update(self, iterable):
+ for i in iterable:
+ if i not in self:
+ self.add(i)
+
+
class CycleError(Exception):
"""An exception raised when an unexpected cycle is detected."""
def __init__(self, nodes):
diff --git a/deps/gyp/pylib/gyp/generator/android.py b/deps/gyp/pylib/gyp/generator/android.py
index 41346e2b1c..39884749b1 100644
--- a/deps/gyp/pylib/gyp/generator/android.py
+++ b/deps/gyp/pylib/gyp/generator/android.py
@@ -55,7 +55,7 @@ generator_additional_path_sections = []
generator_extra_sources_for_rules = []
-SHARED_FOOTER = """\
+ALL_MODULES_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
@@ -133,7 +133,7 @@ class AndroidMkWriter(object):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
- spec, configs, part_of_all):
+ spec, configs, part_of_all, write_alias_target):
"""The main entry point: writes a .mk file for a single target.
Arguments:
@@ -144,6 +144,8 @@ class AndroidMkWriter(object):
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for
+ this target
"""
gyp.common.EnsureDirExists(output_filename)
@@ -186,11 +188,19 @@ class AndroidMkWriter(object):
self.WriteLn('LOCAL_MODULE_TAGS := optional')
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
+ else:
+ self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
+ '$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
# Grab output directories; needed for Actions and Rules.
- self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)')
+ if self.toolset == 'host':
+ self.WriteLn('gyp_intermediate_dir := '
+ '$(call local-intermediates-dir)')
+ else:
+ self.WriteLn('gyp_intermediate_dir := '
+ '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
self.WriteLn('gyp_shared_intermediate_dir := '
- '$(call intermediates-dir-for,GYP,shared)')
+ '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
@@ -226,7 +236,8 @@ class AndroidMkWriter(object):
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
- self.WriteTarget(spec, configs, deps, link_deps, part_of_all)
+ self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
+ write_alias_target)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
@@ -291,6 +302,7 @@ class AndroidMkWriter(object):
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
+ self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
@@ -337,13 +349,10 @@ class AndroidMkWriter(object):
"""
if len(rules) == 0:
return
- rule_trigger = '%s_rule_trigger' % self.android_module
- did_write_rule = False
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
- did_write_rule = True
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
@@ -391,6 +400,7 @@ class AndroidMkWriter(object):
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
+ self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
@@ -412,13 +422,9 @@ class AndroidMkWriter(object):
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
- self.WriteLn('.PHONY: %s' % (rule_trigger))
- self.WriteLn('%s: %s' % (rule_trigger, main_output))
- self.WriteLn('')
- if did_write_rule:
- extra_sources.append(rule_trigger) # Force all rules to run.
- self.WriteLn('### Finished generating for all rules')
- self.WriteLn('')
+ self.WriteLn()
+
+ self.WriteLn()
def WriteCopies(self, copies, extra_outputs):
@@ -501,6 +507,9 @@ class AndroidMkWriter(object):
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
+ # Android uses separate flags for assembly file invocations, but gyp expects
+ # the same CFLAGS to be applied:
+ self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
@@ -609,16 +618,16 @@ class AndroidMkWriter(object):
prefix = ''
if spec['toolset'] == 'host':
- suffix = '_host_gyp'
+ suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
else:
suffix = '_gyp'
if self.path:
- name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix)
+ middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
else:
- name = '%s%s%s' % (prefix, self.target, suffix)
+ middle = make.StringToMakefileVariable(self.target)
- return make.StringToMakefileVariable(name)
+ return ''.join([prefix, middle, suffix])
def ComputeOutputParts(self, spec):
@@ -680,15 +689,15 @@ class AndroidMkWriter(object):
if self.toolset == 'host':
path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
- path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)'
+ path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class,
self.android_module)
else:
- path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class,
- self.android_module)
+ path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
+ % (self.android_class, self.android_module))
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
@@ -816,12 +825,15 @@ class AndroidMkWriter(object):
'LOCAL_SHARED_LIBRARIES')
- def WriteTarget(self, spec, configs, deps, link_deps, part_of_all):
+ def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
+ write_alias_target):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for this
+ target
"""
self.WriteLn('### Rules for final target.')
@@ -832,7 +844,7 @@ class AndroidMkWriter(object):
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
- if part_of_all:
+ if part_of_all and write_alias_target:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
@@ -841,7 +853,7 @@ class AndroidMkWriter(object):
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
- if self.target != self.android_module:
+ if self.target != self.android_module and write_alias_target:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
@@ -870,6 +882,8 @@ class AndroidMkWriter(object):
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
+ if self.toolset == 'target':
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
@@ -877,6 +891,9 @@ class AndroidMkWriter(object):
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
+ if self.toolset == 'target':
+ self.WriteLn()
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
def WriteList(self, value_list, variable=None, prefix='',
@@ -926,7 +943,7 @@ class AndroidMkWriter(object):
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
- return path
+ return os.path.normpath(path)
def PerformBuild(data, configurations, params):
@@ -946,6 +963,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
+ write_alias_targets = generator_flags.get('write_alias_targets', True)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
@@ -1041,7 +1059,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
- part_of_all=part_of_all)
+ part_of_all=part_of_all,
+ write_alias_target=write_alias_targets)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
@@ -1057,6 +1076,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
+ root_makefile.write('GYP_VAR_PREFIX ?=\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
@@ -1064,6 +1084,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
- root_makefile.write(SHARED_FOOTER)
+ if write_alias_targets:
+ root_makefile.write(ALL_MODULES_FOOTER)
root_makefile.close()
diff --git a/deps/gyp/pylib/gyp/generator/eclipse.py b/deps/gyp/pylib/gyp/generator/eclipse.py
index 8d08f57eaa..718eb5d3db 100644
--- a/deps/gyp/pylib/gyp/generator/eclipse.py
+++ b/deps/gyp/pylib/gyp/generator/eclipse.py
@@ -165,7 +165,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
return all_includes_list
-def GetCompilerPath(target_list, data):
+def GetCompilerPath(target_list, data, options):
"""Determine a command that can be used to invoke the compiler.
Returns:
@@ -173,13 +173,12 @@ def GetCompilerPath(target_list, data):
the compiler from that. Otherwise, see if a compiler was specified via the
CC_target environment variable.
"""
-
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get('make_global_settings', {})
for key, value in make_global_settings_dict:
if key in ['CC', 'CXX']:
- return value
+ return os.path.join(options.toplevel_dir, value)
# Check to see if the compiler was specified as an environment variable.
for key in ['CC_target', 'CC', 'CXX']:
@@ -304,7 +303,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
- compiler_path = GetCompilerPath(target_list, data)
+ compiler_path = GetCompilerPath(target_list, data, options)
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs, config_name,
params, compiler_path)
diff --git a/deps/gyp/pylib/gyp/generator/msvs.py b/deps/gyp/pylib/gyp/generator/msvs.py
index 9dcdab6cfa..64991d4248 100644
--- a/deps/gyp/pylib/gyp/generator/msvs.py
+++ b/deps/gyp/pylib/gyp/generator/msvs.py
@@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import collections
import copy
import ntpath
import os
@@ -21,6 +20,7 @@ import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
+from gyp.common import OrderedSet
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
def _import_OrderedDict():
@@ -81,6 +81,7 @@ generator_additional_non_configuration_keys = [
'msvs_external_builder_out_dir',
'msvs_external_builder_build_cmd',
'msvs_external_builder_clean_cmd',
+ 'msvs_external_builder_clcompile_cmd',
]
@@ -97,46 +98,6 @@ cached_username = None
cached_domain = None
-# Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(collections.MutableSet):
- def __init__(self, iterable=None):
- self.end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.map = {} # key --> [key, prev, next]
- if iterable is not None:
- self |= iterable
-
- def __len__(self):
- return len(self.map)
-
- def discard(self, key):
- if key in self.map:
- key, prev, next = self.map.pop(key)
- prev[2] = next
- next[1] = prev
-
- def __contains__(self, key):
- return key in self.map
-
- def add(self, key):
- if key not in self.map:
- end = self.end
- curr = end[1]
- curr[2] = end[1] = self.map[key] = [key, curr, end]
-
- def update(self, iterable):
- for i in iterable:
- if i not in self:
- self.add(i)
-
- def __iter__(self):
- end = self.end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
-
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
@@ -858,17 +819,21 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
# Add outputs generated by each rule (if applicable).
for rule in rules:
- # Done if not processing outputs as sources.
- if int(rule.get('process_outputs_as_sources', False)):
- # Add in the outputs from this rule.
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for trigger_file in trigger_files:
+ # Add in the outputs from this rule.
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for trigger_file in trigger_files:
+ # Remove trigger_file from excluded_sources to let the rule be triggered
+ # (e.g. rule trigger ax_enums.idl is added to excluded_sources
+ # because it's also in an action's inputs in the same project)
+ excluded_sources.discard(_FixPath(trigger_file))
+ # Done if not processing outputs as sources.
+ if int(rule.get('process_outputs_as_sources', False)):
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
inputs = OrderedSet(_FixPaths(inputs))
outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
- if not spec.get('msvs_external_builder'):
+ if spec['type'] != 'none' and not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
sources.update(outputs)
@@ -1427,7 +1392,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
# Add all inputs to sources and excluded sources.
inputs = OrderedSet(inputs)
sources.update(inputs)
- if not spec.get('msvs_external_builder'):
+ if spec['type'] != 'none' and not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
@@ -1479,8 +1444,14 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
# Prune filters with a single child to flatten ugly directory structures
# such as ../../src/modules/module1 etc.
- while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
- sources = sources[0].contents
+ if version.UsesVcxproj():
+ while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
+ and len(set([s.name for s in sources])) == 1:
+ assert all([len(s.contents) == 1 for s in sources])
+ sources = [s.contents[0] for s in sources]
+ else:
+ while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+ sources = sources[0].contents
return sources, excluded_sources, excluded_idl
@@ -2584,6 +2555,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
['ProjectGuid', guid],
['Keyword', 'Win32Proj'],
['RootNamespace', namespace],
+ ['IgnoreWarnCompileDuplicatedFilename', 'true'],
]
]
@@ -3226,7 +3198,9 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
- Right now, only "Build" and "Clean" targets are generated.
+ The "Build" and "Clean" targets are always generated. If the spec contains
+ 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+ be generated, to support building selected C/C++ files.
Arguments:
spec: The gyp target spec.
@@ -3245,7 +3219,17 @@ def _GetMSBuildExternalBuilderTargets(spec):
clean_target = ['Target', {'Name': 'Clean'}]
clean_target.append(['Exec', {'Command': clean_cmd}])
- return [build_target, clean_target]
+ targets = [build_target, clean_target]
+
+ if spec.get('msvs_external_builder_clcompile_cmd'):
+ clcompile_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clcompile_cmd'],
+ False, False, False, False)
+ clcompile_target = ['Target', {'Name': 'ClCompile'}]
+ clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
+ targets.append(clcompile_target)
+
+ return targets
def _GetMSBuildExtensions(props_files_of_rules):
diff --git a/deps/gyp/pylib/gyp/generator/ninja.py b/deps/gyp/pylib/gyp/generator/ninja.py
index 1ed23f64cc..e3fafb5d43 100644
--- a/deps/gyp/pylib/gyp/generator/ninja.py
+++ b/deps/gyp/pylib/gyp/generator/ninja.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import collections
import copy
import hashlib
import json
@@ -343,7 +344,7 @@ class NinjaWriter:
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
- def WriteCollapsedDependencies(self, name, targets):
+ def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
@@ -351,10 +352,11 @@ class NinjaWriter:
assert targets == filter(None, targets), targets
if len(targets) == 0:
+ assert not order_only
return None
- if len(targets) > 1:
+ if len(targets) > 1 or order_only:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
- targets = self.ninja.build(stamp, 'stamp', targets)
+ targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
self.ninja.newline()
return targets[0]
@@ -472,6 +474,8 @@ class NinjaWriter:
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
+ elif self.flavor == 'mac' and len(self.archs) > 1:
+ link_deps = collections.defaultdict(list)
if self.flavor == 'win' and self.target.type == 'static_library':
@@ -580,10 +584,7 @@ class NinjaWriter:
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
- env = self.GetSortedXcodeEnv()
- if self.flavor == 'win':
- env = self.msvs_settings.GetVSMacroEnv(
- '$!PRODUCT_DIR', config=self.config_name)
+ env = self.GetToolchainEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
@@ -616,15 +617,17 @@ class NinjaWriter:
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
- env = self.GetSortedXcodeEnv()
+ env = self.GetToolchainEnv()
all_outputs = []
for rule in rules:
- # First write out a rule for the rule action.
- name = '%s_%s' % (rule['rule_name'],
- hashlib.md5(self.qualified_target).hexdigest())
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
+
+ # First write out a rule for the rule action.
+ name = '%s_%s' % (rule['rule_name'],
+ hashlib.md5(self.qualified_target).hexdigest())
+
args = rule['action']
description = self.GenerateDescription(
'RULE',
@@ -653,8 +656,22 @@ class NinjaWriter:
return path.replace('\\', '/')
return path
+ inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
+
+ # If there are n source files matching the rule, and m additional rule
+ # inputs, then adding 'inputs' to each build edge written below will
+ # write m * n inputs. Collapsing reduces this to m + n.
+ sources = rule.get('rule_sources', [])
+ num_inputs = len(inputs)
+ if prebuild:
+ num_inputs += 1
+ if num_inputs > 2 and len(sources) > 2:
+ inputs = [self.WriteCollapsedDependencies(
+ rule['rule_name'], inputs, order_only=prebuild)]
+ prebuild = []
+
# For each source file, write an edge that generates all the outputs.
- for source in rule.get('rule_sources', []):
+ for source in sources:
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
@@ -663,9 +680,6 @@ class NinjaWriter:
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
- inputs = [self.ExpandRuleVariables(i, root, dirname,
- source, ext, basename)
- for i in rule.get('inputs', [])]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
@@ -703,10 +717,11 @@ class NinjaWriter:
else:
assert var == None, repr(var)
- inputs = [self.GypPathToNinja(i, env) for i in inputs]
outputs = [self.GypPathToNinja(o, env) for o in outputs]
- extra_bindings.append(('unique_name',
- hashlib.md5(outputs[0]).hexdigest()))
+ if self.flavor == 'win':
+ # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
+ extra_bindings.append(('unique_name',
+ hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
@@ -718,7 +733,7 @@ class NinjaWriter:
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
- env = self.GetSortedXcodeEnv()
+ env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
@@ -810,6 +825,7 @@ class NinjaWriter:
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
+ asmflags = self.msvs_settings.GetAsmflags(config_name)
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
@@ -844,16 +860,17 @@ class NinjaWriter:
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
+ self.WriteVariableList(ninja_file, 'asmflags',
+ map(self.ExpandSpecial, asmflags))
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
- env = self.GetSortedXcodeEnv()
+
+ env = self.GetToolchainEnv()
if self.flavor == 'win':
- env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
- config=config_name)
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
@@ -1095,6 +1112,23 @@ class NinjaWriter:
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
+ if self.flavor != 'win':
+ link_file_list = output
+ if self.is_mac_bundle:
+ # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
+ # 'Dependency Framework.framework.rsp'
+ link_file_list = self.xcode_settings.GetWrapperName()
+ if arch:
+ link_file_list += '.' + arch
+ link_file_list += '.rsp'
+ # If an rspfile contains spaces, ninja surrounds the filename with
+ # quotes around it and then passes it to open(), creating a file with
+ # quotes in its name (and when looking for the rsp file, the name
+ # makes it through bash which strips the quotes) :-/
+ link_file_list = link_file_list.replace(' ', '_')
+ extra_bindings.append(
+ ('link_file_list',
+ gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if '/NOENTRY' not in ldflags:
@@ -1196,6 +1230,19 @@ class NinjaWriter:
self.target.bundle = output
return output
+ def GetToolchainEnv(self, additional_settings=None):
+ """Returns the variables toolchain would set for build steps."""
+ env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
+ if self.flavor == 'win':
+ env = self.GetMsvsToolchainEnv(
+ additional_settings=additional_settings)
+ return env
+
+ def GetMsvsToolchainEnv(self, additional_settings=None):
+ """Returns the variables Visual Studio would set for build steps."""
+ return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
+ config=self.config_name)
+
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
@@ -1559,14 +1606,15 @@ def GetDefaultConcurrentLinks():
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
- with open("/proc/meminfo") as meminfo:
- memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
- for line in meminfo:
- match = memtotal_re.match(line)
- if not match:
- continue
- # Allow 8Gb per link on Linux because Gold is quite memory hungry
- return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
@@ -1842,9 +1890,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
sys.executable))
master_ninja.rule(
'asm',
- description='ASM $in',
+ description='ASM $out',
command=('%s gyp-win-tool asm-wrapper '
- '$arch $asm $defines $includes /c /Fo $out $in' %
+ '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
@@ -1863,32 +1911,33 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
- '%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
- '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
- 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
+ 'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; else '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
- ('{ readelf -d ${lib} | grep SONAME ; '
- 'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
+ ('{ readelf -d $lib | grep SONAME ; '
+ 'nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
- command=(mtime_preserving_solink_base % {
- 'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
- '$libs'}),
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content=
+ '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
- command=(mtime_preserving_solink_base % {
- 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
- '$libs'}),
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content='-Wl,--start-group $in $solibs -Wl,--end-group $libs',
pool='link_pool')
master_ninja.rule(
'link',
@@ -1938,16 +1987,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
+ 'if [ ! -e $lib -o ! -e $lib.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
- '%(solink)s && %(extract_toc)s > ${lib}.TOC; '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; '
'else '
- '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
- 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
- 'mv ${lib}.tmp ${lib}.TOC ; '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then '
+ 'mv $lib.tmp $lib.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
@@ -1955,34 +2004,42 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
- solink_suffix = '$in $solibs $libs$postbuilds'
+
+ solink_suffix = '@$link_file_list$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
- solink_module_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
- command=mtime_preserving_solink_base % {'suffix': solink_module_suffix,
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
- command=solink_base % {'suffix': solink_module_suffix, 'type': '-bundle'},
+ command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
diff --git a/deps/gyp/pylib/gyp/generator/xcode.py b/deps/gyp/pylib/gyp/generator/xcode.py
index 331e78baaa..7972459363 100644
--- a/deps/gyp/pylib/gyp/generator/xcode.py
+++ b/deps/gyp/pylib/gyp/generator/xcode.py
@@ -5,6 +5,7 @@
import filecmp
import gyp.common
import gyp.xcodeproj_file
+import gyp.xcode_ninja
import errno
import os
import sys
@@ -575,6 +576,12 @@ def PerformBuild(data, configurations, params):
def GenerateOutput(target_list, target_dicts, data, params):
+ # Optionally configure each spec to use ninja as the external builder.
+ ninja_wrapper = params.get('flavor') == 'ninja'
+ if ninja_wrapper:
+ (target_list, target_dicts, data) = \
+ gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
+
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
@@ -703,11 +710,16 @@ def GenerateOutput(target_list, target_dicts, data, params):
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
- if type != 'none' and (spec_actions or spec_rules):
+ # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
+ # logic all happens in ninja. Don't bother creating the extra targets in
+ # that case.
+ if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
support_xccl = CreateXCConfigurationList(configuration_names);
+ support_target_suffix = generator_flags.get(
+ 'support_target_suffix', ' Support')
support_target_properties = {
'buildConfigurationList': support_xccl,
- 'name': target_name + ' Support',
+ 'name': target_name + support_target_suffix,
}
if target_product_name:
support_target_properties['productName'] = \
diff --git a/deps/gyp/pylib/gyp/input.py b/deps/gyp/pylib/gyp/input.py
index 6472912db8..dc143d9dfc 100644
--- a/deps/gyp/pylib/gyp/input.py
+++ b/deps/gyp/pylib/gyp/input.py
@@ -10,8 +10,8 @@ from compiler.ast import Module
from compiler.ast import Node
from compiler.ast import Stmt
import compiler
-import copy
import gyp.common
+import gyp.simple_copy
import multiprocessing
import optparse
import os.path
@@ -24,6 +24,7 @@ import threading
import time
import traceback
from gyp.common import GypError
+from gyp.common import OrderedSet
# A list of types that are treated as linkable.
@@ -45,18 +46,31 @@ base_path_sections = [
'outputs',
'sources',
]
-path_sections = []
-
-is_path_section_charset = set('=+?!')
-is_path_section_match_re = re.compile('_(dir|file|path)s?$')
+path_sections = set()
def IsPathSection(section):
- # If section ends in one of these characters, it's applied to a section
+ # If section ends in one of the '=+?!' characters, it's applied to a section
# without the trailing characters. '/' is notably absent from this list,
# because there's no way for a regular expression to be treated as a path.
- while section[-1:] in is_path_section_charset:
+ while section[-1:] in '=+?!':
section = section[:-1]
- return section in path_sections or is_path_section_match_re.search(section)
+
+ if section in path_sections:
+ return True
+
+ # Sections mathing the regexp '_(dir|file|path)s?$' are also
+ # considered PathSections. Using manual string matching since that
+ # is much faster than the regexp and this can be called hundreds of
+ # thousands of times so micro performance matters.
+ if "_" in section:
+ tail = section[-6:]
+ if tail[-1] == 's':
+ tail = tail[:-1]
+ if tail[-5:] in ('_file', '_path'):
+ return True
+ return tail[-4:] == '_dir'
+
+ return False
# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
@@ -200,7 +214,7 @@ def CheckNode(node, keypath):
"': " + repr(node)
-def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
+def LoadOneBuildFile(build_file_path, data, aux_data, includes,
is_target, check):
if build_file_path in data:
return data[build_file_path]
@@ -224,7 +238,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
- if not isinstance(build_file_data, dict):
+ if type(build_file_data) is not dict:
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
@@ -236,10 +250,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
try:
if is_target:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, variables, includes, check)
+ aux_data, includes, check)
else:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, variables, None, check)
+ aux_data, None, check)
except Exception, e:
gyp.common.ExceptionAppend(e,
'while reading includes of ' + build_file_path)
@@ -249,7 +263,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
- variables, includes, check):
+ includes, check):
includes_list = []
if includes != None:
includes_list.extend(includes)
@@ -273,30 +287,27 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
MergeDicts(subdict,
- LoadOneBuildFile(include, data, aux_data, variables, None,
- False, check),
+ LoadOneBuildFile(include, data, aux_data, None, False, check),
subdict_path, include)
# Recurse into subdictionaries.
for k, v in subdict.iteritems():
- if v.__class__ == dict:
- LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
+ if type(v) is dict:
+ LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
None, check)
- elif v.__class__ == list:
- LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
+ elif type(v) is list:
+ LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
check)
# This recurses into lists so that it can look for dicts.
-def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
- variables, check):
+def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
for item in sublist:
- if item.__class__ == dict:
+ if type(item) is dict:
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
- variables, None, check)
- elif item.__class__ == list:
- LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
- variables, check)
+ None, check)
+ elif type(item) is list:
+ LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
# Processes toolsets in all the targets. This recurses into condition entries
# since they can contain toolsets as well.
@@ -320,7 +331,7 @@ def ProcessToolsetsInDict(data):
if len(toolsets) > 0:
# Optimization: only do copies if more than one toolset is specified.
for build in toolsets[1:]:
- new_target = copy.deepcopy(target)
+ new_target = gyp.simple_copy.deepcopy(target)
new_target['toolset'] = build
new_target_list.append(new_target)
target['toolset'] = toolsets[0]
@@ -328,7 +339,7 @@ def ProcessToolsetsInDict(data):
data['targets'] = new_target_list
if 'conditions' in data:
for condition in data['conditions']:
- if isinstance(condition, list):
+ if type(condition) is list:
for condition_dict in condition[1:]:
ProcessToolsetsInDict(condition_dict)
@@ -358,7 +369,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
"Loading Target Build File '%s'", build_file_path)
- build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
+ build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
includes, True, check)
# Store DEPTH for later use in generators.
@@ -408,7 +419,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
# copy with the target-specific data merged into it as the replacement
# target dict.
old_target_dict = build_file_data['targets'][index]
- new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
+ new_target_dict = gyp.simple_copy.deepcopy(
+ build_file_data['target_defaults'])
MergeDicts(new_target_dict, old_target_dict,
build_file_path, build_file_path)
build_file_data['targets'][index] = new_target_dict
@@ -636,15 +648,26 @@ def FindEnclosingBracketGroup(input_str):
return (-1, -1)
-canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
-
-
def IsStrCanonicalInt(string):
"""Returns True if |string| is in its canonical integer form.
The canonical form is such that str(int(string)) == string.
"""
- return isinstance(string, str) and canonical_int_re.match(string)
+ if type(string) is str:
+ # This function is called a lot so for maximum performance, avoid
+ # involving regexps which would otherwise make the code much
+ # shorter. Regexps would need twice the time of this function.
+ if string:
+ if string == "0":
+ return True
+ if string[0] == "-":
+ string = string[1:]
+ if not string:
+ return False
+ if '1' <= string[0] <= '9':
+ return string.isdigit()
+
+ return False
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
@@ -677,7 +700,7 @@ cached_command_results = {}
def FixupPlatformCommand(cmd):
if sys.platform == 'win32':
- if type(cmd) == list:
+ if type(cmd) is list:
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
else:
cmd = re.sub('^cat ', 'type ', cmd)
@@ -767,7 +790,7 @@ def ExpandVariables(input, phase, variables, build_file):
# contexts. However, since filtration has no chance to run on <|(),
# this seems like the only obvious way to give them access to filters.
if file_list:
- processed_variables = copy.deepcopy(variables)
+ processed_variables = gyp.simple_copy.deepcopy(variables)
ProcessListFiltersInDict(contents, processed_variables)
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase,
@@ -804,7 +827,7 @@ def ExpandVariables(input, phase, variables, build_file):
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
- if type(contents) == list:
+ if type(contents) is list:
contents_list = contents
else:
contents_list = contents.split(' ')
@@ -837,17 +860,15 @@ def ExpandVariables(input, phase, variables, build_file):
use_shell = False
# Check for a cached value to avoid executing commands, or generating
- # file lists more than once.
- # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
- # possible that the command being invoked depends on the current
- # directory. For that case the syntax needs to be extended so that the
- # directory is also used in cache_key (it becomes a tuple).
+ # file lists more than once. The cache key contains the command to be
+ # run as well as the directory to run it from, to account for commands
+ # that depend on their current directory.
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# someone could author a set of GYP files where each time the command
# is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# command's output so it is run every time.
- cache_key = str(contents)
+ cache_key = (str(contents), build_file_dir)
cached_value = cached_command_results.get(cache_key, None)
if cached_value is None:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
@@ -925,10 +946,9 @@ def ExpandVariables(input, phase, variables, build_file):
else:
replacement = variables[contents]
- if isinstance(replacement, list):
+ if type(replacement) is list:
for item in replacement:
- if (not contents[-1] == '/' and
- not isinstance(item, str) and not isinstance(item, int)):
+ if not contents[-1] == '/' and type(item) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'list contains a ' +
@@ -938,8 +958,7 @@ def ExpandVariables(input, phase, variables, build_file):
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, phase, variables,
build_file)
- elif not isinstance(replacement, str) and \
- not isinstance(replacement, int):
+ elif type(replacement) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'found a ' + replacement.__class__.__name__)
@@ -948,7 +967,7 @@ def ExpandVariables(input, phase, variables, build_file):
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
- if isinstance(replacement, list):
+ if type(replacement) is list:
# If it's already a list, make a copy.
output = replacement[:]
else:
@@ -957,7 +976,7 @@ def ExpandVariables(input, phase, variables, build_file):
else:
# Expanding in string context.
encoded_replacement = ''
- if isinstance(replacement, list):
+ if type(replacement) is list:
# When expanding a list into string context, turn the list items
# into a string in a way that will work with a subprocess call.
#
@@ -979,8 +998,8 @@ def ExpandVariables(input, phase, variables, build_file):
# expanding local variables (variables defined in the same
# variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
- if isinstance(output, list):
- if output and isinstance(output[0], list):
+ if type(output) is list:
+ if output and type(output[0]) is list:
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
@@ -994,7 +1013,7 @@ def ExpandVariables(input, phase, variables, build_file):
output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
- if isinstance(output, list):
+ if type(output) is list:
for index in xrange(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
@@ -1003,6 +1022,57 @@ def ExpandVariables(input, phase, variables, build_file):
return output
+# The same condition is often evaluated over and over again so it
+# makes sense to cache as much as possible between evaluations.
+cached_conditions_asts = {}
+
+def EvalCondition(condition, conditions_key, phase, variables, build_file):
+ """Returns the dict that should be used or None if the result was
+ that nothing should be used."""
+ if type(condition) is not list:
+ raise GypError(conditions_key + ' must be a list')
+ if len(condition) != 2 and len(condition) != 3:
+ # It's possible that condition[0] won't work in which case this
+ # attempt will raise its own IndexError. That's probably fine.
+ raise GypError(conditions_key + ' ' + condition[0] +
+ ' must be length 2 or 3, not ' + str(len(condition)))
+
+ [cond_expr, true_dict] = condition[0:2]
+ false_dict = None
+ if len(condition) == 3:
+ false_dict = condition[2]
+
+ # Do expansions on the condition itself. Since the conditon can naturally
+ # contain variable references without needing to resort to GYP expansion
+ # syntax, this is of dubious value for variables, but someone might want to
+ # use a command expansion directly inside a condition.
+ cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
+ build_file)
+ if type(cond_expr_expanded) not in (str, int):
+ raise ValueError, \
+ 'Variable expansion in this context permits str and int ' + \
+ 'only, found ' + cond_expr_expanded.__class__.__name__
+
+ try:
+ if cond_expr_expanded in cached_conditions_asts:
+ ast_code = cached_conditions_asts[cond_expr_expanded]
+ else:
+ ast_code = compile(cond_expr_expanded, '<string>', 'eval')
+ cached_conditions_asts[cond_expr_expanded] = ast_code
+ if eval(ast_code, {'__builtins__': None}, variables):
+ return true_dict
+ return false_dict
+ except SyntaxError, e:
+ syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
+ 'at character %d.' %
+ (str(e.args[0]), e.text, build_file, e.offset),
+ e.filename, e.lineno, e.offset, e.text)
+ raise syntax_error
+ except NameError, e:
+ gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
+ (cond_expr_expanded, build_file))
+ raise GypError(e)
+
def ProcessConditionsInDict(the_dict, phase, variables, build_file):
# Process a 'conditions' or 'target_conditions' section in the_dict,
@@ -1038,48 +1108,8 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
del the_dict[conditions_key]
for condition in conditions_list:
- if not isinstance(condition, list):
- raise GypError(conditions_key + ' must be a list')
- if len(condition) != 2 and len(condition) != 3:
- # It's possible that condition[0] won't work in which case this
- # attempt will raise its own IndexError. That's probably fine.
- raise GypError(conditions_key + ' ' + condition[0] +
- ' must be length 2 or 3, not ' + str(len(condition)))
-
- [cond_expr, true_dict] = condition[0:2]
- false_dict = None
- if len(condition) == 3:
- false_dict = condition[2]
-
- # Do expansions on the condition itself. Since the conditon can naturally
- # contain variable references without needing to resort to GYP expansion
- # syntax, this is of dubious value for variables, but someone might want to
- # use a command expansion directly inside a condition.
- cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
- build_file)
- if not isinstance(cond_expr_expanded, str) and \
- not isinstance(cond_expr_expanded, int):
- raise ValueError, \
- 'Variable expansion in this context permits str and int ' + \
- 'only, found ' + expanded.__class__.__name__
-
- try:
- ast_code = compile(cond_expr_expanded, '<string>', 'eval')
-
- if eval(ast_code, {'__builtins__': None}, variables):
- merge_dict = true_dict
- else:
- merge_dict = false_dict
- except SyntaxError, e:
- syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
- 'at character %d.' %
- (str(e.args[0]), e.text, build_file, e.offset),
- e.filename, e.lineno, e.offset, e.text)
- raise syntax_error
- except NameError, e:
- gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
- (cond_expr_expanded, build_file))
- raise GypError(e)
+ merge_dict = EvalCondition(condition, conditions_key, phase, variables,
+ build_file)
if merge_dict != None:
# Expand variables and nested conditinals in the merge_dict before
@@ -1094,8 +1124,7 @@ def LoadAutomaticVariablesFromDict(variables, the_dict):
# Any keys with plain string values in the_dict become automatic variables.
# The variable name is the key name with a "_" character prepended.
for key, value in the_dict.iteritems():
- if isinstance(value, str) or isinstance(value, int) or \
- isinstance(value, list):
+ if type(value) in (str, int, list):
variables['_' + key] = value
@@ -1108,8 +1137,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
# (it could be a list or it could be parentless because it is a root dict),
# the_dict_key will be None.
for key, value in the_dict.get('variables', {}).iteritems():
- if not isinstance(value, str) and not isinstance(value, int) and \
- not isinstance(value, list):
+ if type(value) not in (str, int, list):
continue
if key.endswith('%'):
@@ -1162,9 +1190,9 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
for key, value in the_dict.iteritems():
# Skip "variables", which was already processed if present.
- if key != 'variables' and isinstance(value, str):
+ if key != 'variables' and type(value) is str:
expanded = ExpandVariables(value, phase, variables, build_file)
- if not isinstance(expanded, str) and not isinstance(expanded, int):
+ if type(expanded) not in (str, int):
raise ValueError, \
'Variable expansion in this context permits str and int ' + \
'only, found ' + expanded.__class__.__name__ + ' for ' + key
@@ -1221,21 +1249,21 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
for key, value in the_dict.iteritems():
# Skip "variables" and string values, which were already processed if
# present.
- if key == 'variables' or isinstance(value, str):
+ if key == 'variables' or type(value) is str:
continue
- if isinstance(value, dict):
+ if type(value) is dict:
# Pass a copy of the variables dict so that subdicts can't influence
# parents.
ProcessVariablesAndConditionsInDict(value, phase, variables,
build_file, key)
- elif isinstance(value, list):
+ elif type(value) is list:
# The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No
# copy is necessary here.
ProcessVariablesAndConditionsInList(value, phase, variables,
build_file)
- elif not isinstance(value, int):
+ elif type(value) is not int:
raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
' for ' + key
@@ -1246,17 +1274,17 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables,
index = 0
while index < len(the_list):
item = the_list[index]
- if isinstance(item, dict):
+ if type(item) is dict:
# Make a copy of the variables dict so that it won't influence anything
# outside of its own scope.
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
- elif isinstance(item, list):
+ elif type(item) is list:
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
- elif isinstance(item, str):
+ elif type(item) is str:
expanded = ExpandVariables(item, phase, variables, build_file)
- if isinstance(expanded, str) or isinstance(expanded, int):
+ if type(expanded) in (str, int):
the_list[index] = expanded
- elif isinstance(expanded, list):
+ elif type(expanded) is list:
the_list[index:index+1] = expanded
index += len(expanded)
@@ -1268,7 +1296,7 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables,
'Variable expansion in this context permits strings and ' + \
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
index
- elif not isinstance(item, int):
+ elif type(item) is not int:
raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
' at index ' + index
index = index + 1
@@ -1443,6 +1471,20 @@ def RemoveSelfDependencies(targets):
target_dict[dependency_key] = Filter(dependencies, target_name)
+def RemoveLinkDependenciesFromNoneTargets(targets):
+ """Remove dependencies having the 'link_dependency' attribute from the 'none'
+ targets."""
+ for target_name, target_dict in targets.iteritems():
+ for dependency_key in dependency_sections:
+ dependencies = target_dict.get(dependency_key, [])
+ if dependencies:
+ for t in dependencies:
+ if target_dict.get('type', None) == 'none':
+ if targets[t].get('variables', {}).get('link_dependency', 0):
+ target_dict[dependency_key] = \
+ Filter(target_dict[dependency_key], t)
+
+
class DependencyGraphNode(object):
"""
@@ -1468,7 +1510,7 @@ class DependencyGraphNode(object):
# are the "ref" attributes of DependencyGraphNodes. Every target will
# appear in flat_list after all of its dependencies, and before all of its
# dependents.
- flat_list = []
+ flat_list = OrderedSet()
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
@@ -1482,12 +1524,15 @@ class DependencyGraphNode(object):
# as work progresses, so that the next node to process from the list can
# always be accessed at a consistent position.
node = in_degree_zeros.pop()
- flat_list.append(node.ref)
+ flat_list.add(node.ref)
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
for node_dependent in node.dependents:
is_in_degree_zero = True
+ # TODO: We want to check through the
+ # node_dependent.dependencies list but if it's long and we
+ # always start at the beginning, then we get O(n^2) behaviour.
for node_dependent_dependency in node_dependent.dependencies:
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
@@ -1503,7 +1548,7 @@ class DependencyGraphNode(object):
# iteration of the outer loop.
in_degree_zeros.add(node_dependent)
- return flat_list
+ return list(flat_list)
def FindCycles(self, path=None):
"""
@@ -1589,21 +1634,26 @@ class DependencyGraphNode(object):
return self._AddImportedDependencies(targets, dependencies)
def DeepDependencies(self, dependencies=None):
- """Returns a list of all of a target's dependencies, recursively."""
- if dependencies == None:
- dependencies = []
+ """Returns an OrderedSet of all of a target's dependencies, recursively."""
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
- if dependency.ref != None and dependency.ref not in dependencies:
- dependencies.append(dependency.ref)
+ if dependency.ref is None:
+ continue
+ if dependency.ref not in dependencies:
+ dependencies.add(dependency.ref)
dependency.DeepDependencies(dependencies)
return dependencies
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies=None, initial=True):
- """Returns a list of dependency targets that are linked into this target.
+ """Returns an OrderedSet of dependency targets that are linked
+ into this target.
This function has a split personality, depending on the setting of
|initial|. Outside callers should always leave |initial| at its default
@@ -1616,11 +1666,13 @@ class DependencyGraphNode(object):
If |include_shared_libraries| is False, the resulting dependencies will not
include shared_library targets that are linked into this target.
"""
- if dependencies == None:
- dependencies = []
+ if dependencies is None:
+ # Using a list to get ordered output and a set to do fast "is it
+ # already added" checks.
+ dependencies = OrderedSet()
# Check for None, corresponding to the root node.
- if self.ref == None:
+ if self.ref is None:
return dependencies
# It's kind of sucky that |targets| has to be passed into this function,
@@ -1648,8 +1700,7 @@ class DependencyGraphNode(object):
# Don't traverse 'none' targets if explicitly excluded.
if (target_type == 'none' and
not targets[self.ref].get('dependencies_traverse', True)):
- if self.ref not in dependencies:
- dependencies.append(self.ref)
+ dependencies.add(self.ref)
return dependencies
# Executables and loadable modules are already fully and finally linked.
@@ -1671,7 +1722,7 @@ class DependencyGraphNode(object):
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
- dependencies.append(self.ref)
+ dependencies.add(self.ref)
if initial or not is_linkable:
# If this is a subsequent target and it's linkable, don't look any
# further for linkable dependencies, as they'll already be linked into
@@ -1966,25 +2017,25 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
hashable_to_set = set(x for x in to if is_hashable(x))
for item in fro:
singleton = False
- if isinstance(item, str) or isinstance(item, int):
+ if type(item) in (str, int):
# The cheap and easy case.
if is_paths:
to_item = MakePathRelative(to_file, fro_file, item)
else:
to_item = item
- if not isinstance(item, str) or not item.startswith('-'):
+ if not (type(item) is str and item.startswith('-')):
# Any string that doesn't begin with a "-" is a singleton - it can
# only appear once in a list, to be enforced by the list merge append
# or prepend.
singleton = True
- elif isinstance(item, dict):
+ elif type(item) is dict:
# Make a copy of the dictionary, continuing to look for paths to fix.
# The other intelligent aspects of merge processing won't apply because
# item is being merged into an empty dict.
to_item = {}
MergeDicts(to_item, item, to_file, fro_file)
- elif isinstance(item, list):
+ elif type(item) is list:
# Recurse, making a copy of the list. If the list contains any
# descendant dicts, path fixing will occur. Note that here, custom
# values for is_paths and append are dropped; those are only to be
@@ -2030,10 +2081,10 @@ def MergeDicts(to, fro, to_file, fro_file):
# modified.
if k in to:
bad_merge = False
- if isinstance(v, str) or isinstance(v, int):
- if not (isinstance(to[k], str) or isinstance(to[k], int)):
+ if type(v) in (str, int):
+ if type(to[k]) not in (str, int):
bad_merge = True
- elif v.__class__ != to[k].__class__:
+ elif type(v) is not type(to[k]):
bad_merge = True
if bad_merge:
@@ -2041,19 +2092,19 @@ def MergeDicts(to, fro, to_file, fro_file):
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[k].__class__.__name__ + \
' for key ' + k
- if isinstance(v, str) or isinstance(v, int):
+ if type(v) in (str, int):
# Overwrite the existing value, if any. Cheap and easy.
is_path = IsPathSection(k)
if is_path:
to[k] = MakePathRelative(to_file, fro_file, v)
else:
to[k] = v
- elif isinstance(v, dict):
+ elif type(v) is dict:
# Recurse, guaranteeing copies will be made of objects that require it.
if not k in to:
to[k] = {}
MergeDicts(to[k], v, to_file, fro_file)
- elif isinstance(v, list):
+ elif type(v) is list:
# Lists in dicts can be merged with different policies, depending on
# how the key in the "from" dict (k, the from-key) is written.
#
@@ -2096,7 +2147,7 @@ def MergeDicts(to, fro, to_file, fro_file):
# If the key ends in "?", the list will only be merged if it doesn't
# already exist.
continue
- if not isinstance(to[list_base], list):
+ elif type(to[list_base]) is not list:
# This may not have been checked above if merging in a list with an
# extension character.
raise TypeError, \
@@ -2157,43 +2208,39 @@ def SetUpConfigurations(target, target_dict):
if not 'configurations' in target_dict:
target_dict['configurations'] = {'Default': {}}
if not 'default_configuration' in target_dict:
- concrete = [i for i in target_dict['configurations'].iterkeys()
- if not target_dict['configurations'][i].get('abstract')]
+ concrete = [i for (i, config) in target_dict['configurations'].iteritems()
+ if not config.get('abstract')]
target_dict['default_configuration'] = sorted(concrete)[0]
- for configuration in target_dict['configurations'].keys():
- old_configuration_dict = target_dict['configurations'][configuration]
+ merged_configurations = {}
+ configs = target_dict['configurations']
+ for (configuration, old_configuration_dict) in configs.iteritems():
# Skip abstract configurations (saves work only).
if old_configuration_dict.get('abstract'):
continue
# Configurations inherit (most) settings from the enclosing target scope.
# Get the inheritance relationship right by making a copy of the target
# dict.
- new_configuration_dict = copy.deepcopy(target_dict)
-
- # Take out the bits that don't belong in a "configurations" section.
- # Since configuration setup is done before conditional, exclude, and rules
- # processing, be careful with handling of the suffix characters used in
- # those phases.
- delete_keys = []
- for key in new_configuration_dict:
+ new_configuration_dict = {}
+ for (key, target_val) in target_dict.iteritems():
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
- if key_base in non_configuration_keys:
- delete_keys.append(key)
-
- for key in delete_keys:
- del new_configuration_dict[key]
+ if not key_base in non_configuration_keys:
+ new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
# Merge in configuration (with all its parents first).
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, [])
- # Put the new result back into the target dict as a configuration.
- target_dict['configurations'][configuration] = new_configuration_dict
+ merged_configurations[configuration] = new_configuration_dict
+
+ # Put the new configurations back into the target dict as a configuration.
+ for configuration in merged_configurations.keys():
+ target_dict['configurations'][configuration] = (
+ merged_configurations[configuration])
# Now drop all the abstract ones.
for configuration in target_dict['configurations'].keys():
@@ -2264,7 +2311,7 @@ def ProcessListFiltersInDict(name, the_dict):
if operation != '!' and operation != '/':
continue
- if not isinstance(value, list):
+ if type(value) is not list:
raise ValueError, name + ' key ' + key + ' must be list, not ' + \
value.__class__.__name__
@@ -2276,7 +2323,7 @@ def ProcessListFiltersInDict(name, the_dict):
del_lists.append(key)
continue
- if not isinstance(the_dict[list_key], list):
+ if type(the_dict[list_key]) is not list:
value = the_dict[list_key]
raise ValueError, name + ' key ' + list_key + \
' must be list, not ' + \
@@ -2378,17 +2425,17 @@ def ProcessListFiltersInDict(name, the_dict):
# Now recurse into subdicts and lists that may contain dicts.
for key, value in the_dict.iteritems():
- if isinstance(value, dict):
+ if type(value) is dict:
ProcessListFiltersInDict(key, value)
- elif isinstance(value, list):
+ elif type(value) is list:
ProcessListFiltersInList(key, value)
def ProcessListFiltersInList(name, the_list):
for item in the_list:
- if isinstance(item, dict):
+ if type(item) is dict:
ProcessListFiltersInDict(name, item)
- elif isinstance(item, list):
+ elif type(item) is list:
ProcessListFiltersInList(name, item)
@@ -2506,7 +2553,7 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
run_as = target_dict.get('run_as')
if not run_as:
return
- if not isinstance(run_as, dict):
+ if type(run_as) is not dict:
raise GypError("The 'run_as' in target %s from file %s should be a "
"dictionary." %
(target_name, build_file))
@@ -2515,17 +2562,17 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
raise GypError("The 'run_as' in target %s from file %s must have an "
"'action' section." %
(target_name, build_file))
- if not isinstance(action, list):
+ if type(action) is not list:
raise GypError("The 'action' for 'run_as' in target %s from file %s "
"must be a list." %
(target_name, build_file))
working_directory = run_as.get('working_directory')
- if working_directory and not isinstance(working_directory, str):
+ if working_directory and type(working_directory) is not str:
raise GypError("The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." %
(target_name, build_file))
environment = run_as.get('environment')
- if environment and not isinstance(environment, dict):
+ if environment and type(environment) is not dict:
raise GypError("The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." %
(target_name, build_file))
@@ -2555,17 +2602,17 @@ def TurnIntIntoStrInDict(the_dict):
# Use items instead of iteritems because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
- if isinstance(v, int):
+ if type(v) is int:
v = str(v)
the_dict[k] = v
- elif isinstance(v, dict):
+ elif type(v) is dict:
TurnIntIntoStrInDict(v)
- elif isinstance(v, list):
+ elif type(v) is list:
TurnIntIntoStrInList(v)
- if isinstance(k, int):
- the_dict[str(k)] = v
+ if type(k) is int:
del the_dict[k]
+ the_dict[str(k)] = v
def TurnIntIntoStrInList(the_list):
@@ -2573,11 +2620,11 @@ def TurnIntIntoStrInList(the_list):
"""
for index in xrange(0, len(the_list)):
item = the_list[index]
- if isinstance(item, int):
+ if type(item) is int:
the_list[index] = str(item)
- elif isinstance(item, dict):
+ elif type(item) is dict:
TurnIntIntoStrInDict(item)
- elif isinstance(item, list):
+ elif type(item) is list:
TurnIntIntoStrInList(item)
@@ -2647,8 +2694,8 @@ def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
# the generator-specific data.
global path_sections
- path_sections = base_path_sections[:]
- path_sections.extend(generator_input_info['path_sections'])
+ path_sections = set(base_path_sections)
+ path_sections.update(generator_input_info['path_sections'])
global non_configuration_keys
non_configuration_keys = base_non_configuration_keys[:]
@@ -2707,6 +2754,10 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
# Expand dependencies specified as build_file:*.
ExpandWildcardDependencies(targets, data)
+ # Remove all dependencies marked as 'link_dependency' from the targets of
+ # type 'none'.
+ RemoveLinkDependenciesFromNoneTargets(targets)
+
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
for target_name, target_dict in targets.iteritems():
tmp_dict = {}
diff --git a/deps/gyp/pylib/gyp/mac_tool.py b/deps/gyp/pylib/gyp/mac_tool.py
index c61a3ef60b..821e291e9f 100755
--- a/deps/gyp/pylib/gyp/mac_tool.py
+++ b/deps/gyp/pylib/gyp/mac_tool.py
@@ -219,10 +219,14 @@ class MacTool(object):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
+ libtool_re5 = re.compile(
+ r'^.*libtool: warning for library: ' +
+ r'.* the table of contents is empty ' +
+ r'\(no object file members in the library define global symbols\)$')
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
_, err = libtoolout.communicate()
for line in err.splitlines():
- if not libtool_re.match(line):
+ if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
return libtoolout.returncode
diff --git a/deps/gyp/pylib/gyp/msvs_emulation.py b/deps/gyp/pylib/gyp/msvs_emulation.py
index 709ba305d2..63593a424d 100644
--- a/deps/gyp/pylib/gyp/msvs_emulation.py
+++ b/deps/gyp/pylib/gyp/msvs_emulation.py
@@ -345,6 +345,15 @@ class MsvsSettings(object):
else:
return None
+ def GetAsmflags(self, config):
+ """Returns the flags that need to be added to ml invocations."""
+ config = self._TargetConfig(config)
+ asmflags = []
+ safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
+ if safeseh == 'true':
+ asmflags.append('/safeseh')
+ return asmflags
+
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
@@ -379,6 +388,8 @@ class MsvsSettings(object):
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
+ cl('EnableEnhancedInstructionSet',
+ map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32'}, prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e'):
@@ -388,12 +399,6 @@ class MsvsSettings(object):
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
- def GetPrecompiledHeader(self, config, gyp_to_build_path):
- """Returns an object that handles the generation of precompiled header
- build steps."""
- config = self._TargetConfig(config)
- return _PchHelper(self, config, gyp_to_build_path)
-
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
@@ -532,6 +537,7 @@ class MsvsSettings(object):
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
+ ld('ImageHasSafeExceptionHandlers', map={'true': '/SAFESEH'})
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
@@ -787,7 +793,7 @@ class PrecompiledHeader(object):
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
- additional wrapping in the return value is for interface compatability
+ additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
diff --git a/deps/gyp/pylib/gyp/simple_copy.py b/deps/gyp/pylib/gyp/simple_copy.py
new file mode 100644
index 0000000000..74c98c5a79
--- /dev/null
+++ b/deps/gyp/pylib/gyp/simple_copy.py
@@ -0,0 +1,46 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A clone of the default copy.deepcopy that doesn't handle cyclic
+structures or complex types except for dicts and lists. This is
+because gyp copies so large structure that small copy overhead ends up
+taking seconds in a project the size of Chromium."""
+
+class Error(Exception):
+ pass
+
+__all__ = ["Error", "deepcopy"]
+
+def deepcopy(x):
+ """Deep copy operation on gyp objects such as strings, ints, dicts
+ and lists. More than twice as fast as copy.deepcopy but much less
+ generic."""
+
+ try:
+ return _deepcopy_dispatch[type(x)](x)
+ except KeyError:
+ raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
+ 'or expand simple_copy support.' % type(x))
+
+_deepcopy_dispatch = d = {}
+
+def _deepcopy_atomic(x):
+ return x
+
+for x in (type(None), int, long, float,
+ bool, str, unicode, type):
+ d[x] = _deepcopy_atomic
+
+def _deepcopy_list(x):
+ return [deepcopy(a) for a in x]
+d[list] = _deepcopy_list
+
+def _deepcopy_dict(x):
+ y = {}
+ for key, value in x.iteritems():
+ y[deepcopy(key)] = deepcopy(value)
+ return y
+d[dict] = _deepcopy_dict
+
+del d
diff --git a/deps/gyp/pylib/gyp/win_tool.py b/deps/gyp/pylib/gyp/win_tool.py
index 5872f0750c..44e1b0760b 100755
--- a/deps/gyp/pylib/gyp/win_tool.py
+++ b/deps/gyp/pylib/gyp/win_tool.py
@@ -13,6 +13,7 @@ import os
import re
import shutil
import subprocess
+import stat
import string
import sys
@@ -89,9 +90,19 @@ class WinTool(object):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
- shutil.rmtree(dest)
+ def _on_error(fn, path, excinfo):
+ # The operation failed, possibly because the file is set to
+ # read-only. If that's why, make it writable and try the op again.
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWRITE)
+ fn(path)
+ shutil.rmtree(dest, onerror=_on_error)
else:
+ if not os.access(dest, os.W_OK):
+ # Attempt to make the file writable before deleting it.
+ os.chmod(dest, stat.S_IWRITE)
os.unlink(dest)
+
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
@@ -237,10 +248,11 @@ class WinTool(object):
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
- prefix = 'Processing '
- processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
+ prefixes = ('Processing ', '64 bit Processing ')
+ processing = set(os.path.basename(x)
+ for x in lines if x.startswith(prefixes))
for line in lines:
- if not line.startswith(prefix) and line not in processing:
+ if not line.startswith(prefixes) and line not in processing:
print line
return popen.returncode
@@ -288,5 +300,16 @@ class WinTool(object):
dir = dir[0] if dir else None
return subprocess.call(args, shell=True, env=env, cwd=dir)
+ def ExecClCompile(self, project_dir, selected_files):
+ """Executed by msvs-ninja projects when the 'ClCompile' target is used to
+ build selected C/C++ files."""
+ project_dir = os.path.relpath(project_dir, BASE_DIR)
+ selected_files = selected_files.split(';')
+ ninja_targets = [os.path.join(project_dir, filename) + '^^'
+ for filename in selected_files]
+ cmd = ['ninja.exe']
+ cmd.extend(ninja_targets)
+ return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
+
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
diff --git a/deps/gyp/pylib/gyp/xcode_emulation.py b/deps/gyp/pylib/gyp/xcode_emulation.py
index 30f27d5832..859cd5a937 100644
--- a/deps/gyp/pylib/gyp/xcode_emulation.py
+++ b/deps/gyp/pylib/gyp/xcode_emulation.py
@@ -18,6 +18,129 @@ import sys
import tempfile
from gyp.common import GypError
+# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
+# "xcodebuild" is called too quickly (it has been found to return incorrect
+# version number).
+XCODE_VERSION_CACHE = None
+
+# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
+# corresponding to the installed version of Xcode.
+XCODE_ARCHS_DEFAULT_CACHE = None
+
+
+def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
+ """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
+ and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
+ mapping = {'$(ARCHS_STANDARD)': archs}
+ if archs_including_64_bit:
+ mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
+ return mapping
+
+class XcodeArchsDefault(object):
+ """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
+ macros and implementing filtering by VALID_ARCHS. The expansion of macros
+ depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
+ on the version of Xcode.
+ """
+
+ # Match variable like $(ARCHS_STANDARD).
+ variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
+
+ def __init__(self, default, mac, iphonesimulator, iphoneos):
+ self._default = (default,)
+ self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
+
+ def _VariableMapping(self, sdkroot):
+ """Returns the dictionary of variable mapping depending on the SDKROOT."""
+ sdkroot = sdkroot.lower()
+ if 'iphoneos' in sdkroot:
+ return self._archs['ios']
+ elif 'iphonesimulator' in sdkroot:
+ return self._archs['iossim']
+ else:
+ return self._archs['mac']
+
+ def _ExpandArchs(self, archs, sdkroot):
+ """Expands variables references in ARCHS, and remove duplicates."""
+ variable_mapping = self._VariableMapping(sdkroot)
+ expanded_archs = []
+ for arch in archs:
+ if self.variable_pattern.match(arch):
+ variable = arch
+ try:
+ variable_expansion = variable_mapping[variable]
+ for arch in variable_expansion:
+ if arch not in expanded_archs:
+ expanded_archs.append(arch)
+ except KeyError as e:
+ print 'Warning: Ignoring unsupported variable "%s".' % variable
+ elif arch not in expanded_archs:
+ expanded_archs.append(arch)
+ return expanded_archs
+
+ def ActiveArchs(self, archs, valid_archs, sdkroot):
+ """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
+ is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
+ values present in VALID_ARCHS are kept)."""
+ expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
+ if valid_archs:
+ filtered_archs = []
+ for arch in expanded_archs:
+ if arch in valid_archs:
+ filtered_archs.append(arch)
+ expanded_archs = filtered_archs
+ return expanded_archs
+
+
+def GetXcodeArchsDefault():
+ """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
+ installed version of Xcode. The default values used by Xcode for ARCHS
+ and the expansion of the variables depends on the version of Xcode used.
+
+ For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
+ uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
+ $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
+ and deprecated with Xcode 5.1.
+
+ For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
+ architecture as part of $(ARCHS_STANDARD) and default to only building it.
+
+ For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
+ of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
+ are also part of $(ARCHS_STANDARD).
+
+ All thoses rules are coded in the construction of the |XcodeArchsDefault|
+ object to use depending on the version of Xcode detected. The object is
+ for performance reason."""
+ global XCODE_ARCHS_DEFAULT_CACHE
+ if XCODE_ARCHS_DEFAULT_CACHE:
+ return XCODE_ARCHS_DEFAULT_CACHE
+ xcode_version, _ = XcodeVersion()
+ if xcode_version < '0500':
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD)',
+ XcodeArchsVariableMapping(['i386']),
+ XcodeArchsVariableMapping(['i386']),
+ XcodeArchsVariableMapping(['armv7']))
+ elif xcode_version < '0510':
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+ XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
+ XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
+ XcodeArchsVariableMapping(
+ ['armv7', 'armv7s'],
+ ['armv7', 'armv7s', 'arm64']))
+ else:
+ XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
+ '$(ARCHS_STANDARD)',
+ XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
+ XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
+ XcodeArchsVariableMapping(
+ ['armv7', 'armv7s', 'arm64'],
+ ['armv7', 'armv7s', 'arm64']))
+ return XCODE_ARCHS_DEFAULT_CACHE
+
+
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
@@ -34,10 +157,6 @@ class XcodeSettings(object):
# cached at class-level for efficiency.
_codesigning_key_cache = {}
- # Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
- # at class-level for efficiency.
- _xcode_version_cache = ()
-
def __init__(self, spec):
self.spec = spec
@@ -267,17 +386,12 @@ class XcodeSettings(object):
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
- # TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
- # CURRENT_ARCH / NATIVE_ARCH env vars?
- return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
-
- def _GetStdout(self, cmdlist):
- job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
- out = job.communicate()[0]
- if job.returncode != 0:
- sys.stderr.write(out + '\n')
- raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
- return out.rstrip('\n')
+ config_settings = self.xcode_settings[configname]
+ xcode_archs_default = GetXcodeArchsDefault()
+ return xcode_archs_default.ActiveArchs(
+ config_settings.get('ARCHS'),
+ config_settings.get('VALID_ARCHS'),
+ config_settings.get('SDKROOT'))
def _GetSdkVersionInfoItem(self, sdk, infoitem):
# xcodebuild requires Xcode and can't run on Command Line Tools-only
@@ -285,7 +399,7 @@ class XcodeSettings(object):
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
- return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
+ return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
except:
pass
@@ -396,7 +510,8 @@ class XcodeSettings(object):
if arch is not None:
archs = [arch]
else:
- archs = self._Settings().get('ARCHS', [self._DefaultArch()])
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@@ -653,7 +768,8 @@ class XcodeSettings(object):
if arch is not None:
archs = [arch]
else:
- archs = self._Settings().get('ARCHS', [self._DefaultArch()])
+ assert self.configname
+ archs = self.GetActiveArchs(self.configname)
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@@ -678,6 +794,8 @@ class XcodeSettings(object):
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+ self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
+
self.configname = None
return ldflags
@@ -874,65 +992,7 @@ class XcodeSettings(object):
return libraries
def _BuildMachineOSBuild(self):
- return self._GetStdout(['sw_vers', '-buildVersion'])
-
- # This method ported from the logic in Homebrew's CLT version check
- def _CLTVersion(self):
- # pkgutil output looks like
- # package-id: com.apple.pkg.CLTools_Executables
- # version: 5.0.1.0.1.1382131676
- # volume: /
- # location: /
- # install-time: 1382544035
- # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
- STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
- FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
- MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
-
- regex = re.compile('version: (?P<version>.+)')
- for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
- try:
- output = self._GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
- return re.search(regex, output).groupdict()['version']
- except:
- continue
-
- def _XcodeVersion(self):
- # `xcodebuild -version` output looks like
- # Xcode 4.6.3
- # Build version 4H1503
- # or like
- # Xcode 3.2.6
- # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
- # BuildVersion: 10M2518
- # Convert that to '0463', '4H1503'.
- if len(XcodeSettings._xcode_version_cache) == 0:
- try:
- version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
- # In some circumstances xcodebuild exits 0 but doesn't return
- # the right results; for example, a user on 10.7 or 10.8 with
- # a bogus path set via xcode-select
- # In that case this may be a CLT-only install so fall back to
- # checking that version.
- if len(version_list) < 2:
- raise GypError, "xcodebuild returned unexpected results"
- except:
- version = self._CLTVersion()
- if version:
- version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
- else:
- raise GypError, "No Xcode or CLT version detected!"
- # The CLT has no build information, so we return an empty string.
- version_list = [version, '']
- version = version_list[0]
- build = version_list[-1]
- # Be careful to convert "4.2" to "0420":
- version = version.split()[-1].replace('.', '')
- version = (version + '0' * (3 - len(version))).zfill(4)
- if build:
- build = build.split()[-1]
- XcodeSettings._xcode_version_cache = (version, build)
- return XcodeSettings._xcode_version_cache
+ return GetStdout(['sw_vers', '-buildVersion'])
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
@@ -944,7 +1004,7 @@ class XcodeSettings(object):
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
- xcode, xcode_build = self._XcodeVersion()
+ xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
@@ -982,14 +1042,15 @@ class XcodeSettings(object):
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
- if self._XcodeVersion() < '0500':
+ xcode_version, xcode_build = XcodeVersion()
+ if xcode_version < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
try:
- all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
+ all_sdks = GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
@@ -1002,28 +1063,6 @@ class XcodeSettings(object):
return sdk_root
return ''
- def _DefaultArch(self):
- # For Mac projects, Xcode changed the default value used when ARCHS is not
- # set from "i386" to "x86_64".
- #
- # For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
- # building for a device, and the simulator binaries are always build for
- # "i386".
- #
- # For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
- # which correspond to "armv7 armv7s arm64", and when building the simulator
- # the architecture is either "i386" or "x86_64" depending on the simulated
- # device (respectively 32-bit or 64-bit device).
- #
- # Since the value returned by this function is only used when ARCHS is not
- # set, then on iOS we return "i386", as the default xcode project generator
- # does not set ARCHS if it is not set in the .gyp file.
- if self.isIOS:
- return 'i386'
- version, build = self._XcodeVersion()
- if version >= '0500':
- return 'x86_64'
- return 'i386'
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
@@ -1131,6 +1170,81 @@ class MacPrefixHeader(object):
]
+def XcodeVersion():
+ """Returns a tuple of version and build version of installed Xcode."""
+ # `xcodebuild -version` output looks like
+ # Xcode 4.6.3
+ # Build version 4H1503
+ # or like
+ # Xcode 3.2.6
+ # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+ # BuildVersion: 10M2518
+ # Convert that to '0463', '4H1503'.
+ global XCODE_VERSION_CACHE
+ if XCODE_VERSION_CACHE:
+ return XCODE_VERSION_CACHE
+ try:
+ version_list = GetStdout(['xcodebuild', '-version']).splitlines()
+ # In some circumstances xcodebuild exits 0 but doesn't return
+ # the right results; for example, a user on 10.7 or 10.8 with
+ # a bogus path set via xcode-select
+ # In that case this may be a CLT-only install so fall back to
+ # checking that version.
+ if len(version_list) < 2:
+ raise GypError, "xcodebuild returned unexpected results"
+ except:
+ version = CLTVersion()
+ if version:
+ version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
+ else:
+ raise GypError, "No Xcode or CLT version detected!"
+ # The CLT has no build information, so we return an empty string.
+ version_list = [version, '']
+ version = version_list[0]
+ build = version_list[-1]
+ # Be careful to convert "4.2" to "0420":
+ version = version.split()[-1].replace('.', '')
+ version = (version + '0' * (3 - len(version))).zfill(4)
+ if build:
+ build = build.split()[-1]
+ XCODE_VERSION_CACHE = (version, build)
+ return XCODE_VERSION_CACHE
+
+
+# This function ported from the logic in Homebrew's CLT version check
+def CLTVersion():
+ """Returns the version of command-line tools from pkgutil."""
+ # pkgutil output looks like
+ # package-id: com.apple.pkg.CLTools_Executables
+ # version: 5.0.1.0.1.1382131676
+ # volume: /
+ # location: /
+ # install-time: 1382544035
+ # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
+ STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
+ FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
+ MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
+
+ regex = re.compile('version: (?P<version>.+)')
+ for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
+ try:
+ output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
+ return re.search(regex, output).groupdict()['version']
+ except:
+ continue
+
+
+def GetStdout(cmdlist):
+ """Returns the content of standard output returned by invoking |cmdlist|.
+ Raises |GypError| if the command return with a non-zero return code."""
+ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
+ out = job.communicate()[0]
+ if job.returncode != 0:
+ sys.stderr.write(out + '\n')
+ raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
+ return out.rstrip('\n')
+
+
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
@@ -1310,6 +1424,11 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
+ if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
+ sdk_root = xcode_settings._SdkRoot(configuration)
+ if not sdk_root:
+ sdk_root = xcode_settings._XcodeSdkPath('')
+ env['SDKROOT'] = sdk_root
if not additional_settings:
additional_settings = {}
@@ -1420,16 +1539,16 @@ def _HasIOSTarget(targets):
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
- to build for iOS devices."""
- for target_dict in targets.values():
- for config_name in target_dict['configurations'].keys():
- config = target_dict['configurations'][config_name]
- new_config_name = config_name + '-iphoneos'
- new_config_dict = copy.deepcopy(config)
- if target_dict['toolset'] == 'target':
- new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
- new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
- target_dict['configurations'][new_config_name] = new_config_dict
+ to build for iOS devices and use correct architectures for those builds."""
+ for target_dict in targets.itervalues():
+ toolset = target_dict['toolset']
+ configs = target_dict['configurations']
+ for config_name, config_dict in dict(configs).iteritems():
+ iphoneos_config_dict = copy.deepcopy(config_dict)
+ configs[config_name + '-iphoneos'] = iphoneos_config_dict
+ configs[config_name + '-iphonesimulator'] = config_dict
+ if toolset == 'target':
+ iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
diff --git a/deps/gyp/pylib/gyp/xcode_ninja.py b/deps/gyp/pylib/gyp/xcode_ninja.py
new file mode 100644
index 0000000000..0e5a70c714
--- /dev/null
+++ b/deps/gyp/pylib/gyp/xcode_ninja.py
@@ -0,0 +1,257 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Xcode-ninja wrapper project file generator.
+
+This updates the data structures passed to the Xcode gyp generator to build
+with ninja instead. The Xcode project itself is transformed into a list of
+executable targets, each with a build step to build with ninja, and a target
+with every source and resource file. This appears to sidestep some of the
+major performance headaches experienced using complex projects and large number
+of targets within Xcode.
+"""
+
+import errno
+import gyp.generator.ninja
+import os
+import re
+import xml.sax.saxutils
+
+
+def _WriteWorkspace(main_gyp, sources_gyp):
+ """ Create a workspace to wrap main and sources gyp paths. """
+ (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
+ workspace_path = build_file_root + '.xcworkspace'
+ try:
+ os.makedirs(workspace_path)
+ except OSError, e:
+ if e.errno != errno.EEXIST:
+ raise
+ output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
+ '<Workspace version = "1.0">\n'
+ for gyp_name in [main_gyp, sources_gyp]:
+ name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
+ name = xml.sax.saxutils.quoteattr("group:" + name)
+ output_string += ' <FileRef location = %s></FileRef>\n' % name
+ output_string += '</Workspace>\n'
+
+ workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
+
+ try:
+ with open(workspace_file, 'r') as input_file:
+ input_string = input_file.read()
+ if input_string == output_string:
+ return
+ except IOError:
+ # Ignore errors if the file doesn't exist.
+ pass
+
+ with open(workspace_file, 'w') as output_file:
+ output_file.write(output_string)
+
+def _TargetFromSpec(old_spec, params):
+ """ Create fake target for xcode-ninja wrapper. """
+ # Determine ninja top level build dir (e.g. /path/to/out).
+ ninja_toplevel = None
+ jobs = 0
+ if params:
+ options = params['options']
+ ninja_toplevel = \
+ os.path.join(options.toplevel_dir,
+ gyp.generator.ninja.ComputeOutputDir(params))
+ jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
+
+ target_name = old_spec.get('target_name')
+ product_name = old_spec.get('product_name', target_name)
+
+ ninja_target = {}
+ ninja_target['target_name'] = target_name
+ ninja_target['product_name'] = product_name
+ ninja_target['toolset'] = old_spec.get('toolset')
+ ninja_target['default_configuration'] = old_spec.get('default_configuration')
+ ninja_target['configurations'] = {}
+
+ # Tell Xcode to look in |ninja_toplevel| for build products.
+ new_xcode_settings = {}
+ if ninja_toplevel:
+ new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
+ "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
+
+ if 'configurations' in old_spec:
+ for config in old_spec['configurations'].iterkeys():
+ old_xcode_settings = old_spec['configurations'][config]['xcode_settings']
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
+ new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
+ new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
+ old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
+ ninja_target['configurations'][config] = {}
+ ninja_target['configurations'][config]['xcode_settings'] = \
+ new_xcode_settings
+
+ ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
+ ninja_target['type'] = old_spec['type']
+ if ninja_toplevel:
+ ninja_target['actions'] = [
+ {
+ 'action_name': 'Compile and copy %s via ninja' % target_name,
+ 'inputs': [],
+ 'outputs': [],
+ 'action': [
+ 'env',
+ 'PATH=%s' % os.environ['PATH'],
+ 'ninja',
+ '-C',
+ new_xcode_settings['CONFIGURATION_BUILD_DIR'],
+ target_name,
+ ],
+ 'message': 'Compile and copy %s via ninja' % target_name,
+ },
+ ]
+ if jobs > 0:
+ ninja_target['actions'][0]['action'].extend(('-j', jobs))
+ return ninja_target
+
+def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
+ """Limit targets for Xcode wrapper.
+
+ Xcode sometimes performs poorly with too many targets, so only include
+ proper executable targets, with filters to customize.
+ Arguments:
+ target_extras: Regular expression to always add, matching any target.
+ executable_target_pattern: Regular expression limiting executable targets.
+ spec: Specifications for target.
+ """
+ target_name = spec.get('target_name')
+ # Always include targets matching target_extras.
+ if target_extras is not None and re.search(target_extras, target_name):
+ return True
+
+ # Otherwise just show executable targets.
+ if spec.get('type', '') == 'executable' and \
+ spec.get('product_extension', '') != 'bundle':
+
+ # If there is a filter and the target does not match, exclude the target.
+ if executable_target_pattern is not None:
+ if not re.search(executable_target_pattern, target_name):
+ return False
+ return True
+ return False
+
+def CreateWrapper(target_list, target_dicts, data, params):
+ """Initialize targets for the ninja wrapper.
+
+ This sets up the necessary variables in the targets to generate Xcode projects
+ that use ninja as an external builder.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dict of flattened build files keyed on gyp path.
+ params: Dict of global options for gyp.
+ """
+ orig_gyp = params['build_files'][0]
+ for gyp_name, gyp_dict in data.iteritems():
+ if gyp_name == orig_gyp:
+ depth = gyp_dict['_DEPTH']
+
+ # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
+ # and prepend .ninja before the .gyp extension.
+ generator_flags = params.get('generator_flags', {})
+ main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
+ if main_gyp is None:
+ (build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
+ main_gyp = build_file_root + ".ninja" + build_file_ext
+
+ # Create new |target_list|, |target_dicts| and |data| data structures.
+ new_target_list = []
+ new_target_dicts = {}
+ new_data = {}
+
+ # Set base keys needed for |data|.
+ new_data[main_gyp] = {}
+ new_data[main_gyp]['included_files'] = []
+ new_data[main_gyp]['targets'] = []
+ new_data[main_gyp]['xcode_settings'] = \
+ data[orig_gyp].get('xcode_settings', {})
+
+ # Normally the xcode-ninja generator includes only valid executable targets.
+ # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
+ # executable targets that match the pattern. (Default all)
+ executable_target_pattern = \
+ generator_flags.get('xcode_ninja_executable_target_pattern', None)
+
+ # For including other non-executable targets, add the matching target name
+ # to the |xcode_ninja_target_pattern| regular expression. (Default none)
+ target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
+
+ for old_qualified_target in target_list:
+ spec = target_dicts[old_qualified_target]
+ if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
+ # Add to new_target_list.
+ target_name = spec.get('target_name')
+ new_target_name = '%s:%s#target' % (main_gyp, target_name)
+ new_target_list.append(new_target_name)
+
+ # Add to new_target_dicts.
+ new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
+
+ # Add to new_data.
+ for old_target in data[old_qualified_target.split(':')[0]]['targets']:
+ if old_target['target_name'] == target_name:
+ new_data_target = {}
+ new_data_target['target_name'] = old_target['target_name']
+ new_data_target['toolset'] = old_target['toolset']
+ new_data[main_gyp]['targets'].append(new_data_target)
+
+ # Create sources target.
+ sources_target_name = 'sources_for_indexing'
+ sources_target = _TargetFromSpec(
+ { 'target_name' : sources_target_name,
+ 'toolset': 'target',
+ 'default_configuration': 'Default',
+ 'mac_bundle': '0',
+ 'type': 'executable'
+ }, None)
+
+ # Tell Xcode to look everywhere for headers.
+ sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
+
+ sources = []
+ for target, target_dict in target_dicts.iteritems():
+ base = os.path.dirname(target)
+ files = target_dict.get('sources', []) + \
+ target_dict.get('mac_bundle_resources', [])
+ # Remove files starting with $. These are mostly intermediate files for the
+ # build system.
+ files = [ file for file in files if not file.startswith('$')]
+
+ # Make sources relative to root build file.
+ relative_path = os.path.dirname(main_gyp)
+ sources += [ os.path.relpath(os.path.join(base, file), relative_path)
+ for file in files ]
+
+ sources_target['sources'] = sorted(set(sources))
+
+ # Put sources_to_index in it's own gyp.
+ sources_gyp = \
+ os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
+ fully_qualified_target_name = \
+ '%s:%s#target' % (sources_gyp, sources_target_name)
+
+ # Add to new_target_list, new_target_dicts and new_data.
+ new_target_list.append(fully_qualified_target_name)
+ new_target_dicts[fully_qualified_target_name] = sources_target
+ new_data_target = {}
+ new_data_target['target_name'] = sources_target['target_name']
+ new_data_target['_DEPTH'] = depth
+ new_data_target['toolset'] = "target"
+ new_data[sources_gyp] = {}
+ new_data[sources_gyp]['targets'] = []
+ new_data[sources_gyp]['included_files'] = []
+ new_data[sources_gyp]['xcode_settings'] = \
+ data[orig_gyp].get('xcode_settings', {})
+ new_data[sources_gyp]['targets'].append(new_data_target)
+
+ # Write workspace to file.
+ _WriteWorkspace(main_gyp, sources_gyp)
+ return (new_target_list, new_target_dicts, new_data)
diff --git a/deps/gyp/setup.py b/deps/gyp/setup.py
new file mode 100755
index 0000000000..75a42558d8
--- /dev/null
+++ b/deps/gyp/setup.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from setuptools import setup
+
+setup(
+ name='gyp',
+ version='0.1',
+ description='Generate Your Projects',
+ author='Chromium Authors',
+ author_email='chromium-dev@googlegroups.com',
+ url='http://code.google.com/p/gyp',
+ package_dir = {'': 'pylib'},
+ packages=['gyp', 'gyp.generator'],
+ entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
+)
diff --git a/include/llmr/geometry/glyph_atlas.hpp b/include/llmr/geometry/glyph_atlas.hpp
index 312572f5ef..5bcd573f56 100644
--- a/include/llmr/geometry/glyph_atlas.hpp
+++ b/include/llmr/geometry/glyph_atlas.hpp
@@ -2,6 +2,7 @@
#define LLMR_GEOMETRY_GLYPH_ATLAS
#include <llmr/geometry/binpack.hpp>
+#include <llmr/text/glyph_store.hpp>
#include <string>
#include <set>
@@ -30,7 +31,7 @@ public:
Rect<uint16_t> addGlyph(uint64_t tile_id, const std::string& face_name,
- const VectorTileGlyph& glyph);
+ const SDFGlyph& glyph);
void removeGlyphs(uint64_t tile_id);
void bind();
diff --git a/include/llmr/map/map.hpp b/include/llmr/map/map.hpp
index ab1738f961..49050540c9 100644
--- a/include/llmr/map/map.hpp
+++ b/include/llmr/map/map.hpp
@@ -8,6 +8,7 @@
#include <llmr/map/transform.hpp>
#include <llmr/style/style.hpp>
#include <llmr/geometry/glyph_atlas.hpp>
+#include <llmr/text/glyph_store.hpp>
#include <llmr/renderer/painter.hpp>
#include <llmr/util/noncopyable.hpp>
#include <llmr/util/texturepool.hpp>
@@ -98,6 +99,7 @@ public:
inline const TransformState &getState() const { return state; }
inline const Style &getStyle() const { return style; }
inline GlyphAtlas &getGlyphAtlas() { return glyphAtlas; }
+ inline GlyphStore &getGlyphStore() { return glyphStore; }
inline SpriteAtlas &getSpriteAtlas() { return spriteAtlas; }
inline uv_loop_t *getLoop() { return loop; }
inline time getAnimationTime() const { return animationTime; }
@@ -152,6 +154,7 @@ private:
Texturepool texturepool;
Style style;
GlyphAtlas glyphAtlas;
+ GlyphStore glyphStore;
SpriteAtlas spriteAtlas;
Painter painter;
diff --git a/include/llmr/map/tile.hpp b/include/llmr/map/tile.hpp
index 0f43f9df2b..9681c36374 100644
--- a/include/llmr/map/tile.hpp
+++ b/include/llmr/map/tile.hpp
@@ -9,6 +9,7 @@
#include <forward_list>
#include <string>
#include <bitset>
+#include <memory>
namespace llmr {
diff --git a/include/llmr/map/tile_parser.hpp b/include/llmr/map/tile_parser.hpp
index 86545e4f76..5785c29b3a 100644
--- a/include/llmr/map/tile_parser.hpp
+++ b/include/llmr/map/tile_parser.hpp
@@ -4,11 +4,15 @@
#include <llmr/map/vector_tile_data.hpp>
#include <llmr/map/vector_tile.hpp>
#include <llmr/text/placement.hpp>
+#include <llmr/text/glyph_store.hpp>
+#include <llmr/text/glyph.hpp>
+#include <llmr/util/utf.hpp>
namespace llmr {
class Style;
class GlyphAtlas;
+class GlyphStore;
class SpriteAtlas;
class LayerDescription;
@@ -16,12 +20,13 @@ class Bucket;
class TileParser {
public:
- TileParser(const std::string& data, VectorTileData& tile, const Style& style, GlyphAtlas& glyphAtlas, SpriteAtlas &spriteAtlas);
+ TileParser(const std::string& data, VectorTileData& tile, const Style& style, GlyphAtlas& glyphAtlas, GlyphStore &glyphStore, SpriteAtlas &spriteAtlas);
private:
bool obsolete() const;
void parseGlyphs();
void parseStyleLayers(const std::vector<LayerDescription>& layers);
+ void addGlyph(uint64_t tileid, const std::string stackname, const std::u32string &string, const FontStack &fontStack, GlyphAtlas &glyphAtlas, GlyphPositions &face);
std::unique_ptr<Bucket> createBucket(const BucketDescription& bucket_desc);
std::unique_ptr<Bucket> createFillBucket(const VectorTileLayer& layer, const BucketDescription& bucket_desc);
std::unique_ptr<Bucket> createLineBucket(const VectorTileLayer& layer, const BucketDescription& bucket_desc);
@@ -35,6 +40,7 @@ private:
VectorTileData& tile;
const Style& style;
GlyphAtlas& glyphAtlas;
+ GlyphStore &glyphStore;
SpriteAtlas &spriteAtlas;
Faces faces;
Placement placement;
diff --git a/include/llmr/map/vector_tile_data.hpp b/include/llmr/map/vector_tile_data.hpp
index 24d2c7cc2b..f82e2ebff9 100644
--- a/include/llmr/map/vector_tile_data.hpp
+++ b/include/llmr/map/vector_tile_data.hpp
@@ -13,7 +13,7 @@
#include <llmr/geometry/icon_buffer.hpp>
#include <llmr/geometry/text_buffer.hpp>
-#include <map>
+#include <unordered_map>
namespace llmr {
@@ -42,7 +42,7 @@ protected:
// Holds the buckets of this tile.
// They contain the location offsets in the buffers stored above
- std::map<std::string, std::unique_ptr<Bucket>> buckets;
+ std::unordered_map<std::string, std::unique_ptr<Bucket>> buckets;
};
diff --git a/include/llmr/platform/gl.hpp b/include/llmr/platform/gl.hpp
index f0f913a5ea..da30d746e5 100644
--- a/include/llmr/platform/gl.hpp
+++ b/include/llmr/platform/gl.hpp
@@ -39,7 +39,7 @@
namespace llmr {
namespace gl {
// Debug group markers, useful for debuggin on iOS
-#if GL_EXT_debug_marker
+#if defined(__APPLE__) && defined(DEBUG) && defined(GL_EXT_debug_marker)
// static int indent = 0;
inline void start_group(const std::string &str) {
glPushGroupMarkerEXT(0, str.c_str());
diff --git a/include/llmr/platform/platform.hpp b/include/llmr/platform/platform.hpp
index 972e7db6ba..d516378c2a 100644
--- a/include/llmr/platform/platform.hpp
+++ b/include/llmr/platform/platform.hpp
@@ -25,9 +25,11 @@ struct Response {
// Makes an HTTP request of a URL, preferrably on a background thread, and calls a function with the
// results in the original thread (which runs the libuv loop).
+// If the loop pointer is NULL, the callback function will be called on an arbitrary thread.
// Returns a cancellable request.
std::shared_ptr<Request> request_http(const std::string &url,
- std::function<void(Response *)> callback, uv_loop_t *loop);
+ std::function<void(Response *)> callback,
+ uv_loop_t *loop = nullptr);
// Cancels an HTTP request.
void cancel_request_http(const std::shared_ptr<Request> &req);
diff --git a/include/llmr/renderer/bucket.hpp b/include/llmr/renderer/bucket.hpp
index f8447be89a..43bd96e3f5 100644
--- a/include/llmr/renderer/bucket.hpp
+++ b/include/llmr/renderer/bucket.hpp
@@ -2,6 +2,7 @@
#define LLMR_RENDERER_BUCKET
#include <string>
+#include <memory>
#include <llmr/map/tile.hpp>
#include <llmr/util/noncopyable.hpp>
#include <llmr/renderer/prerendered_texture.hpp>
diff --git a/include/llmr/renderer/prerendered_texture.hpp b/include/llmr/renderer/prerendered_texture.hpp
index e35cc0321e..4b163ac70c 100644
--- a/include/llmr/renderer/prerendered_texture.hpp
+++ b/include/llmr/renderer/prerendered_texture.hpp
@@ -26,7 +26,6 @@ private:
GLint previous_fbo = 0;
GLuint fbo = 0;
GLuint texture = 0;
- bool mipmapped = false;
};
}
diff --git a/include/llmr/renderer/text_bucket.hpp b/include/llmr/renderer/text_bucket.hpp
index 2b73aa74da..ad282bdd0b 100644
--- a/include/llmr/renderer/text_bucket.hpp
+++ b/include/llmr/renderer/text_bucket.hpp
@@ -7,6 +7,7 @@
#include <llmr/geometry/elements_buffer.hpp>
#include <llmr/map/vector_tile.hpp>
#include <llmr/text/types.hpp>
+#include <llmr/text/glyph.hpp>
#include <memory>
#include <map>
#include <vector>
@@ -36,7 +37,7 @@ public:
PlacementRange placementRange, float zoom);
void addFeature(const VectorTileFeature &feature,
- const IndexedFaces &faces,
+ const GlyphPositions &face,
const std::map<Value, Shaping> &shapings);
void drawGlyphs(TextShader &shader);
diff --git a/include/llmr/style/properties.hpp b/include/llmr/style/properties.hpp
index 4dfc7e40b5..b8ffcfad6e 100644
--- a/include/llmr/style/properties.hpp
+++ b/include/llmr/style/properties.hpp
@@ -16,6 +16,7 @@ namespace llmr {
typedef std::array<float, 4> Color;
struct PropertyTransition {
+ inline PropertyTransition(uint16_t duration, uint16_t delay = 0) : duration(duration), delay(delay) {}
uint16_t duration = 0;
uint16_t delay = 0;
};
diff --git a/include/llmr/style/style_parser.hpp b/include/llmr/style/style_parser.hpp
index 109b94f1ff..7c7d0a70ab 100644
--- a/include/llmr/style/style_parser.hpp
+++ b/include/llmr/style/style_parser.hpp
@@ -47,7 +47,7 @@ private:
Value parseValue(JSVal value);
FunctionProperty::fn parseFunctionType(JSVal type);
FunctionProperty parseFunction(JSVal value);
- PropertyTransition parseTransition(JSVal value, std::string property_name);
+ boost::optional<PropertyTransition> parseTransition(JSVal value, std::string property_name);
private:
std::map<std::string, const rapidjson::Value *> constants;
diff --git a/include/llmr/text/collision.hpp b/include/llmr/text/collision.hpp
index ef4899c264..c9dafdd386 100644
--- a/include/llmr/text/collision.hpp
+++ b/include/llmr/text/collision.hpp
@@ -11,7 +11,7 @@ public:
Collision();
~Collision();
- PlacementProperty place(const PlacedGlyphs &boxes,
+ PlacementProperty place(const GlyphBoxes &boxes,
const CollisionAnchor &anchor,
float minPlacementScale, float maxPlacementScale,
float padding, bool horizontal, bool alwaysVisible);
diff --git a/include/llmr/text/glyph.hpp b/include/llmr/text/glyph.hpp
index 91531e24f3..e6138e712e 100644
--- a/include/llmr/text/glyph.hpp
+++ b/include/llmr/text/glyph.hpp
@@ -9,6 +9,11 @@
namespace llmr {
+typedef std::pair<uint16_t, uint16_t> GlyphRange;
+
+// Note: this only works for the BMP
+GlyphRange getGlyphRange(uint32_t glyph);
+
struct GlyphMetrics {
operator bool() const {
return width == 0 && height == 0 && advance == 0;
diff --git a/include/llmr/text/glyph_store.hpp b/include/llmr/text/glyph_store.hpp
new file mode 100644
index 0000000000..eb5d6038f1
--- /dev/null
+++ b/include/llmr/text/glyph_store.hpp
@@ -0,0 +1,80 @@
+#ifndef LLMR_TEXT_GLYPH_STORE
+#define LLMR_TEXT_GLYPH_STORE
+
+#include <llmr/text/glyph.hpp>
+#include <llmr/util/pbf.hpp>
+
+#include <cstdint>
+#include <vector>
+#include <future>
+#include <map>
+#include <set>
+#include <unordered_map>
+
+namespace llmr {
+
+
+class SDFGlyph {
+public:
+ uint32_t id = 0;
+
+ // A signed distance field of the glyph with a border of 3 pixels.
+ std::string bitmap;
+
+ // Glyph metrics
+ GlyphMetrics metrics;
+};
+
+class FontStack {
+public:
+ void insert(uint32_t id, const SDFGlyph &glyph);
+ const std::map<uint32_t, GlyphMetrics> &getMetrics() const;
+ const std::map<uint32_t, SDFGlyph> &getSDFs() const;
+ const Shaping getShaping(const std::u32string &string) const;
+
+private:
+ std::map<uint32_t, std::string> bitmaps;
+ std::map<uint32_t, GlyphMetrics> metrics;
+ std::map<uint32_t, SDFGlyph> sdfs;
+ mutable std::mutex mtx;
+};
+
+class GlyphPBF {
+public:
+ GlyphPBF(const std::string &fontStack, GlyphRange glyphRange);
+
+ void parse(FontStack &stack);
+
+ std::shared_future<GlyphPBF &> getFuture();
+
+private:
+ std::string data;
+ std::promise<GlyphPBF &> promise;
+ std::shared_future<GlyphPBF &> future;
+ std::mutex mtx;
+};
+
+// Manages Glyphrange PBF loading.
+class GlyphStore {
+public:
+ // Block until all specified GlyphRanges of the specified font stack are loaded.
+ void waitForGlyphRanges(const std::string &fontStack, const std::set<GlyphRange> &glyphRanges);
+
+ FontStack &getFontStack(const std::string &fontStack);
+
+private:
+ // Loads an individual glyph range from the font stack and adds it to rangeSets
+ std::shared_future<GlyphPBF &> loadGlyphRange(const std::string &fontStack, std::map<GlyphRange, std::unique_ptr<GlyphPBF>> &rangeSets, GlyphRange range);
+
+ FontStack &createFontStack(const std::string &fontStack);
+
+private:
+ std::unordered_map<std::string, std::map<GlyphRange, std::unique_ptr<GlyphPBF>>> ranges;
+ std::unordered_map<std::string, std::unique_ptr<FontStack>> stacks;
+ std::mutex mtx;
+};
+
+
+}
+
+#endif
diff --git a/include/llmr/text/placement.hpp b/include/llmr/text/placement.hpp
index 8ec7df5e4f..87c47345ec 100644
--- a/include/llmr/text/placement.hpp
+++ b/include/llmr/text/placement.hpp
@@ -18,9 +18,9 @@ public:
void addFeature(TextBucket &bucket, const std::vector<Coordinate> &line,
const BucketGeometryDescription &info,
- const IndexedFaces &faces,
+ const GlyphPositions &face,
const Shaping &shaping);
- float measureText(const IndexedFaces &faces,
+ float measureText(const GlyphPositions &face,
const Shaping &shaping);
private:
diff --git a/include/llmr/util/constants.hpp b/include/llmr/util/constants.hpp
index 70befcb379..f315e5171b 100644
--- a/include/llmr/util/constants.hpp
+++ b/include/llmr/util/constants.hpp
@@ -19,6 +19,11 @@ extern const bool styleParseWarnings;
extern const bool spriteWarnings;
extern const bool renderWarnings;
extern const bool renderTree;
+extern const bool labelTextMissingWarning;
+extern const bool missingFontStackWarning;
+extern const bool missingFontFaceWarning;
+extern const bool glyphWarning;
+extern const bool shapingWarning;
}
diff --git a/include/llmr/util/utf.hpp b/include/llmr/util/utf.hpp
new file mode 100644
index 0000000000..92da93f018
--- /dev/null
+++ b/include/llmr/util/utf.hpp
@@ -0,0 +1,45 @@
+#ifndef LLMR_UTIL_UTF
+#define LLMR_UTIL_UTF
+
+#include <memory>
+
+// g++/libstdc++ is missing c++11 codecvt support
+#ifdef __linux__
+#include <boost/locale.hpp>
+#else
+#include <codecvt>
+#include <locale>
+#endif
+
+namespace llmr {
+
+namespace util {
+
+#ifdef __linux__
+
+class utf8_to_utf32 {
+ public:
+ explicit utf8_to_utf32() {}
+ std::u32string convert(std::string const& utf8) {
+ return boost::locale::conv::utf_to_utf<char32_t>(utf8);
+ }
+};
+
+#else
+
+class utf8_to_utf32 {
+ public:
+ explicit utf8_to_utf32()
+ : utf32conv_() {}
+ std::u32string convert(std::string const& utf8) {
+ return utf32conv_.from_bytes(utf8);
+ }
+ private:
+ std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t> utf32conv_;
+};
+
+#endif
+
+}}
+
+#endif
diff --git a/ios/img/Icon-40.png b/ios/img/Icon-40.png
new file mode 100644
index 0000000000..eca13393e6
--- /dev/null
+++ b/ios/img/Icon-40.png
Binary files differ
diff --git a/ios/img/Icon-40@2x.png b/ios/img/Icon-40@2x.png
new file mode 100644
index 0000000000..070d037539
--- /dev/null
+++ b/ios/img/Icon-40@2x.png
Binary files differ
diff --git a/ios/img/Icon-60.png b/ios/img/Icon-60.png
index d062c32754..ff4c6ab4b1 100644
--- a/ios/img/Icon-60.png
+++ b/ios/img/Icon-60.png
Binary files differ
diff --git a/ios/img/Icon-60@2x.png b/ios/img/Icon-60@2x.png
index 84550f36e4..b7f25955f5 100644
--- a/ios/img/Icon-60@2x.png
+++ b/ios/img/Icon-60@2x.png
Binary files differ
diff --git a/ios/img/Icon-72.png b/ios/img/Icon-72.png
index 9218aca973..0c876f664d 100644
--- a/ios/img/Icon-72.png
+++ b/ios/img/Icon-72.png
Binary files differ
diff --git a/ios/img/Icon-72@2x.png b/ios/img/Icon-72@2x.png
index b4fb12ba7b..6da408204a 100644
--- a/ios/img/Icon-72@2x.png
+++ b/ios/img/Icon-72@2x.png
Binary files differ
diff --git a/ios/img/Icon-76.png b/ios/img/Icon-76.png
index debb8c453e..895b4a1761 100644
--- a/ios/img/Icon-76.png
+++ b/ios/img/Icon-76.png
Binary files differ
diff --git a/ios/img/Icon-76@2x.png b/ios/img/Icon-76@2x.png
index 1dce0cbd50..7bc5208976 100644
--- a/ios/img/Icon-76@2x.png
+++ b/ios/img/Icon-76@2x.png
Binary files differ
diff --git a/ios/img/Icon-Small-50.png b/ios/img/Icon-Small-50.png
index a72e343a32..6d17da4b00 100644
--- a/ios/img/Icon-Small-50.png
+++ b/ios/img/Icon-Small-50.png
Binary files differ
diff --git a/ios/img/Icon-Small-50@2x.png b/ios/img/Icon-Small-50@2x.png
index 0535d00f0a..ac4ec19282 100644
--- a/ios/img/Icon-Small-50@2x.png
+++ b/ios/img/Icon-Small-50@2x.png
Binary files differ
diff --git a/ios/img/Icon-Small.png b/ios/img/Icon-Small.png
index 46d4a5de64..aecbbc8a1d 100644
--- a/ios/img/Icon-Small.png
+++ b/ios/img/Icon-Small.png
Binary files differ
diff --git a/ios/img/Icon-Small@2x.png b/ios/img/Icon-Small@2x.png
index 99058cebdb..7773852e7a 100644
--- a/ios/img/Icon-Small@2x.png
+++ b/ios/img/Icon-Small@2x.png
Binary files differ
diff --git a/ios/img/Icon-Spotlight-40.png b/ios/img/Icon-Spotlight-40.png
index 14b1ae1125..eca13393e6 100644
--- a/ios/img/Icon-Spotlight-40.png
+++ b/ios/img/Icon-Spotlight-40.png
Binary files differ
diff --git a/ios/img/Icon-Spotlight-40@2x.png b/ios/img/Icon-Spotlight-40@2x.png
index 4a45d8eff9..070d037539 100644
--- a/ios/img/Icon-Spotlight-40@2x.png
+++ b/ios/img/Icon-Spotlight-40@2x.png
Binary files differ
diff --git a/ios/img/Icon.png b/ios/img/Icon.png
index bb426e6681..9ca8194eef 100644
--- a/ios/img/Icon.png
+++ b/ios/img/Icon.png
Binary files differ
diff --git a/ios/img/Icon@2x.png b/ios/img/Icon@2x.png
index 3069b485fa..7c2e8ba037 100644
--- a/ios/img/Icon@2x.png
+++ b/ios/img/Icon@2x.png
Binary files differ
diff --git a/ios/img/iTunesArtwork.png b/ios/img/iTunesArtwork.png
new file mode 100644
index 0000000000..b10824b048
--- /dev/null
+++ b/ios/img/iTunesArtwork.png
Binary files differ
diff --git a/ios/img/iTunesArtwork@2x.png b/ios/img/iTunesArtwork@2x.png
new file mode 100644
index 0000000000..fdee900aa4
--- /dev/null
+++ b/ios/img/iTunesArtwork@2x.png
Binary files differ
diff --git a/macosx/Icon.icns b/macosx/Icon.icns
index ab5ae136ff..c4535a2e51 100644
--- a/macosx/Icon.icns
+++ b/macosx/Icon.icns
Binary files differ
diff --git a/proto/glyphs.proto b/proto/glyphs.proto
new file mode 100644
index 0000000000..606050f7af
--- /dev/null
+++ b/proto/glyphs.proto
@@ -0,0 +1,33 @@
+// Protocol Version 1
+
+package llmr.glyphs;
+
+option optimize_for = LITE_RUNTIME;
+
+// Stores a glyph with metrics and optional SDF bitmap information.
+message glyph {
+ required uint32 id = 1;
+
+ // A signed distance field of the glyph with a border of 3 pixels.
+ optional bytes bitmap = 2;
+
+ // Glyph metrics.
+ required uint32 width = 3;
+ required uint32 height = 4;
+ required sint32 left = 5;
+ required sint32 top = 6;
+ required uint32 advance = 7;
+}
+
+// Stores fontstack information and a list of faces.
+message fontstack {
+ required string name = 1;
+ required string range = 2;
+ repeated glyph glyphs = 3;
+}
+
+message glyphs {
+ repeated fontstack stacks = 1;
+
+ extensions 16 to 8191;
+}
diff --git a/setup-libraries.sh b/setup-libraries.sh
index d434611115..b92cd3f05a 100755
--- a/setup-libraries.sh
+++ b/setup-libraries.sh
@@ -16,6 +16,7 @@ ensure_dep cmake
ensure_dep automake
ensure_dep autoconf
ensure_dep pkg-config
+ensure_dep node
if [ ${UNAME} = 'Darwin' ]; then
ensure_dep makedepend
ensure_dep glibtool
@@ -27,10 +28,12 @@ fi
if [[ $MISSING_DEPS != "" ]]; then
if [ ${UNAME} = 'Darwin' ]; then
echo "Missing build deps: ${MISSING_DEPS}"
- echo 'Please run "brew install autoconf automake libtool makedepend cmake pkg-config" and then re-run ./setup-libraries.sh'
+ echo 'Please run "brew install autoconf automake libtool makedepend cmake pkg-config node"'
+ echo 'and then re-run ./setup-libraries.sh'
elif [ ${UNAME} = 'Linux' ]; then
echo "Missing build deps: ${MISSING_DEPS}"
- echo 'Please run "sudo apt-get install automake libtool xutils-dev cmake pkg-config" and then re-run ./setup-libraries.sh'
+ echo 'Please run "sudo apt-get install automake libtool xutils-dev cmake pkg-config libxi-dev libglu1-mesa-dev x11proto-randr-dev x11proto-xext-dev libxrandr-dev x11proto-xf86vidmode-dev libxxf86vm-dev libxcursor-dev nodejs-legacy"'
+ echo 'and then re-run ./setup-libraries.sh'
fi
exit 1
fi
@@ -71,11 +74,9 @@ source MacOSX.sh
if [ ! -f out/build-cpp11-libcpp-x86_64/lib/libuv.a ] ; then ./scripts/build_libuv.sh ; fi
if [ ! -f out/build-cpp11-libcpp-x86_64/lib/libssl.a ] ; then ./scripts/build_openssl.sh ; fi
if [ ! -f out/build-cpp11-libcpp-x86_64/lib/libcurl.a ] ; then ./scripts/build_curl.sh ; fi
- if [ ! -d out/build-cpp11-libcpp-x86_64/include/boost ] ; then ./scripts/build_boost.sh `pwd`/../../src/ `pwd`/../../linux/ `pwd`/../../common/ ; fi
+ if [ ! -d out/build-cpp11-libcpp-x86_64/include/boost ] ; then ./scripts/build_boost.sh `pwd`/../../src/ `pwd`/../../include/ `pwd`/../../linux/ `pwd`/../../common/ ; fi
echo ' ...done'
-rm -rf out/packages
-
./scripts/make_universal.sh
cd ../../
@@ -86,14 +87,12 @@ cd ../../
elif [ ${UNAME} = 'Linux' ]; then
source Linux.sh
- if [ ! -f out/build-cpp11-libstdcpp-gcc-x86_64/lib/libpng.a ] ; then ./scripts/build_png.sh ; fi
if [ ! -f out/build-cpp11-libstdcpp-gcc-x86_64/lib/libglfw3.a ] ; then ./scripts/build_glfw.sh ; fi
+ if [ ! -f out/build-cpp11-libstdcpp-gcc-x86_64/lib/libpng.a ] ; then ./scripts/build_png.sh ; fi
if [ ! -f out/build-cpp11-libstdcpp-gcc-x86_64/lib/libuv.a ] ; then ./scripts/build_libuv.sh ; fi
if [ ! -f out/build-cpp11-libstdcpp-gcc-x86_64/lib/libssl.a ] ; then ./scripts/build_openssl.sh ; fi
if [ ! -f out/build-cpp11-libstdcpp-gcc-x86_64/lib/libcurl.a ] ; then ./scripts/build_curl.sh ; fi
- if [ ! -d out/build-cpp11-libstdcpp-gcc-x86_64/include/boost ] ; then ./scripts/build_boost.sh `pwd`/../../src/ `pwd`/../../linux/ `pwd`/../../common/ ; fi
-
-rm -rf out/packages
+ if [ ! -d out/build-cpp11-libstdcpp-gcc-x86_64/include/boost ] ; then ./scripts/build_boost.sh `pwd`/../../src/ `pwd`/../../include/ `pwd`/../../linux/ `pwd`/../../common/ ; fi
cd ../../
./configure \
diff --git a/src/clipper/clipper.cpp b/src/clipper/clipper.cpp
index 95975450e3..56ad58416d 100755
--- a/src/clipper/clipper.cpp
+++ b/src/clipper/clipper.cpp
@@ -2152,13 +2152,13 @@ void Clipper::IntersectEdges(TEdge *e1, TEdge *e2,
else if (e1->PolyTyp != e2->PolyTyp)
{
//toggle subj open path OutIdx on/off when Abs(clip.WndCnt) == 1 ...
- if ((e1->WindDelta == 0) && abs(e2->WindCnt) == 1 &&
+ if ((e1->WindDelta == 0) && std::abs(e2->WindCnt) == 1 &&
(m_ClipType != ctUnion || e2->WindCnt2 == 0))
{
AddOutPt(e1, Pt);
if (e1Contributing) e1->OutIdx = Unassigned;
}
- else if ((e2->WindDelta == 0) && (abs(e1->WindCnt) == 1) &&
+ else if ((e2->WindDelta == 0) && (std::abs(e1->WindCnt) == 1) &&
(m_ClipType != ctUnion || e1->WindCnt2 == 0))
{
AddOutPt(e2, Pt);
diff --git a/src/geometry/glyph_atlas.cpp b/src/geometry/glyph_atlas.cpp
index 200d281f7c..f4defac692 100644
--- a/src/geometry/glyph_atlas.cpp
+++ b/src/geometry/glyph_atlas.cpp
@@ -22,7 +22,7 @@ GlyphAtlas::~GlyphAtlas() {
}
Rect<uint16_t> GlyphAtlas::addGlyph(uint64_t tile_id, const std::string& face_name,
- const VectorTileGlyph& glyph) {
+ const SDFGlyph& glyph) {
std::lock_guard<std::mutex> lock(mtx);
// Use constant value for now.
diff --git a/src/map/map.cpp b/src/map/map.cpp
index 8291fc2b96..7ac6dbe0e1 100644
--- a/src/map/map.cpp
+++ b/src/map/map.cpp
@@ -163,7 +163,7 @@ void Map::setup() {
sources.emplace("outdoors",
std::unique_ptr<Source>(new Source(*this,
painter,
- "http://api-maps-gl.tilestream.net/v3/mapbox.mapbox-terrain-v1,mapbox.mapbox-streets-v5/%d/%d/%d.gl.pbf",
+ "http://a.tiles.mapbox.com/v3/mapbox.mapbox-terrain-v1,mapbox.mapbox-streets-v5/%d/%d/%d.vector.pbf",
Source::Type::vector,
{{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 }},
512,
@@ -548,7 +548,6 @@ void Map::renderLayers(const std::vector<LayerDescription>& layers) {
painter.setStrata(i * strata_thickness);
renderLayer(*it, Opaque);
}
- // painter.endPass();
if (debug::renderTree) {
std::cout << std::string(--indent * 4, ' ') << "}" << std::endl;
}
@@ -566,7 +565,6 @@ void Map::renderLayers(const std::vector<LayerDescription>& layers) {
painter.setStrata(i * strata_thickness);
renderLayer(*it, Translucent);
}
- // painter.endPass();
if (debug::renderTree) {
std::cout << std::string(--indent * 4, ' ') << "}" << std::endl;
}
diff --git a/src/map/tile_parser.cpp b/src/map/tile_parser.cpp
index 49444428ac..ff5d61a5e1 100644
--- a/src/map/tile_parser.cpp
+++ b/src/map/tile_parser.cpp
@@ -9,20 +9,22 @@
#include <llmr/util/raster.hpp>
#include <llmr/util/constants.hpp>
#include <llmr/geometry/glyph_atlas.hpp>
+#include <llmr/text/glyph_store.hpp>
+#include <llmr/text/glyph.hpp>
#include <llmr/util/std.hpp>
+#include <llmr/util/utf.hpp>
using namespace llmr;
-
-TileParser::TileParser(const std::string& data, VectorTileData& tile, const Style& style, GlyphAtlas& glyphAtlas, SpriteAtlas &spriteAtlas)
+TileParser::TileParser(const std::string& data, VectorTileData& tile, const Style& style, GlyphAtlas& glyphAtlas, GlyphStore &glyphStore, SpriteAtlas &spriteAtlas)
: vector_data(pbf((const uint8_t *)data.data(), data.size())),
tile(tile),
style(style),
glyphAtlas(glyphAtlas),
+ glyphStore(glyphStore),
spriteAtlas(spriteAtlas),
placement(tile.id.z) {
- parseGlyphs();
parseStyleLayers(style.layers);
}
@@ -30,17 +32,13 @@ bool TileParser::obsolete() const {
return tile.state == TileData::State::obsolete;
}
-void TileParser::parseGlyphs() {
- for (const std::pair<std::string, const VectorTileFace> pair : vector_data.faces) {
- const std::string &name = pair.first;
- const VectorTileFace &face = pair.second;
-
- GlyphPositions &glyphs = faces[name];
- for (const VectorTileGlyph &glyph : face.glyphs) {
- const Rect<uint16_t> rect =
- glyphAtlas.addGlyph(tile.id.to_uint64(), name, glyph);
- glyphs.emplace(glyph.id, Glyph{rect, glyph.metrics});
- }
+void TileParser::addGlyph(uint64_t tileid, const std::string stackname, const std::u32string &string, const FontStack &fontStack, GlyphAtlas &glyphAtlas, GlyphPositions &face) {
+ std::map<uint32_t, SDFGlyph> sdfs = fontStack.getSDFs();
+ // Loop through all characters and add glyph to atlas, positions.
+ for (uint32_t chr : string) {
+ const SDFGlyph sdf = sdfs[chr];
+ const Rect<uint16_t> rect = glyphAtlas.addGlyph(tileid, stackname, sdf);
+ face.emplace(chr, Glyph{rect, sdf.metrics});
}
}
@@ -162,30 +160,79 @@ std::unique_ptr<Bucket> TileParser::createIconBucket(const VectorTileLayer& laye
return obsolete() ? nullptr : std::move(bucket);
}
+typedef std::pair<uint16_t, uint16_t> GlyphRange;
+
std::unique_ptr<Bucket> TileParser::createTextBucket(const VectorTileLayer& layer, const BucketDescription& bucket_desc) {
+ std::unique_ptr<TextBucket> bucket = std::make_unique<TextBucket>(
+ tile.textVertexBuffer, tile.triangleElementsBuffer, bucket_desc, placement);
+
+ util::utf8_to_utf32 ucs4conv;
+
+ // Determine and load glyph ranges
+ {
+ std::set<GlyphRange> ranges;
+
+ FilteredVectorTileLayer filtered_layer(layer, bucket_desc);
+ for (const pbf& feature_pbf : filtered_layer) {
+ if (obsolete()) return nullptr;
+ VectorTileFeature feature { feature_pbf, layer };
+
+ auto it_prop = feature.properties.find(bucket_desc.geometry.field);
+ if (it_prop == feature.properties.end()) {
+ // feature does not have the correct property
+ if (debug::labelTextMissingWarning) {
+ fprintf(stderr, "[WARNING] feature doesn't have property '%s' required for labelling\n", bucket_desc.geometry.field.c_str());
+ }
+ continue;
+ }
- // Determine the correct text stack.
- if (!layer.shaping.size()) {
- return nullptr;
+ const std::u32string string = ucs4conv.convert(toString(it_prop->second));
+
+ // Loop through all characters of this text and collect unique codepoints.
+ for (uint32_t chr : string) {
+ ranges.insert(getGlyphRange(chr));
+ }
+ }
+
+ glyphStore.waitForGlyphRanges(bucket_desc.geometry.font, ranges);
}
- // TODO: currently hardcoded to use the first font stack.
- const std::map<Value, Shaping>& shaping = layer.shaping.begin()->second;
+ // Create a copy!
+ const FontStack &fontStack = glyphStore.getFontStack(bucket_desc.geometry.font);
+ std::map<Value, Shaping> shaping;
+ GlyphPositions face;
+
+ // Shape and place all labels.
+ {
+ FilteredVectorTileLayer filtered_layer(layer, bucket_desc);
+ for (const pbf& feature_pbf : filtered_layer) {
+ if (obsolete()) return nullptr;
+ VectorTileFeature feature { feature_pbf, layer };
+
+ auto it_prop = feature.properties.find(bucket_desc.geometry.field);
+ if (it_prop == feature.properties.end()) {
+ // feature does not have the correct property
+ if (debug::labelTextMissingWarning) {
+ fprintf(stderr, "[WARNING] feature doesn't have property '%s' required for labelling\n", bucket_desc.geometry.field.c_str());
+ }
+ continue;
+ }
- const Faces& const_faces = faces;
+ const std::u32string string = ucs4conv.convert(toString(it_prop->second));
- IndexedFaces faces;
- for (const std::string& face : layer.faces) {
- auto it = const_faces.find(face);
- if (it == const_faces.end()) {
- // This layer references an unknown face.
- return nullptr;
+ // Shape labels.
+ const Shaping shaped = fontStack.getShaping(string);
+ shaping.emplace(toString(it_prop->second), shaped);
+
+ // Place labels.
+ addGlyph(tile.id.to_uint64(), bucket_desc.geometry.font, string, fontStack, glyphAtlas, face);
}
- faces.push_back(&it->second);
}
- std::unique_ptr<TextBucket> bucket = std::make_unique<TextBucket>(
- tile.textVertexBuffer, tile.triangleElementsBuffer, bucket_desc, placement);
- addBucketFeatures(bucket, layer, bucket_desc, faces, shaping);
+ // It looks like nearly the same interface through the rest
+ // of the stack.
+ addBucketFeatures(bucket, layer, bucket_desc, face, shaping);
+
return std::move(bucket);
}
+
diff --git a/src/map/transform.cpp b/src/map/transform.cpp
index 996117bfa4..d567a586c9 100644
--- a/src/map/transform.cpp
+++ b/src/map/transform.cpp
@@ -60,8 +60,8 @@ void Transform::_moveBy(const double dx, const double dy, const time duration) {
// Un-rotate when rotated and panning far enough to show off-world in corners.
double w = final.scale * util::tileSize / 2;
- double m = std::sqrt(pow((current.width / 2), 2) + pow((current.height / 2), 2));
- double x = std::abs(sqrt(pow(final.x, 2) + pow(final.y, 2)));
+ double m = std::sqrt(std::pow((current.width / 2), 2) + pow((current.height / 2), 2));
+ double x = std::abs(sqrt(std::pow(final.x, 2) + std::pow(final.y, 2)));
if (current.angle && w - x < m) _setAngle(0, 500_milliseconds, true);
if (duration == 0) {
diff --git a/src/map/vector_tile_data.cpp b/src/map/vector_tile_data.cpp
index 337e2fcc35..3c972aa319 100644
--- a/src/map/vector_tile_data.cpp
+++ b/src/map/vector_tile_data.cpp
@@ -22,7 +22,7 @@ void VectorTileData::parse() {
// Parsing creates state that is encapsulated in TileParser. While parsing,
// the TileParser object writes results into this objects. All other state
// is going to be discarded afterwards.
- TileParser parser(data, *this, map.getStyle(), map.getGlyphAtlas(), map.getSpriteAtlas());
+ TileParser parser(data, *this, map.getStyle(), map.getGlyphAtlas(), map.getGlyphStore(), map.getSpriteAtlas());
} catch (const std::exception& ex) {
fprintf(stderr, "[%p] exception [%d/%d/%d]... failed: %s\n", this, id.z, id.x, id.y, ex.what());
cancel();
diff --git a/src/platform/request.cpp b/src/platform/request.cpp
index aadf83deab..f689517ab5 100644
--- a/src/platform/request.cpp
+++ b/src/platform/request.cpp
@@ -12,25 +12,33 @@ Request::Request(const std::string &url,
res(std::make_unique<Response>(callback)),
cancelled(false),
loop(loop) {
- // Add a check handle without a callback to keep the default loop running.
- // We don't have a real handler attached to the default loop right from the
- // beginning, because we're using asynchronous messaging to perform the actual
- // request in the request thread. Only after the request is complete, we
- // create an actual work request that is attached to the default loop.
- async = new uv_async_t();
- async->data = new std::unique_ptr<Response>();
- uv_async_init(loop, async, complete);
+ if (loop) {
+ // Add a check handle without a callback to keep the default loop running.
+ // We don't have a real handler attached to the default loop right from the
+ // beginning, because we're using asynchronous messaging to perform the actual
+ // request in the request thread. Only after the request is complete, we
+ // create an actual work request that is attached to the default loop.
+ async = new uv_async_t();
+ async->data = new std::unique_ptr<Response>();
+ uv_async_init(loop, async, complete);
+ }
}
Request::~Request() {
}
void Request::complete() {
- // We're scheduling the response callback to be invoked in the event loop.
- // Since the Request object will be deleted before the callback is invoked,
- // we move over the Response object to be owned by the async handle.
- ((std::unique_ptr<Response> *)async->data)->swap(res);
- uv_async_send(async);
+ if (loop) {
+ // We're scheduling the response callback to be invoked in the event loop.
+ // Since the Request object will be deleted before the callback is invoked,
+ // we move over the Response object to be owned by the async handle.
+ ((std::unique_ptr<Response> *)async->data)->swap(res);
+ uv_async_send(async);
+ } else {
+ // We're calling the response callback immediately. We're currently on an
+ // arbitrary thread, but that's okay.
+ res->callback(res.get());
+ }
}
void Request::complete(uv_async_t *async) {
diff --git a/src/renderer/painter.cpp b/src/renderer/painter.cpp
index 1c568073a4..82808f4d13 100644
--- a/src/renderer/painter.cpp
+++ b/src/renderer/painter.cpp
@@ -144,15 +144,19 @@ void Painter::clear() {
}
void Painter::setOpaque() {
- pass = Opaque;
- glDisable(GL_BLEND);
- depthMask(true);
+ if (pass != Opaque) {
+ pass = Opaque;
+ glDisable(GL_BLEND);
+ depthMask(true);
+ }
}
void Painter::setTranslucent() {
- pass = Translucent;
- glEnable(GL_BLEND);
- depthMask(false);
+ if (pass != Translucent) {
+ pass = Translucent;
+ glEnable(GL_BLEND);
+ depthMask(false);
+ }
}
void Painter::setStrata(float value) {
diff --git a/src/renderer/painter_fill.cpp b/src/renderer/painter_fill.cpp
index 5f014dc014..cd7b0780df 100644
--- a/src/renderer/painter_fill.cpp
+++ b/src/renderer/painter_fill.cpp
@@ -180,10 +180,10 @@ void Painter::renderFill(FillBucket& bucket, const std::string& layer_name, cons
return vtxMatrix;
}();
- pass = Opaque;
+ setOpaque();
renderFill(bucket, modifiedProperties, id, vtxMatrix);
- pass = Translucent;
+ setTranslucent();
renderFill(bucket, modifiedProperties, id, vtxMatrix);
diff --git a/src/renderer/painter_prerender.cpp b/src/renderer/painter_prerender.cpp
index d112f7aadf..7d4f7f9d93 100644
--- a/src/renderer/painter_prerender.cpp
+++ b/src/renderer/painter_prerender.cpp
@@ -27,8 +27,6 @@ void Painter::renderPrerenderedTexture(Bucket &bucket, const GenericProperties &
const int buffer = 4096 * properties.prerenderBuffer;
// draw the texture on a quad
- depthMask(false);
-
useProgram(rasterShader->program);
rasterShader->setMatrix(matrix);
rasterShader->setOpacity(1);
@@ -42,6 +40,4 @@ void Painter::renderPrerenderedTexture(Bucket &bucket, const GenericProperties &
bucket.prerendered->bindTexture();
coveringRasterArray.bind(*rasterShader, tileStencilBuffer, BUFFER_OFFSET(0));
glDrawArrays(GL_TRIANGLES, 0, (GLsizei)tileStencilBuffer.index());
-
- depthMask(true);
}
diff --git a/src/renderer/text_bucket.cpp b/src/renderer/text_bucket.cpp
index 0fd351343c..5823607acf 100644
--- a/src/renderer/text_bucket.cpp
+++ b/src/renderer/text_bucket.cpp
@@ -7,6 +7,8 @@
#include <llmr/style/style.hpp>
#include <llmr/map/vector_tile.hpp>
#include <llmr/text/placement.hpp>
+#include <llmr/text/glyph_store.hpp>
+#include <llmr/util/constants.hpp>
#include <llmr/util/math.hpp>
#include <llmr/platform/gl.hpp>
@@ -98,17 +100,23 @@ void TextBucket::addGlyphs(const PlacedGlyphs &glyphs, float placementZoom,
};
void TextBucket::addFeature(const VectorTileFeature &feature,
- const IndexedFaces &faces,
+ const GlyphPositions &face,
const std::map<Value, Shaping> &shapings) {
auto it_prop = feature.properties.find(geom_desc.field);
if (it_prop == feature.properties.end()) {
// feature does not have the correct property
+ if (debug::labelTextMissingWarning) {
+ fprintf(stderr, "[WARNING] feature doesn't have property '%s' required for labelling\n", geom_desc.field.c_str());
+ }
return;
}
const Value &value = it_prop->second;
- auto it_shaping = shapings.find(value);
+ auto it_shaping = shapings.find(toString(value));
if (it_shaping == shapings.end()) {
+ if (debug::shapingWarning) {
+ fprintf(stderr, "[WARNING] missing shaping for '%s'\n", toString(value).c_str());
+ }
// we lack shaping information for this label
return;
}
@@ -127,14 +135,14 @@ void TextBucket::addFeature(const VectorTileFeature &feature,
while ((cmd = geometry.next(x, y)) != Geometry::end) {
if (cmd == Geometry::move_to) {
if (!line.empty()) {
- placement.addFeature(*this, line, geom_desc, faces, shaping);
+ placement.addFeature(*this, line, geom_desc, face, shaping);
line.clear();
}
}
line.emplace_back(x, y);
}
if (line.size()) {
- placement.addFeature(*this, line, geom_desc, faces, shaping);
+ placement.addFeature(*this, line, geom_desc, face, shaping);
}
}
diff --git a/src/style/properties.cpp b/src/style/properties.cpp
index 0e48418300..b39f20c514 100644
--- a/src/style/properties.cpp
+++ b/src/style/properties.cpp
@@ -29,9 +29,11 @@ float functions::stops(float z, const std::vector<float>& stops) {
if (stops.size() % 2 != 0) return 0;
bool smaller = false;
- float smaller_z, smaller_val;
+ float smaller_z = 0.0;
+ float smaller_val = 0.0;
bool larger = false;
- float larger_z, larger_val;
+ float larger_z = 0.0;
+ float larger_val = 0.0;
for (uint32_t i = 0; i < stops.size(); i += 2) {
float stop_z = stops[i];
diff --git a/src/style/style.cpp b/src/style/style.cpp
index fced8c2237..f1694694c0 100644
--- a/src/style/style.cpp
+++ b/src/style/style.cpp
@@ -35,7 +35,7 @@ void Style::cascadeProperties(GenericProperties &properties, const GenericClass&
klass.translate.get()[1].evaluate<float>(z) }};
computed.effective_classes[layer_name][TransitionablePropertyKey::Translate] = class_name;
if (klass.translate_transition && klass.translate_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Translate] = klass.translate_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Translate, klass.translate_transition.get());
}
}
@@ -47,7 +47,7 @@ void Style::cascadeProperties(GenericProperties &properties, const GenericClass&
properties.opacity = klass.opacity.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::Opacity] = class_name;
if (klass.opacity_transition && klass.opacity_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Opacity] = klass.opacity_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Opacity, klass.opacity_transition.get());
}
}
@@ -120,7 +120,7 @@ void Style::cascade(float z) {
fill.fill_color = layer.fill_color.get();
computed.effective_classes[layer_name][TransitionablePropertyKey::FillColor] = class_name;
if (layer.fill_color_transition && layer.fill_color_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::FillColor] = layer.fill_color_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::FillColor, layer.fill_color_transition.get());
}
}
@@ -128,7 +128,7 @@ void Style::cascade(float z) {
fill.stroke_color = layer.stroke_color.get();
computed.effective_classes[layer_name][TransitionablePropertyKey::StrokeColor] = class_name;
if (layer.stroke_color_transition && layer.stroke_color_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::StrokeColor] = layer.stroke_color_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::StrokeColor, layer.stroke_color_transition.get());
}
}
@@ -150,7 +150,7 @@ void Style::cascade(float z) {
stroke.width = layer.width.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::Width] = class_name;
if (layer.width_transition && layer.width_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Width] = layer.width_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Width, layer.width_transition.get());
}
}
@@ -159,7 +159,7 @@ void Style::cascade(float z) {
stroke.offset = layer.offset.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::Offset] = class_name;
if (layer.offset_transition && layer.offset_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Offset] = layer.offset_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Offset, layer.offset_transition.get());
}
}
@@ -167,7 +167,7 @@ void Style::cascade(float z) {
stroke.color = layer.color.get();
computed.effective_classes[layer_name][TransitionablePropertyKey::Color] = class_name;
if (layer.color_transition && layer.color_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Color] = layer.color_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Color, layer.color_transition.get());
}
}
@@ -176,7 +176,7 @@ void Style::cascade(float z) {
layer.dash_array.get()[1].evaluate<float>(z) }};
computed.effective_classes[layer_name][TransitionablePropertyKey::DashArray] = class_name;
if (layer.dash_array_transition && layer.dash_array_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::DashArray] = layer.dash_array_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::DashArray, layer.dash_array_transition.get());
}
}
}
@@ -194,7 +194,7 @@ void Style::cascade(float z) {
icon.color = layer.color.get();
computed.effective_classes[layer_name][TransitionablePropertyKey::Color] = class_name;
if (layer.color_transition && layer.color_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Color] = layer.color_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Color, layer.color_transition.get());
}
}
@@ -210,7 +210,7 @@ void Style::cascade(float z) {
icon.radius = layer.radius.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::Radius] = class_name;
if (layer.radius_transition && layer.radius_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Radius] = layer.radius_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Radius, layer.radius_transition.get());
}
}
@@ -218,7 +218,7 @@ void Style::cascade(float z) {
icon.blur = layer.blur.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::Blur] = class_name;
if (layer.blur_transition && layer.blur_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Blur] = layer.blur_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Blur, layer.blur_transition.get());
}
}
}
@@ -236,7 +236,7 @@ void Style::cascade(float z) {
text.color = layer.color.get();
computed.effective_classes[layer_name][TransitionablePropertyKey::Color] = class_name;
if (layer.color_transition && layer.color_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Color] = layer.color_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Color, layer.color_transition.get());
}
}
@@ -248,7 +248,7 @@ void Style::cascade(float z) {
text.halo = layer.halo.get();
computed.effective_classes[layer_name][TransitionablePropertyKey::Halo] = class_name;
if (layer.halo_transition && layer.halo_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::Halo] = layer.halo_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::Halo, layer.halo_transition.get());
}
}
@@ -256,7 +256,7 @@ void Style::cascade(float z) {
text.halo_radius = layer.halo_radius.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::HaloRadius] = class_name;
if (layer.halo_radius_transition && layer.halo_radius_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::HaloRadius] = layer.halo_radius_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::HaloRadius, layer.halo_radius_transition.get());
}
}
@@ -264,7 +264,7 @@ void Style::cascade(float z) {
text.halo_blur = layer.halo_blur.get().evaluate<float>(z);
computed.effective_classes[layer_name][TransitionablePropertyKey::HaloBlur] = class_name;
if (layer.halo_blur_transition && layer.halo_blur_transition.get().duration) {
- properties_to_transition[layer_name][TransitionablePropertyKey::HaloBlur] = layer.halo_blur_transition.get();
+ properties_to_transition[layer_name].emplace(TransitionablePropertyKey::HaloBlur, layer.halo_blur_transition.get());
}
}
@@ -305,7 +305,7 @@ void Style::cascade(float z) {
computed.background.color = sheetClass.background.color.get();
computed.effective_classes["background"][TransitionablePropertyKey::Color] = class_name;
if (sheetClass.background.color_transition && sheetClass.background.color_transition.get().duration) {
- properties_to_transition["background"][TransitionablePropertyKey::Color] = sheetClass.background.color_transition.get();
+ properties_to_transition["background"].emplace(TransitionablePropertyKey::Color, sheetClass.background.color_transition.get());
}
}
}
diff --git a/src/style/style_parser.cpp b/src/style/style_parser.cpp
index 56e28350b9..6276d657d2 100644
--- a/src/style/style_parser.cpp
+++ b/src/style/style_parser.cpp
@@ -477,7 +477,7 @@ FunctionProperty StyleParser::parseFunction(JSVal value) {
return property;
}
-PropertyTransition StyleParser::parseTransition(JSVal value, std::string property_name) {
+boost::optional<PropertyTransition> StyleParser::parseTransition(JSVal value, std::string property_name) {
uint16_t duration = 0, delay = 0;
std::string transition_property = std::string("transition-").append(property_name);
if (value.HasMember(transition_property.c_str())) {
@@ -492,12 +492,11 @@ PropertyTransition StyleParser::parseTransition(JSVal value, std::string propert
}
}
- PropertyTransition transition;
-
- transition.duration = duration;
- transition.delay = delay;
-
- return transition;
+ if (duration || delay) {
+ return boost::optional<PropertyTransition>(PropertyTransition { duration, delay });
+ } else {
+ return boost::optional<PropertyTransition>();
+ }
}
void StyleParser::parseGenericClass(GenericClass &klass, JSVal value) {
@@ -529,7 +528,7 @@ void StyleParser::parseGenericClass(GenericClass &klass, JSVal value) {
}
if (value.HasMember("prerender-size")) {
- klass.prerenderBuffer = toNumber<uint64_t>(parseValue(value["prerender-size"]));
+ klass.prerenderSize = toNumber<uint64_t>(parseValue(value["prerender-size"]));
}
if (value.HasMember("prerender-blur")) {
diff --git a/src/text/collision.cpp b/src/text/collision.cpp
index d1488cb29e..0da50f2809 100644
--- a/src/text/collision.cpp
+++ b/src/text/collision.cpp
@@ -53,7 +53,7 @@ Collision::Collision() : cTree(new Tree()), hTree(new Tree()) {
CollisionAnchor{m, m}, 1, {{M_PI * 2, 0}}, false, 2);
}
-GlyphBox getMergedGlyphs(const PlacedGlyphs &placed_glyphs, bool horizontal,
+GlyphBox getMergedGlyphs(const GlyphBoxes &boxes, bool horizontal,
const CollisionAnchor &anchor) {
GlyphBox mergedGlyphs;
const float inf = std::numeric_limits<float>::infinity();
@@ -62,28 +62,28 @@ GlyphBox getMergedGlyphs(const PlacedGlyphs &placed_glyphs, bool horizontal,
mergedGlyphs.anchor = anchor;
CollisionRect &box = mergedGlyphs.box;
- for (const PlacedGlyph &placed_glyph : placed_glyphs) {
- const CollisionRect &gbox = placed_glyph.glyphBox.box;
+ for (const GlyphBox &glyph : boxes) {
+ const CollisionRect &gbox = glyph.box;
box.tl.x = util::min(box.tl.x, gbox.tl.x);
box.tl.y = util::min(box.tl.y, gbox.tl.y);
box.br.x = util::max(box.br.x, gbox.br.x);
box.br.y = util::max(box.br.y, gbox.br.y);
mergedGlyphs.minScale =
- util::max(mergedGlyphs.minScale, placed_glyph.glyphBox.minScale);
+ util::max(mergedGlyphs.minScale, glyph.minScale);
}
return mergedGlyphs;
}
-PlacementProperty Collision::place(const PlacedGlyphs &placed_glyphs,
+PlacementProperty Collision::place(const GlyphBoxes &boxes,
const CollisionAnchor &anchor,
float minPlacementScale,
float maxPlacementScale, float padding,
bool horizontal, bool alwaysVisible) {
float minScale = std::numeric_limits<float>::infinity();
- for (const PlacedGlyph &placed_glyph : placed_glyphs) {
- minScale = util::min(minScale, placed_glyph.glyphBox.minScale);
+ for (const GlyphBox &glyphBox : boxes) {
+ minScale = util::min(minScale, glyphBox.minScale);
}
minPlacementScale = util::max(minPlacementScale, minScale);
@@ -92,11 +92,9 @@ PlacementProperty Collision::place(const PlacedGlyphs &placed_glyphs,
// for horizontal labels.
GlyphBoxes glyphs;
if (horizontal) {
- glyphs.push_back(getMergedGlyphs(placed_glyphs, horizontal, anchor));
+ glyphs.push_back(getMergedGlyphs(boxes, horizontal, anchor));
} else {
- for (const PlacedGlyph &placed_glyph : placed_glyphs) {
- glyphs.push_back(placed_glyph.glyphBox);
- }
+ glyphs = boxes;
}
// Calculate bboxes for all the glyphs
diff --git a/src/text/glyph.cpp b/src/text/glyph.cpp
new file mode 100644
index 0000000000..882a8c493c
--- /dev/null
+++ b/src/text/glyph.cpp
@@ -0,0 +1,17 @@
+#include <llmr/text/glyph.hpp>
+
+namespace llmr {
+
+// Note: this only works for the BMP
+// Note: we could use a binary lookup table to get averaged constant time lookups, however,
+// most of our lookups are going to be within the first 3 ranges listed here, so this is
+// likely faster.
+GlyphRange getGlyphRange(uint32_t glyph) {
+ unsigned start = (glyph/256) * 256;
+ unsigned end = (start + 255);
+ if (start > 65280) start = 65280;
+ if (end > 65533) end = 65533;
+ return { start, end };
+}
+
+}
diff --git a/src/text/glyph_store.cpp b/src/text/glyph_store.cpp
new file mode 100644
index 0000000000..3f741236f0
--- /dev/null
+++ b/src/text/glyph_store.cpp
@@ -0,0 +1,185 @@
+#include <llmr/text/glyph_store.hpp>
+
+#include <llmr/util/std.hpp>
+#include <llmr/util/string.hpp>
+#include <llmr/util/utf.hpp>
+#include <llmr/util/pbf.hpp>
+#include <llmr/platform/platform.hpp>
+#include <uv.h>
+#include <algorithm>
+
+namespace llmr {
+
+
+void FontStack::insert(uint32_t id, const SDFGlyph &glyph) {
+ std::lock_guard<std::mutex> lock(mtx);
+ metrics.emplace(id, glyph.metrics);
+ bitmaps.emplace(id, glyph.bitmap);
+ sdfs.emplace(id, glyph);
+}
+
+const std::map<uint32_t, GlyphMetrics> &FontStack::getMetrics() const {
+ std::lock_guard<std::mutex> lock(mtx);
+ return metrics;
+}
+
+const std::map<uint32_t, SDFGlyph> &FontStack::getSDFs() const {
+ std::lock_guard<std::mutex> lock(mtx);
+ return sdfs;
+}
+
+const Shaping FontStack::getShaping(const std::u32string &string) const {
+ std::lock_guard<std::mutex> lock(mtx);
+ uint32_t i = 0;
+ uint32_t x = 0;
+ Shaping shaped;
+ // Loop through all characters of this label and shape.
+ for (uint32_t chr : string) {
+ GlyphPlacement glyph = GlyphPlacement(0, chr, x, 0);
+ shaped.push_back(glyph);
+ i++;
+ x += metrics.find(chr)->second.advance;
+ }
+ return shaped;
+}
+
+GlyphPBF::GlyphPBF(const std::string &fontStack, GlyphRange glyphRange)
+ : future(promise.get_future().share())
+{
+ // Load the glyph set URL
+ std::string url = util::sprintf<255>("http://mapbox.s3.amazonaws.com/gl-glyphs-256/%s/%d-%d.pbf", fontStack.c_str(), glyphRange.first, glyphRange.second);
+
+ // TODO: Find more reliable URL normalization function
+ std::replace(url.begin(), url.end(), ' ', '+');
+
+ fprintf(stderr, "%s\n", url.c_str());
+
+ platform::request_http(url, [&](platform::Response *res) {
+ if (res->code != 200) {
+ // Something went wrong with loading the glyph pbf. Pass on the error to the future listeners.
+ const std::string msg = util::sprintf<255>("[ERROR] failed to load glyphs (%d): %s\n", res->code, res->error_message.c_str());
+ promise.set_exception(std::make_exception_ptr(std::runtime_error(msg)));
+ } else {
+ // Transfer the data to the GlyphSet and signal its availability.
+ // Once it is available, the caller will need to call parse() to actually
+ // parse the data we received. We are not doing this here since this callback is being
+ // called from another (unknown) thread.
+ data.swap(res->body);
+ promise.set_value(*this);
+ }
+ });
+}
+
+std::shared_future<GlyphPBF &> GlyphPBF::getFuture() {
+ return future;
+}
+
+void GlyphPBF::parse(FontStack &stack) {
+ std::lock_guard<std::mutex> lock(mtx);
+
+ if (!data.size()) {
+ // If there is no data, this means we either haven't received any data, or
+ // we have already parsed the data.
+ return;
+ }
+
+ // Parse the glyph PBF
+ pbf glyphs_pbf(reinterpret_cast<const uint8_t *>(data.data()), data.size());
+
+ while (glyphs_pbf.next()) {
+ if (glyphs_pbf.tag == 1) { // stacks
+ pbf fontstack_pbf = glyphs_pbf.message();
+ while (fontstack_pbf.next()) {
+ if (fontstack_pbf.tag == 3) { // glyphs
+ pbf glyph_pbf = fontstack_pbf.message();
+
+ SDFGlyph glyph;
+
+ while (glyph_pbf.next()) {
+ if (glyph_pbf.tag == 1) { // id
+ glyph.id = glyph_pbf.varint();
+ } else if (glyph_pbf.tag == 2) { // bitmap
+ glyph.bitmap = glyph_pbf.string();
+ } else if (glyph_pbf.tag == 3) { // width
+ glyph.metrics.width = glyph_pbf.varint();
+ } else if (glyph_pbf.tag == 4) { // height
+ glyph.metrics.height = glyph_pbf.varint();
+ } else if (glyph_pbf.tag == 5) { // left
+ glyph.metrics.left = glyph_pbf.svarint();
+ } else if (glyph_pbf.tag == 6) { // top
+ glyph.metrics.top = glyph_pbf.svarint();
+ } else if (glyph_pbf.tag == 7) { // advance
+ glyph.metrics.advance = glyph_pbf.varint();
+ } else {
+ glyph_pbf.skip();
+ }
+ }
+
+ stack.insert(glyph.id, glyph);
+ } else {
+ fontstack_pbf.skip();
+ }
+ }
+ } else {
+ glyphs_pbf.skip();
+ }
+ }
+
+ data.clear();
+}
+
+
+void GlyphStore::waitForGlyphRanges(const std::string &fontStack, const std::set<GlyphRange> &glyphRanges) {
+ // We are implementing a blocking wait with futures: Every GlyphSet has a future that we are
+ // waiting for until it is loaded.
+
+ FontStack *stack = nullptr;
+
+ std::vector<std::shared_future<GlyphPBF &>> futures;
+ futures.reserve(glyphRanges.size());
+ {
+ std::lock_guard<std::mutex> lock(mtx);
+ auto &rangeSets = ranges[fontStack];
+
+ stack = &createFontStack(fontStack);
+
+ // Attempt to load the glyph range. If the GlyphSet already exists, we are getting back
+ // the same shared_future.
+ for (GlyphRange range : glyphRanges) {
+ futures.emplace_back(loadGlyphRange(fontStack, rangeSets, range));
+ }
+ }
+
+ // Now that we potentially created all GlyphSets, we are waiting for the results, one by one.
+ // When we get a result (or the GlyphSet is aready loaded), we are attempting to parse the
+ // GlyphSet.
+ for (std::shared_future<GlyphPBF &> &future : futures) {
+ future.get().parse(*stack);
+ }
+}
+
+std::shared_future<GlyphPBF &> GlyphStore::loadGlyphRange(const std::string &name, std::map<GlyphRange, std::unique_ptr<GlyphPBF>> &rangeSets, const GlyphRange range) {
+ auto range_it = rangeSets.find(range);
+ if (range_it == rangeSets.end()) {
+ // We don't have this glyph set yet for this font stack.
+ range_it = rangeSets.emplace(range, std::make_unique<GlyphPBF>(name, range)).first;
+ }
+
+ return range_it->second->getFuture();
+}
+
+FontStack &GlyphStore::createFontStack(const std::string &fontStack) {
+ auto stack_it = stacks.find(fontStack);
+ if (stack_it == stacks.end()) {
+ stack_it = stacks.emplace(fontStack, std::make_unique<FontStack>()).first;
+ }
+ return *stack_it->second.get();
+}
+
+FontStack &GlyphStore::getFontStack(const std::string &fontStack) {
+ std::lock_guard<std::mutex> lock(mtx);
+ return createFontStack(fontStack);
+}
+
+
+}
diff --git a/src/text/placement.cpp b/src/text/placement.cpp
index 237fd05d76..f57436bcb4 100644
--- a/src/text/placement.cpp
+++ b/src/text/placement.cpp
@@ -35,21 +35,12 @@ bool byScale(const Anchor &a, const Anchor &b) { return a.scale < b.scale; }
static const Glyph null_glyph;
inline const Glyph &getGlyph(const GlyphPlacement &placed,
- const IndexedFaces &faces) {
- if (placed.face < faces.size()) {
- const GlyphPositions &face = *faces[placed.face];
- if (&face) {
- auto it = face.find(placed.glyph);
- if (it != face.end()) {
- return it->second;
- } else {
- fprintf(stderr, "glyph %d does not exist\n", placed.glyph);
- }
- } else {
- fprintf(stderr, "face pointer is null\n");
- }
+ const GlyphPositions &face) {
+ auto it = face.find(placed.glyph);
+ if (it != face.end()) {
+ return it->second;
} else {
- fprintf(stderr, "face does not exist\n");
+ fprintf(stderr, "glyph %d does not exist\n", placed.glyph);
}
return null_glyph;
@@ -145,8 +136,9 @@ void getSegmentGlyphs(std::back_insert_iterator<GlyphInstances> glyphs,
}
}
-PlacedGlyphs getGlyphs(Anchor &anchor, float advance, const Shaping &shaping,
- const IndexedFaces &faces, float fontScale,
+void getGlyphs(PlacedGlyphs &glyphs, GlyphBoxes &boxes,
+ Anchor &anchor, float advance, const Shaping &shaping,
+ const GlyphPositions &face, float fontScale,
bool horizontal, const std::vector<Coordinate> &line,
float maxAngleDelta, float rotate) {
// The total text advance is the width of this label.
@@ -162,12 +154,10 @@ PlacedGlyphs getGlyphs(Anchor &anchor, float advance, const Shaping &shaping,
// origin.x -= advance;
// }
- PlacedGlyphs glyphs;
-
const uint32_t buffer = 3;
for (const GlyphPlacement &placed : shaping) {
- const Glyph &glyph = getGlyph(placed, faces);
+ const Glyph &glyph = getGlyph(placed, face);
if (!glyph) {
// This glyph is empty and doesn't have any pixels that we'd need to
// show.
@@ -235,29 +225,32 @@ PlacedGlyphs getGlyphs(Anchor &anchor, float advance, const Shaping &shaping,
fontScale * util::max(tl.y, tr.y, bl.y, br.y)};
}
+ GlyphBox glyphBox = GlyphBox{
+ box,
+ // Prevent label from extending past the end of the line
+ util::max(instance.minScale, anchor.scale),
+ instance.maxScale,
+ instance.anchor,
+ horizontal};
+
// Remember the glyph for later insertion.
glyphs.emplace_back(PlacedGlyph{
tl, tr, bl, br, glyph.rect,
static_cast<float>(
- std::fmod((anchor.angle + rotate + instance.offset + 2 * M_PI),
- (2 * M_PI))),
- GlyphBox{box, instance.minScale, instance.maxScale,
- instance.anchor, horizontal}});
- }
- }
+ std::fmod((anchor.angle + rotate + instance.offset + 2 * M_PI), (2 * M_PI))),
+ glyphBox});
- // Prevent label from extending past the end of the line
- for (PlacedGlyph &g : glyphs) {
- g.glyphBox.minScale = util::max(g.glyphBox.minScale, anchor.scale);
+ if (instance.offset == 0.0f) {
+ boxes.emplace_back(glyphBox);
+ }
+ }
}
-
- return glyphs;
}
void Placement::addFeature(TextBucket& bucket,
const std::vector<Coordinate> &line,
const BucketGeometryDescription &info,
- const IndexedFaces &faces,
+ const GlyphPositions &face,
const Shaping &shaping) {
const bool horizontal = info.path == TextPathType::Horizontal;
@@ -268,7 +261,7 @@ void Placement::addFeature(TextBucket& bucket,
const float fontScale =
(tileExtent / util::tileSize) / (glyphSize / info.size);
- const float advance = measureText(faces, shaping);
+ const float advance = measureText(face, shaping);
Anchors anchors;
// fprintf(stderr, "adding feature with advance %f\n", advance);
@@ -289,27 +282,28 @@ void Placement::addFeature(TextBucket& bucket,
}
for (Anchor anchor : anchors) {
- PlacedGlyphs glyphs =
- getGlyphs(anchor, advance, shaping, faces, fontScale, horizontal,
+ PlacedGlyphs glyphs;
+ GlyphBoxes boxes;
+
+ getGlyphs(glyphs, boxes, anchor, advance, shaping, face, fontScale, horizontal,
line, maxAngleDelta, rotate);
PlacementProperty place =
- collision.place(glyphs, anchor, anchor.scale, maxPlacementScale,
+ collision.place(boxes, anchor, anchor.scale, maxPlacementScale,
padding, horizontal, info.alwaysVisible);
if (place) {
- bucket.addGlyphs(glyphs, place.zoom, place.rotationRange,
- zoom - zOffset);
+ bucket.addGlyphs(glyphs, place.zoom, place.rotationRange, zoom - zOffset);
}
}
}
-float Placement::measureText(const IndexedFaces &faces,
+float Placement::measureText(const GlyphPositions &face,
const Shaping &shaping) {
float advance = 0;
// TODO: advance is not calculated correctly. we should instead use the
// bounding box of the glyph placement.
for (const GlyphPlacement &shape : shaping) {
- advance += getGlyph(shape, faces).metrics.advance;
+ advance += getGlyph(shape, face).metrics.advance;
}
return advance;
diff --git a/src/util/constants.cpp b/src/util/constants.cpp
index 2a4ecd3747..f674649266 100644
--- a/src/util/constants.cpp
+++ b/src/util/constants.cpp
@@ -7,3 +7,8 @@ const bool llmr::debug::styleParseWarnings = false;
const bool llmr::debug::spriteWarnings = false;
const bool llmr::debug::renderWarnings = false;
const bool llmr::debug::renderTree = false;
+const bool llmr::debug::labelTextMissingWarning = true;
+const bool llmr::debug::missingFontStackWarning = true;
+const bool llmr::debug::missingFontFaceWarning = true;
+const bool llmr::debug::glyphWarning = true;
+const bool llmr::debug::shapingWarning = true;
diff --git a/src/util/threadpool.cpp b/src/util/threadpool.cpp
index 3b69302b16..29ab11f1c2 100644
--- a/src/util/threadpool.cpp
+++ b/src/util/threadpool.cpp
@@ -1,6 +1,7 @@
#include <llmr/util/threadpool.hpp>
#include <llmr/util/std.hpp>
#include <thread>
+#include <memory>
using namespace llmr::util;
diff --git a/test/headless.cpp b/test/headless.cpp
index c515b36c9a..af34f4e19f 100644
--- a/test/headless.cpp
+++ b/test/headless.cpp
@@ -11,6 +11,15 @@
#include <uv.h>
+namespace llmr {
+namespace platform {
+
+void notify_map_change() {
+ // no-op
+}
+
+}}
+
class View : public llmr::View {
public:
void make_active() {