summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/clang-tools.sh6
-rwxr-xr-xscripts/code-coverage.sh17
-rwxr-xr-xscripts/generate-file-lists.js1
-rwxr-xr-xscripts/generate-shaders.js75
-rwxr-xr-xscripts/generate-style-code.js8
-rwxr-xr-xscripts/publish_core_codecoverage.js170
-rwxr-xr-xscripts/publish_github_stats.js147
-rw-r--r--scripts/style-spec.js7
8 files changed, 384 insertions, 47 deletions
diff --git a/scripts/clang-tools.sh b/scripts/clang-tools.sh
index bdda4544b9..1e73951176 100755
--- a/scripts/clang-tools.sh
+++ b/scripts/clang-tools.sh
@@ -33,7 +33,7 @@ function run_clang_tidy() {
}
function run_clang_tidy_diff() {
- OUTPUT=$(git diff origin/master --src-prefix=${CDUP} --dst-prefix=${CDUP} | \
+ OUTPUT=$(git diff origin/$2 --src-prefix=${CDUP} --dst-prefix=${CDUP} | \
${CLANG_TIDY_PREFIX}/share/clang-tidy-diff.py \
-clang-tidy-binary ${CLANG_TIDY} \
2>/dev/null)
@@ -45,7 +45,7 @@ function run_clang_tidy_diff() {
function run_clang_format() {
echo "Running clang-format on $0..."
- DIFF_FILES=$(git diff origin/master --name-only *cpp)
+ DIFF_FILES=$(git diff origin/$2 --name-only *cpp)
echo "${DIFF_FILES}" | xargs -I{} -P ${JOBS} bash -c 'run_clang_format' {}
${CLANG_FORMAT} -i ${CDUP}/$0 || exit 1
}
@@ -54,7 +54,7 @@ export -f run_clang_tidy run_clang_tidy_diff run_clang_format
echo "Running Clang checks... (this might take a while)"
-if [[ -n $2 ]] && [[ $2 == "--diff" ]]; then
+if [[ -n $3 ]] && [[ $3 == "--diff" ]]; then
run_clang_tidy_diff $@
# XXX disabled until we run clang-format over the entire codebase.
#run_clang_format $@
diff --git a/scripts/code-coverage.sh b/scripts/code-coverage.sh
index d6866f61b8..cb99bd9646 100755
--- a/scripts/code-coverage.sh
+++ b/scripts/code-coverage.sh
@@ -4,7 +4,7 @@ set -e
set -o pipefail
#
-# This script takes three values: $1 should be a decimal value reflecting the
+# This script takes three values: $1 should be a decimal value reflecting the
# percentage of lines covered, with a maximum value of 100.0, $2 is
# the platform the percentage pertains to (iOS or Android), and $3 is the
# test scheme being run (on iOS, this is currently CI).
@@ -17,19 +17,22 @@ if [[ ! $2 = "iOS" && ! $2 = "Android" ]]; then
exit 1
fi
-# Create a formatted JSON file that contains the current coverage.
+circle_sha=""
+if [[ $CIRCLE_SHA1 ]]; then
+ circle_sha="$CIRCLE_SHA1"
+fi
+# Create a formatted JSON file that contains the current coverage.
-current_date=$(TZ=UTC date +"%Y-%m-%d")
+current_date=$(TZ=UTC date +"%FT%T%z")
file_name=$2_coverage.json
cat <<EOF > $file_name
- {"code_coverage":$1,"platform":"$2","sdk":"Maps","scheme":"$3","created_at":"$current_date"}
+{"code_coverage":$1,"platform":"$2","sdk":"Maps","scheme":"$3","created_at":"$current_date","sha":"$circle_sha"}
EOF
gzip -f $file_name
-if [ -z `which aws` ]; then
- brew install awscli
+if [ -z `which aws` ]; then
+ brew install awscli
fi
aws s3 cp $file_name.gz s3://mapbox-loading-dock/raw/mobile.codecoverage/$current_date/
echo $
-
diff --git a/scripts/generate-file-lists.js b/scripts/generate-file-lists.js
index 37e4dbc20b..760a01d87c 100755
--- a/scripts/generate-file-lists.js
+++ b/scripts/generate-file-lists.js
@@ -124,6 +124,7 @@ generateXcodeSourceList('platform/macos/macos.xcodeproj', 'dynamic', 'sdk');
generateXcodeSourceList('platform/ios/ios.xcodeproj', 'dynamic', 'sdk');
const vendorRegex = /^(?:(?:src|include)\/)?(?:(.+)\/)?[^\/]+$/
+generateFileList('vendor/args-files.json', 'vendor/args', vendorRegex, [ "args.hxx" ]);
generateFileList('vendor/boost-files.json', 'vendor/boost', vendorRegex, [ "include/**/*.hpp", "include/**/*.h" ]);
generateFileList('vendor/cheap-ruler-cpp-files.json', 'vendor/cheap-ruler-cpp', vendorRegex, [ "include/**/*.hpp" ]);
generateFileList('vendor/earcut.hpp-files.json', 'vendor/earcut.hpp', vendorRegex, [ "include/**/*.hpp" ]);
diff --git a/scripts/generate-shaders.js b/scripts/generate-shaders.js
index 59544d0b34..fc9088fd21 100755
--- a/scripts/generate-shaders.js
+++ b/scripts/generate-shaders.js
@@ -5,6 +5,7 @@ require('flow-remove-types/register');
const path = require('path');
const outputPath = 'src/mbgl/programs/gl';
const zlib = require('zlib');
+const crypto = require('crypto');
var shaders = require('../mapbox-gl-js/src/shaders');
@@ -23,18 +24,28 @@ function basicMinify(src) {
}
for (const key in shaders) {
+ // Rename a_*_t uniforms to u_*_t. This is a workaround until we can use
+ // https://github.com/mapbox/mapbox-gl-js/pull/8055, which is blocked by
+ // https://github.com/mapbox/mapbox-gl-native/issues/13984
+ shaders[key].vertexSource = shaders[key].vertexSource.replace(/\ba_(\w+)_t\b/mg, 'u_$1_t');
+
+ const hash = crypto.createHash('sha1');
+
const vertex = concatenated.length;
- concatenated += basicMinify(shaders[key].vertexSource);
- concatenated += '\n\0';
+ const vertexSource = basicMinify(shaders[key].vertexSource) + '\n\0';
+ hash.update(vertexSource);
+ concatenated += vertexSource;
const fragment = concatenated.length;
- concatenated += basicMinify(shaders[key].fragmentSource);
- concatenated += '\n\0';
+ const fragmentSource = basicMinify(shaders[key].fragmentSource) + '\n\0';
+ hash.update(fragmentSource);
+ concatenated += fragmentSource;
offsets[key] = {
vertex,
fragment,
originalKey: key,
+ hash: hash.digest('hex').substring(0, 16).match(/.{1,2}/g).map(n => `0x${n}`).join(', '),
shaderName: key.replace(/[A-Z]+/g, (match) => `_${match.toLowerCase()}`),
ShaderName: key.replace(/^[a-z]/g, (match) => match.toUpperCase())
};
@@ -44,6 +55,7 @@ for (const key in shaders) {
offsets.symbolSDFIcon = {
vertex: offsets.symbolSDF.vertex,
fragment: offsets.symbolSDF.fragment,
+ hash: offsets.symbolSDF.hash,
originalKey: 'symbolSDF',
shaderName: 'symbol_sdf_icon',
ShaderName: 'SymbolSDFIcon',
@@ -52,6 +64,7 @@ offsets.symbolSDFIcon = {
offsets.symbolSDFText = {
vertex: offsets.symbolSDF.vertex,
fragment: offsets.symbolSDF.fragment,
+ hash: offsets.symbolSDF.hash,
originalKey: 'symbolSDF',
shaderName: 'symbol_sdf_text',
ShaderName: 'SymbolSDFText',
@@ -66,10 +79,6 @@ const compressed = zlib.deflateSync(concatenated, {level: zlib.Z_BEST_COMPRESSIO
.join(',\n ')
.trim();
-function sourceOffset(key, type) {
- return `programs::gl::shaderSource() + ${offsets[key][type]}`
-}
-
writeIfModified(path.join(outputPath, 'shader_source.hpp'), `// NOTE: DO NOT CHANGE THIS FILE. IT IS AUTOMATICALLY GENERATED.
#pragma once
@@ -80,6 +89,9 @@ namespace gl {
const char* shaderSource();
+template <typename>
+struct ShaderSource;
+
} // namespace gl
} // namespace programs
} // namespace mbgl
@@ -114,29 +126,15 @@ writeIfModified(path.join(outputPath, 'preludes.hpp'), `// NOTE: DO NOT CHANGE T
#pragma once
-namespace mbgl {
-namespace programs {
-namespace gl {
-
-extern const char* vertexShaderPrelude;
-extern const char* fragmentShaderPrelude;
-
-} // namespace gl
-} // namespace programs
-} // namespace mbgl
-`);
-
-writeIfModified(path.join(outputPath, 'preludes.cpp'), `// NOTE: DO NOT CHANGE THIS FILE. IT IS AUTOMATICALLY GENERATED.
-
-#include <mbgl/programs/gl/preludes.hpp>
-#include <mbgl/programs/gl/shader_source.hpp>
+#include <cstdint>
namespace mbgl {
namespace programs {
namespace gl {
-const char* vertexShaderPrelude = ${sourceOffset('prelude', 'vertex')};
-const char* fragmentShaderPrelude = ${sourceOffset('prelude', 'fragment')};
+constexpr const uint8_t preludeHash[8] = { ${offsets['prelude'].hash} };
+constexpr const auto vertexPreludeOffset = ${offsets['prelude'].vertex};
+constexpr const auto fragmentPreludeOffset = ${offsets['prelude'].fragment};
} // namespace gl
} // namespace programs
@@ -152,18 +150,37 @@ for (const key in offsets) {
writeIfModified(path.join(outputPath, `${shaderName}.cpp`), `// NOTE: DO NOT CHANGE THIS FILE. IT IS AUTOMATICALLY GENERATED.
#include <mbgl/programs/${shaderName}_program.hpp>
+#include <mbgl/programs/gl/preludes.hpp>
#include <mbgl/programs/gl/shader_source.hpp>
#include <mbgl/gl/program.hpp>
namespace mbgl {
+namespace programs {
+namespace gl {
+
+template <typename>
+struct ShaderSource;
+
+template <>
+struct ShaderSource<${ShaderName}Program> {
+ static constexpr const char* name = "${shaderName}";
+ static constexpr const uint8_t hash[8] = { ${offsets[key].hash} };
+ static constexpr const auto vertexOffset = ${offsets[key].vertex};
+ static constexpr const auto fragmentOffset = ${offsets[key].fragment};
+};
+
+constexpr const char* ShaderSource<${ShaderName}Program>::name;
+constexpr const uint8_t ShaderSource<${ShaderName}Program>::hash[8];
+
+} // namespace gl
+} // namespace programs
+
namespace gfx {
template <>
std::unique_ptr<Program<${ShaderName}Program>>
Context::createProgram<gl::Context>(const ProgramParameters& programParameters) {
- return gl::Program<${ShaderName}Program>::createProgram(
- reinterpret_cast<gl::Context&>(*this), programParameters, "${shaderName}",
- ${sourceOffset(key, 'vertex')}, ${sourceOffset(key, 'fragment')});
+ return std::make_unique<gl::Program<${ShaderName}Program>>(programParameters);
}
} // namespace gfx
diff --git a/scripts/generate-style-code.js b/scripts/generate-style-code.js
index ae25a856a8..5145755cec 100755
--- a/scripts/generate-style-code.js
+++ b/scripts/generate-style-code.js
@@ -72,7 +72,7 @@ global.evaluatedType = function (property) {
if (property.length) {
return `std::array<${evaluatedType({type: property.value})}, ${property.length}>`;
} else {
- return `std::vector<${evaluatedType({type: property.value})}>`;
+ return `std::vector<${evaluatedType({type: property.value, name: property.name})}>`;
}
default: throw new Error(`unknown type for ${property.name}`)
}
@@ -99,7 +99,7 @@ function attributeUniformType(property, type) {
[ property.name.replace(type + '-', '').replace(/-/g, '_') ];
return names.map(name => {
- return `attributes::a_${name}, uniforms::u_${name}`
+ return `attributes::${name}, uniforms::${name}`
}).join(', ');
}
@@ -153,7 +153,11 @@ global.defaultValue = function (property) {
switch (property.type) {
case 'number':
+ if (property.default === undefined) {
+ return 0;
+ } else {
return property.default;
+ }
case 'formatted':
case 'string':
return JSON.stringify(property.default || "");
diff --git a/scripts/publish_core_codecoverage.js b/scripts/publish_core_codecoverage.js
new file mode 100755
index 0000000000..43de662a13
--- /dev/null
+++ b/scripts/publish_core_codecoverage.js
@@ -0,0 +1,170 @@
+#!/usr/bin/env node
+
+// Script to retrieve total code coverage ratio from codecov.io
+// for a given commit hash, and upload it to the S3 bucket.
+
+const https = require('https');
+const zlib = require('zlib');
+const AWS = require('aws-sdk');
+const {execSync} = require('child_process');
+
+const args = process.argv.slice(2);
+const options = {
+ help: false
+};
+
+const usage = 'usage: publish_code_coverage.js [options]\n' +
+'options: \n' +
+' -h, --help \n' +
+' -p, --platform <Platform Name>\n' +
+' -s, --sdk <Sdk Name> \n' +
+' -c, --commit <Commit Hash> \n';
+
+for (var i = 0; i < args.length; i++) {
+ var arg = args[i];
+
+ switch(arg) {
+ case '-h':
+ case '--help':
+ options.help = true;
+ break;
+ case '-s':
+ case '--sdk':
+ options.sdkName = args[i + 1];
+ break;
+ case '-p':
+ case '--platform':
+ options.platformName = args[i + 1];
+ break;
+ case '-c':
+ case '--commit':
+ options.commitId = args[i + 1];
+ break;
+ }
+}
+
+if (options.help == true) {
+ console.log(usage);
+ process.exit(0);
+}
+
+// Commit hash
+const commitHash = options.commitId ? options.commitId : process.env['CIRCLE_SHA1'];
+if (!options.sdkName || !options.platformName || !commitHash) {
+ console.log(usage);
+ process.exit(0);
+}
+
+// Commit Message
+const commitMessage = execSync(`git show --pretty=format:%s -s ${commitHash}`).toString().trim();
+if (!commitMessage) {
+ throw new Error ('Commit message is missing');
+}
+
+const date = new Date().toISOString().substring(0, 19);
+
+process.on('uncaughtException', (err) => {
+ console.error(err);
+ process.exit(1);
+});
+
+// Parse the response received from codecov.io and build the
+// data point that is going to be uploaded to S3 bucket.
+function parseResponse(data) {
+ if (data && data.commit) {
+ if (!data.commit.totals || !data.commit.totals.c) {
+ return;
+ }
+
+ const source = {
+ code_coverage: Number(data.commit.totals.c),
+ platform: options.platformName,
+ sdk: options.sdkName,
+ commit: commitHash,
+ commit_message: commitMessage,
+ created_at: date
+ };
+
+ return source;
+ }
+}
+
+// Upload to data source used by Mapbox internal metrics dashboards
+function uploadData(data) {
+ return new AWS.S3({region: 'us-east-1'}).putObject({
+ Body: zlib.gzipSync(JSON.stringify(data)),
+ Bucket: 'mapbox-loading-dock',
+ Key: `raw/mobile_staging.codecoverage/${date.substring(0,10)}/${options.sdkName}-coverage-${commitHash}.json.gz`,
+ CacheControl: 'max-age=300',
+ ContentEncoding: 'gzip',
+ ContentType: 'application/json'
+ }).promise();
+}
+
+// Attempt to retrieve code coverage report from codecov.io
+// for a given commit hash.
+function httpRequest() {
+ const options = {
+ hostname: 'codecov.io',
+ port: 443,
+ path: '/api/gh/mapbox/mapbox-gl-native/commit/' + commitHash,
+ method: 'GET'
+ };
+
+ return new Promise((resolve, reject) => {
+ setTimeout(function() {
+ const req = https.request(options, (res) => {
+ var body = [];
+ res.on('data', (chunk) => {
+ body.push(chunk);
+ }).on('error', (error) => {
+ reject(error);
+ }).on('end', () => {
+ if (res.statusCode < 200 || res.statusCode >= 300) {
+ return reject(new Error('Failed to fetch coverage report from codecov.io. StatusCode=' + res.statusCode));
+ }
+
+ try {
+ body = JSON.parse(Buffer.concat(body).toString());
+ resolve(body);
+ } catch(e) {
+ reject(e);
+ }
+ });
+ });
+
+ // Reject on error
+ req.on('error', (err) => {
+ reject(err);
+ });
+
+ req.end();
+ }, 30000);
+ });
+}
+
+var errResponse = false;
+const publishWithRetry = (maxRetries) => {
+ httpRequest().then((body) => {
+ const dataSource = parseResponse(body);
+ if (dataSource) {
+ return uploadData(dataSource);
+ } else {
+ errResponse = true;
+ throw new Error('Failed to parse coverage report received from codecov.io.');
+ }
+ }).then(data => {
+ console.log('Successfully uploaded code coverage metrics to S3');
+ }).catch(err => {
+ if (maxRetries > 1 && errResponse) {
+ console.log('Invalid coverage report received. Trying to retrieve again.');
+ errResponse = false;
+ return publishWithRetry(maxRetries - 1);
+ }
+
+ console.error('Failed to upload code coverage metrics to S3: ' + err.message);
+ });
+};
+
+// Fetch and publish code coverage report
+publishWithRetry(5);
diff --git a/scripts/publish_github_stats.js b/scripts/publish_github_stats.js
new file mode 100755
index 0000000000..f79a5082d0
--- /dev/null
+++ b/scripts/publish_github_stats.js
@@ -0,0 +1,147 @@
+#!/usr/bin/env node
+
+const assert = require('assert');
+const jwt = require('jsonwebtoken');
+const github = require('@octokit/rest')();
+const zlib = require('zlib');
+const AWS = require('aws-sdk');
+
+const SIZE_CHECK_APP_ID = 14028;
+const SIZE_CHECK_APP_INSTALLATION_ID = 229425;
+
+// Error handling
+
+process.on('unhandledRejection', error => {
+ console.log(error);
+ process.exit(1)
+});
+
+// Github authorization
+
+const pk = process.env['SIZE_CHECK_APP_PRIVATE_KEY'];
+if (!pk) {
+ console.log('Fork PR; not publishing size.');
+ process.exit(0);
+}
+
+const key = Buffer.from(pk, 'base64').toString('binary');
+const payload = {
+ exp: Math.floor(Date.now() / 1000) + 60,
+ iat: Math.floor(Date.now() / 1000),
+ iss: SIZE_CHECK_APP_ID
+};
+
+const token = jwt.sign(payload, key, {algorithm: 'RS256'});
+github.authenticate({type: 'app', token});
+
+// Metrics: Github statistics
+let openIssuesTotal = 0;
+let openIssuesTotalFromNonMembers = 0;
+let openIssuesTotalCore = 0;
+let openIssuesTotalAndroid = 0;
+let openIssuesTotalIOS = 0;
+let openIssuesTotalGLJSParity = 0;
+let openPullRequestsTotal = 0;
+let openPullRequestsTotalFromNonMembers = 0;
+let openPullRequestsSinceLastMonth = 0;
+let openPullRequestsSinceLastMonthFromNonMembers = 0;
+
+function collectMetricsFromIssues(issues) {
+ const oneMonthAgo = function() { let date = new Date(); date.setMonth(date.getMonth() - 1); return date; }();
+
+ // Metrics
+ issues.data.forEach(function (issue) {
+ const issueCreatedAt = new Date(issue.created_at);
+ const isMapboxAuthor = issue.author_association === "MEMBER";
+
+ if (issue.pull_request) {
+ openPullRequestsTotal++;
+ if (!isMapboxAuthor) {
+ openPullRequestsTotalFromNonMembers++;
+ }
+ if (issueCreatedAt >= oneMonthAgo) {
+ openPullRequestsSinceLastMonth++;
+ if (!isMapboxAuthor) {
+ openPullRequestsSinceLastMonthFromNonMembers++;
+ }
+ }
+ } else {
+ openIssuesTotal++;
+ if (!isMapboxAuthor) {
+ openIssuesTotalFromNonMembers++;
+ }
+ issue.labels.forEach(function (label) {
+ switch (label.name) {
+ case "Core":
+ openIssuesTotalCore++;
+ break;
+ case "Android":
+ openIssuesTotalAndroid++;
+ break;
+ case "iOS":
+ openIssuesTotalIOS++;
+ break;
+ case "GL JS parity":
+ openIssuesTotalGLJSParity++;
+ break;
+ default:
+ break;
+ }
+ });
+ }
+ });
+}
+
+function publishMetrics() {
+ let metrics = {
+ 'created_at': new Date().toISOString().substring(0, 10),
+ 'open_issues_total': openIssuesTotal,
+ 'open_issues_total_from_non_members': openIssuesTotalFromNonMembers,
+ 'open_issues_total_core': openIssuesTotalCore,
+ 'open_issues_total_android': openIssuesTotalAndroid,
+ 'open_issues_total_ios': openIssuesTotalIOS,
+ 'open_issues_total_gl_js_parity': openIssuesTotalGLJSParity,
+ 'open_pull_requests_total': openPullRequestsTotal,
+ 'open_pull_requests_total_from_non_members': openPullRequestsTotalFromNonMembers,
+ 'open_pull_requests_since_last_month': openPullRequestsSinceLastMonth,
+ 'open_pull_requests_since_last_month_from_non_members': openPullRequestsSinceLastMonthFromNonMembers
+ };
+
+ var promise = new AWS.S3({region: 'us-east-1'}).putObject({
+ Body: zlib.gzipSync(JSON.stringify(metrics)),
+ Bucket: 'mapbox-loading-dock',
+ Key: `raw/mobile_staging.github_stats/${metrics['created_at']}/METRIC.json.gz`,
+ CacheControl: 'max-age=300',
+ ContentEncoding: 'gzip',
+ ContentType: 'application/json'
+ }).promise();
+
+ return Promise.all([promise]).then(data => {
+ return console.log("Successfully uploaded Github Stats metrics to S3");
+ }).catch(err => {
+ console.log("Error uploading Github Stats metrics to S3 " + err.message);
+ return err;
+ });
+}
+
+function recursiveListForRepo(query) {
+ assert(query);
+ query.then(result => {
+ collectMetricsFromIssues(result);
+ if (github.hasNextPage(result)) {
+ recursiveListForRepo(github.getNextPage(result));
+ } else {
+ publishMetrics();
+ }
+ }).catch(error => {
+ console.log("Error fetching the repository issues list: " + err.message);
+ });
+}
+
+github.apps.createInstallationToken({ installation_id: SIZE_CHECK_APP_INSTALLATION_ID })
+ .then(({data}) => {
+ github.authenticate({ type: 'token', token: data.token });
+ })
+ .then(() => {
+ recursiveListForRepo(github.issues.listForRepo({ owner: 'mapbox', repo: 'mapbox-gl-native', state: 'open', per_page: 100 }));
+ });
diff --git a/scripts/style-spec.js b/scripts/style-spec.js
index 4bbd453a86..b2686a6a77 100644
--- a/scripts/style-spec.js
+++ b/scripts/style-spec.js
@@ -3,9 +3,4 @@ var spec = module.exports = require('../mapbox-gl-js/src/style-spec/reference/v8
// Make temporary modifications here when Native doesn't have all features that JS has.
delete spec.layout_symbol['symbol-sort-key'];
delete spec.layout_symbol['symbol-z-order'].values['auto'];
-spec.layout_symbol['symbol-z-order'].default = 'viewport-y';
-
-delete spec.layout_symbol['text-variable-anchor'];
-delete spec.layout_symbol['text-radial-offset'];
-delete spec.layout_symbol['text-justify'].values['auto'];
-spec.layout_symbol['text-offset'].requires.splice(1, 1); // { "!": "text-radial-offset" } \ No newline at end of file
+spec.layout_symbol['symbol-z-order'].default = 'viewport-y'; \ No newline at end of file