diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ea0f69d8607..1a65418b938 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -49,10 +49,10 @@ Okay, so you have decided on the proper branch. Create a feature branch and start hacking: ``` -$ git checkout -b my-feature-branch -t origin/v0.8 +$ git checkout -b my-feature-branch -t origin/v0.10 ``` -(Where v0.8 is the latest stable branch as of this writing.) +(Where v0.10 is the latest stable branch as of this writing.) ### COMMIT @@ -68,14 +68,15 @@ Writing good commit logs is important. A commit log should describe what changed and why. Follow these guidelines when writing one: 1. The first line should be 50 characters or less and contain a short - description of the change. + description of the change prefixed with the name of the changed + subsystem (e.g. "net: add localAddress and localPort to Socket"). 2. Keep the second line blank. 3. Wrap all other lines at 72 columns. A good commit log looks like this: ``` -Header line: explaining the commit in one line +subsystem: explaining the commit in one line Body of commit message is a few lines of text, explaining things in more detail, possibly giving some background about the issue @@ -99,7 +100,7 @@ Use `git rebase` (not `git merge`) to sync your work from time to time. ``` $ git fetch upstream -$ git rebase upstream/v0.8 # or upstream/master +$ git rebase upstream/v0.10 # or upstream/master ``` diff --git a/benchmark/misc/child-process-read.js b/benchmark/misc/child-process-read.js new file mode 100644 index 00000000000..894dd55d301 --- /dev/null +++ b/benchmark/misc/child-process-read.js @@ -0,0 +1,28 @@ +var common = require('../common.js'); +var bench = common.createBenchmark(main, { + len: [64, 256, 1024, 4096, 32768], + dur: [5] +}); + +var spawn = require('child_process').spawn; +function main(conf) { + bench.start(); + + var dur = +conf.dur; + var len = +conf.len; + + var msg = '"' + Array(len).join('.') + '"'; + var options = { 'stdio': ['ignore', 'ipc', 'ignore'] }; + var child = spawn('yes', [msg], options); + + var bytes = 0; + child.on('message', function(msg) { + bytes += msg.length; + }); + + setTimeout(function() { + child.kill(); + var gbits = (bytes * 8) / (1024 * 1024 * 1024); + bench.end(gbits); + }, dur * 1000); +} diff --git a/deps/openssl/openssl.gyp b/deps/openssl/openssl.gyp index 68d89ce9a2d..0b08ecdf5d6 100644 --- a/deps/openssl/openssl.gyp +++ b/deps/openssl/openssl.gyp @@ -882,7 +882,13 @@ 'defines': [ 'MK1MF_BUILD', 'WIN32_LEAN_AND_MEAN' - ] + ], + 'link_settings': { + 'libraries': [ + '-lgdi32.lib', + '-luser32.lib', + ] + } }, { 'defines': [ # ENGINESDIR must be defined if OPENSSLDIR is. diff --git a/doc/api/crypto.markdown b/doc/api/crypto.markdown index 7cc4bdd920e..27b03622e85 100644 --- a/doc/api/crypto.markdown +++ b/doc/api/crypto.markdown @@ -19,7 +19,7 @@ Returns an array with the names of the supported ciphers. Example: var ciphers = crypto.getCiphers(); - console.log(ciphers); // ['AES128-SHA', 'AES256-SHA', ...] + console.log(ciphers); // ['AES-128-CBC', 'AES-128-CBC-HMAC-SHA1', ...] ## crypto.getHashes() diff --git a/doc/api/process.markdown b/doc/api/process.markdown index 639bdd74bd3..c345da76900 100644 --- a/doc/api/process.markdown +++ b/doc/api/process.markdown @@ -93,6 +93,20 @@ that writes to them are usually blocking. They are blocking in the case that they refer to regular files or TTY file descriptors. In the case they refer to pipes, they are non-blocking like other streams. +To check if Node is being run in a TTY context, read the `isTTY` property +on `process.stderr`, `process.stdout`, or `process.stdin`: + + $ node -p "Boolean(process.stdin.isTTY)" + true + $ echo "foo" | node -p "Boolean(process.stdin.isTTY)" + false + + $ node -p "Boolean(process.stdout.isTTY)" + true + $ node -p "Boolean(process.stdout.isTTY)" | cat + false + +See [the tty docs](tty.html#tty_tty) for more information. ## process.stderr diff --git a/doc/api/stdio.markdown b/doc/api/stdio.markdown index d7f9b353be0..408f0937490 100644 --- a/doc/api/stdio.markdown +++ b/doc/api/stdio.markdown @@ -9,6 +9,19 @@ For printing to stdout and stderr. Similar to the console object functions provided by most web browsers, here the output is sent to stdout or stderr. +The console functions are synchronous when the destination is a terminal or +a file (to avoid lost messages in case of premature exit) and asynchronous +when it's a pipe (to avoid blocking for long periods of time). + +That is, in the following example, stdout is non-blocking while stderr +is blocking: + + $ node script.js 2> error.log | tee info.log + +In daily use, the blocking/non-blocking dichotomy is not something you +should worry about unless you log huge amounts of data. + + ## console.log([data], [...]) Prints to stdout with newline. This function can take multiple arguments in a diff --git a/doc/api/tls.markdown b/doc/api/tls.markdown index 28bc05630e6..f89de220596 100644 --- a/doc/api/tls.markdown +++ b/doc/api/tls.markdown @@ -76,6 +76,16 @@ handshake extensions allowing you: certificates. +## tls.getCiphers() + +Returns an array with the names of the supported SSL ciphers. + +Example: + + var ciphers = tls.getCiphers(); + console.log(ciphers); // ['AES128-SHA', 'AES256-SHA', ...] + + ## tls.createServer(options, [secureConnectionListener]) Creates a new [tls.Server][]. The `connectionListener` argument is diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js index 1b6bbb00c9d..ec2b46a9534 100644 --- a/lib/_stream_transform.js +++ b/lib/_stream_transform.js @@ -101,6 +101,7 @@ function afterTransform(stream, er, data) { cb(er); var rs = stream._readableState; + rs.reading = false; if (rs.needReadable || rs.length < rs.highWaterMark) { stream._read(rs.highWaterMark); } diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js index ba74f70c2a9..346f2cc96ab 100644 --- a/lib/_stream_writable.js +++ b/lib/_stream_writable.js @@ -327,7 +327,10 @@ Writable.prototype.end = function(chunk, encoding, cb) { }; function finishMaybe(stream, state) { - if (state.ending && state.length === 0 && !state.finished) { + if (state.ending && + state.length === 0 && + !state.finished && + !state.writing) { state.finished = true; stream.emit('finish'); } diff --git a/lib/child_process.js b/lib/child_process.js index 80735d4d0c2..e67615d21b3 100644 --- a/lib/child_process.js +++ b/lib/child_process.js @@ -19,6 +19,7 @@ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +var StringDecoder = require('string_decoder').StringDecoder; var EventEmitter = require('events').EventEmitter; var net = require('net'); var dgram = require('dgram'); @@ -321,11 +322,12 @@ function handleMessage(target, message, handle) { function setupChannel(target, channel) { target._channel = channel; + var decoder = new StringDecoder('utf8'); var jsonBuffer = ''; channel.buffering = false; channel.onread = function(pool, offset, length, recvHandle) { if (pool) { - jsonBuffer += pool.toString('ascii', offset, offset + length); + jsonBuffer += decoder.write(pool.slice(offset, offset + length)); var i, start = 0; @@ -335,6 +337,7 @@ function setupChannel(target, channel) { var message = JSON.parse(json); handleMessage(target, message, recvHandle); + recvHandle = undefined; start = i + 1; } diff --git a/lib/crypto.js b/lib/crypto.js index 12fe68c6da1..1e7ec9655dd 100644 --- a/lib/crypto.js +++ b/lib/crypto.js @@ -576,21 +576,23 @@ exports.prng = pseudoRandomBytes; exports.getCiphers = function() { - return getCiphers.call(null, arguments).sort(); + return filterDuplicates(getCiphers.call(null, arguments)); }; exports.getHashes = function() { - var names = getHashes.call(null, arguments); + return filterDuplicates(getHashes.call(null, arguments)); +}; + + +function filterDuplicates(names) { // Drop all-caps names in favor of their lowercase aliases, // for example, 'sha1' instead of 'SHA1'. var ctx = {}; - names = names.forEach(function(name) { + names.forEach(function(name) { if (/^[0-9A-Z\-]+$/.test(name)) name = name.toLowerCase(); ctx[name] = true; }); - names = Object.getOwnPropertyNames(ctx); - - return names.sort(); -}; + return Object.getOwnPropertyNames(ctx).sort(); +} diff --git a/lib/timers.js b/lib/timers.js index 989050538ee..3900d61c94c 100644 --- a/lib/timers.js +++ b/lib/timers.js @@ -86,7 +86,7 @@ function listOnTimeout() { var first; while (first = L.peek(list)) { var diff = now - first._idleStart; - if (diff + 1 < msecs) { + if (diff < msecs) { list.start(msecs - diff, 0); debug(msecs + ' list wait because diff is ' + diff); return; @@ -150,6 +150,11 @@ exports.enroll = function(item, msecs) { // then we should unenroll it from that if (item._idleNext) unenroll(item); + // Ensure that msecs fits into signed int32 + if (msecs > 0x7fffffff) { + msecs = 0x7fffffff; + } + item._idleTimeout = msecs; L.init(item); }; diff --git a/lib/tls.js b/lib/tls.js index a7908f77fce..df2afec5333 100644 --- a/lib/tls.js +++ b/lib/tls.js @@ -40,6 +40,17 @@ exports.CLIENT_RENEG_WINDOW = 600; exports.SLAB_BUFFER_SIZE = 10 * 1024 * 1024; +exports.getCiphers = function() { + var names = process.binding('crypto').getSSLCiphers(); + // Drop all-caps names in favor of their lowercase aliases, + var ctx = {}; + names.forEach(function(name) { + if (/^[0-9A-Z\-]+$/.test(name)) name = name.toLowerCase(); + ctx[name] = true; + }); + return Object.getOwnPropertyNames(ctx).sort(); +}; + var debug; if (process.env.NODE_DEBUG && /tls/.test(process.env.NODE_DEBUG)) { diff --git a/src/node_crypto.cc b/src/node_crypto.cc index bab3b6971cf..21541f34dee 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -3466,11 +3466,13 @@ Handle RandomBytes(const Arguments& args) { // maybe allow a buffer to write to? cuts down on object creation // when generating random data in a loop if (!args[0]->IsUint32()) { - Local s = String::New("Argument #1 must be number > 0"); - return ThrowException(Exception::TypeError(s)); + return ThrowTypeError("Argument #1 must be number > 0"); } - const size_t size = args[0]->Uint32Value(); + const uint32_t size = args[0]->Uint32Value(); + if (size > Buffer::kMaxLength) { + return ThrowTypeError("size > Buffer::kMaxLength"); + } RandomBytesRequest* req = new RandomBytesRequest(); req->error_ = 0; @@ -3502,7 +3504,7 @@ Handle RandomBytes(const Arguments& args) { } -Handle GetCiphers(const Arguments& args) { +Handle GetSSLCiphers(const Arguments& args) { HandleScope scope(node_isolate); SSL_CTX* ctx = SSL_CTX_new(TLSv1_server_method()); @@ -3531,19 +3533,28 @@ Handle GetCiphers(const Arguments& args) { } -static void add_hash_to_array(const EVP_MD* md, - const char* from, - const char* to, - void* arg) { +template +static void array_push_back(const TypeName* md, + const char* from, + const char* to, + void* arg) { Local& arr = *static_cast*>(arg); arr->Set(arr->Length(), String::New(from)); } +Handle GetCiphers(const Arguments& args) { + HandleScope scope; + Local arr = Array::New(); + EVP_CIPHER_do_all_sorted(array_push_back, &arr); + return scope.Close(arr); +} + + Handle GetHashes(const Arguments& args) { HandleScope scope(node_isolate); Local arr = Array::New(); - EVP_MD_do_all_sorted(add_hash_to_array, &arr); + EVP_MD_do_all_sorted(array_push_back, &arr); return scope.Close(arr); } @@ -3586,6 +3597,7 @@ void InitCrypto(Handle target) { NODE_SET_METHOD(target, "PBKDF2", PBKDF2); NODE_SET_METHOD(target, "randomBytes", RandomBytes); NODE_SET_METHOD(target, "pseudoRandomBytes", RandomBytes); + NODE_SET_METHOD(target, "getSSLCiphers", GetSSLCiphers); NODE_SET_METHOD(target, "getCiphers", GetCiphers); NODE_SET_METHOD(target, "getHashes", GetHashes); diff --git a/test/simple/test-child-process-fork-getconnections.js b/test/simple/test-child-process-fork-getconnections.js index 326c6d9f6cc..ad04dd73362 100644 --- a/test/simple/test-child-process-fork-getconnections.js +++ b/test/simple/test-child-process-fork-getconnections.js @@ -35,6 +35,7 @@ if (process.argv[2] === 'child') { } if (m.cmd === 'close') { + assert.equal(socket, undefined); sockets[m.id].once('close', function() { process.send({ id: m.id, status: 'closed' }); }); diff --git a/test/simple/test-child-process-send-utf8.js b/test/simple/test-child-process-send-utf8.js new file mode 100644 index 00000000000..b3ce24390ed --- /dev/null +++ b/test/simple/test-child-process-send-utf8.js @@ -0,0 +1,34 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('../common'); +var assert = require('assert'); +var fork = require('child_process').fork; + +var expected = Array(1e5).join('ßßßß'); +if (process.argv[2] === 'child') { + process.send(expected); +} else { + var child = fork(process.argv[1], ['child']); + child.on('message', common.mustCall(function(actual) { + assert.equal(actual, expected); + })); +} diff --git a/test/simple/test-crypto-random.js b/test/simple/test-crypto-random.js index 321c8574cdf..24a76f4f1ac 100644 --- a/test/simple/test-crypto-random.js +++ b/test/simple/test-crypto-random.js @@ -70,3 +70,9 @@ function checkCall(cb, desc) { return called_ = true, cb.apply(cb, Array.prototype.slice.call(arguments)); }; } + +// #5126, "FATAL ERROR: v8::Object::SetIndexedPropertiesToExternalArrayData() +// length exceeds max acceptable value" +assert.throws(function() { + crypto.randomBytes(0x3fffffff + 1); +}, TypeError); diff --git a/test/simple/test-crypto.js b/test/simple/test-crypto.js index 4a7c45681cd..e17f2d5599e 100644 --- a/test/simple/test-crypto.js +++ b/test/simple/test-crypto.js @@ -834,20 +834,24 @@ testPBKDF2('pass\0word', 'sa\0lt', 4096, 16, '\x25\xe0\xc3'); function assertSorted(list) { - for (var i = 0, k = list.length - 1; i < k; ++i) { - var a = list[i + 0]; - var b = list[i + 1]; - assert(a <= b); - } + assert.deepEqual(list, list.sort()); } -// Assume that we have at least AES256-SHA. -assert.notEqual(0, crypto.getCiphers()); -assert.notEqual(-1, crypto.getCiphers().indexOf('AES256-SHA')); +// Assume that we have at least AES-128-CBC. +assert.notEqual(0, crypto.getCiphers().length); +assert.notEqual(-1, crypto.getCiphers().indexOf('aes-128-cbc')); +assert.equal(-1, crypto.getCiphers().indexOf('AES-128-CBC')); assertSorted(crypto.getCiphers()); +// Assume that we have at least AES256-SHA. +var tls = require('tls'); +assert.notEqual(0, tls.getCiphers().length); +assert.notEqual(-1, tls.getCiphers().indexOf('aes256-sha')); +assert.equal(-1, tls.getCiphers().indexOf('AES256-SHA')); +assertSorted(tls.getCiphers()); + // Assert that we have sha and sha1 but not SHA and SHA1. -assert.notEqual(0, crypto.getHashes()); +assert.notEqual(0, crypto.getHashes().length); assert.notEqual(-1, crypto.getHashes().indexOf('sha1')); assert.notEqual(-1, crypto.getHashes().indexOf('sha')); assert.equal(-1, crypto.getHashes().indexOf('SHA1')); diff --git a/test/simple/test-http-timeout-overflow.js b/test/simple/test-http-timeout-overflow.js new file mode 100644 index 00000000000..3e62612b38e --- /dev/null +++ b/test/simple/test-http-timeout-overflow.js @@ -0,0 +1,64 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + +var common = require('../common'); +var assert = require('assert'); + +var http = require('http'); + +var port = common.PORT; +var serverRequests = 0; +var clientRequests = 0; + +var server = http.createServer(function(req, res) { + serverRequests++; + res.writeHead(200, {'Content-Type': 'text/plain'}); + res.end('OK'); +}); + +server.listen(port, function() { + function callback(){} + + var req = http.request({ + port: port, + path: '/', + agent: false + }, function(res) { + req.clearTimeout(callback); + + res.on('end', function() { + clientRequests++; + server.close(); + }) + + res.resume(); + }); + + // Overflow signed int32 + req.setTimeout(0xffffffff, callback); + req.end(); +}); + +process.once('exit', function() { + assert.equal(clientRequests, 1); + assert.equal(serverRequests, 1); +}); diff --git a/test/simple/test-stream2-transform.js b/test/simple/test-stream2-transform.js index 500c48b8ac0..7a32a3cce8b 100644 --- a/test/simple/test-stream2-transform.js +++ b/test/simple/test-stream2-transform.js @@ -235,6 +235,42 @@ test('assymetric transform (compress)', function(t) { }); }); +// this tests for a stall when data is written to a full stream +// that has empty transforms. +test('complex transform', function(t) { + var count = 0; + var saved = null; + var pt = new Transform({highWaterMark:3}); + pt._transform = function(c, e, cb) { + if (count++ === 1) + saved = c; + else { + if (saved) { + pt.push(saved); + saved = null; + } + pt.push(c); + } + + cb(); + }; + + pt.once('readable', function() { + process.nextTick(function() { + pt.write(new Buffer('d')); + pt.write(new Buffer('ef'), function() { + pt.end(); + t.end(); + }); + t.equal(pt.read().toString(), 'abc'); + t.equal(pt.read().toString(), 'def'); + t.equal(pt.read(), null); + }); + }); + + pt.write(new Buffer('abc')); +}); + test('passthrough event emission', function(t) { var pt = new PassThrough(); diff --git a/test/simple/test-stream2-writable.js b/test/simple/test-stream2-writable.js index 1c1bb97ce87..bdb7df2d04d 100644 --- a/test/simple/test-stream2-writable.js +++ b/test/simple/test-stream2-writable.js @@ -326,3 +326,23 @@ test('end(chunk) two times is an error', function(t) { t.end(); }); }); + +test('dont end while writing', function(t) { + var w = new W(); + var wrote = false; + w._write = function(chunk, e, cb) { + assert(!this.writing); + wrote = true; + this.writing = true; + setTimeout(function() { + this.writing = false; + cb(); + }); + }; + w.on('finish', function() { + assert(wrote); + t.end(); + }); + w.write(Buffer(0)); + w.end(); +}); diff --git a/test/simple/test-timers-ordering.js b/test/simple/test-timers-ordering.js new file mode 100644 index 00000000000..0a32d48d593 --- /dev/null +++ b/test/simple/test-timers-ordering.js @@ -0,0 +1,48 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('../common'); +var assert = require('assert'); +var i; + +var N = 30; + +var last_i = 0; +var last_ts = 0; +var start = Date.now(); + +var f = function(i) { + if (i <= N) { + // check order + assert.equal(i, last_i + 1, 'order is broken: ' + i + ' != ' + last_i + ' + 1'); + last_i = i; + + // check that this iteration is fired at least 1ms later than the previous + var now = Date.now(); + console.log(i, now); + assert(now >= last_ts + 1, 'current ts ' + now + ' < prev ts ' + last_ts + ' + 1'); + last_ts = now; + + // schedule next iteration + setTimeout(f, 1, i + 1); + } +}; +f(1); diff --git a/test/simple/test-zlib-zero-byte.js b/test/simple/test-zlib-zero-byte.js new file mode 100644 index 00000000000..31ac86bd6ab --- /dev/null +++ b/test/simple/test-zlib-zero-byte.js @@ -0,0 +1,48 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('../common'); +var assert = require('assert'); + +var zlib = require('zlib'); +var gz = zlib.Gzip() +var emptyBuffer = new Buffer(0); +var received = 0; +gz.on('data', function(c) { + received += c.length; +}); +var ended = false; +gz.on('end', function() { + ended = true; +}); +var finished = false; +gz.on('finish', function() { + finished = true; +}); +gz.write(emptyBuffer); +gz.end(); + +process.on('exit', function() { + assert.equal(received, 20); + assert(ended); + assert(finished); + console.log('ok'); +}); diff --git a/tools/gyp/AUTHORS b/tools/gyp/AUTHORS index 6db82b9e4bb..89777619605 100644 --- a/tools/gyp/AUTHORS +++ b/tools/gyp/AUTHORS @@ -2,5 +2,7 @@ # Name or Organization Google Inc. +Bloomberg Finance L.P. + Steven Knight Ryan Norton diff --git a/tools/gyp/PRESUBMIT.py b/tools/gyp/PRESUBMIT.py index 0338fb4a966..65235661a4a 100644 --- a/tools/gyp/PRESUBMIT.py +++ b/tools/gyp/PRESUBMIT.py @@ -75,13 +75,20 @@ def CheckChangeOnUpload(input_api, output_api): def CheckChangeOnCommit(input_api, output_api): report = [] + + # Accept any year number from 2009 to the current year. + current_year = int(input_api.time.strftime('%Y')) + allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) + years_re = '(' + '|'.join(allowed_years) + ')' + + # The (c) is deprecated, but tolerate it until it's removed from all files. license = ( - r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n' + r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n' r'.*? Use of this source code is governed by a BSD-style license that ' r'can be\n' r'.*? found in the LICENSE file\.\n' ) % { - 'year': input_api.time.strftime('%Y'), + 'year': years_re, } report.extend(input_api.canned_checks.PanProjectChecks( @@ -106,4 +113,4 @@ def CheckChangeOnCommit(input_api, output_api): def GetPreferredTrySlaves(): - return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac'] + return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android'] diff --git a/tools/gyp/buildbot/buildbot_run.py b/tools/gyp/buildbot/buildbot_run.py deleted file mode 100755 index 57fdb655ba0..00000000000 --- a/tools/gyp/buildbot/buildbot_run.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -"""Argument-less script to select what to run on the buildbots.""" - - -import os -import shutil -import subprocess -import sys - - -if sys.platform in ['win32', 'cygwin']: - EXE_SUFFIX = '.exe' -else: - EXE_SUFFIX = '' - - -BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) -TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) -ROOT_DIR = os.path.dirname(TRUNK_DIR) -OUT_DIR = os.path.join(TRUNK_DIR, 'out') - - -def GypTestFormat(title, format=None, msvs_version=None): - """Run the gyp tests for a given format, emitting annotator tags. - - See annotator docs at: - https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations - Args: - format: gyp format to test. - Returns: - 0 for sucesss, 1 for failure. - """ - if not format: - format = title - - print '@@@BUILD_STEP ' + title + '@@@' - sys.stdout.flush() - env = os.environ.copy() - # TODO(bradnelson): remove this when this issue is resolved: - # http://code.google.com/p/chromium/issues/detail?id=108251 - if format == 'ninja': - env['NOGOLD'] = '1' - if msvs_version: - env['GYP_MSVS_VERSION'] = msvs_version - retcode = subprocess.call(' '.join( - [sys.executable, 'trunk/gyptest.py', - '--all', - '--passed', - '--format', format, - '--chdir', 'trunk', - '--path', '../scons']), - cwd=ROOT_DIR, env=env, shell=True) - if retcode: - # Emit failure tag, and keep going. - print '@@@STEP_FAILURE@@@' - return 1 - return 0 - - -def GypBuild(): - # Dump out/ directory. - print '@@@BUILD_STEP cleanup@@@' - print 'Removing %s...' % OUT_DIR - shutil.rmtree(OUT_DIR, ignore_errors=True) - print 'Done.' - - retcode = 0 - if sys.platform.startswith('linux'): - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('scons') - retcode += GypTestFormat('make') - elif sys.platform == 'darwin': - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('xcode') - retcode += GypTestFormat('make') - elif sys.platform == 'win32': - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008') - if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': - retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') - else: - raise Exception('Unknown platform') - if retcode: - # TODO(bradnelson): once the annotator supports a postscript (section for - # after the build proper that could be used for cumulative failures), - # use that instead of this. This isolates the final return value so - # that it isn't misattributed to the last stage. - print '@@@BUILD_STEP failures@@@' - sys.exit(retcode) - - -if __name__ == '__main__': - GypBuild() diff --git a/tools/gyp/data/win/large-pdb-shim.cc b/tools/gyp/data/win/large-pdb-shim.cc new file mode 100644 index 00000000000..8bca510815e --- /dev/null +++ b/tools/gyp/data/win/large-pdb-shim.cc @@ -0,0 +1,12 @@ +// Copyright (c) 2013 Google Inc. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is +// then used during the final link for modules that have large PDBs. Otherwise, +// the linker will generate a pdb with a page size of 1KB, which imposes a limit +// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler +// (rather than the linker), this limit is avoided. With this in place PDBs may +// grow to 2GB. +// +// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py. diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py index 6c6b00944fe..efa75a7aa82 100755 --- a/tools/gyp/gyptest.py +++ b/tools/gyp/gyptest.py @@ -212,6 +212,7 @@ def main(argv=None): format_list = { 'freebsd7': ['make'], 'freebsd8': ['make'], + 'openbsd5': ['make'], 'cygwin': ['msvs'], 'win32': ['msvs', 'ninja'], 'linux2': ['make', 'ninja'], diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py new file mode 100644 index 00000000000..5afcd1f2ab6 --- /dev/null +++ b/tools/gyp/pylib/gyp/MSVSUtil.py @@ -0,0 +1,212 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions shared amongst the Windows generators.""" + +import copy +import os + + +_TARGET_TYPE_EXT = { + 'executable': '.exe', + 'shared_library': '.dll' +} + + +def _GetLargePdbShimCcPath(): + """Returns the path of the large_pdb_shim.cc file.""" + this_dir = os.path.abspath(os.path.dirname(__file__)) + src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) + win_data_dir = os.path.join(src_dir, 'data', 'win') + large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') + return large_pdb_shim_cc + + +def _DeepCopySomeKeys(in_dict, keys): + """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. + + Arguments: + in_dict: The dictionary to copy. + keys: The keys to be copied. If a key is in this list and doesn't exist in + |in_dict| this is not an error. + Returns: + The partially deep-copied dictionary. + """ + d = {} + for key in keys: + if key not in in_dict: + continue + d[key] = copy.deepcopy(in_dict[key]) + return d + + +def _SuffixName(name, suffix): + """Add a suffix to the end of a target. + + Arguments: + name: name of the target (foo#target) + suffix: the suffix to be added + Returns: + Target name with suffix added (foo_suffix#target) + """ + parts = name.rsplit('#', 1) + parts[0] = '%s_%s' % (parts[0], suffix) + return '#'.join(parts) + + +def _ShardName(name, number): + """Add a shard number to the end of a target. + + Arguments: + name: name of the target (foo#target) + number: shard number + Returns: + Target name with shard added (foo_1#target) + """ + return _SuffixName(name, str(number)) + + +def ShardTargets(target_list, target_dicts): + """Shard some targets apart to work around the linkers limits. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + Returns: + Tuple of the new sharded versions of the inputs. + """ + # Gather the targets to shard, and how many pieces. + targets_to_shard = {} + for t in target_dicts: + shards = int(target_dicts[t].get('msvs_shard', 0)) + if shards: + targets_to_shard[t] = shards + # Shard target_list. + new_target_list = [] + for t in target_list: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + new_target_list.append(_ShardName(t, i)) + else: + new_target_list.append(t) + # Shard target_dict. + new_target_dicts = {} + for t in target_dicts: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + name = _ShardName(t, i) + new_target_dicts[name] = copy.copy(target_dicts[t]) + new_target_dicts[name]['target_name'] = _ShardName( + new_target_dicts[name]['target_name'], i) + sources = new_target_dicts[name].get('sources', []) + new_sources = [] + for pos in range(i, len(sources), targets_to_shard[t]): + new_sources.append(sources[pos]) + new_target_dicts[name]['sources'] = new_sources + else: + new_target_dicts[t] = target_dicts[t] + # Shard dependencies. + for t in new_target_dicts: + dependencies = copy.copy(new_target_dicts[t].get('dependencies', [])) + new_dependencies = [] + for d in dependencies: + if d in targets_to_shard: + for i in range(targets_to_shard[d]): + new_dependencies.append(_ShardName(d, i)) + else: + new_dependencies.append(d) + new_target_dicts[t]['dependencies'] = new_dependencies + + return (new_target_list, new_target_dicts) + + +def InsertLargePdbShims(target_list, target_dicts, vars): + """Insert a shim target that forces the linker to use 4KB pagesize PDBs. + + This is a workaround for targets with PDBs greater than 1GB in size, the + limit for the 1KB pagesize PDBs created by the linker by default. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + vars: A dictionary of common GYP variables with generator-specific values. + Returns: + Tuple of the shimmed version of the inputs. + """ + # Determine which targets need shimming. + targets_to_shim = [] + for t in target_dicts: + target_dict = target_dicts[t] + # We only want to shim targets that have msvs_large_pdb enabled. + if not int(target_dict.get('msvs_large_pdb', 0)): + continue + # This is intended for executable, shared_library and loadable_module + # targets where every configuration is set up to produce a PDB output. + # If any of these conditions is not true then the shim logic will fail + # below. + targets_to_shim.append(t) + + large_pdb_shim_cc = _GetLargePdbShimCcPath() + + for t in targets_to_shim: + target_dict = target_dicts[t] + target_name = target_dict.get('target_name') + + base_dict = _DeepCopySomeKeys(target_dict, + ['configurations', 'default_configuration', 'toolset']) + + # This is the dict for copying the source file (part of the GYP tree) + # to the intermediate directory of the project. This is necessary because + # we can't always build a relative path to the shim source file (on Windows + # GYP and the project may be on different drives), and Ninja hates absolute + # paths (it ends up generating the .obj and .obj.d alongside the source + # file, polluting GYPs tree). + copy_suffix = '_large_pdb_copy' + copy_target_name = target_name + '_' + copy_suffix + full_copy_target_name = _SuffixName(t, copy_suffix) + shim_cc_basename = os.path.basename(large_pdb_shim_cc) + shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name + shim_cc_path = shim_cc_dir + '/' + shim_cc_basename + copy_dict = copy.deepcopy(base_dict) + copy_dict['target_name'] = copy_target_name + copy_dict['type'] = 'none' + copy_dict['sources'] = [ large_pdb_shim_cc ] + copy_dict['copies'] = [{ + 'destination': shim_cc_dir, + 'files': [ large_pdb_shim_cc ] + }] + + # This is the dict for the PDB generating shim target. It depends on the + # copy target. + shim_suffix = '_large_pdb_shim' + shim_target_name = target_name + '_' + shim_suffix + full_shim_target_name = _SuffixName(t, shim_suffix) + shim_dict = copy.deepcopy(base_dict) + shim_dict['target_name'] = shim_target_name + shim_dict['type'] = 'static_library' + shim_dict['sources'] = [ shim_cc_path ] + shim_dict['dependencies'] = [ full_copy_target_name ] + + # Set up the shim to output its PDB to the same location as the final linker + # target. + for config in shim_dict.get('configurations').itervalues(): + msvs = config.setdefault('msvs_settings') + + linker = msvs.pop('VCLinkerTool') # We want to clear this dict. + pdb_path = linker.get('ProgramDatabaseFile') + + compiler = msvs.setdefault('VCCLCompilerTool', {}) + compiler.setdefault('DebugInformationFormat', '3') + compiler.setdefault('ProgramDataBaseFileName', pdb_path) + + # Add the new targets. + target_list.append(full_copy_target_name) + target_list.append(full_shim_target_name) + target_dicts[full_copy_target_name] = copy_dict + target_dicts[full_shim_target_name] = shim_dict + + # Update the original target to depend on the shim target. + target_dict.setdefault('dependencies', []).append(full_shim_target_name) + + return (target_list, target_dicts) \ No newline at end of file diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py index 97caf669801..2d95cd0c9ec 100644 --- a/tools/gyp/pylib/gyp/MSVSVersion.py +++ b/tools/gyp/pylib/gyp/MSVSVersion.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. +# Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -355,6 +355,13 @@ def SelectVisualStudioVersion(version='auto'): '2012': ('11.0',), '2012e': ('11.0',), } + override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH') + if override_path: + msvs_version = os.environ.get('GYP_MSVS_VERSION') + if not msvs_version or 'e' not in msvs_version: + raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be ' + 'set to an "e" version (e.g. 2010e)') + return _CreateVersion(msvs_version, override_path, sdk_based=True) version = str(version) versions = _DetectVisualStudioVersions(version_map[version], 'e' in version) if not versions: diff --git a/tools/gyp/pylib/gyp/__init__.py b/tools/gyp/pylib/gyp/__init__.py index ac300a903c0..3769c526522 100755 --- a/tools/gyp/pylib/gyp/__init__.py +++ b/tools/gyp/pylib/gyp/__init__.py @@ -23,8 +23,8 @@ DEBUG_VARIABLES = 'variables' DEBUG_INCLUDES = 'includes' -def DebugOutput(mode, message): - if 'all' in gyp.debug.keys() or mode in gyp.debug.keys(): +def DebugOutput(mode, message, *args): + if 'all' in gyp.debug or mode in gyp.debug: ctx = ('unknown', 0, 'unknown') try: f = traceback.extract_stack(limit=2) @@ -32,6 +32,8 @@ def DebugOutput(mode, message): ctx = f[0][:3] except: pass + if args: + message %= args print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message) @@ -376,21 +378,22 @@ def gyp_main(args): options.generator_output = g_o if not options.parallel and options.use_environment: - options.parallel = bool(os.environ.get('GYP_PARALLEL')) + p = os.environ.get('GYP_PARALLEL') + options.parallel = bool(p and p != '0') for mode in options.debug: gyp.debug[mode] = 1 # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug.keys(): + if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, 'running with these options:') for option, value in sorted(options.__dict__.items()): if option[0] == '_': continue if isinstance(value, basestring): - DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value)) + DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) else: - DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value))) + DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) if not build_files: build_files = FindBuildFiles() @@ -440,9 +443,9 @@ def gyp_main(args): if options.defines: defines += options.defines cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug.keys(): + if DEBUG_GENERAL in gyp.debug: DebugOutput(DEBUG_GENERAL, - "cmdline_default_variables: %s" % cmdline_default_variables) + "cmdline_default_variables: %s", cmdline_default_variables) # Set up includes. includes = [] @@ -468,7 +471,7 @@ def gyp_main(args): gen_flags += options.generator_flags generator_flags = NameValueListToDict(gen_flags) if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags) + DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) # TODO: Remove this and the option after we've gotten folks to move to the # generator flag. diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index e917a59a3c5..e50f51c3079 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -127,9 +127,9 @@ def RelativePath(path, relative_to): # directory, returns a relative path that identifies path relative to # relative_to. - # Convert to absolute (and therefore normalized paths). - path = os.path.abspath(path) - relative_to = os.path.abspath(relative_to) + # Convert to normalized (and therefore absolute paths). + path = os.path.realpath(path) + relative_to = os.path.realpath(relative_to) # Split the paths into components. path_split = path.split(os.path.sep) @@ -151,6 +151,20 @@ def RelativePath(path, relative_to): return os.path.join(*relative_split) +@memoize +def InvertRelativePath(path, toplevel_dir=None): + """Given a path like foo/bar that is relative to toplevel_dir, return + the inverse relative path back to the toplevel_dir. + + E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) + should always produce the empty string, unless the path contains symlinks. + """ + if not path: + return path + toplevel_dir = '.' if toplevel_dir is None else toplevel_dir + return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path)) + + def FixIfRelativePath(path, relative_to): # Like RelativePath but returns |path| unchanged if it is absolute. if os.path.isabs(path): @@ -378,8 +392,10 @@ def GetFlavor(params): return 'solaris' if sys.platform.startswith('freebsd'): return 'freebsd' - if sys.platform.startswith('dragonfly'): - return 'dragonflybsd' + if sys.platform.startswith('openbsd'): + return 'openbsd' + if sys.platform.startswith('aix'): + return 'aix' return 'linux' diff --git a/tools/gyp/pylib/gyp/common_test.py b/tools/gyp/pylib/gyp/common_test.py index 7fbac09d0fe..ad6f9a1438f 100755 --- a/tools/gyp/pylib/gyp/common_test.py +++ b/tools/gyp/pylib/gyp/common_test.py @@ -56,13 +56,13 @@ class TestGetFlavor(unittest.TestCase): self.assertEqual(expected, gyp.common.GetFlavor(param)) def test_platform_default(self): - self.assertFlavor('dragonflybsd', 'dragonfly3', {}) - self.assertFlavor('freebsd' , 'freebsd9' , {}) - self.assertFlavor('freebsd' , 'freebsd10' , {}) - self.assertFlavor('solaris' , 'sunos5' , {}); - self.assertFlavor('solaris' , 'sunos' , {}); - self.assertFlavor('linux' , 'linux2' , {}); - self.assertFlavor('linux' , 'linux3' , {}); + self.assertFlavor('freebsd', 'freebsd9' , {}) + self.assertFlavor('freebsd', 'freebsd10', {}) + self.assertFlavor('openbsd', 'openbsd5' , {}) + self.assertFlavor('solaris', 'sunos5' , {}); + self.assertFlavor('solaris', 'sunos' , {}); + self.assertFlavor('linux' , 'linux2' , {}); + self.assertFlavor('linux' , 'linux3' , {}); def test_param(self): self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) diff --git a/tools/gyp/pylib/gyp/generator/android.py b/tools/gyp/pylib/gyp/generator/android.py index 872ec844c8c..a01ead020d4 100644 --- a/tools/gyp/pylib/gyp/generator/android.py +++ b/tools/gyp/pylib/gyp/generator/android.py @@ -19,6 +19,7 @@ import gyp.common import gyp.generator.make as make # Reuse global functions from make backend. import os import re +import subprocess generator_default_variables = { 'OS': 'android', @@ -38,7 +39,7 @@ generator_default_variables = { 'RULE_INPUT_PATH': '$(RULE_SOURCES)', 'RULE_INPUT_EXT': '$(suffix $<)', 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': 'NOT_USED_ON_ANDROID', + 'CONFIGURATION_NAME': '$(GYP_DEFAULT_CONFIGURATION)', } # Make supports multiple toolsets @@ -131,12 +132,13 @@ class AndroidMkWriter(object): def __init__(self, android_top_dir): self.android_top_dir = android_top_dir - def Write(self, qualified_target, base_path, output_filename, spec, configs, - part_of_all): + def Write(self, qualified_target, relative_target, base_path, output_filename, + spec, configs, part_of_all): """The main entry point: writes a .mk file for a single target. Arguments: qualified_target: target we're generating + relative_target: qualified target name relative to the root base_path: path relative to source root we're building in, used to resolve target-relative paths output_filename: output .mk file name to write @@ -150,6 +152,7 @@ class AndroidMkWriter(object): self.fp.write(header) self.qualified_target = qualified_target + self.relative_target = relative_target self.path = base_path self.target = spec['target_name'] self.type = spec['type'] @@ -248,7 +251,7 @@ class AndroidMkWriter(object): actions) """ for action in actions: - name = make.StringToMakefileVariable('%s_%s' % (self.qualified_target, + name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, action['action_name'])) self.WriteLn('### Rules for action "%s":' % action['action_name']) inputs = action['inputs'] @@ -295,6 +298,15 @@ class AndroidMkWriter(object): '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' % main_output) + # Android's envsetup.sh adds a number of directories to the path including + # the built host binary directory. This causes actions/rules invoked by + # gyp to sometimes use these instead of system versions, e.g. bison. + # The built host binaries may not be suitable, and can cause errors. + # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable + # set by envsetup. + self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' + % main_output) + for input in inputs: assert ' ' not in input, ( "Spaces in action input filenames not supported (%s)" % input) @@ -334,7 +346,7 @@ class AndroidMkWriter(object): if len(rule.get('rule_sources', [])) == 0: continue did_write_rule = True - name = make.StringToMakefileVariable('%s_%s' % (self.qualified_target, + name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, rule['rule_name'])) self.WriteLn('\n### Generated for rule "%s":' % name) self.WriteLn('# "%s":' % rule) @@ -388,6 +400,10 @@ class AndroidMkWriter(object): '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' % main_output) + # See explanation in WriteActions. + self.WriteLn('%s: export PATH := ' + '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) + main_output_deps = self.LocalPathify(rule_source) if inputs: main_output_deps += ' ' @@ -415,7 +431,7 @@ class AndroidMkWriter(object): """ self.WriteLn('### Generated for copy rule.') - variable = make.StringToMakefileVariable(self.qualified_target + '_copies') + variable = make.StringToMakefileVariable(self.relative_target + '_copies') outputs = [] for copy in copies: for path in copy['files']: @@ -940,30 +956,16 @@ class AndroidMkWriter(object): return path -def WriteAutoRegenerationRule(params, root_makefile, makefile_name, - build_files): - """Write the target to regenerate the Makefile.""" +def PerformBuild(data, configurations, params): + # The android backend only supports the default configuration. options = params['options'] - # Sort to avoid non-functional changes to makefile. - build_files = sorted([os.path.join('$(LOCAL_PATH)', f) for f in build_files]) - build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params['build_files_arg']] - build_files_args = [os.path.join('$(PRIVATE_LOCAL_PATH)', f) - for f in build_files_args] - gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], - options.toplevel_dir) - makefile_path = os.path.join('$(LOCAL_PATH)', makefile_name) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join('.', gyp_binary) - root_makefile.write('GYP_FILES := \\\n %s\n\n' % - '\\\n '.join(map(Sourceify, build_files))) - root_makefile.write('%s: PRIVATE_LOCAL_PATH := $(LOCAL_PATH)\n' % - makefile_path) - root_makefile.write('%s: $(GYP_FILES)\n' % makefile_path) - root_makefile.write('\techo ACTION Regenerating $@\n\t%s\n\n' % - gyp.common.EncodePOSIXShellList([gyp_binary, '-fandroid'] + - gyp.RegenerateFlags(options) + - build_files_args)) + makefile = os.path.abspath(os.path.join(options.toplevel_dir, + 'GypAndroid.mk')) + env = dict(os.environ) + env['ONE_SHOT_MAKEFILE'] = makefile + arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] + print 'Building: %s' % arguments + subprocess.check_call(arguments, env=env) def GenerateOutput(target_list, target_dicts, data, params): @@ -1030,7 +1032,9 @@ def GenerateOutput(target_list, target_dicts, data, params): for qualified_target in target_list: build_file, target, toolset = gyp.common.ParseQualifiedTarget( qualified_target) - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) + relative_build_file = gyp.common.RelativePath(build_file, + options.toplevel_dir) + build_files.add(relative_build_file) included_files = data[build_file]['included_files'] for included_file in included_files: # The included_files entries are relative to the dir of the build file @@ -1058,9 +1062,13 @@ def GenerateOutput(target_list, target_dicts, data, params): not int(spec.get('suppress_wildcard', False))) if limit_to_target_all and not part_of_all: continue + + relative_target = gyp.common.QualifiedTarget(relative_build_file, target, + toolset) writer = AndroidMkWriter(android_top_dir) - android_module = writer.Write(qualified_target, base_path, output_file, - spec, configs, part_of_all=part_of_all) + android_module = writer.Write(qualified_target, relative_target, base_path, + output_file, spec, configs, + part_of_all=part_of_all) if android_module in android_modules: print ('ERROR: Android module names must be unique. The following ' 'targets both generate Android module name %s.\n %s\n %s' % @@ -1077,6 +1085,8 @@ def GenerateOutput(target_list, target_dicts, data, params): # Some tools need to know the absolute path of the top directory. root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n') + root_makefile.write('GYP_DEFAULT_CONFIGURATION := %s\n' % + default_configuration) # Write out the sorted list of includes. root_makefile.write('\n') @@ -1084,9 +1094,6 @@ def GenerateOutput(target_list, target_dicts, data, params): root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n') root_makefile.write('\n') - if generator_flags.get('auto_regeneration', True): - WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) - root_makefile.write(SHARED_FOOTER) root_makefile.close() diff --git a/tools/gyp/pylib/gyp/generator/eclipse.py b/tools/gyp/pylib/gyp/generator/eclipse.py index 0f90b5ea608..08425da8e89 100644 --- a/tools/gyp/pylib/gyp/generator/eclipse.py +++ b/tools/gyp/pylib/gyp/generator/eclipse.py @@ -41,11 +41,11 @@ for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' -# Include dirs will occasionaly use the SHARED_INTERMEDIATE_DIR variable as +# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as # part of the path when dealing with generated headers. This value will be # replaced dynamically for each configuration. generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ - '$SHARED_INTERMEDIATES_DIR' + '$SHARED_INTERMEDIATE_DIR' def CalculateVariables(default_variables, params): @@ -65,7 +65,7 @@ def CalculateGeneratorInputInfo(params): def GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediates_dir, config_name): + shared_intermediate_dirs, config_name): """Calculate the set of include directories to be used. Returns: @@ -96,17 +96,18 @@ def GetAllIncludeDirectories(target_list, target_dicts, # Find standard gyp include dirs. if config.has_key('include_dirs'): include_dirs = config['include_dirs'] - for include_dir in include_dirs: - include_dir = include_dir.replace('$SHARED_INTERMEDIATES_DIR', - shared_intermediates_dir) - if not os.path.isabs(include_dir): - base_dir = os.path.dirname(target_name) + for shared_intermediate_dir in shared_intermediate_dirs: + for include_dir in include_dirs: + include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', + shared_intermediate_dir) + if not os.path.isabs(include_dir): + base_dir = os.path.dirname(target_name) - include_dir = base_dir + '/' + include_dir - include_dir = os.path.abspath(include_dir) + include_dir = base_dir + '/' + include_dir + include_dir = os.path.abspath(include_dir) - if not include_dir in gyp_includes_set: - gyp_includes_set.add(include_dir) + if not include_dir in gyp_includes_set: + gyp_includes_set.add(include_dir) # Generate a list that has all the include dirs. @@ -234,7 +235,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) toplevel_build = os.path.join(options.toplevel_dir, build_dir) - shared_intermediate_dir = os.path.join(toplevel_build, 'obj', 'gen') + # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the + # SHARED_INTERMEDIATE_DIR. Include both possible locations. + shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), + os.path.join(toplevel_build, 'gen')] if not os.path.exists(toplevel_build): os.makedirs(toplevel_build) @@ -246,7 +250,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', 'GNU C++', 'GNU C', 'Assembly'] include_dirs = GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediate_dir, config_name) + shared_intermediate_dirs, config_name) WriteIncludePaths(out, eclipse_langs, include_dirs) defines = GetAllDefines(target_list, target_dicts, data, config_name) WriteMacros(out, eclipse_langs, defines) diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index bcc2cc619de..9806c64a8f9 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -259,7 +259,7 @@ all_deps := # export LINK=g++ # # This will allow make to invoke N linker processes as specified in -jN. -LINK ?= %(flock)s $(builddir)/linker.lock $(CXX) +LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target) CC.target ?= %(CC.target)s CFLAGS.target ?= $(CFLAGS) @@ -395,15 +395,14 @@ command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ # $| -- order-only dependencies prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) -# Helper that executes all postbuilds, and deletes the output file when done -# if any of the postbuilds failed. +# Helper that executes all postbuilds until one fails. define do_postbuilds @E=0;\\ for p in $(POSTBUILDS); do\\ eval $$p;\\ - F=$$?;\\ - if [ $$F -ne 0 ]; then\\ - E=$$F;\\ + E=$$?;\\ + if [ $$E -ne 0 ]; then\\ + break;\\ fi;\\ done;\\ if [ $$E -ne 0 ]; then\\ @@ -619,21 +618,6 @@ def QuoteSpaces(s, quote=r'\ '): return s.replace(' ', quote) -def InvertRelativePath(path): - """Given a relative path like foo/bar, return the inverse relative path: - the path from the relative path back to the origin dir. - - E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) - should always produce the empty string.""" - - if not path: - return path - # Only need to handle relative paths into subdirectories for now. - assert '..' not in path, path - depth = len(path.split(os.path.sep)) - return os.path.sep.join(['..'] * depth) - - # Map from qualified target to path to output. target_outputs = {} # Map from qualified target to any linkable output. A subset @@ -1417,7 +1401,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD lambda p: Sourceify(self.Absolutify(p))) # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. - gyp_to_build = InvertRelativePath(self.path) + gyp_to_build = gyp.common.InvertRelativePath(self.path) target_postbuild = self.xcode_settings.GetTargetPostbuilds( configname, QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, @@ -1541,7 +1525,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD for link_dep in link_deps: assert ' ' not in link_dep, ( "Spaces in alink input filenames not supported (%s)" % link_dep) - if (self.flavor not in ('mac', 'win') and not + if (self.flavor not in ('mac', 'openbsd', 'win') and not self.is_standalone_static_library): self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', part_of_all, postbuilds=postbuilds) @@ -2000,7 +1984,8 @@ def GenerateOutput(target_list, target_dicts, data, params): 'flock_index': 2, 'extra_commands': SHARED_HEADER_SUN_COMMANDS, }) - elif flavor == 'freebsd' or flavor == 'dragonflybsd': + elif flavor == 'freebsd': + # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. header_params.update({ 'flock': 'lockf', }) @@ -2018,14 +2003,22 @@ def GenerateOutput(target_list, target_dicts, data, params): build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_array = data[build_file].get('make_global_settings', []) + wrappers = {} + wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command + for key, value in make_global_settings_array: + if key.endswith('_wrapper'): + wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value make_global_settings = '' for key, value in make_global_settings_array: + if re.match('.*_wrapper', key): + continue if value[0] != '$': value = '$(abspath %s)' % value - if key == 'LINK': - make_global_settings += ('%s ?= %s $(builddir)/linker.lock %s\n' % - (key, flock_command, value)) - elif key in ('CC', 'CC.host', 'CXX', 'CXX.host'): + wrapper = wrappers.get(key) + if wrapper: + value = '%s %s' % (wrapper, value) + del wrappers[key] + if key in ('CC', 'CC.host', 'CXX', 'CXX.host'): make_global_settings += ( 'ifneq (,$(filter $(origin %s), undefined default))\n' % key) # Let gyp-time envvars win over global settings. @@ -2035,6 +2028,9 @@ def GenerateOutput(target_list, target_dicts, data, params): make_global_settings += 'endif\n' else: make_global_settings += '%s ?= %s\n' % (key, value) + # TODO(ukai): define cmd when only wrapper is specified in + # make_global_settings. + header_params['make_global_settings'] = make_global_settings ensure_directory_exists(makefile_path) diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 47cbd36ec69..51acf2eb3e8 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -17,6 +17,7 @@ import gyp.MSVSProject as MSVSProject import gyp.MSVSSettings as MSVSSettings import gyp.MSVSToolFile as MSVSToolFile import gyp.MSVSUserFile as MSVSUserFile +import gyp.MSVSUtil as MSVSUtil import gyp.MSVSVersion as MSVSVersion from gyp.common import GypError @@ -63,6 +64,7 @@ generator_additional_path_sections = [ generator_additional_non_configuration_keys = [ 'msvs_cygwin_dirs', 'msvs_cygwin_shell', + 'msvs_large_pdb', 'msvs_shard', ] @@ -204,6 +206,10 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): if not value: return + _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) + + +def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): # TODO(bradnelson): ugly hack, fix this more generally!!! if 'Directories' in setting or 'Dependencies' in setting: if type(value) == str: @@ -232,7 +238,7 @@ def _ConfigPlatform(config_data): def _ConfigBaseName(config_name, platform_name): if config_name.endswith('_' + platform_name): - return config_name[0:-len(platform_name)-1] + return config_name[0:-len(platform_name) - 1] else: return config_name @@ -270,7 +276,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, '`cygpath -m "${INPUTPATH}"`') for i in direct_cmd] direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] - #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) + # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) direct_cmd = ' '.join(direct_cmd) # TODO(quote): regularize quoting path names throughout the module cmd = '' @@ -306,7 +312,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, # If the argument starts with a slash or dash, it's probably a command line # switch arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]] - arguments = [i.replace('$(InputDir)','%INPUTDIR%') for i in arguments] + arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments] arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] if quote_cmd: # Support a mode for using cmd directly. @@ -720,7 +726,7 @@ def _EscapeCommandLineArgumentForMSBuild(s): """Escapes a Windows command-line argument for use by MSBuild.""" def _Replace(match): - return (len(match.group(1))/2*4)*'\\' + '\\"' + return (len(match.group(1)) / 2 * 4) * '\\' + '\\"' # Escape all quotes so that they are interpreted literally. s = quote_replacer_regex2.sub(_Replace, s) @@ -1001,12 +1007,12 @@ def _GetMSVSConfigurationType(spec, build_file): }[spec['type']] except KeyError: if spec.get('type'): - raise Exception('Target type %s is not a valid target type for ' - 'target %s in %s.' % - (spec['type'], spec['target_name'], build_file)) + raise GypError('Target type %s is not a valid target type for ' + 'target %s in %s.' % + (spec['type'], spec['target_name'], build_file)) else: - raise Exception('Missing type field for target %s in %s.' % - (spec['target_name'], build_file)) + raise GypError('Missing type field for target %s in %s.' % + (spec['target_name'], build_file)) return config_type @@ -1041,6 +1047,10 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): # Add in user specified msvs_settings. msvs_settings = config.get('msvs_settings', {}) MSVSSettings.ValidateMSVSSettings(msvs_settings) + + # Prevent default library inheritance from the environment. + _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)']) + for tool in msvs_settings: settings = config['msvs_settings'][tool] for setting in settings: @@ -1663,7 +1673,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): for qualified_target in target_list: spec = target_dicts[qualified_target] if spec['toolset'] != 'target': - raise Exception( + raise GypError( 'Multiple toolsets not supported in msvs build (target %s)' % qualified_target) proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, @@ -1718,74 +1728,6 @@ def CalculateVariables(default_variables, params): default_variables['MSVS_OS_BITS'] = 32 -def _ShardName(name, number): - """Add a shard number to the end of a target. - - Arguments: - name: name of the target (foo#target) - number: shard number - Returns: - Target name with shard added (foo_1#target) - """ - parts = name.rsplit('#', 1) - parts[0] = '%s_%d' % (parts[0], number) - return '#'.join(parts) - - -def _ShardTargets(target_list, target_dicts): - """Shard some targets apart to work around the linkers limits. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - Returns: - Tuple of the new sharded versions of the inputs. - """ - # Gather the targets to shard, and how many pieces. - targets_to_shard = {} - for t in target_dicts: - shards = int(target_dicts[t].get('msvs_shard', 0)) - if shards: - targets_to_shard[t] = shards - # Shard target_list. - new_target_list = [] - for t in target_list: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - new_target_list.append(_ShardName(t, i)) - else: - new_target_list.append(t) - # Shard target_dict. - new_target_dicts = {} - for t in target_dicts: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - name = _ShardName(t, i) - new_target_dicts[name] = copy.copy(target_dicts[t]) - new_target_dicts[name]['target_name'] = _ShardName( - new_target_dicts[name]['target_name'], i) - sources = new_target_dicts[name].get('sources', []) - new_sources = [] - for pos in range(i, len(sources), targets_to_shard[t]): - new_sources.append(sources[pos]) - new_target_dicts[name]['sources'] = new_sources - else: - new_target_dicts[t] = target_dicts[t] - # Shard dependencies. - for t in new_target_dicts: - dependencies = copy.copy(new_target_dicts[t].get('dependencies', [])) - new_dependencies = [] - for d in dependencies: - if d in targets_to_shard: - for i in range(targets_to_shard[d]): - new_dependencies.append(_ShardName(d, i)) - else: - new_dependencies.append(d) - new_target_dicts[t]['dependencies'] = new_dependencies - - return (new_target_list, new_target_dicts) - - def PerformBuild(data, configurations, params): options = params['options'] msvs_version = params['msvs_version'] @@ -1825,7 +1767,12 @@ def GenerateOutput(target_list, target_dicts, data, params): generator_flags = params.get('generator_flags', {}) # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. - (target_list, target_dicts) = _ShardTargets(target_list, target_dicts) + (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) + + # Optionally use the large PDB workaround for targets marked with + # 'msvs_large_pdb': 1. + (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( + target_list, target_dicts, generator_default_variables) # Prepare the set of configurations. configs = set() @@ -1872,9 +1819,9 @@ def GenerateOutput(target_list, target_dicts, data, params): error_message = "Missing input files:\n" + \ '\n'.join(set(missing_sources)) if generator_flags.get('msvs_error_on_missing_sources', False): - raise Exception(error_message) + raise GypError(error_message) else: - print >>sys.stdout, "Warning: " + error_message + print >> sys.stdout, "Warning: " + error_message def _GenerateMSBuildFiltersFile(filters_path, source_files, @@ -2809,8 +2756,10 @@ def _FinalizeMSBuildSettings(spec, configuration): 'AdditionalIncludeDirectories', include_dirs) _ToolAppend(msbuild_settings, 'ResourceCompile', 'AdditionalIncludeDirectories', resource_include_dirs) - # Add in libraries. - _ToolAppend(msbuild_settings, 'Link', 'AdditionalDependencies', libraries) + # Add in libraries, note that even for empty libraries, we want this + # set, to prevent inheriting default libraries from the enviroment. + _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies', + libraries) if out_file: _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file, only_if_unset=True) @@ -2844,8 +2793,7 @@ def _GetValueFormattedForMSBuild(tool_name, name, value): if type(value) == list: # For some settings, VS2010 does not automatically extends the settings # TODO(jeanluc) Is this what we want? - if name in ['AdditionalDependencies', - 'AdditionalIncludeDirectories', + if name in ['AdditionalIncludeDirectories', 'AdditionalLibraryDirectories', 'AdditionalOptions', 'DelayLoadDLLs', diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index fa6bd86ac3f..c6bceaf382a 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -1,4 +1,4 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. +# Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,7 +13,7 @@ import sys import gyp import gyp.common import gyp.msvs_emulation -import gyp.MSVSVersion +import gyp.MSVSUtil as MSVSUtil import gyp.xcode_emulation from gyp.common import GetEnvironFallback @@ -97,21 +97,6 @@ def Define(d, flavor): return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) -def InvertRelativePath(path): - """Given a relative path like foo/bar, return the inverse relative path: - the path from the relative path back to the origin dir. - - E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) - should always produce the empty string.""" - - if not path: - return path - # Only need to handle relative paths into subdirectories for now. - assert '..' not in path, path - depth = len(path.split(os.path.sep)) - return os.path.sep.join(['..'] * depth) - - class Target: """Target represents the paths used within a single gyp target. @@ -218,12 +203,12 @@ class Target: class NinjaWriter: def __init__(self, qualified_target, target_outputs, base_dir, build_dir, - output_file, flavor, abs_build_dir=None): + output_file, flavor, toplevel_dir=None): """ base_dir: path from source root to directory containing this gyp file, by gyp semantics, all input paths are relative to this build_dir: path from source root to build output - abs_build_dir: absolute path to the build directory + toplevel_dir: path to the toplevel directory """ self.qualified_target = qualified_target @@ -232,7 +217,10 @@ class NinjaWriter: self.build_dir = build_dir self.ninja = ninja_syntax.Writer(output_file) self.flavor = flavor - self.abs_build_dir = abs_build_dir + self.abs_build_dir = None + if toplevel_dir is not None: + self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, + build_dir)) self.obj_ext = '.obj' if flavor == 'win' else '.o' if flavor == 'win': # See docstring of msvs_emulation.GenerateEnvironmentFiles(). @@ -241,9 +229,11 @@ class NinjaWriter: self.win_env[arch] = 'environment.' + arch # Relative path from build output dir to base dir. - self.build_to_base = os.path.join(InvertRelativePath(build_dir), base_dir) + build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) + self.build_to_base = os.path.join(build_to_top, base_dir) # Relative path from base dir to build dir. - self.base_to_build = os.path.join(InvertRelativePath(base_dir), build_dir) + base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) + self.base_to_build = os.path.join(base_to_top, build_dir) def ExpandSpecial(self, path, product_dir=None): """Expand specials like $!PRODUCT_DIR in |path|. @@ -428,7 +418,8 @@ class NinjaWriter: gyp.msvs_emulation.VerifyMissingSources( sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) pch = gyp.msvs_emulation.PrecompiledHeader( - self.msvs_settings, config_name, self.GypPathToNinja) + self.msvs_settings, config_name, self.GypPathToNinja, + self.GypPathToUniqueOutput, self.obj_ext) else: pch = gyp.xcode_emulation.MacPrefixHeader( self.xcode_settings, self.GypPathToNinja, @@ -743,7 +734,15 @@ class NinjaWriter: cflags_c = self.msvs_settings.GetCflagsC(config_name) cflags_cc = self.msvs_settings.GetCflagsCC(config_name) extra_defines = self.msvs_settings.GetComputedDefines(config_name) - self.WriteVariableList('pdbname', [self.name + '.pdb']) + pdbpath = self.msvs_settings.GetCompilerPdbName( + config_name, self.ExpandSpecial) + if not pdbpath: + obj = 'obj' + if self.toolset != 'target': + obj += '.' + self.toolset + pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, + self.name + '.pdb')) + self.WriteVariableList('pdbname', [pdbpath]) self.WriteVariableList('pchprefix', [self.name]) else: cflags = config.get('cflags', []) @@ -824,9 +823,14 @@ class NinjaWriter: if not case_sensitive_filesystem: output = output.lower() implicit = precompiled_header.GetObjDependencies([input], [output]) + variables = [] + if self.flavor == 'win': + variables, output, implicit = precompiled_header.GetFlagsModifications( + input, output, implicit, command, cflags_c, cflags_cc, + self.ExpandSpecial) self.ninja.build(output, command, input, implicit=[gch for _, _, gch in implicit], - order_only=predepends) + order_only=predepends, variables=variables) outputs.append(output) self.WritePchTargets(pch_commands) @@ -848,8 +852,6 @@ class NinjaWriter: }[lang] map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', } - if self.flavor == 'win': - map.update({'c': 'cc_pch', 'cc': 'cxx_pch'}) cmd = map.get(lang) self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)]) @@ -903,16 +905,12 @@ class NinjaWriter: extra_bindings.append(('postbuilds', self.GetPostbuildCommand(spec, output, output))) + is_executable = spec['type'] == 'executable' if self.flavor == 'mac': ldflags = self.xcode_settings.GetLdflags(config_name, self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), self.GypPathToNinja) elif self.flavor == 'win': - libflags = self.msvs_settings.GetLibFlags(config_name, - self.GypPathToNinja) - self.WriteVariableList( - 'libflags', gyp.common.uniquer(map(self.ExpandSpecial, libflags))) - is_executable = spec['type'] == 'executable' manifest_name = self.GypPathToUniqueOutput( self.ComputeOutputFileName(spec)) ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name, @@ -920,6 +918,9 @@ class NinjaWriter: self.WriteVariableList('manifests', manifest_files) else: ldflags = config.get('ldflags', []) + if is_executable and len(solibs): + ldflags.append('-Wl,-rpath=\$$ORIGIN/lib/') + ldflags.append('-Wl,-rpath-link=lib/') self.WriteVariableList('ldflags', gyp.common.uniquer(map(self.ExpandSpecial, ldflags))) @@ -975,6 +976,10 @@ class NinjaWriter: self.ninja.build(self.target.binary, 'alink_thin', link_deps, order_only=compile_deps, variables=variables) else: + if self.msvs_settings: + libflags = self.msvs_settings.GetLibFlags(config_name, + self.GypPathToNinja) + variables.append(('libflags', libflags)) self.ninja.build(self.target.binary, 'alink', link_deps, order_only=compile_deps, variables=variables) else: @@ -1046,10 +1051,9 @@ class NinjaWriter: env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) # G will be non-null if any postbuild fails. Run all postbuilds in a # subshell. - commands = env + ' (F=0; ' + \ - ' '.join([ninja_syntax.escape(command) + ' || F=$$?;' - for command in postbuilds]) - command_string = (commands + ' exit $$F); G=$$?; ' + commands = env + ' (' + \ + ' && '.join([ninja_syntax.escape(command) for command in postbuilds]) + command_string = (commands + '); G=$$?; ' # Remove the final output if any postbuild failed. '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)') if is_command_start: @@ -1315,6 +1319,13 @@ def OpenOutput(path, mode='w'): return open(path, mode) +def CommandWithWrapper(cmd, wrappers, prog): + wrapper = wrappers.get(cmd, '') + if wrapper: + return wrapper + ' ' + prog + return prog + + def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] @@ -1372,7 +1383,14 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings = data[build_file].get('make_global_settings', []) - build_to_root = InvertRelativePath(build_dir) + build_to_root = gyp.common.InvertRelativePath(build_dir, + options.toplevel_dir) + flock = 'flock' + if flavor == 'mac': + flock = './gyp-mac-tool flock' + wrappers = {} + if flavor != 'win': + wrappers['LINK'] = flock + ' linker.lock' for key, value in make_global_settings: if key == 'CC': cc = os.path.join(build_to_root, value) @@ -1388,14 +1406,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, cxx_host_global_setting = value if key == 'LD.host': ld_host = os.path.join(build_to_root, value) + if key.endswith('_wrapper'): + wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) - flock = 'flock' - if flavor == 'mac': - flock = './gyp-mac-tool flock' cc = GetEnvironFallback(['CC_target', 'CC'], cc) - master_ninja.variable('cc', cc) + master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc)) cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx) - master_ninja.variable('cxx', cxx) + master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx)) ld = GetEnvironFallback(['LD_target', 'LD'], ld) if not cc_host: @@ -1412,7 +1429,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja.variable('mt', 'mt.exe') master_ninja.variable('use_dep_database', '1') else: - master_ninja.variable('ld', flock + ' linker.lock ' + ld) + master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld)) master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar')) master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar')) @@ -1426,12 +1443,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, cc_host = cc_host_global_setting.replace('$(CC)', cc) if '$(CXX)' in cxx_host and cxx_host_global_setting: cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx) - master_ninja.variable('cc_host', cc_host) - master_ninja.variable('cxx_host', cxx_host) + master_ninja.variable('cc_host', + CommandWithWrapper('CC.host', wrappers, cc_host)) + master_ninja.variable('cxx_host', + CommandWithWrapper('CXX.host', wrappers, cxx_host)) if flavor == 'win': master_ninja.variable('ld_host', ld_host) else: - master_ninja.variable('ld_host', flock + ' linker.lock ' + ld_host) + master_ninja.variable('ld_host', CommandWithWrapper( + 'LINK', wrappers, ld_host)) master_ninja.newline() @@ -1454,45 +1474,25 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, '$cflags_pch_cc -c $in -o $out'), depfile='$out.d') else: - # Template for compile commands mostly shared between compiling files - # and generating PCH. In the case of PCH, the "output" is specified by /Fp - # rather than /Fo (for object files), but we still need to specify an /Fo - # when compiling PCH. - cc_template = ('ninja -t msvc -r . -o $out -e $arch ' + cc_command = ('ninja -t msvc -o $out -e $arch ' + '-- ' + '$cc /nologo /showIncludes /FC ' + '@$out.rsp /c $in /Fo$out /Fd$pdbname ') + cxx_command = ('ninja -t msvc -o $out -e $arch ' '-- ' - '$cc /nologo /showIncludes /FC ' - '@$out.rsp ' - '$cflags_pch_c /c $in %(outspec)s /Fd$pdbname ') - cxx_template = ('ninja -t msvc -r . -o $out -e $arch ' - '-- ' - '$cxx /nologo /showIncludes /FC ' - '@$out.rsp ' - '$cflags_pch_cc /c $in %(outspec)s $pchobj /Fd$pdbname ') + '$cxx /nologo /showIncludes /FC ' + '@$out.rsp /c $in /Fo$out /Fd$pdbname ') master_ninja.rule( 'cc', description='CC $out', - command=cc_template % {'outspec': '/Fo$out'}, - depfile='$out.d', - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_c') - master_ninja.rule( - 'cc_pch', - description='CC PCH $out', - command=cc_template % {'outspec': '/Fp$out /Fo$out.obj'}, + command=cc_command, depfile='$out.d', rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_c') master_ninja.rule( 'cxx', description='CXX $out', - command=cxx_template % {'outspec': '/Fo$out'}, - depfile='$out.d', - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_cc') - master_ninja.rule( - 'cxx_pch', - description='CXX PCH $out', - command=cxx_template % {'outspec': '/Fp$out /Fo$out.obj'}, + command=cxx_command, depfile='$out.d', rspfile='$out.rsp', rspfile_content='$defines $includes $cflags $cflags_cc') @@ -1559,7 +1559,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, master_ninja.rule( 'link', description='LINK $out', - command=('$ld $ldflags -o $out -Wl,-rpath=\$$ORIGIN/lib ' + command=('$ld $ldflags -o $out ' '-Wl,--start-group $in $solibs -Wl,--end-group $libs')) elif flavor == 'win': master_ninja.rule( @@ -1574,6 +1574,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, dllcmd = ('%s gyp-win-tool link-wrapper $arch ' '$ld /nologo $implibflag /DLL /OUT:$dll ' '/PDB:$dll.pdb @$dll.rsp' % sys.executable) + dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' + 'cmd /c if exist $dll.manifest del $dll.manifest' % + sys.executable) dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' '$mt -nologo -manifest $manifests -out:$dll.manifest' % sys.executable) @@ -1593,8 +1596,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, command=('%s gyp-win-tool link-wrapper $arch ' '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && ' '%s gyp-win-tool manifest-wrapper $arch ' + 'cmd /c if exist $out.manifest del $out.manifest && ' + '%s gyp-win-tool manifest-wrapper $arch ' '$mt -nologo -manifest $manifests -out:$out.manifest' % - (sys.executable, sys.executable)), + (sys.executable, sys.executable, sys.executable)), rspfile='$out.rsp', rspfile_content='$in_newline $libs $ldflags') else: @@ -1729,7 +1734,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, abs_build_dir = os.path.abspath(toplevel_build) writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir, OpenOutput(os.path.join(toplevel_build, output_file)), - flavor, abs_build_dir=abs_build_dir) + flavor, toplevel_dir=options.toplevel_dir) master_ninja.subninja(output_file) target = writer.WriteSpec( @@ -1777,6 +1782,11 @@ def CallGenerateOutputForConfig(arglist): def GenerateOutput(target_list, target_dicts, data, params): user_config = params.get('generator_flags', {}).get('config', None) + if gyp.common.GetFlavor(params) == 'win': + target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) + target_list, target_dicts = MSVSUtil.InsertLargePdbShims( + target_list, target_dicts, generator_default_variables) + if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py index 7b21bae8a98..ca3b01eea0c 100644 --- a/tools/gyp/pylib/gyp/generator/xcode.py +++ b/tools/gyp/pylib/gyp/generator/xcode.py @@ -1110,20 +1110,29 @@ exit 1 AddHeaderToTarget(header, pbxp, xct, True) # Add "copies". + pbxcp_dict = {} for copy_group in spec.get('copies', []): - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ - 'name': 'Copy to ' + copy_group['destination'] - }, - parent=xct) dest = copy_group['destination'] if dest[0] not in ('/', '$'): # Relative paths are relative to $(SRCROOT). dest = '$(SRCROOT)/' + dest - pbxcp.SetDestination(dest) - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties['buildPhases'].insert(prebuild_index, pbxcp) + # Coalesce multiple "copies" sections in the same target with the same + # "destination" property into the same PBXCopyFilesBuildPhase, otherwise + # they'll wind up with ID collisions. + pbxcp = pbxcp_dict.get(dest, None) + if pbxcp is None: + pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ + 'name': 'Copy to ' + copy_group['destination'] + }, + parent=xct) + pbxcp.SetDestination(dest) + + # TODO(mark): The usual comment about this knowing too much about + # gyp.xcodeproj_file internals applies. + xct._properties['buildPhases'].insert(prebuild_index, pbxcp) + + pbxcp_dict[dest] = pbxcp for file in copy_group['files']: pbxcp.AddFile(file) diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index 65236671f97..eca0eb93aaf 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -46,21 +46,16 @@ base_path_sections = [ ] path_sections = [] +is_path_section_charset = set('=+?!') +is_path_section_match_re = re.compile('_(dir|file|path)s?$') def IsPathSection(section): # If section ends in one of these characters, it's applied to a section # without the trailing characters. '/' is notably absent from this list, # because there's no way for a regular expression to be treated as a path. - while section[-1:] in ('=', '+', '?', '!'): - section = section[0:-1] - - if section in path_sections or \ - section.endswith('_dir') or section.endswith('_dirs') or \ - section.endswith('_file') or section.endswith('_files') or \ - section.endswith('_path') or section.endswith('_paths'): - return True - return False - + while section[-1:] in is_path_section_charset: + section = section[:-1] + return section in path_sections or is_path_section_match_re.search(section) # base_non_configuraiton_keys is a list of key names that belong in the target # itself and should not be propagated into its configurations. It is merged @@ -269,7 +264,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, aux_data[subdict_path]['included'] = [] aux_data[subdict_path]['included'].append(include) - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include) + gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) MergeDicts(subdict, LoadOneBuildFile(include, data, aux_data, variables, None, @@ -359,7 +354,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, data['target_build_files'].add(build_file_path) gyp.DebugOutput(gyp.DEBUG_INCLUDES, - "Loading Target Build File '%s'" % build_file_path) + "Loading Target Build File '%s'", build_file_path) build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, True, check) @@ -494,7 +489,7 @@ def CallLoadTargetBuildFile(global_flags, aux_data_out, dependencies) except Exception, e: - print "Exception: ", e + print >>sys.stderr, 'Exception: ', e return None @@ -569,6 +564,12 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data, parallel_state.condition.acquire() while parallel_state.dependencies or parallel_state.pending: if parallel_state.error: + print >>sys.stderr, ( + '\n' + 'Note: an error occurred while running gyp using multiprocessing.\n' + 'For more verbose output, set GYP_PARALLEL=0 in your environment.\n' + 'If the error only occurs when GYP_PARALLEL=1, ' + 'please report a bug!') break if not parallel_state.dependencies: parallel_state.condition.wait() @@ -608,32 +609,27 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data, # the input is something like "<(foo <(bar)) blah", then it would # return (1, 13), indicating the entire string except for the leading # "<" and trailing " blah". -def FindEnclosingBracketGroup(input): - brackets = { '}': '{', - ']': '[', - ')': '(', } +LBRACKETS= set('{[(') +BRACKETS = {'}': '{', ']': '[', ')': '('} +def FindEnclosingBracketGroup(input_str): stack = [] - count = 0 start = -1 - for char in input: - if char in brackets.values(): + for index, char in enumerate(input_str): + if char in LBRACKETS: stack.append(char) if start == -1: - start = count - if char in brackets.keys(): - try: - last_bracket = stack.pop() - except IndexError: + start = index + elif char in BRACKETS: + if not stack: return (-1, -1) - if last_bracket != brackets[char]: + if stack.pop() != BRACKETS[char]: return (-1, -1) - if len(stack) == 0: - return (start, count + 1) - count = count + 1 + if not stack: + return (start, index + 1) return (-1, -1) -canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$') +canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$') def IsStrCanonicalInt(string): @@ -641,10 +637,7 @@ def IsStrCanonicalInt(string): The canonical form is such that str(int(string)) == string. """ - if not isinstance(string, str) or not canonical_int_re.match(string): - return False - - return True + return isinstance(string, str) and canonical_int_re.match(string) # This matches things like "<(asdf)", " ! <| >| <@ # >@ !@), match['is_array'] contains a '[' for command @@ -839,8 +831,8 @@ def ExpandVariables(input, phase, variables, build_file): cached_value = cached_command_results.get(cache_key, None) if cached_value is None: gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'" % - (contents,build_file_dir)) + "Executing command '%s' in directory '%s'", + contents, build_file_dir) replacement = '' @@ -852,12 +844,17 @@ def ExpandVariables(input, phase, variables, build_file): # >sys.stderr, 'Using parallel processing (experimental).' + print >>sys.stderr, 'Using parallel processing.' LoadTargetBuildFileParallel(build_file, data, aux_data, variables, includes, depth, check) else: @@ -2564,6 +2576,10 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, # Fully qualify all dependency links. QualifyDependencies(targets) + # Remove self-dependencies from targets that have 'prune_self_dependencies' + # set to 1. + RemoveSelfDependencies(targets) + # Expand dependencies specified as build_file:*. ExpandWildcardDependencies(targets, data) diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py index 69267694dc7..c06e3bebbf0 100755 --- a/tools/gyp/pylib/gyp/mac_tool.py +++ b/tools/gyp/pylib/gyp/mac_tool.py @@ -80,6 +80,19 @@ class MacTool(object): def _CopyStringsFile(self, source, dest): """Copies a .strings file using iconv to reconvert the input into UTF-16.""" input_code = self._DetectInputEncoding(source) or "UTF-8" + + # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call + # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints + # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing + # semicolon in dictionary. + # on invalid files. Do the same kind of validation. + import CoreFoundation + s = open(source).read() + d = CoreFoundation.CFDataCreate(None, s, len(s)) + _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) + if error: + return + fp = open(dest, 'w') args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code', 'UTF-16', source] diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py index 840a79b6736..bc2afca3e0b 100644 --- a/tools/gyp/pylib/gyp/msvs_emulation.py +++ b/tools/gyp/pylib/gyp/msvs_emulation.py @@ -168,8 +168,6 @@ class MsvsSettings(object): equivalents.""" target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64' replacements = { - '$(VSInstallDir)': self.vs_version.Path(), - '$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\', '$(OutDir)\\': base_to_build + '\\' if base_to_build else '', '$(IntDir)': '$!INTERMEDIATE_DIR', '$(InputPath)': '${source}', @@ -178,6 +176,12 @@ class MsvsSettings(object): '$(PlatformName)': target_platform, '$(ProjectDir)\\': '', } + # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when + # Visual Studio is actually installed. + if self.vs_version.Path(): + replacements['$(VSInstallDir)'] = self.vs_version.Path() + replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(), + 'VC') + '\\' # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be # set. This happens when the SDK is sync'd via src-internal, rather than # by typical end-user installation of the SDK. If it's not set, we don't @@ -275,6 +279,16 @@ class MsvsSettings(object): ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[])) return defines + def GetCompilerPdbName(self, config, expand_special): + """Get the pdb file name that should be used for compiler invocations, or + None if there's no explicit name specified.""" + config = self._TargetConfig(config) + pdbname = self._Setting( + ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config) + if pdbname: + pdbname = expand_special(self.ConvertVSMacros(pdbname)) + return pdbname + def GetOutputName(self, config, expand_special): """Gets the explicitly overridden output name for a target or returns None if it's not overridden.""" @@ -309,6 +323,7 @@ class MsvsSettings(object): map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O') cl('InlineFunctionExpansion', prefix='/Ob') cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy') + cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi') cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O') cl('WholeProgramOptimization', map={'true': '/GL'}) cl('WarningLevel', prefix='/W') @@ -323,8 +338,13 @@ class MsvsSettings(object): cl('RuntimeLibrary', map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH') + cl('DefaultCharIsUnsigned', map={'true': '/J'}) + cl('TreatWChar_tAsBuiltInType', + map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t') cl('EnablePREfast', map={'true': '/analyze'}) cl('AdditionalOptions', prefix='') + cflags.extend(['/FI' + f for f in self._Setting( + ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])]) # ninja handles parallelism by itself, don't have the compiler do it too. cflags = filter(lambda x: not x.startswith('/MP'), cflags) return cflags @@ -378,6 +398,7 @@ class MsvsSettings(object): 'VCLibrarianTool', append=libflags) libflags.extend(self._GetAdditionalLibraryDirectories( 'VCLibrarianTool', config, gyp_to_build_path)) + lib('LinkTimeCodeGeneration', map={'true': '/LTCG'}) lib('AdditionalOptions') return libflags @@ -414,6 +435,7 @@ class MsvsSettings(object): ldflags.append('/PDB:' + pdb) ld('AdditionalOptions', prefix='') ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:') + ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE') ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED') ld('RandomizedBaseAddress', @@ -426,13 +448,11 @@ class MsvsSettings(object): ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) ld('EntryPointSymbol', prefix='/ENTRY:') - ld('Profile', map={ 'true': '/PROFILE'}) + ld('Profile', map={'true': '/PROFILE'}) + ld('LargeAddressAware', + map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE') # TODO(scottmg): This should sort of be somewhere else (not really a flag). ld('AdditionalDependencies', prefix='') - # TODO(scottmg): These too. - ldflags.extend(('kernel32.lib', 'user32.lib', 'gdi32.lib', 'winspool.lib', - 'comdlg32.lib', 'advapi32.lib', 'shell32.lib', 'ole32.lib', - 'oleaut32.lib', 'uuid.lib', 'odbc32.lib', 'DelayImp.lib')) # If the base address is not specifically controlled, DYNAMICBASE should # be on by default. @@ -576,7 +596,8 @@ class MsvsSettings(object): ('iid', iid), ('proxy', proxy)] # TODO(scottmg): Are there configuration settings to set these flags? - flags = ['/char', 'signed', '/env', 'win32', '/Oicf'] + target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64' + flags = ['/char', 'signed', '/env', target_platform, '/Oicf'] return outdir, output, variables, flags @@ -586,29 +607,25 @@ def _LanguageMatchesForPch(source_ext, pch_source_ext): return ((source_ext in c_exts and pch_source_ext in c_exts) or (source_ext in cc_exts and pch_source_ext in cc_exts)) + class PrecompiledHeader(object): """Helper to generate dependencies and build rules to handle generation of precompiled headers. Interface matches the GCH handler in xcode_emulation.py. """ - def __init__(self, settings, config, gyp_to_build_path): + def __init__( + self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext): self.settings = settings self.config = config - self.gyp_to_build_path = gyp_to_build_path + pch_source = self.settings.msvs_precompiled_source[self.config] + self.pch_source = gyp_to_build_path(pch_source) + filename, _ = os.path.splitext(pch_source) + self.output_obj = gyp_to_unique_output(filename + obj_ext).lower() def _PchHeader(self): """Get the header that will appear in an #include line for all source files.""" return os.path.split(self.settings.msvs_precompiled_header[self.config])[1] - def _PchSource(self): - """Get the source file that is built once to compile the pch data.""" - return self.gyp_to_build_path( - self.settings.msvs_precompiled_source[self.config]) - - def _PchOutput(self): - """Get the name of the output of the compiled pch data.""" - return '${pchprefix}.' + self._PchHeader() + '.pch' - def GetObjDependencies(self, sources, objs): """Given a list of sources files and the corresponding object files, returns a list of the pch files that should be depended upon. The @@ -616,24 +633,30 @@ class PrecompiledHeader(object): with make.py on Mac, and xcode_emulation.py.""" if not self._PchHeader(): return [] - source = self._PchSource() - assert source - pch_ext = os.path.splitext(self._PchSource())[1] + pch_ext = os.path.splitext(self.pch_source)[1] for source in sources: if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext): - return [(None, None, self._PchOutput())] + return [(None, None, self.output_obj)] return [] def GetPchBuildCommands(self): - """Returns [(path_to_pch, language_flag, language, header)]. - |path_to_gch| and |header| are relative to the build directory.""" - header = self._PchHeader() - source = self._PchSource() - if not source or not header: - return [] - ext = os.path.splitext(source)[1] - lang = 'c' if ext == '.c' else 'cc' - return [(self._PchOutput(), '/Yc' + header, lang, source)] + """Not used on Windows as there are no additional build steps required + (instead, existing steps are modified in GetFlagsModifications below).""" + return [] + + def GetFlagsModifications(self, input, output, implicit, command, + cflags_c, cflags_cc, expand_special): + """Get the modified cflags and implicit dependencies that should be used + for the pch compilation step.""" + if input == self.pch_source: + pch_output = ['/Yc' + self._PchHeader()] + if command == 'cxx': + return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))], + self.output_obj, []) + elif command == 'cc': + return ([('cflags_c', map(expand_special, cflags_c + pch_output))], + self.output_obj, []) + return [], output, implicit vs_version = None @@ -711,7 +734,13 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out): of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which sets up the environment, and then we do not prefix the compiler with an absolute path, instead preferring something like "cl.exe" in the rule - which will then run whichever the environment setup has put in the path.""" + which will then run whichever the environment setup has put in the path. + When the following procedure to generate environment files does not + meet your requirement (e.g. for custom toolchains), you can pass + "-G ninja_use_custom_environment_files" to the gyp to suppress file + generation and use custom environment files prepared by yourself.""" + if generator_flags.get('ninja_use_custom_environment_files', 0): + return vs = GetVSVersion(generator_flags) for arch in ('x86', 'x64'): args = vs.SetupScript(arch) diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py index ef5b46046ed..806f92b57af 100644 --- a/tools/gyp/pylib/gyp/xcode_emulation.py +++ b/tools/gyp/pylib/gyp/xcode_emulation.py @@ -11,13 +11,16 @@ import gyp.common import os.path import re import shlex +import subprocess +import sys +from gyp.common import GypError class XcodeSettings(object): """A class that understands the gyp 'xcode_settings' object.""" - # Computed lazily by _GetSdkBaseDir(). Shared by all XcodeSettings, so cached + # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached # at class-level for efficiency. - _sdk_base_dir = None + _sdk_path_cache = {} def __init__(self, spec): self.spec = spec @@ -219,34 +222,34 @@ class XcodeSettings(object): else: return self._GetStandaloneBinaryPath() - def _GetSdkBaseDir(self): - """Returns the root of the 'Developer' directory. On Xcode 4.2 and prior, - this is usually just /Developer. Xcode 4.3 moved that folder into the Xcode - bundle.""" - if not XcodeSettings._sdk_base_dir: - import subprocess - job = subprocess.Popen(['xcode-select', '-print-path'], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - out, err = job.communicate() - if job.returncode != 0: - print out - raise Exception('Error %d running xcode-select' % job.returncode) - # The Developer folder moved in Xcode 4.3. - xcode43_sdk_path = os.path.join( - out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs') - if os.path.isdir(xcode43_sdk_path): - XcodeSettings._sdk_base_dir = xcode43_sdk_path - else: - XcodeSettings._sdk_base_dir = os.path.join(out.rstrip(), 'SDKs') - return XcodeSettings._sdk_base_dir + def _GetSdkVersionInfoItem(self, sdk, infoitem): + job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out = job.communicate()[0] + if job.returncode != 0: + sys.stderr.write(out + '\n') + raise GypError('Error %d running xcodebuild' % job.returncode) + return out.rstrip('\n') def _SdkPath(self): - sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx10.5') - if sdk_root.startswith('macosx'): - return os.path.join(self._GetSdkBaseDir(), - 'MacOSX' + sdk_root[len('macosx'):] + '.sdk') - return sdk_root + sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx') + if sdk_root not in XcodeSettings._sdk_path_cache: + XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem( + sdk_root, 'Path') + return XcodeSettings._sdk_path_cache[sdk_root] + + def _AppendPlatformVersionMinFlags(self, lst): + self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') + if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings(): + # TODO: Implement this better? + sdk_path_basename = os.path.basename(self._SdkPath()) + if sdk_path_basename.lower().startswith('iphonesimulator'): + self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', + '-mios-simulator-version-min=%s') + else: + self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', + '-miphoneos-version-min=%s') def GetCflags(self, configname): """Returns flags that need to be added to .c, .cc, .m, and .mm @@ -261,6 +264,9 @@ class XcodeSettings(object): if 'SDKROOT' in self._Settings(): cflags.append('-isysroot %s' % sdk_root) + if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'): + cflags.append('-Wconstant-conversion') + if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'): cflags.append('-funsigned-char') @@ -301,7 +307,7 @@ class XcodeSettings(object): if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'): cflags.append('-Wnewline-eof') - self._Appendf(cflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') + self._AppendPlatformVersionMinFlags(cflags) # TODO: if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'): @@ -354,6 +360,16 @@ class XcodeSettings(object): """Returns flags that need to be added to .cc, and .mm compilations.""" self.configname = configname cflags_cc = [] + + clang_cxx_language_standard = self._Settings().get( + 'CLANG_CXX_LANGUAGE_STANDARD') + # Note: Don't make c++0x to c++11 so that c++0x can be used with older + # clangs that don't understand c++11 yet (like Xcode 4.2's). + if clang_cxx_language_standard: + cflags_cc.append('-std=%s' % clang_cxx_language_standard) + + self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s') + if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'): cflags_cc.append('-fno-rtti') if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'): @@ -362,6 +378,7 @@ class XcodeSettings(object): cflags_cc.append('-fvisibility-inlines-hidden') if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'): cflags_cc.append('-fno-threadsafe-statics') + # Note: This flag is a no-op for clang, it only has an effect for gcc. if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'): cflags_cc.append('-Wno-invalid-offsetof') @@ -524,8 +541,9 @@ class XcodeSettings(object): ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') self._Appendf( ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') - self._Appendf( - ldflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') + + self._AppendPlatformVersionMinFlags(ldflags) + if 'SDKROOT' in self._Settings(): ldflags.append('-isysroot ' + self._SdkPath()) @@ -1042,7 +1060,7 @@ def _TopologicallySortedEnvVarKeys(env): order.reverse() return order except gyp.common.CycleError, e: - raise Exception( + raise GypError( 'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py index ec4cb96bc20..47712a7f6e0 100644 --- a/tools/gyp/pylib/gyp/xcodeproj_file.py +++ b/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -1503,6 +1503,7 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): 'r': 'sourcecode.rez', 'rez': 'sourcecode.rez', 's': 'sourcecode.asm', + 'storyboard': 'file.storyboard', 'strings': 'text.plist.strings', 'ttf': 'file', 'xcconfig': 'text.xcconfig', diff --git a/tools/gyp/tools/emacs/gyp-tests.el b/tools/gyp/tools/emacs/gyp-tests.el index e988a350edf..11b84978860 100644 --- a/tools/gyp/tools/emacs/gyp-tests.el +++ b/tools/gyp/tools/emacs/gyp-tests.el @@ -26,11 +26,20 @@ (insert-file-contents-literally (concat filename ".fontified")) (read (current-buffer)))) +(defun equivalent-face (face) + "For the purposes of face comparison, we're not interested in the + differences between certain faces. For example, the difference between + font-lock-comment-delimiter and font-lock-comment-face." + (case face + ((font-lock-comment-delimiter-face) font-lock-comment-face) + (t face))) + (defun text-face-properties (s) "Extract the text properties from s" (let ((result (list t))) (dotimes (i (length s)) - (setq result (cons (get-text-property i 'face s) result))) + (setq result (cons (equivalent-face (get-text-property i 'face s)) + result))) (nreverse result))) (ert-deftest test-golden-samples () diff --git a/tools/gyp/tools/emacs/gyp.el b/tools/gyp/tools/emacs/gyp.el index c20fc8de976..f558b53135f 100644 --- a/tools/gyp/tools/emacs/gyp.el +++ b/tools/gyp/tools/emacs/gyp.el @@ -135,7 +135,7 @@ (setq sections (cdr sections)) ; pop out a level (cond ((looking-at-p "['\"]") ; a string (setq string-start (point)) - (forward-sexp 1) + (goto-char (scan-sexps (point) 1)) (if (gyp-inside-dictionary-p) ;; Look for sections inside a dictionary (let ((section (gyp-section-name diff --git a/tools/test.py b/tools/test.py index 140358e4cfa..90c79bcb35e 100755 --- a/tools/test.py +++ b/tools/test.py @@ -232,7 +232,7 @@ class TapProgressIndicator(SimpleProgressIndicator): def HasRun(self, output): self._done += 1 - command = basename(output.command[1]) + command = basename(output.command[-1]) if output.UnexpectedOutput(): print 'not ok %i - %s' % (self._done, command) for l in output.output.stderr.splitlines():