diff options
Diffstat (limited to 'deps/npm/lib')
-rw-r--r-- | deps/npm/lib/cache.js | 114 | ||||
-rw-r--r-- | deps/npm/lib/install.js | 83 | ||||
-rw-r--r-- | deps/npm/lib/ls.js | 194 | ||||
-rw-r--r-- | deps/npm/lib/npm.js | 32 | ||||
-rw-r--r-- | deps/npm/lib/uninstall.js | 7 | ||||
-rw-r--r-- | deps/npm/lib/utils/cmd-shim.js | 2 | ||||
-rw-r--r-- | deps/npm/lib/utils/completion/file-completion.js | 2 | ||||
-rw-r--r-- | deps/npm/lib/utils/excludes.js | 159 | ||||
-rw-r--r-- | deps/npm/lib/utils/fetch.js | 5 | ||||
-rw-r--r-- | deps/npm/lib/utils/link.js | 2 | ||||
-rw-r--r-- | deps/npm/lib/utils/mkdir-p.js | 191 | ||||
-rw-r--r-- | deps/npm/lib/utils/npm-registry-client/get.js | 7 | ||||
-rw-r--r-- | deps/npm/lib/utils/read-json.js | 25 | ||||
-rw-r--r-- | deps/npm/lib/utils/tar.js | 496 | ||||
-rw-r--r-- | deps/npm/lib/utils/uid-number.js | 55 |
15 files changed, 295 insertions, 1079 deletions
diff --git a/deps/npm/lib/cache.js b/deps/npm/lib/cache.js index e8aac5d2f..b62e82dd1 100644 --- a/deps/npm/lib/cache.js +++ b/deps/npm/lib/cache.js @@ -3,11 +3,10 @@ /* adding a folder: 1. tar into tmp/random/package.tgz -2. untar into tmp/random/contents/{blah} -3. rename {blah} to "package" -4. tar tmp/random/contents/package to cache/n/v/package.tgz -5. untar cache/n/v/package.tgz into cache/n/v/package -6. rm tmp/random +2. untar into tmp/random/contents/package, stripping one dir piece +3. tar tmp/random/contents/package to cache/n/v/package.tgz +4. untar cache/n/v/package.tgz into cache/n/v/package +5. rm tmp/random Adding a url: 1. fetch to tmp/random/package.tgz @@ -32,7 +31,7 @@ exports.read = read exports.clean = clean exports.unpack = unpack -var mkdir = require("./utils/mkdir-p.js") +var mkdir = require("mkdirp") , exec = require("./utils/exec.js") , fetch = require("./utils/fetch.js") , npm = require("./npm.js") @@ -50,6 +49,7 @@ var mkdir = require("./utils/mkdir-p.js") , tar = require("./utils/tar.js") , fileCompletion = require("./utils/completion/file-completion.js") , url = require("url") + , chownr = require("chownr") cache.usage = "npm cache add <tarball file>" + "\nnpm cache add <folder>" @@ -95,12 +95,10 @@ function cache (args, cb) { // if the pkg and ver are in the cache, then // just do a readJson and return. // if they're not, then fetch them from the registry. -var cacheSeen = {} function read (name, ver, forceBypass, cb) { if (typeof cb !== "function") cb = forceBypass, forceBypass = true var jsonFile = path.join(npm.cache, name, ver, "package", "package.json") function c (er, data) { - if (!er) cacheSeen[data._id] = data if (data) deprCheck(data) return cb(er, data) } @@ -110,10 +108,6 @@ function read (name, ver, forceBypass, cb) { return addNamed(name, ver, c) } - if (name+"@"+ver in cacheSeen) { - return cb(null, cacheSeen[name+"@"+ver]) - } - readJson(jsonFile, function (er, data) { if (er) return addNamed(name, ver, c) deprCheck(data) @@ -126,9 +120,13 @@ function ls (args, cb) { output = output || require("./utils/output.js") args = args.join("/").split("@").join("/") if (args.substr(-1) === "/") args = args.substr(0, args.length - 1) + var prefix = npm.config.get("cache") + if (0 === prefix.indexOf(process.env.HOME)) { + prefix = "~" + prefix.substr(process.env.HOME.length) + } ls_(args, npm.config.get("depth"), function(er, files) { output.write(files.map(function (f) { - return path.join("~/.npm", f) + return path.join(prefix, f) }).join("\n").trim(), function (er) { return cb(er, files) }) @@ -212,7 +210,7 @@ function add (args, cb) { // see if the spec is a url // otherwise, treat as name@version - var p = url.parse(spec.replace(/^git\+/, "git")) || {} + var p = url.parse(spec) || {} log.verbose(p, "parsed url") // it could be that we got name@http://blah @@ -230,11 +228,11 @@ function add (args, cb) { case "https:": return addRemoteTarball(spec, null, name, cb) case "git:": - case "githttp:": - case "githttps:": - case "gitrsync:": - case "gitftp:": - case "gitssh:": + case "git+http:": + case "git+https:": + case "git+rsync:": + case "git+ftp:": + case "git+ssh:": //p.protocol = p.protocol.replace(/^git([^:])/, "$1") return addRemoteGit(spec, p, name, cb) default: @@ -636,7 +634,7 @@ function getCacheStat (cb) { } function makeCacheDir (cb) { - if (!process.getuid) return mkdir(npm.cache, npm.modes.exec, cb) + if (!process.getuid) return mkdir(npm.cache, cb) var uid = +process.getuid() , gid = +process.getgid() @@ -647,18 +645,28 @@ function makeCacheDir (cb) { } if (uid !== 0 || !process.env.HOME) { cacheStat = {uid: uid, gid: gid} - return mkdir(npm.cache, npm.modes.exec, uid, gid, function (er) { - return cb(er, cacheStat) - }) + return mkdir(npm.cache, afterMkdir) } + fs.stat(process.env.HOME, function (er, st) { if (er) return log.er(cb, "homeless?")(er) cacheStat = st log.silly([st.uid, st.gid], "uid, gid for cache dir") - return mkdir(npm.cache, npm.modes.exec, st.uid, st.gid, function (er) { + return mkdir(npm.cache, afterMkdir) + }) + + function afterMkdir (er, made) { + if (er || !cacheStat || isNaN(cacheStat.uid) || isNaN(cacheStat.gid)) { + return cb(er, cacheStat) + } + + if (!made) return cb(er, cacheStat) + + // ensure that the ownership is correct. + chownr(made, cacheStat.uid, cacheStat.gid, function (er) { return cb(er, cacheStat) }) - }) + } } @@ -736,9 +744,20 @@ function addLocalDirectory (p, name, cb) { , tgz = placeDirect ? placed : tmptgz , doFancyCrap = p.indexOf(npm.tmp) !== 0 && p.indexOf(npm.cache) !== 0 - tar.pack(tgz, p, data, doFancyCrap, function (er) { - if (er) return log.er(cb,"couldn't pack "+p+ " to "+tgz)(er) - addLocalTarball(tgz, name, cb) + getCacheStat(function (er, cs) { + mkdir(path.dirname(tgz), function (er, made) { + if (er) return cb(er) + tar.pack(tgz, p, data, doFancyCrap, function (er) { + if (er) return log.er(cb,"couldn't pack "+p+ " to "+tgz)(er) + + if (er || !cs || isNaN(cs.uid) || isNaN(cs.gid)) return cb() + + chownr(made || tgz, cs.uid, cs.gid, function (er) { + if (er) return cb(er) + addLocalTarball(tgz, name, cb) + }) + }) + }) }) }) } @@ -747,38 +766,15 @@ function addTmpTarball (tgz, name, cb) { if (!cb) cb = name, name = "" getCacheStat(function (er, cs) { if (er) return cb(er) - return addTmpTarball_(tgz, name, cs.uid, cs.gid, cb) - }) -} - -function addTmpTarball_ (tgz, name, uid, gid, cb) { - var contents = path.dirname(tgz) - tar.unpack( tgz, path.resolve(contents, "package") - , null, null - , uid, gid - , function (er) { - if (er) { - return cb(er) - } - fs.readdir(contents, function (er, folder) { - if (er) return log.er(cb, "couldn't readdir "+contents)(er) - log.verbose(folder, "tarball contents") - if (folder.length > 1) { - folder = folder.filter(function (f) { - return !f.match(/^\.|^tmp\.tgz$/) - }) - } - if (folder.length > 1) { - log.warn(folder.slice(1).join("\n") - ,"extra junk in folder, ignoring") + var contents = path.dirname(tgz) + tar.unpack( tgz, path.resolve(contents, "package") + , null, null + , cs.uid, cs.gid + , function (er) { + if (er) { + return cb(er) } - if (!folder.length) return cb(new Error("Empty package tarball")) - folder = path.join(contents, folder[0]) - var newName = path.join(contents, "package") - fs.rename(folder, newName, function (er) { - if (er) return log.er(cb, "couldn't rename "+folder+" to package")(er) - addLocalDirectory(newName, name, cb) - }) + addLocalDirectory(path.resolve(contents, "package"), name, cb) }) }) } diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js index ae10965cb..4da66d338 100644 --- a/deps/npm/lib/install.js +++ b/deps/npm/lib/install.js @@ -1,4 +1,3 @@ - // npm install <pkg> <pkg> <pkg> // // See doc/install.md for more description @@ -68,8 +67,9 @@ var npm = require("./npm.js") , relativize = require("./utils/relativize.js") , output , url = require("url") - , mkdir = require("./utils/mkdir-p.js") + , mkdir = require("mkdirp") , lifecycle = require("./utils/lifecycle.js") + , archy = require("archy") function install (args, cb_) { @@ -107,7 +107,7 @@ function install (args, cb_) { }) } - mkdir(where, function (er) { + mkdir(where, function (er, made) { if (er) return cb(er) // install dependencies locally by default, // or install current folder globally @@ -278,26 +278,46 @@ function save (where, installed, tree, pretty, cb) { // that the submodules are not immediately require()able. // TODO: Show the complete tree, ls-style, but only if --long is provided function prettify (tree, installed) { - // XXX This should match the data structure provided by npm ls --json - if (npm.config.get("json")) return JSON.stringify(tree, null, 2) - if (npm.config.get("parseable")) return parseable(installed) - return Object.keys(tree).map(function (p) { - p = tree[p] - var c = "" - if (p.children && p.children.length) { - pref = "\n" - var l = p.children.pop() - c = p.children.map(function (c) { - var gc = c.children && c.children.length - ? " (" + c.children.map(function (gc) { - return gc.what - }).join(" ") + ")" - : "" - return "\n├── " + c.what + gc - }).join("") + "\n└── " + l.what + if (npm.config.get("json")) { + function red (set, kv) { + set[kv[0]] = kv[1] + return set } - return [p.what, p.where, c].join(" ") + tree = Object.keys(tree).map(function (p) { + if (!tree[p]) return null + var what = tree[p].what.split("@") + , name = what.shift() + , version = what.join("@") + , o = { name: name, version: version, from: tree[p].from } + o.dependencies = tree[p].children.map(function P (dep) { + var what = dep.what.split("@") + , name = what.shift() + , version = what.join("@") + , o = { version: version, from: dep.from } + o.dependencies = dep.children.map(P).reduce(red, {}) + return [name, o] + }).reduce(red, {}) + return o + }) + + return JSON.stringify(tree, null, 2) + } + if (npm.config.get("parseable")) return parseable(installed) + + return Object.keys(tree).map(function (p) { + return archy({ label: tree[p].what + " " + p + , nodes: (tree[p].children || []).map(function P (c) { + if (npm.config.get("long")) { + return { label: c.what, nodes: c.children.map(P) } + } + var g = c.children.map(function (g) { + return g.what + }).join(", ") + if (g) g = " (" + g + ")" + return c.what + g + }) + }) }).join("\n") } @@ -506,6 +526,15 @@ function targetResolver (where, context, deps) { } } + // if it's identical to its parent, then it's probably someone + // doing `npm install foo` inside of the foo project. Print + // a warning, and skip it. + if (parent && parent.name === what && !npm.config.get("force")) { + log.warn("Refusing to install "+what+" as a dependency of itself" + ,"install") + return cb(null, []) + } + if (wrap) { name = what.split(/@/).shift() if (wrap[name]) { @@ -723,8 +752,16 @@ function checkCycle (target, ancestors, cb) { // A more correct, but more complex, solution would be to symlink // the deeper thing into the new location. // Will do that if anyone whines about this irl. - - var p = Object.getPrototypeOf(ancestors) + // + // Note: `npm install foo` inside of the `foo` package will abort + // earlier if `--force` is not set. However, if it IS set, then + // we need to still fail here, but just skip the first level. Of + // course, it'll still fail eventually if it's a true cycle, and + // leave things in an undefined state, but that's what is to be + // expected when `--force` is used. That is why getPrototypeOf + // is used *twice* here: to skip the first level of repetition. + + var p = Object.getPrototypeOf(Object.getPrototypeOf(ancestors)) , name = target.name , version = target.version while (p && p !== Object.prototype && p[name] !== version) { diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js index 274d0f22b..99376db11 100644 --- a/deps/npm/lib/ls.js +++ b/deps/npm/lib/ls.js @@ -13,6 +13,7 @@ var npm = require("./npm.js") , log = require("./utils/log.js") , relativize = require("./utils/relativize.js") , path = require("path") + , archy = require("archy") ls.usage = "npm ls" @@ -43,13 +44,22 @@ function ls (args, silent, cb) { } return o }, 2) - } else { - out = makePretty(bfsify(data), long, dir).join("\n") + } else if (npm.config.get("parseable")) { + out = makeParseable(bfsify(data), long, dir) + } else if (data) { + out = makeArchy(bfsify(data), long, dir) } output.write(out, function (er) { cb(er, data, lite) }) }) } +function alphasort (a, b) { + a = a.toLowerCase() + b = b.toLowerCase() + return a > b ? 1 + : a < b ? -1 : 0 +} + function getLite (data, noname) { var lite = {} , maxDepth = npm.config.get("depth") @@ -147,103 +157,115 @@ function bfsify (root, current, queue, seen) { } -function makePretty (data, long, dir, prefix, list) { - var top = !list - list = list || [] - prefix = prefix || "" - list.push(format(data, long, prefix, dir)) - var deps = data.dependencies || {} - , childPref = prefix.split("├─").join("│ ") - .split("└─").join(" ") - , depList = Object.keys(deps) - , depLast = depList.length - 1 - , maxDepth = npm.config.get("depth") - Object.keys(deps).sort(function (a, b) { - return a > b ? 1 : -1 - }).forEach(function (d, i) { - var depData = deps[d] - if (typeof depData === "string") { - if (data.depth < maxDepth) { - var p = data.link || data.path - log.warn("Unmet dependency in "+p, d+" "+deps[d]) - depData = npm.config.get("parseable") - ? ( npm.config.get("long") - ? path.resolve(data.path, "node_modules", d) - + ":"+d+"@"+JSON.stringify(depData)+":INVALID:MISSING" - : "" ) - : "─ \033[31;40mUNMET DEPENDENCY\033[0m "+d+" "+depData - } else { - if (npm.config.get("parseable")) { - depData = path.resolve(data.path, "node_modules", d) - + (npm.config.get("long") - ? ":" + d + "@" + JSON.stringify(depData) - + ":" // no realpath resolved - + ":MAXDEPTH" - : "") - } else { - depData = "─ "+d+"@'"+depData +"' (max depth reached)" - } - } - } - var c = i === depLast ? "└─" : "├─" - makePretty(depData, long, dir, childPref + c, list) - }) - if (top && list.length === 1 && !data._id) { - if (!npm.config.get("parseable")) { - list.push("(empty)") - } else if (npm.config.get("long")) list[0] += ":EMPTY" - } - return list.filter(function (l) { return l && l.trim() }) +function makeArchy (data, long, dir) { + var out = makeArchy_(data, long, dir, 0) + return archy(out, "", { unicode: npm.config.get("unicode") }) } -function ugly (data) { +function makeArchy_ (data, long, dir, depth, parent, d) { if (typeof data === "string") { + if (depth < npm.config.get("depth")) { + // just missing + var p = parent.link || parent.path + log.warn("Unmet dependency in "+p, d+" "+data) + data = "\033[31;40mUNMET DEPENDENCY\033[0m " + d + " " + data + } else { + data = d+"@'"+ data +"' (max depth reached)" + } return data } - if (!npm.config.get("long")) return data.path - return data.path - + ":" + (data._id || "") - + ":" + (data.realPath !== data.path ? data.realPath : "") - + (data.extraneous ? ":EXTRANEOUS" : "") - + (data.invalid ? ":INVALID" : "") -} + var out = {} + // the top level is a bit special. + out.label = data._id ? data._id + " " : "" + if (data.link) out.label += "-> " + data.link -function format (data, long, prefix, dir) { - if (npm.config.get("parseable")) return ugly(data) - if (typeof data === "string") { - return prefix + data - } -// console.log([data.path, dir], "relativize") - var depLen = Object.keys(data.dependencies).length - , space = prefix.split("├─").join("│ ") - .split("└─").join(" ") - + (depLen ? "" : " ") - , rel = relativize(data.path || "", dir) - , l = prefix - + (rel === "." ? "" : depLen ? "┬ " : "─ ") - + (data._id ? data._id + " " : "") - + (data.link ? "-> " + data.link : "") + "" - + (rel === "." && !(long && data._id) ? dir : "") if (data.invalid) { if (data.realName !== data.name) l += " ("+data.realName+")" - l += " \033[31;40minvalid\033[0m" + out.label += " \033[31;40minvalid\033[0m" } - if (data.extraneous && rel !== ".") { - l += " \033[32;40mextraneous\033[0m" + + if (data.extraneous && data.path !== dir) { + out.label += " \033[32;40mextraneous\033[0m" } - if (!long || !data._id) return l + + if (long) { + if (dir === data.path) out.label += "\n" + dir + out.label += "\n" + getExtras(data, dir) + } else if (dir === data.path) { + out.label += dir + } + + // now all the children. + out.nodes = Object.keys(data.dependencies || {}) + .sort(alphasort).map(function (d) { + return makeArchy_(data.dependencies[d], long, dir, depth + 1, data, d) + }) + + if (out.nodes.length === 0 && data.path === dir) { + out.nodes = ["(empty)"] + } + + return out +} + +function getExtras (data, dir) { var extras = [] - if (rel !== ".") extras.push(rel) - else extras.push(dir) + , rel = relativize(data.path || "", dir) + , url = require("url") + if (data.description) extras.push(data.description) if (data.repository) extras.push(data.repository.url) if (data.homepage) extras.push(data.homepage) - extras = extras.filter(function (e) { return e }) - var lastExtra = !depLen && extras.length - 1 - l += extras.map(function (e, i) { - var indent = !depLen ? " " : "│ " - return "\n" + space + indent + e - }).join("") - return l + if (data._from) { + var from = data._from + if (from.indexOf(data.name + "@") === 0) { + from = from.substr(data.name.length + 1) + } + var u = url.parse(from) + if (u.protocol) extras.push(from) + } + return extras.join("\n") +} + + +function makeParseable (data, long, dir, depth, parent, d) { + depth = depth || 0 + + return [ makeParseable_(data, long, dir, depth, parent, d) ] + .concat(Object.keys(data.dependencies || {}) + .sort(alphasort).map(function (d) { + return makeParseable(data.dependencies[d], long, dir, depth + 1, data, d) + })) + .join("\n") +} + +function makeParseable_ (data, long, dir, depth, parent, d) { + if (typeof data === "string") { + if (data.depth < npm.config.get("depth")) { + var p = parent.link || parent.path + log.warn("Unmet dependency in "+p, d+" "+data) + data = npm.config.get("long") + ? path.resolve(parent.path, "node_modules", d) + + ":"+d+"@"+JSON.stringify(data)+":INVALID:MISSING" + : "" + } else { + data = path.resolve(data.path, "node_modules", d) + + (npm.config.get("long") + ? ":" + d + "@" + JSON.stringify(data) + + ":" // no realpath resolved + + ":MAXDEPTH" + : "") + } + + return data + } + + if (!npm.config.get("long")) return data.path + + return data.path + + ":" + (data._id || "") + + ":" + (data.realPath !== data.path ? data.realPath : "") + + (data.extraneous ? ":EXTRANEOUS" : "") + + (data.invalid ? ":INVALID" : "") } diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index afce0f9f2..456948a29 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -25,8 +25,8 @@ var EventEmitter = require("events").EventEmitter , which = require("which") , semver = require("semver") , findPrefix = require("./utils/find-prefix.js") - , getUid = require("./utils/uid-number.js") - , mkdir = require("./utils/mkdir-p.js") + , getUid = require("uid-number") + , mkdir = require("mkdirp") , slide = require("slide") , chain = slide.chain @@ -40,30 +40,6 @@ npm.ECYCLE = {} npm.ENOTSUP = {} npm.EBADPLATFORM = {} -// HACK for windows -if (process.platform === "win32") { - // stub in unavailable methods from process and fs binding - if (!process.getuid) process.getuid = function() {} - if (!process.getgid) process.getgid = function() {} - var fsBinding = process.binding("fs") - if (!fsBinding.chown) fsBinding.chown = function() { - var cb = arguments[arguments.length - 1] - if (typeof cb == "function") cb() - } - - // patch rename/renameSync, but this should really be fixed in node - var _fsRename = fs.rename - , _fsPathPatch - _fsPathPatch = function(p) { - return p && p.replace(/\\/g, "/") || p; - } - fs.rename = function(p1, p2) { - arguments[0] = _fsPathPatch(p1) - arguments[1] = _fsPathPatch(p2) - return _fsRename.apply(fs, arguments); - } -} - try { // startup, ok to do this synchronously var j = JSON.parse(fs.readFileSync( @@ -310,7 +286,7 @@ function loadPrefix (npm, conf, cb) { }) // the prefix MUST exist, or else nothing works. if (!npm.config.get("global")) { - mkdir(p, npm.modes.exec, null, null, true, next) + mkdir(p, next) } else { next(er) } @@ -323,7 +299,7 @@ function loadPrefix (npm, conf, cb) { , enumerable : true }) // the prefix MUST exist, or else nothing works. - mkdir(gp, npm.modes.exec, null, null, true, next) + mkdir(gp, next) }) var i = 2 diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js index 1b43607d6..655e5eb96 100644 --- a/deps/npm/lib/uninstall.js +++ b/deps/npm/lib/uninstall.js @@ -40,7 +40,12 @@ function uninstall (args, cb) { function uninstall_ (args, nm, cb) { asyncMap(args, function (arg, cb) { - var p = path.resolve(nm, arg) + // uninstall .. should not delete /usr/local/lib/node_modules/.. + var p = path.join(path.resolve(nm), path.join("/", arg)) + if (path.resolve(p) === nm) { + log.warn(arg, "uninstall: invalid argument") + return cb(null, []) + } fs.lstat(p, function (er) { if (er) { log.warn(arg, "Not installed in "+nm) diff --git a/deps/npm/lib/utils/cmd-shim.js b/deps/npm/lib/utils/cmd-shim.js index f53ab3cf8..e24da36f6 100644 --- a/deps/npm/lib/utils/cmd-shim.js +++ b/deps/npm/lib/utils/cmd-shim.js @@ -14,7 +14,7 @@ cmdShim.ifExists = cmdShimIfExists var fs = require("graceful-fs") , chain = require("slide").chain - , mkdir = require("./mkdir-p.js") + , mkdir = require("mkdirp") , rm = require("rimraf") , log = require("./log.js") , path = require("path") diff --git a/deps/npm/lib/utils/completion/file-completion.js b/deps/npm/lib/utils/completion/file-completion.js index 427efefb4..c1c241d68 100644 --- a/deps/npm/lib/utils/completion/file-completion.js +++ b/deps/npm/lib/utils/completion/file-completion.js @@ -1,7 +1,7 @@ module.exports = fileCompletion var find = require("../find.js") - , mkdir = require("../mkdir-p.js") + , mkdir = require("mkdirp") , path = require("path") function fileCompletion (root, req, depth, cb) { diff --git a/deps/npm/lib/utils/excludes.js b/deps/npm/lib/utils/excludes.js deleted file mode 100644 index 83935ee35..000000000 --- a/deps/npm/lib/utils/excludes.js +++ /dev/null @@ -1,159 +0,0 @@ -// build up a set of exclude lists in order of precedence: -// [ ["!foo", "bar"] -// , ["foo", "!bar"] ] -// being *included* will override a previous exclusion, -// and being excluded will override a previous inclusion. -// -// Each time the tar file-list generator thingie enters a new directory, -// it calls "addIgnoreFile(dir, list, cb)". If an ignore file is found, -// then it is added to the list and the cb() is called with an -// child of the original list, so that we don't have -// to worry about popping it off at the right time, since other -// directories will continue to use the original parent list. -// -// If no ignore file is found, then the original list is returned. -// -// To start off with, ~/.{npm,git}ignore is added, as is -// prefix/{npm,git}ignore, effectively treated as if they were in the -// base package directory. - -exports.addIgnoreFile = addIgnoreFile -exports.readIgnoreFile = readIgnoreFile -exports.parseIgnoreFile = parseIgnoreFile -exports.test = test -exports.filter = filter - -var path = require("path") - , fs = require("graceful-fs") - , minimatch = require("minimatch") - , relativize = require("./relativize.js") - , log = require("./log.js") - -// todo: memoize - -// read an ignore file, or fall back to the -// "gitBase" file in the same directory. -function readIgnoreFile (file, gitBase, cb) { - //log.warn(file, "ignoreFile") - if (!file) return cb(null, "") - fs.readFile(file, function (er, data) { - if (!er || !gitBase) return cb(null, data || "") - var gitFile = path.resolve(path.dirname(file), gitBase) - fs.readFile(gitFile, function (er, data) { - return cb(null, data || "") - }) - }) -} - -// read a file, and then return the list of patterns -function parseIgnoreFile (file, gitBase, dir, cb) { - readIgnoreFile(file, gitBase, function (er, data) { - data = data ? data.toString("utf8") : "" - - data = data.split(/[\r\n]+/).map(function (p) { - return p.trim() - }).filter(function (p) { - return p.length && p.charAt(0) !== "#" - }) - data.dir = dir - return cb(er, data) - }) -} - -// add an ignore file to an existing list which can -// then be passed to the test() function. If the ignore -// file doesn't exist, then the list is unmodified. If -// it is, then a concat-child of the original is returned, -// so that this is suitable for walking a directory tree. -function addIgnoreFile (file, gitBase, list, dir, cb) { - if (typeof cb !== "function") cb = dir, dir = path.dirname(file) - if (typeof cb !== "function") cb = list, list = [] - parseIgnoreFile(file, gitBase, dir, function (er, data) { - if (!er && data) { - // package.json "files" array trumps everything - // Make sure it's always last. - if (list.length && list[list.length-1].packageFiles) { - list = list.concat([data, list.pop()]) - } else { - list = list.concat([data]) - } - } - cb(er, list) - }) -} - - -// no IO -// loop through the lists created in the functions above, and test to -// see if a file should be included or not, given those exclude lists. -function test (file, excludeList) { - if (path.basename(file) === "package.json") return true - // log.warn(file, "test file") - // log.warn(excludeList, "test list") - var incRe = /^\!(\!\!)*/ - , excluded = false - , mmconf = { matchBase: true, dot: true } - for (var i = 0, l = excludeList.length; i < l; i ++) { - var excludes = excludeList[i] - , dir = excludes.dir - - // chop the filename down to be relative to excludeDir - var rf = relativize(file, dir, true) - rf = rf.replace(/^\.?\//, "") - if (file.slice(-1) === "/") rf += "/" - - // log.warn([file, rf], "rf") - - for (var ii = 0, ll = excludes.length; ii < ll; ii ++) { - var ex = excludes[ii].replace(/^(!*)\//, "$1") - , inc = !!ex.match(incRe) - - // log.warn([ex, rf], "ex, rf") - // excluding/including a dir excludes/includes all the files in it. - if (ex.slice(-1) === "/") ex += "**" - - // if this is not an inclusion attempt, and someone else - // excluded it, then just continue, because there's nothing - // that can be done here to change the exclusion. - if (!inc && excluded) continue - - // if it's an inclusion attempt, and the file has not been - // excluded, then skip it, because there's no need to try again. - if (inc && !excluded) continue - - // if it matches the pattern, then it should be excluded. - excluded = !!minimatch(rf, ex, mmconf) - // log.error([rf, ex, excluded], "rf, ex, excluded") - - // if you include foo, then it also includes foo/bar.js - if (inc && excluded && ex.slice(-3) !== "/**") { - excluded = minimatch(rf, ex + "/**", mmconf) - // log.warn([rf, ex + "/**", inc, excluded], "dir without /") - } - - // if you exclude foo, then it also excludes foo/bar.js - if (!inc - && excluded - && ex.slice(-3) !== "/**" - && rf.slice(-1) === "/" - && excludes.indexOf(ex + "/**") === -1) { - // log.warn(ex + "/**", "adding dir-matching exclude pattern") - excludes.splice(ii, 1, ex, ex + "/**") - ll ++ - } - } - - // log.warn([rf, excluded, excludes], "rf, excluded, excludes") - } - // true if it *should* be included - // log.warn([file, excludeList, excluded], "file, excluded") - return !excluded -} - -// returns a function suitable for Array#filter -function filter (dir, list) { return function (file) { - file = file.trim() - var testFile = path.resolve(dir, file) - if (file.slice(-1) === "/") testFile += "/" - return file && test(testFile, list) -}} diff --git a/deps/npm/lib/utils/fetch.js b/deps/npm/lib/utils/fetch.js index 935e82039..0ece53cab 100644 --- a/deps/npm/lib/utils/fetch.js +++ b/deps/npm/lib/utils/fetch.js @@ -8,7 +8,8 @@ var request = require("request") , url = require("url") , log = require("./log.js") , path = require("path") - , mkdir = require("./mkdir-p.js") + , mkdir = require("mkdirp") + , chownr = require("chownr") , regHost module.exports = fetch @@ -16,7 +17,7 @@ module.exports = fetch function fetch (remote, local, headers, cb) { if (typeof cb !== "function") cb = headers, headers = {} log.verbose(local, "fetch to") - mkdir(path.dirname(local), function (er) { + mkdir(path.dirname(local), function (er, made) { if (er) return cb(er) fetch_(remote, local, headers, cb) }) diff --git a/deps/npm/lib/utils/link.js b/deps/npm/lib/utils/link.js index 918481068..7fa80d5e1 100644 --- a/deps/npm/lib/utils/link.js +++ b/deps/npm/lib/utils/link.js @@ -4,7 +4,7 @@ link.ifExists = linkIfExists var fs = require("graceful-fs") , chain = require("slide").chain - , mkdir = require("./mkdir-p.js") + , mkdir = require("mkdirp") , rm = require("./gently-rm.js") , log = require("./log.js") , path = require("path") diff --git a/deps/npm/lib/utils/mkdir-p.js b/deps/npm/lib/utils/mkdir-p.js deleted file mode 100644 index cc2b465fb..000000000 --- a/deps/npm/lib/utils/mkdir-p.js +++ /dev/null @@ -1,191 +0,0 @@ - -var log = require("./log.js") - , fs = require("graceful-fs") - , path = require("path") - , npm = require("../npm.js") - , exec = require("./exec.js") - , uidNumber = require("./uid-number.js") - , umask = process.umask() - , umaskOrig = umask - , addedUmaskExit = false - , mkdirCache = {} - -module.exports = mkdir -function mkdir (ensure, mode, uid, gid, noChmod, cb_) { - if (typeof cb_ !== "function") cb_ = noChmod, noChmod = null - if (typeof cb_ !== "function") cb_ = gid, gid = null - if (typeof cb_ !== "function") cb_ = uid, uid = null - if (typeof cb_ !== "function") cb_ = mode, mode = npm.modes.exec - - if (mode & umask) { - log.verbose(mode.toString(8), "umasking from "+umask.toString(8)) - process.umask(umask = 0) - if (!addedUmaskExit) { - addedUmaskExit = true - process.on("exit", function () { process.umask(umask = umaskOrig) }) - } - } - - ensure = path.resolve(ensure).replace(/\/+$/, '') - - // mkdir("/") should not do anything, since that always exists. - if (!ensure - || ( process.platform === "win32" - && ensure.match(/^[a-zA-Z]:(\\|\/)?$/))) { - return cb_() - } - - if (mkdirCache.hasOwnProperty(ensure)) { - return mkdirCache[ensure].push(cb_) - } - mkdirCache[ensure] = [cb_] - - function cb (er) { - var cbs = mkdirCache[ensure] - delete mkdirCache[ensure] - cbs.forEach(function (c) { c(er) }) - } - - if (uid === null && gid === null) { - return mkdir_(ensure, mode, uid, gid, noChmod, cb) - } - - uidNumber(uid, gid, function (er, uid, gid) { - if (er) return cb(er) - mkdir_(ensure, mode, uid, gid, noChmod, cb) - }) -} - -function mkdir_ (ensure, mode, uid, gid, noChmod, cb) { - // if it's already a dir, then just check the bits and owner. - fs.stat(ensure, function (er, s) { - if (s && s.isDirectory()) { - // check mode, uid, and gid. - if ((noChmod || (s.mode & mode) === mode) - && (typeof uid !== "number" || s.uid === uid) - && (typeof gid !== "number" || s.gid === gid)) return cb() - return done(ensure, mode, uid, gid, noChmod, cb) - } - return walkDirs(ensure, mode, uid, gid, noChmod, cb) - }) -} - -function done (ensure, mode, uid, gid, noChmod, cb) { - // now the directory has been created. - // chown it to the desired uid/gid - // Don't chown the npm.root dir, though, in case we're - // in unsafe-perm mode. - log.verbose("done: "+ensure+" "+mode.toString(8), "mkdir") - - // only chmod if noChmod isn't set. - var d = done_(ensure, mode, uid, gid, cb) - if (noChmod) return d() - fs.chmod(ensure, mode, d) -} - -function done_ (ensure, mode, uid, gid, cb) { - return function (er) { - if (er - || ensure === npm.dir - || typeof uid !== "number" - || typeof gid !== "number" - || npm.config.get("unsafe-perm")) return cb(er) - uid = Math.floor(uid) - gid = Math.floor(gid) - fs.chown(ensure, uid, gid, cb) - } -} - -var pathSplit = process.platform === "win32" ? /\/|\\/ : "/" -function walkDirs (ensure, mode, uid, gid, noChmod, cb) { - var dirs = ensure.split(pathSplit) - , walker = [] - , foundUID = null - , foundGID = null - - // gobble the "/" or C: first - walker.push(dirs.shift()) - - // The loop that goes through and stats each dir. - ;(function S (d) { - // no more directory steps left. - if (d === undefined) { - // do the chown stuff - return done(ensure, mode, uid, gid, noChmod, cb) - } - - // get the absolute dir for the next piece being stat'd - walker.push(d) - var dir = walker.join(path.SPLIT_CHAR) - - // stat callback lambda - fs.stat(dir, function STATCB (er, s) { - if (er) { - // the stat failed - directory does not exist. - - log.verbose(er.message, "mkdir (expected) error") - - // use the same uid/gid as the nearest parent, if not set. - if (foundUID !== null) uid = foundUID - if (foundGID !== null) gid = foundGID - - // make the directory - fs.mkdir(dir, mode, function MKDIRCB (er) { - // since stat and mkdir are done as two separate syscalls, - // operating on a path rather than a file descriptor, it's - // possible that the directory didn't exist when we did - // the stat, but then *did* exist when we go to to the mkdir. - // If we didn't care about uid/gid, we could just mkdir - // repeatedly, failing on any error other than "EEXIST". - if (er && er.message.indexOf("EEXIST") === 0) { - return fs.stat(dir, STATCB) - } - - // any other kind of error is not saveable. - if (er) return cb(er) - - // at this point, we've just created a new directory successfully. - - // if we care about permissions - if (!npm.config.get("unsafe-perm") // care about permissions - // specified a uid and gid - && uid !== null - && gid !== null ) { - // set the proper ownership - return fs.chown(dir, uid, gid, function (er) { - if (er) return cb(er) - // attack the next portion of the path. - S(dirs.shift()) - }) - } else { - // either we don't care about ownership, or it's already right. - S(dirs.shift()) - } - }) // mkdir - - } else { - // the stat succeeded. - if (s.isDirectory()) { - // if it's a directory, that's good. - // if the uid and gid aren't already set, then try to preserve - // the ownership on up the tree. Things in ~ remain owned by - // the user, things in / remain owned by root, etc. - if (uid === null && typeof s.uid === "number") foundUID = s.uid - if (gid === null && typeof s.gid === "number") foundGID = s.gid - - // move onto next portion of path - S(dirs.shift()) - - } else { - // the stat succeeded, but it's not a directory - log.verbose(dir, "mkdir exists") - log.silly(s, "stat("+dir+")") - log.verbose(s.isDirectory(), "isDirectory()") - cb(new Error("Failed to mkdir "+dir+": File exists")) - }// if (isDirectory) else - } // if (stat failed) else - }) // stat - - // start the S function with the first item in the list of directories. - })(dirs.shift()) -} diff --git a/deps/npm/lib/utils/npm-registry-client/get.js b/deps/npm/lib/utils/npm-registry-client/get.js index 49a8b4cc0..e0902f027 100644 --- a/deps/npm/lib/utils/npm-registry-client/get.js +++ b/deps/npm/lib/utils/npm-registry-client/get.js @@ -6,8 +6,9 @@ var GET = require("./request.js").GET , npm = require("../../npm.js") , path = require("path") , log = require("../log.js") - , mkdir = require("../mkdir-p.js") + , mkdir = require("mkdirp") , cacheStat = null + , chownr = require("chownr") function get (project, version, timeout, nofollow, staleOk, cb) { if (typeof cb !== "function") cb = staleOk, staleOk = false @@ -173,13 +174,13 @@ function saveToCache (cache, data, saved) { } function saveToCache_ (cache, data, uid, gid, saved) { - mkdir(path.dirname(cache), npm.modes.exec, uid, gid, function (er) { + mkdir(path.dirname(cache), function (er, made) { if (er) return saved() fs.writeFile(cache, JSON.stringify(data), function (er) { if (er || uid === null || gid === null) { return saved() } - fs.chown(cache, uid, gid, saved) + chownr(made || cache, uid, gid, saved) }) }) } diff --git a/deps/npm/lib/utils/read-json.js b/deps/npm/lib/utils/read-json.js index 20461e4cf..d1bba10f5 100644 --- a/deps/npm/lib/utils/read-json.js +++ b/deps/npm/lib/utils/read-json.js @@ -52,27 +52,11 @@ function readJson (jsonFile, opts, cb) { if (er) return hasGyp(false) // see if there are any *.gyp files in there. - // If there are, then copy them to binding.gyp - // if there are not, then just proceed without gf = gf.filter(function (f) { return f.match(/\.gyp$/) }) gf = gf[0] - if (!gf) return hasGyp(false) - if (gf === "binding.gyp") return hasGyp(true) - - // need to rename. windows is annoying. - // why not fs.rename? because we just saw the file, so it'll - // be cached for potentially several seconds on a network share. - return fs.readFile(path.resolve(pkgdir, gf), function (er, d) { - if (er) return hasGyp(false) - fs.writeFile(path.resolve(pkgdir, "binding.gyp"), d, function (er) { - if (er) return hasGyp(false) - fs.unlink(path.resolve(pkgdir, gf), function (er) { - return hasGyp(!er) - }) - }) - }) + return hasGyp(!!gf) }) } @@ -262,11 +246,12 @@ function typoWarn (json) { } if (typeof json.bugs === "object") { + // just go ahead and correct these. Object.keys(bugsTypos).forEach(function (d) { if (json.bugs.hasOwnProperty(d)) { - log.warn( "package.json: bugs['" + d + "'] should probably be " - + "bugs['" + bugsTypos[d] + "']", json._id) - } + json.bugs[ bugsTypos[d] ] = json.bugs[d] + delete json.bugs[d] + } }) } diff --git a/deps/npm/lib/utils/tar.js b/deps/npm/lib/utils/tar.js index 9a97ee280..415eb7f9e 100644 --- a/deps/npm/lib/utils/tar.js +++ b/deps/npm/lib/utils/tar.js @@ -1,27 +1,21 @@ -// XXX lib/cache.js and this file need to be rewritten. - // commands for packing and unpacking tarballs // this file is used by lib/cache.js var npm = require("../npm.js") , fs = require("graceful-fs") - , exec = require("./exec.js") - , find = require("./find.js") - , mkdir = require("./mkdir-p.js") - , asyncMap = require("slide").asyncMap , path = require("path") , log = require("./log.js") - , uidNumber = require("./uid-number.js") + , uidNumber = require("uid-number") , rm = require("rimraf") , readJson = require("./read-json.js") , relativize = require("./relativize.js") , cache = require("../cache.js") - , excludes = require("./excludes.js") , myUid = process.getuid && process.getuid() , myGid = process.getgid && process.getgid() , tar = require("tar") , zlib = require("zlib") , fstream = require("fstream") + , Packer = require("fstream-npm") if (process.env.SUDO_UID && myUid === 0) { if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID @@ -30,91 +24,16 @@ if (process.env.SUDO_UID && myUid === 0) { exports.pack = pack exports.unpack = unpack -exports.makeList = makeList function pack (targetTarball, folder, pkg, dfc, cb) { + log.verbose([targetTarball, folder], "tar.pack") if (typeof cb !== "function") cb = dfc, dfc = true - folder = path.resolve(folder) - - log.verbose(folder, "pack") - - if (typeof pkg === "function") { - cb = pkg, pkg = null - return readJson(path.resolve(folder, "package.json"), function (er, pkg) { - if (er) return log.er(cb, "Couldn't find package.json in "+folder)(er) - pack(targetTarball, folder, pkg, dfc, cb) - }) - } - log.verbose(folder+" "+targetTarball, "pack") - var parent = path.dirname(folder) - , addFolder = path.basename(folder) - - var confEx = npm.config.get("ignore") - log.silly(folder, "makeList") - makeList(folder, pkg, dfc, function (er, files, cleanup) { - if (er) return cb(er) - // log.silly(files, "files") - return packFiles(targetTarball, parent, files, pkg, function (er) { - if (!cleanup || !cleanup.length) return cb(er) - // try to be a good citizen, even/especially in the event of failure. - cleanupResolveLinkDep(cleanup, function (er2) { - if (er || er2) { - if (er) log(er, "packing tarball") - if (er2) log(er2, "while cleaning up resolved deps") - } - return cb(er || er2) - }) - }) - }) -} - -function packFiles (targetTarball, parent, files, pkg, cb_) { - - var p - - files = files.map(function (f) { - p = f.split(/\/|\\/)[0] - return path.resolve(parent, f) - }) - - parent = path.resolve(parent, p) - - var called = false - function cb (er) { - if (called) return - called = true - cb_(er) - } log.verbose(targetTarball, "tarball") - log.verbose(parent, "parent") - fstream.Reader({ type: "Directory" - , path: parent - , filter: function () { - // files should *always* get into tarballs - // in a user-writable state, even if they're - // being installed from some wackey vm-mounted - // read-only filesystem. - this.props.mode = this.props.mode | 0200 - var inc = -1 !== files.indexOf(this.path) + log.verbose(folder, "folder") + new Packer({ path: folder, type: "Directory", isDirectory: true }) + .on("error", log.er(cb, "error reading "+folder)) - // WARNING! Hackety hack! - // XXX Fix this in a better way. - // Rename .gitignore to .npmignore if there is not a - // .npmignore file there already, the better to lock - // down installed packages with git for deployment. - if (this.basename === ".gitignore") { - if (this.parent._entries.indexOf(".npmignore") !== -1) { - return false - } - var d = path.dirname(this.path) - this.basename = ".npmignore" - this.path = path.join(d, ".npmignore") - } - return inc - } - }) - .on("error", log.er(cb, "error reading "+parent)) // By default, npm includes some proprietary attributes in the // package tarball. This is sane, and allowed by the spec. // However, npm *itself* excludes these from its own package, @@ -126,11 +45,14 @@ function packFiles (targetTarball, parent, files, pkg, cb_) { .on("error", log.er(cb, "gzip error "+targetTarball)) .pipe(fstream.Writer({ type: "File", path: targetTarball })) .on("error", log.er(cb, "Could not write "+targetTarball)) - .on("close", cb) + .on("close", function () { + cb() + }) } function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) { + log.verbose(tarball, "unpack") if (typeof cb !== "function") cb = gid, gid = null if (typeof cb !== "function") cb = uid, uid = null if (typeof cb !== "function") cb = fMode, fMode = npm.modes.file @@ -143,52 +65,24 @@ function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) { } function unpack_ ( tarball, unpackTarget, dMode, fMode, uid, gid, cb ) { - // If the desired target is /path/to/foo, - // then unpack into /path/to/.foo.npm/{something} - // rename that to /path/to/foo, and delete /path/to/.foo.npm var parent = path.dirname(unpackTarget) , base = path.basename(unpackTarget) rm(unpackTarget, function (er) { if (er) return cb(er) - mkdir(unpackTarget, dMode || npm.modes.exec, uid, gid, function (er) { - log.verbose([uid, gid], "unpack_ uid, gid") - log.verbose(unpackTarget, "unpackTarget") + // gzip {tarball} --decompress --stdout \ + // | tar -mvxpf - --strip-components=1 -C {unpackTarget} + gunzTarPerm( tarball, unpackTarget + , dMode, fMode + , uid, gid + , function (er, folder) { if (er) return cb(er) - - // cp the gzip of the tarball, pipe the stdout into tar's stdin - // gzip {tarball} --decompress --stdout \ - // | tar -mvxpf - --strip-components=1 -C {unpackTarget} - gunzTarPerm( tarball, unpackTarget - , dMode, fMode - , uid, gid - , function (er, folder) { - if (er) return cb(er) - log.verbose(folder, "gunzed") - readJson(path.resolve(folder, "package.json"), cb) - }) + readJson(path.resolve(folder, "package.json"), cb) }) }) } -// on Windows, A/V software can lock the directory, causing this -// to fail with an EACCES. Try again on failure, for up to 1 second. -// XXX Fix this by not unpacking into a temp directory, instead just -// renaming things on the way out of the tarball. -function moveIntoPlace (folder, unpackTarget, cb) { - var start = Date.now() - fs.rename(folder, unpackTarget, function CB (er) { - if (er - && process.platform === "win32" - && er.code === "EACCES" - && Date.now() - start < 1000) { - return fs.rename(folder, unpackTarget, CB) - } - cb(er) - }) -} - function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) { if (!dMode) dMode = npm.modes.exec @@ -212,6 +106,7 @@ function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) { } function extractEntry (entry) { + log.silly(entry.path, "extracting entry") // never create things that are user-unreadable, // or dirs that are user-un-listable. Only leads to headaches. var originalMode = entry.mode = entry.mode || entry.props.mode @@ -233,6 +128,25 @@ function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) { var extractOpts = { type: "Directory", path: target, strip: 1 } + if (process.platform !== "win32" && + typeof uid === "number" && + typeof gid === "number") { + extractOpts.uid = uid + extractOpts.gid = gid + } + + extractOpts.filter = function () { + // symbolic links are not allowed in packages. + if (this.type.match(/^.*Link$/)) { + log.warn( this.path.substr(target.length + 1) + + ' -> ' + this.linkpath + , "excluding symbolic link") + return false + } + return true + } + + fst.on("error", log.er(cb, "error reading "+tarball)) fst.on("data", function OD (c) { // detect what it is. @@ -242,7 +156,6 @@ function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) { if (c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08) { - var extracter = tar.Extract(extractOpts) fst .pipe(zlib.Unzip()) .on("error", log.er(cb, "unzip error "+tarball)) @@ -259,8 +172,17 @@ function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) { .on("close", cb) } else { // naked js file + var jsOpts = { path: path.resolve(target, "index.js") } + + if (process.platform !== "win32" && + typeof uid === "number" && + typeof gid === "number") { + jsOpts.uid = uid + jsOpts.gid = gid + } + fst - .pipe(fstream.Writer({ path: path.resolve(target, "index.js") })) + .pipe(fstream.Writer(jsOpts)) .on("error", log.er(cb, "copy error "+tarball)) .on("close", function () { var j = path.resolve(target, "package.json") @@ -279,327 +201,3 @@ function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) { fst.emit("data", c) }) } - -function makeList (dir, pkg, dfc, cb) { - if (typeof cb !== "function") cb = dfc, dfc = true - if (typeof cb !== "function") cb = pkg, pkg = null - dir = path.resolve(dir) - - if (!pkg.path) pkg.path = dir - - var name = path.basename(dir) - - // since this is a top-level traversal, get the user and global - // exclude files, as well as the "ignore" config setting. - var confIgnore = npm.config.get("ignore").trim() - .split(/[\n\r\s\t]+/) - .filter(function (i) { return i.trim() }) - , userIgnore = npm.config.get("userignorefile") - , globalIgnore = npm.config.get("globalignorefile") - , userExclude - , globalExclude - - confIgnore.dir = dir - confIgnore.name = "confIgnore" - - var defIgnore = ["build/"] - defIgnore.dir = dir - - // TODO: only look these up once, and cache outside this function - excludes.parseIgnoreFile( userIgnore, null, dir - , function (er, uex) { - if (er) return cb(er) - userExclude = uex - next() - }) - - excludes.parseIgnoreFile( globalIgnore, null, dir - , function (er, gex) { - if (er) return cb(er) - globalExclude = gex - next() - }) - - function next () { - if (!globalExclude || !userExclude) return - var exList = [ defIgnore, confIgnore, globalExclude, userExclude ] - - makeList_(dir, pkg, exList, dfc, function (er, files, cleanup) { - if (er) return cb(er) - var dirLen = dir.replace(/(\/|\\)$/, "").length + 1 - log.silly([dir, dirLen], "dir, dirLen") - files = files.map(function (file) { - return path.join(name, file.substr(dirLen)) - }) - return cb(null, files, cleanup) - }) - } -} - -// Patterns ending in slashes will only match targets -// ending in slashes. To implement this, add a / to -// the filename iff it lstats isDirectory() -function readDir (dir, pkg, dfc, cb) { - fs.readdir(dir, function (er, files) { - if (er) return cb(er) - files = files.filter(function (f) { - return f && f.charAt(0) !== "/" && f.indexOf("\0") === -1 - }) - asyncMap(files, function (file, cb) { - fs.lstat(path.resolve(dir, file), function (er, st) { - if (er) return cb(null, []) - // if it's a directory, then tack "/" onto the name - // so that it can match dir-only patterns in the - // include/exclude logic later. - if (st.isDirectory()) return cb(null, file + "/") - - // if it's a symlink, then we need to do some more - // complex stuff for GH-691 - if (st.isSymbolicLink()) return readSymlink(dir, file, pkg, dfc, cb) - - // otherwise, just let it on through. - return cb(null, file) - }) - }, cb) - }) -} - -// just see where this link is pointing, and resolve relative paths. -function shallowReal (link, cb) { - link = path.resolve(link) - fs.readlink(link, function (er, t) { - if (er) return cb(er) - return cb(null, path.resolve(path.dirname(link), t), t) - }) -} - -function readSymlink (dir, file, pkg, dfc, cb) { - var isNM = dfc - && path.basename(dir) === "node_modules" - && path.dirname(dir) === pkg.path - // see if this thing is pointing outside of the package. - // external symlinks are resolved for deps, ignored for other things. - // internal symlinks are allowed through. - var df = path.resolve(dir, file) - shallowReal(df, function (er, r, target) { - if (er) return cb(null, []) // wtf? exclude file. - if (r.indexOf(dir) === 0) return cb(null, file) // internal - if (!isNM) return cb(null, []) // external non-dep - // now the fun stuff! - fs.realpath(df, function (er, resolved) { - if (er) return cb(null, []) // can't add it. - readJson(path.resolve(resolved, "package.json"), function (er) { - if (er) return cb(null, []) // not a package - resolveLinkDep(dir, file, resolved, target, pkg, function (er, f, c) { - cb(er, f, c) - }) - }) - }) - }) -} - -// put the link back the way it was. -function cleanupResolveLinkDep (cleanup, cb) { - // cut it out of the list, so that cycles will be broken. - if (!cleanup) return cb() - - asyncMap(cleanup, function (d, cb) { - rm(d[1], function (er) { - if (er) return cb(er) - fs.symlink(d[0], d[1], cb) - }) - }, cb) -} - -function resolveLinkDep (dir, file, resolved, target, pkg, cb) { - // we've already decided that this is a dep that will be bundled. - // make sure the data reflects this. - var bd = pkg.bundleDependencies || pkg.bundledDependencies || [] - delete pkg.bundledDependencies - pkg.bundleDependencies = bd - var f = path.resolve(dir, file) - , cleanup = [[target, f, resolved]] - - if (bd.indexOf(file) === -1) { - // then we don't do this one. - // just move the symlink out of the way. - return rm(f, function (er) { - cb(er, file, cleanup) - }) - } - - rm(f, function (er) { - if (er) return cb(er) - cache.add(resolved, function (er, data) { - if (er) return cb(er) - cache.unpack(data.name, data.version, f, function (er, data) { - if (er) return cb(er) - // now clear out the cache entry, since it's weird, probably. - // pass the cleanup object along so that the thing getting the - // list of files knows what to clean up afterwards. - cache.clean([data._id], function (er) { cb(er, file, cleanup) }) - }) - }) - }) -} - -// exList is a list of ignore lists. -// Each exList item is an array of patterns of files to ignore -// -function makeList_ (dir, pkg, exList, dfc, cb) { - var files = null - , cleanup = null - - readDir(dir, pkg, dfc, function (er, f, c) { - if (er) return cb(er) - cleanup = c - files = f.map(function (f) { - // no nulls in paths! - return f.split(/\0/)[0] - }).filter(function (f) { - // always remove all source control folders and - // waf/vim/OSX garbage. this is a firm requirement. - return !( f === ".git/" - || f === ".lock-wscript" - || f.match(/^\.wafpickle-[0-9]+$/) - || f === "CVS/" - || f === ".svn/" - || f === ".hg/" - || f.match(/^\..*\.swp/) - || f === ".DS_Store" - || f.match(/^\._/) - || f === "npm-debug.log" - || f === "" - || f.charAt(0) === "/" - ) - }) - - // if (files.length > 0) files.push(".") - - if (files.indexOf("package.json") !== -1 && dir !== pkg.path) { - // a package.json file starts the whole exclude/include - // logic all over. Otherwise, a parent could break its - // deps with its files list or .npmignore file. - readJson(path.resolve(dir, "package.json"), function (er, data) { - if (!er && typeof data === "object") { - data.path = dir - return makeList(dir, data, dfc, function (er, files) { - // these need to be mounted onto the directory now. - cb(er, files && files.map(function (f) { - return path.resolve(path.dirname(dir), f) - })) - }) - } - next() - }) - //next() - } else next() - - // add a local ignore file, if found. - if (files.indexOf(".npmignore") === -1 - && files.indexOf(".gitignore") === -1) next() - else { - excludes.addIgnoreFile( path.resolve(dir, ".npmignore") - , ".gitignore" - , exList - , dir - , function (er, list) { - if (!er) exList = list - next(er) - }) - } - }) - - var n = 2 - , errState = null - function next (er) { - if (errState) return - if (er) return cb(errState = er, [], cleanup) - if (-- n > 0) return - - if (!pkg) return cb(new Error("No package.json file in "+dir)) - if (pkg.path === dir && pkg.files) { - pkg.files = pkg.files.filter(function (f) { - f = f.trim() - return f && f.charAt(0) !== "#" - }) - if (!pkg.files.length) pkg.files = null - } - if (pkg.path === dir && pkg.files) { - // stuff on the files list MUST be there. - // ignore everything, then include the stuff on the files list. - var pkgFiles = ["*"].concat(pkg.files.map(function (f) { - return "!" + f - })) - pkgFiles.dir = dir - pkgFiles.packageFiles = true - exList.push(pkgFiles) - } - - if (path.basename(dir) === "node_modules" - && pkg.path === path.dirname(dir) - // do fancy crap - && dfc - // not already part of a bundled dependency - && (path.basename(path.dirname(pkg.path)) !== "node_modules" - // unless it's the root - || pkg.path === npm.prefix)) { - log.verbose(dir, "doing fancy crap") - files = filterNodeModules(files, pkg) - } else { - // If a directory is excluded, we still need to be - // able to *include* a file within it, and have that override - // the prior exclusion. - // - // This whole makeList thing probably needs to be rewritten - files = files.filter(function (f) { - return excludes.filter(dir, exList)(f) || f.slice(-1) === "/" - }) - } - - - asyncMap(files, function (file, cb) { - // if this is a dir, then dive into it. - // otherwise, don't. - file = path.resolve(dir, file) - - // in 0.6.0, fs.readdir can produce some really odd results. - // XXX: remove this and make the engines hash exclude 0.6.0 - if (file.indexOf(dir) !== 0) { - return cb(null, []) - } - - fs.lstat(file, function (er, st) { - if (er) return cb(er) - if (st.isDirectory()) { - return makeList_(file, pkg, exList, dfc, cb) - } - return cb(null, file) - }) - }, function (er, files, c) { - if (c) cleanup = (cleanup || []).concat(c) - if (files.length > 0) files.push(dir) - return cb(er, files, cleanup) - }) - } -} - -// only include node_modules folder that are: -// 1. not on the dependencies list or -// 2. on the "bundleDependencies" list. -function filterNodeModules (files, pkg) { - var bd = pkg.bundleDependencies || pkg.bundledDependencies || [] - , deps = Object.keys(pkg.dependencies || {}) - .filter(function (key) { return !pkg.dependencies[key].extraneous }) - .concat(Object.keys(pkg.devDependencies || {})) - - delete pkg.bundledDependencies - pkg.bundleDependencies = bd - - return files.filter(function (f) { - f = f.replace(/\/$/, "") - return f.charAt(0) !== "." - && f.charAt(0) !== "_" - && bd.indexOf(f) !== -1 - }) -} diff --git a/deps/npm/lib/utils/uid-number.js b/deps/npm/lib/utils/uid-number.js deleted file mode 100644 index 375627553..000000000 --- a/deps/npm/lib/utils/uid-number.js +++ /dev/null @@ -1,55 +0,0 @@ -module.exports = uidNumber - -// This module calls into bin/npm-get-uid-gid.js, which sets the -// uid and gid to the supplied argument, in order to find out their -// numeric value. This can't be done in the main node process, -// because otherwise npm would be running as that user. - -var exec = require("./exec.js") - , path = require("path") - , log = require("./log.js") - , constants = require("constants") - , npm = require("../npm.js") - , uidSupport = process.getuid && process.setuid - , uidCache = {} - , gidCache = {} - -function uidNumber (uid, gid, cb) { - if (!uidSupport || npm.config.get("unsafe-perm")) return cb() - if (typeof cb !== "function") cb = gid, gid = null - if (typeof cb !== "function") cb = uid, uid = null - if (gid == null) gid = process.getgid() - if (uid == null) uid = process.getuid() - if (!isNaN(gid)) gid = +gid - if (!isNaN(uid)) uid = +uid - - if (uidCache[uid]) uid = uidCache[uid] - if (gidCache[gid]) gid = gidCache[gid] - - if (typeof gid === "number" && typeof uid === "number") { - return cb(null, uid, gid) - } - - var getter = path.join(__dirname, "..", "..", "bin", "npm-get-uid-gid.js") - return exec( process.execPath, [getter, uid, gid], process.env, false - , null, process.getuid(), process.getgid() - , function (er, code, out, err) { - if (er) return log.er(cb, "Could not get uid/gid "+err)(er) - log.silly(out, "output from getuid/gid") - out = JSON.parse(out+"") - if (out.error) { - if (!npm.config.get("unsafe-perm")) { - var er = new Error(out.error) - er.errno = out.errno - return cb(er) - } else { - return cb(null, +process.getuid(), +process.getgid()) - } - } - if (isNaN(out.uid) || isNaN(out.gid)) return cb(new Error( - "Could not get uid/gid: "+JSON.stringify(out))) - uidCache[uid] = out.uid - uidCache[gid] = out.gid - cb(null, out.uid, out.gid) - }) -} |