mirror of
https://github.com/biobulkbende/biobulkbende.org.git
synced 2025-10-12 07:34:58 +00:00
structure, layout and automation
This commit is contained in:
155
node_modules/node-sass/scripts/build.js
generated
vendored
Normal file
155
node_modules/node-sass/scripts/build.js
generated
vendored
Normal file
@ -0,0 +1,155 @@
|
||||
/*!
|
||||
* node-sass: scripts/build.js
|
||||
*/
|
||||
|
||||
var fs = require('fs'),
|
||||
mkdir = require('mkdirp'),
|
||||
path = require('path'),
|
||||
spawn = require('cross-spawn'),
|
||||
sass = require('../lib/extensions');
|
||||
|
||||
/**
|
||||
* After build
|
||||
*
|
||||
* @param {Object} options
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function afterBuild(options) {
|
||||
var install = sass.getBinaryPath();
|
||||
var target = path.join(__dirname, '..', 'build',
|
||||
options.debug ? 'Debug' :
|
||||
process.config.target_defaults
|
||||
? process.config.target_defaults.default_configuration
|
||||
: 'Release',
|
||||
'binding.node');
|
||||
|
||||
mkdir(path.dirname(install), function(err) {
|
||||
if (err && err.code !== 'EEXIST') {
|
||||
console.error(err.message);
|
||||
return;
|
||||
}
|
||||
|
||||
fs.stat(target, function(err) {
|
||||
if (err) {
|
||||
console.error('Build succeeded but target not found');
|
||||
return;
|
||||
}
|
||||
|
||||
fs.rename(target, install, function(err) {
|
||||
if (err) {
|
||||
console.error(err.message);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Installed to', install);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build
|
||||
*
|
||||
* @param {Object} options
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function build(options) {
|
||||
var args = [require.resolve(path.join('node-gyp', 'bin', 'node-gyp.js')), 'rebuild', '--verbose'].concat(
|
||||
['libsass_ext', 'libsass_cflags', 'libsass_ldflags', 'libsass_library'].map(function(subject) {
|
||||
return ['--', subject, '=', process.env[subject.toUpperCase()] || ''].join('');
|
||||
})).concat(options.args);
|
||||
|
||||
console.log('Building:', [process.execPath].concat(args).join(' '));
|
||||
|
||||
var proc = spawn(process.execPath, args, {
|
||||
stdio: [0, 1, 2]
|
||||
});
|
||||
|
||||
proc.on('exit', function(errorCode) {
|
||||
if (!errorCode) {
|
||||
afterBuild(options);
|
||||
return;
|
||||
}
|
||||
|
||||
if (errorCode === 127 ) {
|
||||
console.error('node-gyp not found!');
|
||||
} else {
|
||||
console.error('Build failed with error code:', errorCode);
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse arguments
|
||||
*
|
||||
* @param {Array} args
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parseArgs(args) {
|
||||
var options = {
|
||||
arch: process.arch,
|
||||
platform: process.platform,
|
||||
force: process.env.npm_config_force === 'true',
|
||||
};
|
||||
|
||||
options.args = args.filter(function(arg) {
|
||||
if (arg === '-f' || arg === '--force') {
|
||||
options.force = true;
|
||||
return false;
|
||||
} else if (arg.substring(0, 13) === '--target_arch') {
|
||||
options.arch = arg.substring(14);
|
||||
} else if (arg === '-d' || arg === '--debug') {
|
||||
options.debug = true;
|
||||
} else if (arg.substring(0, 13) === '--libsass_ext' && arg.substring(14) !== 'no') {
|
||||
options.libsassExt = true;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for pre-built library
|
||||
*
|
||||
* @param {Object} options
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function testBinary(options) {
|
||||
if (options.force || process.env.SASS_FORCE_BUILD) {
|
||||
return build(options);
|
||||
}
|
||||
|
||||
if (!sass.hasBinary(sass.getBinaryPath())) {
|
||||
return build(options);
|
||||
}
|
||||
|
||||
console.log('Binary found at', sass.getBinaryPath());
|
||||
console.log('Testing binary');
|
||||
|
||||
try {
|
||||
require('../').renderSync({
|
||||
data: 's { a: ss }'
|
||||
});
|
||||
|
||||
console.log('Binary is fine');
|
||||
} catch (e) {
|
||||
console.log('Binary has a problem:', e);
|
||||
console.log('Building the binary locally');
|
||||
|
||||
return build(options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply arguments and run
|
||||
*/
|
||||
|
||||
testBinary(parseArgs(process.argv.slice(2)));
|
85
node_modules/node-sass/scripts/coverage.js
generated
vendored
Normal file
85
node_modules/node-sass/scripts/coverage.js
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
/*!
|
||||
* node-sass: scripts/coverage.js
|
||||
*/
|
||||
|
||||
var Mocha = require('mocha'),
|
||||
fs = require('fs'),
|
||||
path = require('path'),
|
||||
mkdirp = require('mkdirp'),
|
||||
coveralls = require('coveralls'),
|
||||
istanbul = require('istanbul'),
|
||||
sourcefiles = ['index.js', 'binding.js', 'extensions.js', 'render.js', 'errors.js'],
|
||||
summary= istanbul.Report.create('text-summary'),
|
||||
lcov = istanbul.Report.create('lcovonly', { dir: path.join('coverage') }),
|
||||
html = istanbul.Report.create('html', { dir: path.join('coverage', 'html') });
|
||||
|
||||
function coverage() {
|
||||
var mocha = new Mocha();
|
||||
var rep = function(runner) {
|
||||
runner.on('end', function(){
|
||||
var cov = global.__coverage__,
|
||||
collector = new istanbul.Collector();
|
||||
if (cov) {
|
||||
mkdirp(path.join('coverage', 'html'), function(err) {
|
||||
if (err) { throw err; }
|
||||
collector.add(cov);
|
||||
summary.writeReport(collector, true);
|
||||
html.writeReport(collector, true);
|
||||
lcov.on('done', function() {
|
||||
fs.readFile(path.join('coverage', 'lcov.info'), function(err, data) {
|
||||
if (err) { console.error(err); }
|
||||
coveralls.handleInput(data.toString(),
|
||||
function (err) { if (err) { console.error(err); } });
|
||||
});
|
||||
});
|
||||
lcov.writeReport(collector, true);
|
||||
});
|
||||
} else {
|
||||
console.warn('No coverage');
|
||||
}
|
||||
});
|
||||
};
|
||||
var instrumenter = new istanbul.Instrumenter();
|
||||
var instrumentedfiles = [];
|
||||
var processfile = function(source) {
|
||||
fs.readFile(path.join('lib', source), function(err, data) {
|
||||
if (err) { throw err; }
|
||||
mkdirp('lib-cov', function(err) {
|
||||
if (err) { throw err; }
|
||||
fs.writeFile(path.join('lib-cov', source),
|
||||
instrumenter.instrumentSync(data.toString(),
|
||||
path.join('lib', source)),
|
||||
function(err) {
|
||||
if (err) { throw err; }
|
||||
instrumentedfiles.push(source);
|
||||
if (instrumentedfiles.length === sourcefiles.length) {
|
||||
fs.readdirSync('test').filter(function(file){
|
||||
return file.substr(-6) === 'api.js' ||
|
||||
file.substr(-11) === 'runtime.js' ||
|
||||
file.substr(-7) === 'spec.js';
|
||||
}).forEach(function(file){
|
||||
mocha.addFile(
|
||||
path.join('test', file)
|
||||
);
|
||||
});
|
||||
process.env.NODESASS_COV = 1;
|
||||
mocha.reporter(rep).run(function(failures) {
|
||||
process.on('exit', function () {
|
||||
process.exit(failures);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
for (var i in sourcefiles) {
|
||||
processfile(sourcefiles[i]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run
|
||||
*/
|
||||
|
||||
coverage();
|
157
node_modules/node-sass/scripts/install.js
generated
vendored
Normal file
157
node_modules/node-sass/scripts/install.js
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
/*!
|
||||
* node-sass: scripts/install.js
|
||||
*/
|
||||
|
||||
var fs = require('fs'),
|
||||
eol = require('os').EOL,
|
||||
mkdir = require('mkdirp'),
|
||||
path = require('path'),
|
||||
sass = require('../lib/extensions'),
|
||||
request = require('request'),
|
||||
log = require('npmlog'),
|
||||
downloadOptions = require('./util/downloadoptions');
|
||||
|
||||
/**
|
||||
* Download file, if succeeds save, if not delete
|
||||
*
|
||||
* @param {String} url
|
||||
* @param {String} dest
|
||||
* @param {Function} cb
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function download(url, dest, cb) {
|
||||
var reportError = function(err) {
|
||||
var timeoutMessge;
|
||||
|
||||
if (err.code === 'ETIMEDOUT') {
|
||||
if (err.connect === true) {
|
||||
// timeout is hit while your client is attempting to establish a connection to a remote machine
|
||||
timeoutMessge = 'Timed out attemping to establish a remote connection';
|
||||
} else {
|
||||
timeoutMessge = 'Timed out whilst downloading the prebuilt binary';
|
||||
// occurs any time the server is too slow to send back a part of the response
|
||||
}
|
||||
|
||||
}
|
||||
cb(['Cannot download "', url, '": ', eol, eol,
|
||||
typeof err.message === 'string' ? err.message : err, eol, eol,
|
||||
timeoutMessge ? timeoutMessge + eol + eol : timeoutMessge,
|
||||
'Hint: If github.com is not accessible in your location', eol,
|
||||
' try setting a proxy via HTTP_PROXY, e.g. ', eol, eol,
|
||||
' export HTTP_PROXY=http://example.com:1234',eol, eol,
|
||||
'or configure npm proxy via', eol, eol,
|
||||
' npm config set proxy http://example.com:8080'].join(''));
|
||||
};
|
||||
|
||||
var successful = function(response) {
|
||||
return response.statusCode >= 200 && response.statusCode < 300;
|
||||
};
|
||||
|
||||
console.log('Downloading binary from', url);
|
||||
|
||||
try {
|
||||
request(url, downloadOptions(), function(err, response, buffer) {
|
||||
if (err) {
|
||||
reportError(err);
|
||||
} else if (!successful(response)) {
|
||||
reportError(['HTTP error', response.statusCode, response.statusMessage].join(' '));
|
||||
} else {
|
||||
console.log('Download complete');
|
||||
|
||||
if (successful(response)) {
|
||||
fs.createWriteStream(dest)
|
||||
.on('error', cb)
|
||||
.end(buffer, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
})
|
||||
.on('response', function(response) {
|
||||
var length = parseInt(response.headers['content-length'], 10);
|
||||
var progress = log.newItem('', length);
|
||||
|
||||
// The `progress` is true by default. However if it has not
|
||||
// been explicitly set it's `undefined` which is considered
|
||||
// as far as npm is concerned.
|
||||
if (process.env.npm_config_progress === 'true') {
|
||||
log.enableProgress();
|
||||
|
||||
response.on('data', function(chunk) {
|
||||
progress.completeWork(chunk.length);
|
||||
})
|
||||
.on('end', progress.finish);
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
cb(err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check and download binary
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function checkAndDownloadBinary() {
|
||||
if (process.env.SKIP_SASS_BINARY_DOWNLOAD_FOR_CI) {
|
||||
console.log('Skipping downloading binaries on CI builds');
|
||||
return;
|
||||
}
|
||||
|
||||
var cachedBinary = sass.getCachedBinary(),
|
||||
cachePath = sass.getBinaryCachePath(),
|
||||
binaryPath = sass.getBinaryPath();
|
||||
|
||||
if (sass.hasBinary(binaryPath)) {
|
||||
console.log('node-sass build', 'Binary found at', binaryPath);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
mkdir.sync(path.dirname(binaryPath));
|
||||
} catch (err) {
|
||||
console.error('Unable to save binary', path.dirname(binaryPath), ':', err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (cachedBinary) {
|
||||
console.log('Cached binary found at', cachedBinary);
|
||||
fs.createReadStream(cachedBinary).pipe(fs.createWriteStream(binaryPath));
|
||||
return;
|
||||
}
|
||||
|
||||
download(sass.getBinaryUrl(), binaryPath, function(err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Binary saved to', binaryPath);
|
||||
|
||||
cachedBinary = path.join(cachePath, sass.getBinaryName());
|
||||
|
||||
if (cachePath) {
|
||||
console.log('Caching binary to', cachedBinary);
|
||||
|
||||
try {
|
||||
mkdir.sync(path.dirname(cachedBinary));
|
||||
fs.createReadStream(binaryPath)
|
||||
.pipe(fs.createWriteStream(cachedBinary))
|
||||
.on('error', function (err) {
|
||||
console.log('Failed to cache binary:', err);
|
||||
});
|
||||
} catch (err) {
|
||||
console.log('Failed to cache binary:', err);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* If binary does not exist, download it
|
||||
*/
|
||||
|
||||
checkAndDownloadBinary();
|
17
node_modules/node-sass/scripts/prepublish.js
generated
vendored
Normal file
17
node_modules/node-sass/scripts/prepublish.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
/*!
|
||||
* node-sass: scripts/install.js
|
||||
*/
|
||||
|
||||
var path = require('path'),
|
||||
rimraf = require('rimraf');
|
||||
|
||||
function prepublish() {
|
||||
var vendorPath = path.resolve(__dirname, '..', 'vendor');
|
||||
rimraf.sync(vendorPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run
|
||||
*/
|
||||
|
||||
prepublish();
|
31
node_modules/node-sass/scripts/util/downloadoptions.js
generated
vendored
Normal file
31
node_modules/node-sass/scripts/util/downloadoptions.js
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
var proxy = require('./proxy'),
|
||||
userAgent = require('./useragent');
|
||||
|
||||
/**
|
||||
* The options passed to request when downloading the bibary
|
||||
*
|
||||
* There some nuance to how request handles options. Specifically
|
||||
* we've been caught by their usage of `hasOwnProperty` rather than
|
||||
* falsey checks. By moving the options generation into a util helper
|
||||
* we can test for regressions.
|
||||
*
|
||||
* @return {Object} an options object for request
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function() {
|
||||
var options = {
|
||||
rejectUnauthorized: false,
|
||||
timeout: 60000,
|
||||
headers: {
|
||||
'User-Agent': userAgent(),
|
||||
},
|
||||
encoding: null,
|
||||
};
|
||||
|
||||
var proxyConfig = proxy();
|
||||
if (proxyConfig) {
|
||||
options.proxy = proxyConfig;
|
||||
}
|
||||
|
||||
return options;
|
||||
};
|
22
node_modules/node-sass/scripts/util/proxy.js
generated
vendored
Normal file
22
node_modules/node-sass/scripts/util/proxy.js
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
|
||||
/**
|
||||
* Determine the proxy settings configured by npm
|
||||
*
|
||||
* It's possible to configure npm to use a proxy different
|
||||
* from the system defined proxy. This can be done via the
|
||||
* `npm config` CLI or the `.npmrc` config file.
|
||||
*
|
||||
* If a proxy has been configured in this way we must
|
||||
* tell request explicitly to use it.
|
||||
*
|
||||
* Otherwise we can trust request to the right thing.
|
||||
*
|
||||
* @return {String} the proxy configured by npm or an empty string
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function() {
|
||||
return process.env.npm_config_https_proxy ||
|
||||
process.env.npm_config_proxy ||
|
||||
process.env.npm_config_http_proxy ||
|
||||
'';
|
||||
};
|
13
node_modules/node-sass/scripts/util/useragent.js
generated
vendored
Normal file
13
node_modules/node-sass/scripts/util/useragent.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
var pkg = require('../../package.json');
|
||||
|
||||
/**
|
||||
* A custom user agent use for binary downloads.
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function() {
|
||||
return [
|
||||
'node/', process.version, ' ',
|
||||
'node-sass-installer/', pkg.version
|
||||
].join('');
|
||||
};
|
Reference in New Issue
Block a user