Skip to content
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
node_modules
test/fixtures/*.actual.css
test/cache
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,13 @@ Default: `process.cwd()` or _dirname of [the postcss `from`](https://github.com/
A string or an array of paths in where to look for files.
_Note: nested `@import` will additionally benefit of the relative dirname of imported files._

#### `cacheDir`

Type: `String`
Default: `null`

The location of the cache. When set, the compiled output will be stored in this directory. The cache will automatically be invalidated when a file in the dependency graph is modified.

#### `transform`

Type: `Function`
Expand Down
120 changes: 106 additions & 14 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,25 @@ function AtImport(options) {

var hashFiles = {}

parseStyles(styles, options, insertRules, importedFiles, ignoredAtRules, null, hashFiles)
var cache;
if (options.cacheDir) {
if (!fs.existsSync(options.cacheDir)) {
fs.mkdirSync(options.cacheDir);
}
try {
cache = require(path.resolve(options.cacheDir, "imports.json"))
} catch (err) {
cache = {}
}
}

parseStyles(styles, options, insertRules, importedFiles, ignoredAtRules, null, hashFiles, cache)
addIgnoredAtRulesOnTop(styles, ignoredAtRules)

if (cache) {
fs.writeFileSync(path.resolve(options.cacheDir, "imports.json"), JSON.stringify(cache))
}

if (typeof options.onImport === "function") {
options.onImport(Object.keys(importedFiles))
}
Expand All @@ -83,7 +99,16 @@ function AtImport(options) {
* @param {Object} styles
* @param {Object} options
*/
function parseStyles(styles, options, cb, importedFiles, ignoredAtRules, media, hashFiles) {
function parseStyles(styles, options, cb, globallyImportedFiles, ignoredAtRules, media, hashFiles, cache) {
if (options.from && !media && cache && IsCached(options.from, options, cache)) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so you are not caching import that use a media query ? why ?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Media queried imports will be cached by the requesting file. One shouldn't cache the file when requested with a media query because it will alter the output from how it looks when requested normally.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hum... ok :)

var newStyles = readFromCache(options, options.from, cache)
var newNodes = newStyles.nodes
newNodes.forEach(function(node) {node.parent = styles})
styles.source = newStyles.source
styles.nodes = newStyles.nodes
styles.semicolon = newStyles.semicolon
return
}
var imports = []
styles.eachAtRule("import", function checkAtRule(atRule) {
if (options.glob && glob.hasMagic(atRule.params)) {
Expand All @@ -93,11 +118,72 @@ function parseStyles(styles, options, cb, importedFiles, ignoredAtRules, media,
imports.push(atRule)
}
})

var locallyImportedFiles = []
imports.forEach(function(atRule) {
helpers.try(function transformAtImport() {
readAtImport(atRule, options, cb, importedFiles, ignoredAtRules, media, hashFiles)
readAtImport(atRule, options, cb, globallyImportedFiles, locallyImportedFiles, ignoredAtRules, media, hashFiles, cache)
}, atRule.source)
})
if (options.from && cache && !media) {
var cacheFilename;
if (locallyImportedFiles.length > 0) {
var css = styles.toResult().css
cacheFilename = path.resolve(options.cacheDir, hash(css) + ".css")
fs.writeFileSync(cacheFilename, css)
}
cache[options.from] = cache[options.from] || {};

cache[options.from].mtime = fs.statSync(options.from).mtime.getTime();
cache[options.from].dependencies = locallyImportedFiles;
cache[options.from].cache = cacheFilename
}
}

function IsCached(filename, options, cache) {
var importCache = cache[filename]

if (!IsDependencyCached(filename, options, cache)) {
if (importCache && importCache.cache) {
fs.unlink(importCache.cache, function(err) {
if (err) {
throw err
}
})
}
return false
}

if (!importCache.cache) {
return false
}

return true
}

function readFromCache(options, filename, cache) {
var fileContent = fs.readFileSync(cache[filename].cache)
return postcss.parse(fileContent, options)
}

function IsDependencyCached(filename, options, cache) {
if (!filename) {
return false
}
var importCache = cache[filename]
if (!importCache) {
return false
}
var mtime = fs.statSync(filename).mtime.getTime();
if (mtime !== importCache.mtime) {
return false
}
for (var i = 0; i < importCache.dependencies.length; i++) {
if (!IsDependencyCached(importCache.dependencies[i], options, cache)) {
return false
}
}
return true
}

/**
Expand Down Expand Up @@ -172,7 +258,7 @@ function addIgnoredAtRulesOnTop(styles, ignoredAtRules) {
* @param {Object} atRule postcss atRule
* @param {Object} options
*/
function readAtImport(atRule, options, cb, importedFiles, ignoredAtRules, media, hashFiles) {
function readAtImport(atRule, options, cb, globallyImportedFiles, locallyImportedFiles, ignoredAtRules, media, hashFiles, cache) {
// parse-import module parse entire line
// @todo extract what can be interesting from this one
var parsedAtImport = parseImport(atRule.params, atRule.source)
Expand All @@ -197,19 +283,20 @@ function readAtImport(atRule, options, cb, importedFiles, ignoredAtRules, media,
var resolvedFilename = resolveFilename(parsedAtImport.uri, options.root, options.path, atRule.source)

// skip files already imported at the same scope
if (importedFiles[resolvedFilename] && importedFiles[resolvedFilename][media]) {
if (globallyImportedFiles[resolvedFilename] && globallyImportedFiles[resolvedFilename][media]) {
detach(atRule)
return
}

// save imported files to skip them next time
if (!importedFiles[resolvedFilename]) {
importedFiles[resolvedFilename] = {}
if (!globallyImportedFiles[resolvedFilename]) {
globallyImportedFiles[resolvedFilename] = {}
}
importedFiles[resolvedFilename][media] = true


readImportedContent(atRule, parsedAtImport, clone(options), resolvedFilename, cb, importedFiles, ignoredAtRules, media, hashFiles)
globallyImportedFiles[resolvedFilename][media] = true
if (locallyImportedFiles.indexOf(resolvedFilename) === -1) {
locallyImportedFiles.push(resolvedFilename)
}
readImportedContent(atRule, parsedAtImport, clone(options), resolvedFilename, cb, globallyImportedFiles, ignoredAtRules, media, hashFiles, cache)
}

/**
Expand All @@ -221,20 +308,25 @@ function readAtImport(atRule, options, cb, importedFiles, ignoredAtRules, media,
* @param {String} resolvedFilename
* @param {Function} cb
*/
function readImportedContent(atRule, parsedAtImport, options, resolvedFilename, cb, importedFiles, ignoredAtRules, media, hashFiles) {
function readImportedContent(atRule, parsedAtImport, options, resolvedFilename, cb, globallyImportedFiles, ignoredAtRules, media, hashFiles, cache) {
// add directory containing the @imported file in the paths
// to allow local import from this file
var dirname = path.dirname(resolvedFilename)
if (options.path.indexOf(dirname) === -1) {
options.path = options.path.slice()
options.path.unshift(dirname)
}

options.from = resolvedFilename
var fileContent = readFile(resolvedFilename, options.encoding, options.transform || function(value) { return value })

if (fileContent.trim() === "") {
console.log(helpers.message(resolvedFilename + " is empty", atRule.source))
if (cache) {
cache[resolvedFilename] = {
mtime: fs.statSync(resolvedFilename).mtime.getTime(),
dependencies: []
}
}
detach(atRule)
return
}
Expand All @@ -260,7 +352,7 @@ function readImportedContent(atRule, parsedAtImport, options, resolvedFilename,
var newStyles = postcss.parse(fileContent, options)

// recursion: import @import from imported file
parseStyles(newStyles, options, cb, importedFiles, ignoredAtRules, parsedAtImport.media, hashFiles)
parseStyles(newStyles, options, cb, globallyImportedFiles, ignoredAtRules, parsedAtImport.media, hashFiles, cache)

cb(atRule, parsedAtImport, newStyles, resolvedFilename)
}
Expand Down
26 changes: 26 additions & 0 deletions test/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@ var postcss = require("postcss")

var fixturesDir = path.join(__dirname, "fixtures")
var importsDir = path.join(fixturesDir, "imports")
var cacheDir = path.join(__dirname, "cache")

function read(name) {
return fs.readFileSync("test/" + name + ".css", "utf8").trim()
}

function compareFixtures(t, name, msg, opts, postcssOpts) {
opts = opts || {path: importsDir}
postcssOpts = postcssOpts || {}
var actual = postcss()
.use(atImport(opts))
.process(read("fixtures/" + name), postcssOpts)
Expand Down Expand Up @@ -190,3 +192,27 @@ test("works with no styles at all", function(t) {

t.end()
})

test("works with caching", function(t) {
var opts = {path: importsDir, cacheDir: cacheDir}
var cssFileName = path.resolve("test/fixtures/relative-to-source.css")
var postcssOpts = {from: cssFileName}
var css = fs.readFileSync(cssFileName)
var output = postcss()
.use(atImport(opts))
.process(css, postcssOpts)
.css.trim()

var imports = JSON.parse(fs.readFileSync(cacheDir + "/imports.json"))
var cacheFileName = imports[cssFileName].cache;
var cache = fs.readFileSync(cacheFileName, "utf8").trim()

t.equal(output, cache, "should put output in cache")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we know here that the cache is written, how do we know the cache is being read after that ?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm sorry @MoOx, there's no way of knowing. It's a transparent cache. Unless you're suggesting that we should add a bunch of debug code to poll the internal state of the engine, which I would strongly advice against, we might as well close the PR because there's no way for me to satisfy this need. I know that the cache is hit by using breakpoints, I know that it returns a correct and consistent result, and I know that I get a 100ms performance increase when using it with a local project of mine, but there's no way for me to put that into a unit test.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is always a way. What about adding a simple map object with {file-requested: bool flag} to know if a requested file as been used from cache? Doesn't seems hard to do and this won't affect perfs.
If you don't know how to do it, I can probably handle that.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't quite understand what you mean, but you're welcome to take a stab at it.


output = postcss()
.use(atImport(opts))
.process(css, postcssOpts)
.css.trim()
t.equal(output, cache, "should return identical result on subsequent calls")
t.end()
})
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

how is this testing that the cache has been used ?!

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The first process() call puts the compiled output into the cache since cacheDir has been specified.

The second process() call rather than compiling the CSS again instead reads it from the cache.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The test does not prove anything right now :/. We should probably add something to track the cache usage.