diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6030c14f36d3..917fa166b4fd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,6 +9,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
+- Support specifying the base path for automatic source detection using a `source(…)` function on `@tailwind utilities` or `@import "tailwindcss"` ([#14820](https://github.com/tailwindlabs/tailwindcss/pull/14820))
+- Support disabling automatic source detection with `source(none)` ([#14820](https://github.com/tailwindlabs/tailwindcss/pull/14820))
+- Support passing directories to `@source` without needing to pass a complete glob ([#14820](https://github.com/tailwindlabs/tailwindcss/pull/14820))
- _Upgrade (experimental)_: Bump `prettier-plugin-tailwindcss` to latest version during upgrade ([#14808](https://github.com/tailwindlabs/tailwindcss/pull/14808))
### Fixed
@@ -18,6 +21,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
- Use logical `*-inline` and `*-block` properties for all x/y utilities like `px-*`, `my-*`, `scroll-px-*`, and `inset-y-*` ([#14805](https://github.com/tailwindlabs/tailwindcss/pull/14805))
+- Respect automatic source detection heuristics in sources registered with `@source` ([#14820](https://github.com/tailwindlabs/tailwindcss/pull/14820))
## [4.0.0-alpha.30] - 2024-10-24
diff --git a/Cargo.lock b/Cargo.lock
index f3ecb7a58e2e..eb9602407af0 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -11,6 +11,16 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "bexpand"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "045d7d9db8390cf2c59f39f3bd138f1962ef616b096d1b9f5651c7acba19e5a7"
+dependencies = [
+ "itertools",
+ "nom",
+]
+
[[package]]
name = "bitflags"
version = "2.6.0"
@@ -183,6 +193,15 @@ dependencies = [
"winapi-util",
]
+[[package]]
+name = "itertools"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+dependencies = [
+ "either",
+]
+
[[package]]
name = "lazy_static"
version = "1.4.0"
@@ -202,7 +221,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
- "windows-targets 0.48.0",
+ "windows-targets",
]
[[package]]
@@ -232,6 +251,12 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
[[package]]
name = "napi"
version = "2.16.11"
@@ -289,6 +314,16 @@ dependencies = [
"libloading",
]
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
@@ -485,6 +520,7 @@ dependencies = [
name = "tailwindcss-oxide"
version = "0.1.0"
dependencies = [
+ "bexpand",
"bstr",
"crossbeam",
"dunce",
@@ -649,7 +685,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
- "windows-targets 0.52.6",
+ "windows-targets",
]
[[package]]
@@ -658,22 +694,7 @@ version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
- "windows-targets 0.52.6",
-]
-
-[[package]]
-name = "windows-targets"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
-dependencies = [
- "windows_aarch64_gnullvm 0.48.0",
- "windows_aarch64_msvc 0.48.0",
- "windows_i686_gnu 0.48.0",
- "windows_i686_msvc 0.48.0",
- "windows_x86_64_gnu 0.48.0",
- "windows_x86_64_gnullvm 0.48.0",
- "windows_x86_64_msvc 0.48.0",
+ "windows-targets",
]
[[package]]
@@ -682,46 +703,28 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
- "windows_aarch64_gnullvm 0.52.6",
- "windows_aarch64_msvc 0.52.6",
- "windows_i686_gnu 0.52.6",
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
"windows_i686_gnullvm",
- "windows_i686_msvc 0.52.6",
- "windows_x86_64_gnu 0.52.6",
- "windows_x86_64_gnullvm 0.52.6",
- "windows_x86_64_msvc 0.52.6",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
]
-[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
-
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
-
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
-[[package]]
-name = "windows_i686_gnu"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
-
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
@@ -734,48 +737,24 @@ version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
-[[package]]
-name = "windows_i686_msvc"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
-
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
-
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
-[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
-
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
-
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
diff --git a/crates/node/src/lib.rs b/crates/node/src/lib.rs
index 10d8f99d7008..2eff57be308d 100644
--- a/crates/node/src/lib.rs
+++ b/crates/node/src/lib.rs
@@ -18,13 +18,6 @@ pub struct ChangedContent {
pub extension: String,
}
-#[derive(Debug, Clone)]
-#[napi(object)]
-pub struct DetectSources {
- /// Base path to start scanning from
- pub base: String,
-}
-
#[derive(Debug, Clone)]
#[napi(object)]
pub struct GlobEntry {
@@ -62,20 +55,11 @@ impl From for GlobEntry {
}
}
-impl From for tailwindcss_oxide::scanner::detect_sources::DetectSources {
- fn from(detect_sources: DetectSources) -> Self {
- Self::new(detect_sources.base.into())
- }
-}
-
// ---
#[derive(Debug, Clone)]
#[napi(object)]
pub struct ScannerOptions {
- /// Automatically detect sources in the base path
- pub detect_sources: Option,
-
/// Glob sources
pub sources: Option>,
}
@@ -102,7 +86,6 @@ impl Scanner {
pub fn new(opts: ScannerOptions) -> Self {
Self {
scanner: tailwindcss_oxide::Scanner::new(
- opts.detect_sources.map(Into::into),
opts
.sources
.map(|x| x.into_iter().map(Into::into).collect()),
@@ -128,7 +111,7 @@ impl Scanner {
input: ChangedContent,
) -> Vec {
let content = input.content.unwrap_or_else(|| {
- std::fs::read_to_string(&input.file.unwrap()).expect("Failed to read file")
+ std::fs::read_to_string(input.file.unwrap()).expect("Failed to read file")
});
let input = ChangedContent {
diff --git a/crates/node/src/utf16.rs b/crates/node/src/utf16.rs
index 7cdebd9fd1b2..a7e7018cb28f 100644
--- a/crates/node/src/utf16.rs
+++ b/crates/node/src/utf16.rs
@@ -31,20 +31,18 @@ impl<'a> IndexConverter<'a> {
// will only ever be incremented up to the length of the input string.
//
// This eliminates a "potential" panic that cannot actually happen
- let slice = unsafe {
- self.input.get_unchecked(self.curr_utf8..)
- };
+ let slice = unsafe { self.input.get_unchecked(self.curr_utf8..) };
for c in slice.chars() {
if self.curr_utf8 >= pos {
- break
+ break;
}
self.curr_utf8 += c.len_utf8();
self.curr_utf16 += c.len_utf16();
}
- return self.curr_utf16 as i64;
+ self.curr_utf16 as i64
}
}
@@ -66,19 +64,16 @@ mod test {
(4, 4),
(5, 5),
(6, 6),
-
// inside the 🔥
(7, 8),
(8, 8),
(9, 8),
(10, 8),
-
// inside the 🥳
(11, 10),
(12, 10),
(13, 10),
(14, 10),
-
// world!
(15, 11),
(16, 12),
@@ -87,7 +82,6 @@ mod test {
(19, 15),
(20, 16),
(21, 17),
-
// Past the end should return the last utf-16 character index
(22, 17),
(100, 17),
diff --git a/crates/oxide/Cargo.toml b/crates/oxide/Cargo.toml
index 315fcf4f5375..287e0cd61092 100644
--- a/crates/oxide/Cargo.toml
+++ b/crates/oxide/Cargo.toml
@@ -14,8 +14,9 @@ tracing = { version = "0.1.40", features = [] }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
walkdir = "2.5.0"
ignore = "0.4.23"
-glob-match = "0.2.1"
dunce = "1.0.5"
+bexpand = "1.2.0"
+glob-match = "0.2.1"
[dev-dependencies]
tempfile = "3.13.0"
diff --git a/crates/oxide/src/glob.rs b/crates/oxide/src/glob.rs
index d74b60b4d824..285c72d0f063 100644
--- a/crates/oxide/src/glob.rs
+++ b/crates/oxide/src/glob.rs
@@ -1,22 +1,59 @@
+use fxhash::{FxHashMap, FxHashSet};
use glob_match::glob_match;
-use std::iter;
use std::path::{Path, PathBuf};
+use tracing::event;
use crate::GlobEntry;
-pub fn fast_glob(
- patterns: &Vec,
-) -> Result- , std::io::Error> {
- Ok(get_fast_patterns(patterns)
- .into_iter()
- .flat_map(|(base_path, patterns)| {
- globwalk::GlobWalkerBuilder::from_patterns(base_path, &patterns)
- .follow_links(true)
- .build()
- .unwrap()
- .filter_map(Result::ok)
- .map(|file| file.path().to_path_buf())
- }))
+pub fn hoist_static_glob_parts(entries: &Vec) -> Vec {
+ let mut result = vec![];
+
+ for entry in entries {
+ let (static_part, dynamic_part) = split_pattern(&entry.pattern);
+
+ let base: PathBuf = entry.base.clone().into();
+ let base = match static_part {
+ Some(static_part) => base.join(static_part),
+ None => base,
+ };
+
+ let base = match dunce::canonicalize(&base) {
+ Ok(base) => base,
+ Err(err) => {
+ event!(tracing::Level::ERROR, "Failed to resolve glob: {:?}", err);
+ // If we can't resolve the new base on disk, let's just skip this entry.
+ continue;
+ }
+ };
+
+ let pattern = match dynamic_part {
+ Some(dynamic_part) => dynamic_part,
+ None => {
+ if base.is_dir() {
+ "**/*".to_owned()
+ } else {
+ "".to_owned()
+ }
+ }
+ };
+
+ // If the base path is a file, then we want to move the file to the pattern, and point the
+ // directory to the base. This is necessary for file watchers that can only listen to
+ // folders.
+ if pattern.is_empty() && base.is_file() {
+ result.push(GlobEntry {
+ base: base.parent().unwrap().to_string_lossy().to_string(),
+ pattern: base.file_name().unwrap().to_string_lossy().to_string(),
+ });
+ }
+
+ result.push(GlobEntry {
+ base: base.to_string_lossy().to_string(),
+ pattern,
+ });
+ }
+
+ result
}
/// This function attempts to optimize the glob patterns to improve performance. The problem is
@@ -42,98 +79,89 @@ pub fn fast_glob(
/// tailwind --pwd ./project/pages --content "**/*.js"
/// tailwind --pwd ./project/components --content "**/*.js"
/// ```
-pub fn get_fast_patterns(patterns: &Vec) -> Vec<(PathBuf, Vec)> {
- let mut optimized_patterns: Vec<(PathBuf, Vec)> = vec![];
+pub fn optimize_patterns(entries: &Vec) -> Vec {
+ let entries = hoist_static_glob_parts(entries);
- for pattern in patterns {
- let base_path = PathBuf::from(&pattern.base);
- let pattern = &pattern.pattern;
+ // Track all base paths and their patterns. Later we will turn them back into `GlobalEntry`s.
+ let mut pattern_map: FxHashMap> = FxHashMap::default();
- let is_negated = pattern.starts_with('!');
- let mut pattern = pattern.clone();
- if is_negated {
- pattern.remove(0);
- }
+ for glob_entry in entries {
+ let entry = pattern_map.entry(glob_entry.base).or_default();
+ entry.insert(glob_entry.pattern.clone());
+ }
- let mut folders = pattern.split('/').collect::<_>>();
-
- if folders.len() <= 1 {
- // No paths we can simplify, so let's use it as-is.
- optimized_patterns.push((base_path, vec![pattern]));
- } else {
- // We do have folders because `/` exists. Let's try to simplify the globs!
- // Safety: We know that the length is greater than 1, so we can safely unwrap.
- let file_pattern = folders.pop().unwrap();
- let all_folders = folders.clone();
- let mut temp_paths = vec![base_path];
-
- let mut bail = false;
-
- for (i, folder) in folders.into_iter().enumerate() {
- // There is a wildcard in the folder, so we have to bail now... 😢 But this also
- // means that we can skip looking at the rest of the folders, so there is at least
- // this small optimization we can apply!
- if folder.contains('*') {
- // Get all the remaining folders, attach the existing file_pattern so that this
- // can now be the final pattern we use.
- let mut remaining_folders = all_folders[i..].to_vec();
- remaining_folders.push(file_pattern);
-
- let pattern = remaining_folders.join("/");
- for path in &temp_paths {
- optimized_patterns.push((path.to_path_buf(), vec![pattern.to_string()]));
- }
+ let mut glob_entries = pattern_map
+ .into_iter()
+ .map(|(base, patterns)| {
+ let size = patterns.len();
- bail = true;
- break;
- }
+ let mut patterns = patterns.into_iter();
- // The folder is very likely using an expandable pattern which we can expand!
- if folder.contains('{') && folder.contains('}') {
- let branches = expand_braces(folder);
-
- let existing_paths = temp_paths;
- temp_paths = branches
- .iter()
- .flat_map(|branch| {
- existing_paths
- .clone()
- .into_iter()
- .map(|path| path.join(branch))
- .collect::<_>>()
- })
- .collect::<_>>();
- }
- // The folder should just be a simple folder name without any glob magic. We should
- // be able to safely add it to the existing paths.
- else {
- temp_paths = temp_paths
- .into_iter()
- .map(|path| path.join(folder))
- .collect();
- }
- }
+ GlobEntry {
+ base,
+ pattern: match size {
+ 1 => patterns.next().unwrap(),
+ _ => {
+ let mut patterns = patterns.collect::<_>>();
- // As long as we didn't bail, we can now add the current expanded patterns to the
- // optimized patterns.
- if !bail {
- for path in &temp_paths {
- optimized_patterns.push((path.to_path_buf(), vec![file_pattern.to_string()]));
- }
+ // Sort the patterns to ensure stable results.
+ patterns.sort();
+
+ // TODO: Right now this will generate something like `{**/*.html,**/*.js}`,
+ // but maybe we want to generate this instead:`**/*.{html,js}`.
+ format!("{{{}}}", patterns.join(","))
+ }
+ },
}
+ })
+ .collect::>();
+
+ // Sort the entries by base path to ensure we have stable results.
+ glob_entries.sort_by(|a, z| a.base.cmp(&z.base));
+
+ glob_entries
+}
+
+// Split a glob pattern into a `static` and `dynamic` part.
+//
+// Assumption: we assume that all globs are expanded, which means that the only dynamic parts are
+// using `*`.
+//
+// E.g.:
+// Original input: `../project-b/**/*.{html,js}`
+// Expanded input: `../project-b/**/*.html` & `../project-b/**/*.js`
+// Split on first input: ("../project-b", "**/*.html")
+// Split on second input: ("../project-b", "**/*.js")
+fn split_pattern(pattern: &str) -> (Option, Option) {
+ // No dynamic parts, so we can just return the input as-is.
+ if !pattern.contains('*') {
+ return (Some(pattern.to_owned()), None);
+ }
+
+ let mut last_slash_position = None;
+
+ for (i, c) in pattern.char_indices() {
+ if c == '/' {
+ last_slash_position = Some(i);
}
- // Ensure that we re-add all the `!` signs to the patterns.
- if is_negated {
- for (_, patterns) in &mut optimized_patterns {
- for pattern in patterns {
- pattern.insert(0, '!');
- }
- }
+ if c == '*' || c == '!' {
+ break;
}
}
- optimized_patterns
+ // Very first character is a `*`, therefore there is no static part, only a dynamic part.
+ let Some(last_slash_position) = last_slash_position else {
+ return (None, Some(pattern.to_owned()));
+ };
+
+ let static_part = pattern[..last_slash_position].to_owned();
+ let dynamic_part = pattern[last_slash_position + 1..].to_owned();
+
+ let static_part = (!static_part.is_empty()).then_some(static_part);
+ let dynamic_part = (!dynamic_part.is_empty()).then_some(dynamic_part);
+
+ (static_part, dynamic_part)
}
pub fn path_matches_globs(path: &Path, globs: &[GlobEntry]) -> bool {
@@ -144,167 +172,185 @@ pub fn path_matches_globs(path: &Path, globs: &[GlobEntry]) -> bool {
.any(|g| glob_match(&format!("{}/{}", g.base, g.pattern), &path))
}
-/// Given this input: a-{b,c}-d-{e,f}
-/// We will get:
-/// [
-/// a-b-d-e
-/// a-b-d-f
-/// a-c-d-e
-/// a-c-d-f
-/// ]
-/// TODO: There is probably a way nicer way of doing this, but this works for now.
-fn expand_braces(input: &str) -> Vec {
- let mut result: Vec = vec![];
-
- let mut in_braces = false;
- let mut last_char: char = '\0';
-
- let mut current = String::new();
-
- // Given the input: a-{b,c}-d-{e,f}-g
- // The template will look like this: ["a-", "-d-", "g"].
- let mut template: Vec = vec![];
-
- // The branches will look like this: [["b", "c"], ["e", "f"]].
- let mut branches: Vec> = vec![];
-
- for (i, c) in input.char_indices() {
- let is_escaped = i > 0 && last_char == '\\';
- last_char = c;
-
- match c {
- '{' if !is_escaped => {
- // Ensure that when a new set of braces is opened, that we at least have 1
- // template.
- if template.is_empty() {
- template.push(String::new());
- }
-
- in_braces = true;
- branches.push(vec![]);
- template.push(String::new());
- }
- '}' if !is_escaped => {
- in_braces = false;
- if let Some(last) = branches.last_mut() {
- last.push(current.clone());
- }
- current.clear();
- }
- ',' if !is_escaped && in_braces => {
- if let Some(last) = branches.last_mut() {
- last.push(current.clone());
- }
- current.clear();
+#[cfg(test)]
+mod tests {
+ use super::optimize_patterns;
+ use crate::GlobEntry;
+ use bexpand::Expression;
+ use std::process::Command;
+ use std::{fs, path};
+ use tempfile::tempdir;
+
+ fn create_folders(folders: &[&str]) -> String {
+ // Create a temporary working directory
+ let dir = tempdir().unwrap().into_path();
+
+ // Initialize this directory as a git repository
+ let _ = Command::new("git").arg("init").current_dir(&dir).output();
+
+ // Create the necessary files
+ for path in folders {
+ // Ensure we use the right path separator for the current platform
+ let path = dir.join(path.replace('/', path::MAIN_SEPARATOR.to_string().as_str()));
+ let parent = path.parent().unwrap();
+ if !parent.exists() {
+ fs::create_dir_all(parent).unwrap();
}
- _ if in_braces => current.push(c),
- _ => {
- if template.is_empty() {
- template.push(String::new());
- }
- if let Some(last) = template.last_mut() {
- last.push(c);
- }
- }
- };
- }
+ fs::write(path, "").unwrap();
+ }
- // Ensure we have a string that we can start adding information too.
- if !template.is_empty() && !branches.is_empty() {
- result.push("".to_string());
- }
+ let base = format!("{}", dir.display());
- // Let's try to generate everything!
- for (i, template) in template.into_iter().enumerate() {
- // Append current template string to all existing results.
- result = result.into_iter().map(|x| x + &template).collect();
-
- // Get the results, and copy it for every single branch.
- if let Some(branches) = branches.get(i) {
- result = branches
- .iter()
- .flat_map(|branch| {
- result
- .clone()
- .into_iter()
- .map(|x| x + branch)
- .collect::<_>>()
- })
- .collect::<_>>();
- }
+ base
}
- result
-}
-
-#[cfg(test)]
-mod tests {
- use super::get_fast_patterns;
- use crate::GlobEntry;
- use std::path::PathBuf;
+ fn test(base: &str, sources: &[GlobEntry]) -> Vec {
+ // Resolve all content paths for the (temporary) current working directory
+ let sources: Vec = sources
+ .iter()
+ .map(|x| GlobEntry {
+ base: format!("{}{}", base, x.base),
+ pattern: x.pattern.clone(),
+ })
+ .collect();
+
+ // Expand glob patterns into multiple `GlobEntry`s.
+ let sources = sources
+ .iter()
+ .flat_map(|source| {
+ let expression: Result = source.pattern[..].try_into();
+ let Ok(expression) = expression else {
+ return vec![source.clone()];
+ };
+
+ expression
+ .into_iter()
+ .filter_map(Result::ok)
+ .map(move |pattern| GlobEntry {
+ base: source.base.clone(),
+ pattern: pattern.into(),
+ })
+ .collect::<_>>()
+ })
+ .collect::<_>>();
+
+ let optimized_sources = optimize_patterns(&sources);
+
+ let parent_dir = format!("{}", fs::canonicalize(base).unwrap().display());
+
+ // Remove the temporary directory from the base
+ optimized_sources
+ .into_iter()
+ .map(|source| GlobEntry {
+ // Normalize paths to use unix style separators
+ base: source.base.replace(&parent_dir, "").replace('\\', "/"),
+ pattern: source.pattern,
+ })
+ .collect()
+ }
#[test]
fn it_should_keep_globs_that_start_with_file_wildcards_as_is() {
- let actual = get_fast_patterns(&vec![GlobEntry {
+ let base = create_folders(&["projects"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "*.html".to_string(),
+ }],
+ );
+
+ let expected = vec![GlobEntry {
base: "/projects".to_string(),
pattern: "*.html".to_string(),
- }]);
- let expected = vec![(PathBuf::from("/projects"), vec!["*.html".to_string()])];
+ }];
- assert_eq!(actual, expected,);
+ assert_eq!(actual, expected);
}
#[test]
fn it_should_keep_globs_that_start_with_folder_wildcards_as_is() {
- let actual = get_fast_patterns(&vec![GlobEntry {
+ let base = create_folders(&["projects"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "**/*.html".to_string(),
+ }],
+ );
+
+ let expected = vec![GlobEntry {
base: "/projects".to_string(),
pattern: "**/*.html".to_string(),
- }]);
-
- let expected = vec![(PathBuf::from("/projects"), vec!["**/*.html".to_string()])];
+ }];
assert_eq!(actual, expected,);
}
#[test]
fn it_should_move_the_starting_folder_to_the_path() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "example/*.html".to_string(),
- }]);
- let expected = vec![(
- PathBuf::from("/projects/example"),
- vec!["*.html".to_string()],
- )];
+ let base = create_folders(&["projects/example"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "example/*.html".to_string(),
+ }],
+ );
+
+ let expected = vec![GlobEntry {
+ base: "/projects/example".to_string(),
+ pattern: "*.html".to_string(),
+ }];
assert_eq!(actual, expected,);
}
#[test]
fn it_should_move_the_starting_folders_to_the_path() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "example/other/*.html".to_string(),
- }]);
- let expected = vec![(
- PathBuf::from("/projects/example/other"),
- vec!["*.html".to_string()],
- )];
+ let base = create_folders(&["projects/example/other"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "example/other/*.html".to_string(),
+ }],
+ );
+
+ let expected = vec![GlobEntry {
+ base: "/projects/example/other".to_string(),
+ pattern: "*.html".to_string(),
+ }];
assert_eq!(actual, expected,);
}
#[test]
fn it_should_branch_expandable_folders() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "{foo,bar}/*.html".to_string(),
- }]);
+ let base = create_folders(&["projects/foo", "projects/bar"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "{foo,bar}/*.html".to_string(),
+ }],
+ );
let expected = vec![
- (PathBuf::from("/projects/foo"), vec!["*.html".to_string()]),
- (PathBuf::from("/projects/bar"), vec!["*.html".to_string()]),
+ GlobEntry {
+ base: "/projects/bar".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/foo".to_string(),
+ pattern: "*.html".to_string(),
+ },
];
assert_eq!(actual, expected,);
@@ -312,27 +358,38 @@ mod tests {
#[test]
fn it_should_expand_multiple_expansions_in_the_same_folder() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "a-{b,c}-d-{e,f}-g/*.html".to_string(),
- }]);
+ let base = create_folders(&[
+ "projects/a-b-d-e-g",
+ "projects/a-b-d-f-g",
+ "projects/a-c-d-e-g",
+ "projects/a-c-d-f-g",
+ ]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "a-{b,c}-d-{e,f}-g/*.html".to_string(),
+ }],
+ );
+
let expected = vec![
- (
- PathBuf::from("/projects/a-b-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-b-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-d-f-g"),
- vec!["*.html".to_string()],
- ),
+ GlobEntry {
+ base: "/projects/a-b-d-e-g".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-b-d-f-g".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-d-e-g".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-d-f-g".to_string(),
+ pattern: "*.html".to_string(),
+ },
];
assert_eq!(actual, expected,);
@@ -340,75 +397,98 @@ mod tests {
#[test]
fn multiple_expansions_per_folder_starting_at_the_root() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "{a,b}-c-{d,e}-f/{b,c}-d-{e,f}-g/*.html".to_string(),
- }]);
+ let base = create_folders(&[
+ "projects/a-c-d-f/b-d-e-g",
+ "projects/a-c-d-f/b-d-f-g",
+ "projects/a-c-d-f/c-d-e-g",
+ "projects/a-c-d-f/c-d-f-g",
+ "projects/a-c-e-f/b-d-e-g",
+ "projects/a-c-e-f/b-d-f-g",
+ "projects/a-c-e-f/c-d-e-g",
+ "projects/a-c-e-f/c-d-f-g",
+ "projects/b-c-d-f/b-d-e-g",
+ "projects/b-c-d-f/b-d-f-g",
+ "projects/b-c-d-f/c-d-e-g",
+ "projects/b-c-d-f/c-d-f-g",
+ "projects/b-c-e-f/b-d-e-g",
+ "projects/b-c-e-f/b-d-f-g",
+ "projects/b-c-e-f/c-d-e-g",
+ "projects/b-c-e-f/c-d-f-g",
+ ]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "{a,b}-c-{d,e}-f/{b,c}-d-{e,f}-g/*.html".to_string(),
+ }],
+ );
+
let expected = vec![
- (
- PathBuf::from("/projects/a-c-d-f/b-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-d-f/b-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-e-f/b-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-e-f/b-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-d-f/c-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-d-f/c-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-e-f/c-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-e-f/c-d-e-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-d-f/b-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-d-f/b-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-e-f/b-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-e-f/b-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-d-f/c-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-d-f/c-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a-c-e-f/c-d-f-g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/b-c-e-f/c-d-f-g"),
- vec!["*.html".to_string()],
- ),
+ GlobEntry {
+ base: "/projects/a-c-d-f/b-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-d-f/b-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-d-f/c-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-d-f/c-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-e-f/b-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-e-f/b-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-e-f/c-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a-c-e-f/c-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-d-f/b-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-d-f/b-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-d-f/c-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-d-f/c-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-e-f/b-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-e-f/b-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-e-f/c-d-e-g".into(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/b-c-e-f/c-d-f-g".into(),
+ pattern: "*.html".to_string(),
+ },
];
assert_eq!(actual, expected,);
@@ -416,20 +496,25 @@ mod tests {
#[test]
fn it_should_stop_expanding_once_we_hit_a_wildcard() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "{foo,bar}/example/**/{baz,qux}/*.html".to_string(),
- }]);
+ let base = create_folders(&["projects/bar/example", "projects/foo/example"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "{foo,bar}/example/**/{baz,qux}/*.html".to_string(),
+ }],
+ );
let expected = vec![
- (
- PathBuf::from("/projects/foo/example"),
- vec!["**/{baz,qux}/*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/bar/example"),
- vec!["**/{baz,qux}/*.html".to_string()],
- ),
+ GlobEntry {
+ base: "/projects/bar/example".to_string(),
+ pattern: "{**/baz/*.html,**/qux/*.html}".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/foo/example".to_string(),
+ pattern: "{**/baz/*.html,**/qux/*.html}".to_string(),
+ },
];
assert_eq!(actual, expected,);
@@ -437,41 +522,60 @@ mod tests {
#[test]
fn it_should_keep_the_negation_symbol_for_all_new_patterns() {
- let actual = get_fast_patterns(&vec![GlobEntry {
+ let base = create_folders(&["projects"]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "!{foo,bar}/*.html".to_string(),
+ }],
+ );
+
+ let expected = vec![GlobEntry {
base: "/projects".to_string(),
- pattern: "!{foo,bar}/*.html".to_string(),
- }]);
- let expected = vec![
- (PathBuf::from("/projects/foo"), vec!["!*.html".to_string()]),
- (PathBuf::from("/projects/bar"), vec!["!*.html".to_string()]),
- ];
+ // TODO: This is wrong, because `!` should be in front. But right now we don't support
+ // `@source "!../foo/bar";` anyway.
+ pattern: "{!bar/*.html,!foo/*.html}".to_string(),
+ }];
assert_eq!(actual, expected,);
}
#[test]
fn it_should_expand_a_complex_example() {
- let actual = get_fast_patterns(&vec![GlobEntry {
- base: "/projects".to_string(),
- pattern: "a/{b,c}/d/{e,f}/g/*.html".to_string(),
- }]);
+ let base = create_folders(&[
+ "projects/a/b/d/e/g",
+ "projects/a/b/d/f/g",
+ "projects/a/c/d/e/g",
+ "projects/a/c/d/f/g",
+ ]);
+
+ let actual = test(
+ &base,
+ &[GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "a/{b,c}/d/{e,f}/g/*.html".to_string(),
+ }],
+ );
+
let expected = vec![
- (
- PathBuf::from("/projects/a/b/d/e/g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a/c/d/e/g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a/b/d/f/g"),
- vec!["*.html".to_string()],
- ),
- (
- PathBuf::from("/projects/a/c/d/f/g"),
- vec!["*.html".to_string()],
- ),
+ GlobEntry {
+ base: "/projects/a/b/d/e/g".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a/b/d/f/g".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a/c/d/e/g".to_string(),
+ pattern: "*.html".to_string(),
+ },
+ GlobEntry {
+ base: "/projects/a/c/d/f/g".to_string(),
+ pattern: "*.html".to_string(),
+ },
];
assert_eq!(actual, expected,);
diff --git a/crates/oxide/src/lib.rs b/crates/oxide/src/lib.rs
index 79c2bcb9e59a..0294d9c82dbf 100644
--- a/crates/oxide/src/lib.rs
+++ b/crates/oxide/src/lib.rs
@@ -1,9 +1,12 @@
+use crate::glob::hoist_static_glob_parts;
use crate::parser::Extractor;
+use crate::scanner::allowed_paths::resolve_paths;
use crate::scanner::detect_sources::DetectSources;
+use bexpand::Expression;
use bstr::ByteSlice;
use fxhash::{FxHashMap, FxHashSet};
-use glob::fast_glob;
-use glob::get_fast_patterns;
+use glob::optimize_patterns;
+use glob_match::glob_match;
use rayon::prelude::*;
use std::fs;
use std::path::PathBuf;
@@ -54,7 +57,7 @@ pub struct ScanResult {
pub globs: Vec,
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, PartialEq)]
pub struct GlobEntry {
pub base: String,
pub pattern: String,
@@ -62,9 +65,6 @@ pub struct GlobEntry {
#[derive(Debug, Clone, Default)]
pub struct Scanner {
- /// Auto content configuration
- detect_sources: Option,
-
/// Glob sources
sources: Option>,
@@ -86,9 +86,8 @@ pub struct Scanner {
}
impl Scanner {
- pub fn new(detect_sources: Option, sources: Option>) -> Self {
+ pub fn new(sources: Option>) -> Self {
Self {
- detect_sources,
sources,
..Default::default()
}
@@ -206,21 +205,11 @@ impl Scanner {
return;
}
- self.detect_sources();
self.scan_sources();
self.ready = true;
}
- #[tracing::instrument(skip_all)]
- fn detect_sources(&mut self) {
- if let Some(detect_sources) = &self.detect_sources {
- let (files, globs) = detect_sources.detect();
- self.files.extend(files);
- self.globs.extend(globs);
- }
- }
-
#[tracing::instrument(skip_all)]
fn scan_sources(&mut self) {
let Some(sources) = &self.sources else {
@@ -231,46 +220,110 @@ impl Scanner {
return;
}
- let resolved_files: Vec<_> = match fast_glob(sources) {
- Ok(matches) => matches
- .filter_map(|x| dunce::canonicalize(&x).ok())
- .collect(),
- Err(err) => {
- event!(tracing::Level::ERROR, "Failed to resolve glob: {:?}", err);
- vec![]
+ // Expand glob patterns and create new `GlobEntry` instances for each expanded pattern.
+ let sources = sources
+ .iter()
+ .flat_map(|source| {
+ let expression: Result = source.pattern[..].try_into();
+ let Ok(expression) = expression else {
+ return vec![source.clone()];
+ };
+
+ expression
+ .into_iter()
+ .filter_map(Result::ok)
+ .map(move |pattern| GlobEntry {
+ base: source.base.clone(),
+ pattern: pattern.into(),
+ })
+ .collect::<_>>()
+ })
+ .collect::<_>>();
+
+ // Partition sources into sources that should be promoted to auto source detection and
+ // sources that should be resolved as globs.
+ let (auto_sources, glob_sources): (Vec<_>, Vec<_>) = sources.iter().partition(|source| {
+ // If a glob ends with `/**/*`, then we just want to register the base path as a new
+ // base. Essentially converting it to use auto source detection.
+ if source.pattern.ends_with("**/*") {
+ return true;
}
- };
- self.files.extend(resolved_files);
- self.globs.extend(sources.clone());
+ // Directories should be promoted to auto source detection
+ if PathBuf::from(&source.base).join(&source.pattern).is_dir() {
+ return true;
+ }
- // Re-optimize the globs to reduce the number of patterns we have to scan.
- self.globs = get_fast_patterns(&self.globs)
- .into_iter()
- .filter_map(|(root, globs)| {
- let root = match dunce::canonicalize(root) {
- Ok(root) => root,
- Err(error) => {
- event!(
- tracing::Level::ERROR,
- "Failed to canonicalize base path {:?}",
- error
- );
- return None;
- }
+ false
+ });
+
+ for path in auto_sources
+ .iter()
+ .map(|source| PathBuf::from(&source.base).join(source.pattern.trim_end_matches("**/*")))
+ {
+ // Insert a glob for the base path, so we can see new files/folders in the directory itself.
+ self.globs.push(GlobEntry {
+ base: path.to_string_lossy().into(),
+ pattern: "*".into(),
+ });
+
+ // Detect all files/folders in the directory
+ let detect_sources = DetectSources::new(path);
+
+ let (files, globs) = detect_sources.detect();
+ self.files.extend(files);
+ self.globs.extend(globs);
+ }
+
+ // Turn `Vec<&GlobEntry>` in `Vec`
+ let glob_sources: Vec<_> = glob_sources.into_iter().cloned().collect();
+ let hoisted = hoist_static_glob_parts(&glob_sources);
+
+ for source in &hoisted {
+ // If the pattern is empty, then the base points to a specific file or folder already
+ // if it doesn't contain any dynamic parts. In that case we can use the base as the
+ // pattern.
+ //
+ // Otherwise we need to combine the base and the pattern, otherwise a pattern that
+ // looks like `*.html`, will never match a path that looks like
+ // `/my-project/project-a/index.html`, because it contains `/`.
+ //
+ // We can't prepend `**/`, because then `/my-project/project-a/nested/index.html` would
+ // match as well.
+ //
+ // Instead we combine the base and the pattern as a single glob pattern.
+ let mut full_pattern = source.base.clone();
+ if !source.pattern.is_empty() {
+ full_pattern.push('/');
+ full_pattern.push_str(&source.pattern);
+ }
+
+ let base = PathBuf::from(&source.base);
+ for entry in resolve_paths(&base) {
+ let Some(file_type) = entry.file_type() else {
+ continue;
};
- Some((root, globs))
- })
- .flat_map(|(root, globs)| {
- let base = root.display().to_string();
+ if !file_type.is_file() {
+ continue;
+ }
- globs.into_iter().map(move |glob| GlobEntry {
- base: base.clone(),
- pattern: glob,
- })
- })
- .collect::>();
+ let file_path = entry.into_path();
+
+ let Some(file_path_str) = file_path.to_str() else {
+ continue;
+ };
+
+ if glob_match(&full_pattern, file_path_str) {
+ self.files.push(file_path);
+ }
+ }
+ }
+
+ self.globs.extend(hoisted);
+
+ // Re-optimize the globs to reduce the number of patterns we have to scan.
+ self.globs = optimize_patterns(&self.globs);
}
}
diff --git a/crates/oxide/src/scanner/allowed_paths.rs b/crates/oxide/src/scanner/allowed_paths.rs
index 3015e9dd0465..a761cd34b03c 100644
--- a/crates/oxide/src/scanner/allowed_paths.rs
+++ b/crates/oxide/src/scanner/allowed_paths.rs
@@ -30,7 +30,7 @@ pub fn resolve_allowed_paths(root: &Path) -> impl Iterator
- {
WalkBuilder::new(root)
.hidden(false)
.require_git(false)
- .filter_entry(|entry| match entry.file_type() {
+ .filter_entry(move |entry| match entry.file_type() {
Some(file_type) if file_type.is_dir() => match entry.file_name().to_str() {
Some(dir) => !IGNORED_CONTENT_DIRS.contains(&dir),
None => false,
@@ -44,6 +44,15 @@ pub fn resolve_allowed_paths(root: &Path) -> impl Iterator
- {
.filter_map(Result::ok)
}
+#[tracing::instrument(skip(root))]
+pub fn resolve_paths(root: &Path) -> impl Iterator
- {
+ WalkBuilder::new(root)
+ .hidden(false)
+ .require_git(false)
+ .build()
+ .filter_map(Result::ok)
+}
+
pub fn is_allowed_content_path(path: &Path) -> bool {
// Skip known ignored files
if path
diff --git a/crates/oxide/tests/scanner.rs b/crates/oxide/tests/scanner.rs
index 2eb128168fff..70c66149d1b4 100644
--- a/crates/oxide/tests/scanner.rs
+++ b/crates/oxide/tests/scanner.rs
@@ -1,6 +1,5 @@
#[cfg(test)]
mod scanner {
- use scanner::detect_sources::DetectSources;
use std::process::Command;
use std::{fs, path};
@@ -35,18 +34,20 @@ mod scanner {
let base = format!("{}", dir.display());
// Resolve all content paths for the (temporary) current working directory
- let mut scanner = Scanner::new(
- Some(DetectSources::new(base.clone().into())),
- Some(
- globs
- .iter()
- .map(|x| GlobEntry {
- base: base.clone(),
- pattern: x.to_string(),
- })
- .collect(),
- ),
- );
+ let mut sources: Vec = globs
+ .iter()
+ .map(|x| GlobEntry {
+ base: base.clone(),
+ pattern: x.to_string(),
+ })
+ .collect();
+
+ sources.push(GlobEntry {
+ base: base.clone(),
+ pattern: "**/*".to_string(),
+ });
+
+ let mut scanner = Scanner::new(Some(sources));
let candidates = scanner.scan();
@@ -65,17 +66,20 @@ mod scanner {
));
}
+ let parent_dir = format!(
+ "{}{}",
+ fs::canonicalize(&base).unwrap().display(),
+ path::MAIN_SEPARATOR
+ );
+
paths = paths
.into_iter()
.map(|x| {
- let parent_dir = format!("{}{}", &base.to_string(), path::MAIN_SEPARATOR);
- x.replace(&parent_dir, "")
- // Normalize paths to use unix style separators
- .replace('\\', "/")
+ x.replace(&parent_dir, "").replace('\\', "/") // Normalize paths to use unix style separators
})
.collect();
- // Sort the output for easier comparison (depending on internal datastructure the order
+ // Sort the output for easier comparison (depending on internal data structure the order
// _could_ be random)
paths.sort();
@@ -98,7 +102,7 @@ mod scanner {
("b.html", None),
("c.html", None),
]);
- assert_eq!(globs, vec!["a.html", "b.html", "c.html", "index.html"]);
+ assert_eq!(globs, vec!["*", "a.html", "b.html", "c.html", "index.html"]);
}
#[test]
@@ -110,7 +114,7 @@ mod scanner {
("b.html", None),
("c.html", None),
]);
- assert_eq!(globs, vec!["a.html", "c.html", "index.html"]);
+ assert_eq!(globs, vec!["*", "a.html", "c.html", "index.html"]);
}
#[test]
@@ -124,6 +128,7 @@ mod scanner {
assert_eq!(
globs,
vec![
+ "*",
"index.html",
"public/a.html",
"public/b.html",
@@ -148,6 +153,7 @@ mod scanner {
assert_eq!(
globs,
vec![
+ "*",
"index.html",
"public/a.html",
"public/b.html",
@@ -170,7 +176,7 @@ mod scanner {
("public/b.html", None),
("public/c.html", None),
]);
- assert_eq!(globs, vec!["index.html", "public/c.html",]);
+ assert_eq!(globs, vec!["*", "index.html", "public/c.html",]);
}
#[test]
@@ -181,7 +187,7 @@ mod scanner {
("src/b.html", None),
("src/c.html", None),
]);
- assert_eq!(globs, vec![
+ assert_eq!(globs, vec!["*",
"index.html",
"src/**/*.{aspx,astro,cjs,cts,eex,erb,gjs,gts,haml,handlebars,hbs,heex,html,jade,js,jsx,liquid,md,mdx,mjs,mts,mustache,njk,nunjucks,php,pug,py,razor,rb,rhtml,rs,slim,svelte,tpl,ts,tsx,twig,vue}",
"src/a.html",
@@ -198,7 +204,7 @@ mod scanner {
("b.png", None),
("c.lock", None),
]);
- assert_eq!(globs, vec!["index.html"]);
+ assert_eq!(globs, vec!["*", "index.html"]);
}
#[test]
@@ -209,7 +215,7 @@ mod scanner {
("b.sass", None),
("c.less", None),
]);
- assert_eq!(globs, vec!["index.html"]);
+ assert_eq!(globs, vec!["*", "index.html"]);
}
#[test]
@@ -219,7 +225,7 @@ mod scanner {
("package-lock.json", None),
("yarn.lock", None),
]);
- assert_eq!(globs, vec!["index.html"]);
+ assert_eq!(globs, vec!["*", "index.html"]);
}
#[test]
@@ -270,6 +276,7 @@ mod scanner {
assert_eq!(
globs,
vec![
+ "*",
"bar.html",
"baz.html",
"foo.html",
@@ -358,7 +365,7 @@ mod scanner {
// detection.
("foo.styl", Some("content-['foo.styl']")),
],
- vec!["*.styl"],
+ vec!["foo.styl"],
)
.1;
diff --git a/integrations/cli/index.test.ts b/integrations/cli/index.test.ts
index f392f754486a..46a49b2700e9 100644
--- a/integrations/cli/index.test.ts
+++ b/integrations/cli/index.test.ts
@@ -1,6 +1,7 @@
+import dedent from 'dedent'
import os from 'node:os'
import path from 'node:path'
-import { describe } from 'vitest'
+import { describe, expect } from 'vitest'
import { candidate, css, html, js, json, test, yaml } from '../utils'
const STANDALONE_BINARY = (() => {
@@ -255,3 +256,564 @@ describe.each([
},
)
})
+
+test(
+ 'auto source detection kitchen sink',
+ {
+ fs: {
+ 'package.json': json`
+ {
+ "dependencies": {
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/cli": "workspace:^"
+ }
+ }
+ `,
+ 'index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
+
+ /* (1) */
+ /* - Only './src' should be auto-scanned, not the current working directory */
+ /* - .gitignore'd paths should be ignored (node_modules) */
+ /* - Binary extensions should be ignored (jpg, zip) */
+ @import 'tailwindcss/utilities' source('./src');
+
+ /* (2) */
+ /* - All HTML and JSX files in 'ignored/components' should be scanned */
+ /* - All other extensions should be ignored */
+ @source "./ignored/components/*.{html,jsx}";
+
+ /* (3) */
+ /* - './components' should be auto-scanned in addition to './src' */
+ /* - './components/ignored.html' should still be ignored */
+ /* - Binary extensions in './components' should be ignored */
+ @source "./components";
+
+ /* (4) */
+ /* - './pages' should be auto-scanned */
+ /* - Only '.html' files should be included */
+ /* - './page/ignored.html' should be ignored */
+ @source "./pages/**/*.html";
+ `,
+
+ '.gitignore': dedent`
+ /src/ignored
+ /ignored
+ /components/ignored.html
+ /pages/ignored.html
+ `,
+
+ // (1)
+ 'index.html': 'content-["index.html"] content-["BAD"]', // "Root" source is in `./src`
+ 'src/index.html': 'content-["src/index.html"]',
+ 'src/nested/index.html': 'content-["src/nested/index.html"]',
+ 'src/index.jpg': 'content-["src/index.jpg"] content-["BAD"]',
+ 'src/nested/index.tar': 'content-["src/nested/index.tar"] content-["BAD"]',
+ 'src/ignored/index.html': 'content-["src/ignored/index.html"] content-["BAD"]',
+
+ // (2)
+ 'ignored/components/my-component.html': 'content-["ignored/components/my-component.html"]',
+ 'ignored/components/my-component.jsx': 'content-["ignored/components/my-component.jsx"]',
+
+ // Ignored and not explicitly listed by (2)
+ 'ignored/components/my-component.tsx':
+ 'content-["ignored/components/my-component.tsx"] content-["BAD"]',
+ 'ignored/components/nested/my-component.html':
+ 'content-["ignored/components/nested/my-component.html"] content-["BAD"]',
+
+ // (3)
+ 'components/my-component.tsx': 'content-["components/my-component.tsx"]',
+ 'components/nested/my-component.tsx': 'content-["components/nested/my-component.tsx"]',
+ 'components/ignored.html': 'content-["components/ignored.html"] content-["BAD"]',
+
+ // (4)
+ 'pages/foo.html': 'content-["pages/foo.html"]',
+ 'pages/nested/foo.html': 'content-["pages/nested/foo.html"]',
+ 'pages/ignored.html': 'content-["pages/ignored.html"] content-["BAD"]',
+ 'pages/foo.jsx': 'content-["pages/foo.jsx"] content-["BAD"]',
+ 'pages/nested/foo.jsx': 'content-["pages/nested/foo.jsx"] content-["BAD"]',
+ },
+ },
+ async ({ fs, exec }) => {
+ await exec('pnpm tailwindcss --input index.css --output dist/out.css')
+
+ expect(await fs.dumpFiles('./dist/*.css')).toMatchInlineSnapshot(`
+ "
+ --- ./dist/out.css ---
+ .content-\\[\\"components\\/my-component\\.tsx\\"\\] {
+ --tw-content: "components/my-component.tsx";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"components\\/nested\\/my-component\\.tsx\\"\\] {
+ --tw-content: "components/nested/my-component.tsx";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"ignored\\/components\\/my-component\\.html\\"\\] {
+ --tw-content: "ignored/components/my-component.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"ignored\\/components\\/my-component\\.jsx\\"\\] {
+ --tw-content: "ignored/components/my-component.jsx";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"pages\\/foo\\.html\\"\\] {
+ --tw-content: "pages/foo.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"pages\\/nested\\/foo\\.html\\"\\] {
+ --tw-content: "pages/nested/foo.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"src\\/index\\.html\\"\\] {
+ --tw-content: "src/index.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"src\\/nested\\/index\\.html\\"\\] {
+ --tw-content: "src/nested/index.html";
+ content: var(--tw-content);
+ }
+ @supports (-moz-orient: inline) {
+ @layer base {
+ *, ::before, ::after, ::backdrop {
+ --tw-content: "";
+ }
+ }
+ }
+ @property --tw-content {
+ syntax: "*";
+ inherits: false;
+ initial-value: "";
+ }
+ "
+ `)
+ },
+)
+
+test(
+ 'auto source detection in depth, source(…) and `@source` can be configured to use auto source detection (build + watch mode)',
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
+ {
+ "dependencies": {
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/cli": "workspace:^"
+ }
+ }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
+
+ /* Run auto-content detection in ../../project-b */
+ @import 'tailwindcss/utilities' source('../../project-b');
+
+ /* Explicitly using node_modules in the @source allows git ignored folders */
+ @source '../node_modules/{my-lib-1,my-lib-2}/src/**/*.html';
+
+ /* We typically ignore these extensions, but now include them explicitly */
+ @source './logo.{jpg,png}';
+
+ /* Project C should apply auto source detection */
+ @source '../../project-c';
+
+ /* Project D should apply auto source detection rules, such as ignoring node_modules */
+ @source '../../project-d/**/*.{html,js}';
+ @source '../../project-d/**/*.bin';
+
+ /* Same as above, but my-lib-2 _should_ be includes */
+ @source '../../project-d/node_modules/my-lib-2/src/*.{html,js}';
+
+ /* bar.html is git ignored, but explicitly listed here to scan */
+ @source '../../project-d/src/bar.html';
+ `,
+
+ // Project A is the current folder, but we explicitly configured
+ // `source(project-b)`, therefore project-a should not be included in
+ // the output.
+ 'project-a/src/index.html': html`
+
+ `,
+
+ // Project A explicitly includes an extension we usually ignore,
+ // therefore it should be included in the output.
+ 'project-a/src/logo.jpg': html`
+
+ `,
+
+ // Project A explicitly includes node_modules/{my-lib-1,my-lib-2},
+ // therefore these files should be included in the output.
+ 'project-a/node_modules/my-lib-1/src/index.html': html`
+
+ `,
+ 'project-a/node_modules/my-lib-2/src/index.html': html`
+
+ `,
+
+ // Project B is the configured `source(…)`, therefore auto source
+ // detection should include known extensions and folders in the output.
+ 'project-b/src/index.html': html`
+
+ `,
+
+ // Project B is the configured `source(…)`, therefore auto source
+ // detection should apply and node_modules should not be included in the
+ // output.
+ 'project-b/node_modules/my-lib-3/src/index.html': html`
+
+ `,
+
+ // Project C should apply auto source detection, therefore known
+ // extensions and folders should be included in the output.
+ 'project-c/src/index.html': html`
+
+ `,
+
+ // Project C should apply auto source detection, therefore known ignored
+ // extensions should not be included in the output.
+ 'project-c/src/logo.jpg': html`
+
+ `,
+
+ // Project C should apply auto source detection, therefore node_modules
+ // should not be included in the output.
+ 'project-c/node_modules/my-lib-1/src/index.html': html`
+
+ `,
+
+ // Project D should apply auto source detection rules, such as ignoring
+ // node_modules.
+ 'project-d/node_modules/my-lib-1/src/index.html': html`
+
+ `,
+
+ // Project D has an explicit glob containing node_modules, thus should include the html file
+ 'project-d/node_modules/my-lib-2/src/index.html': html`
+
+ `,
+
+ 'project-d/src/.gitignore': dedent`
+ foo.html
+ bar.html
+ `,
+
+ // Project D, foo.html is ignored by the gitignore file.
+ 'project-d/src/foo.html': html`
+
+ `,
+
+ // Project D, bar.html is ignored by the gitignore file. But explicitly
+ // listed as a `@source` glob.
+ 'project-d/src/bar.html': html`
+
+ `,
+
+ // Project D should look for files with the extensions html and js.
+ 'project-d/src/index.html': html`
+
+ `,
+
+ // Project D should have a binary file even though we ignore binary files
+ // by default, but it's explicitly listed.
+ 'project-d/my-binary-file.bin': html`
+
+ `,
+ },
+ },
+ async ({ fs, exec, spawn, root }) => {
+ await exec('pnpm tailwindcss --input src/index.css --output dist/out.css', {
+ cwd: path.join(root, 'project-a'),
+ })
+
+ expect(await fs.dumpFiles('./project-a/dist/*.css')).toMatchInlineSnapshot(`
+ "
+ --- ./project-a/dist/out.css ---
+ .content-\\[\\'project-a\\/node_modules\\/my-lib-1\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-a/node modules/my-lib-1/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-a\\/node_modules\\/my-lib-2\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-a/node modules/my-lib-2/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-a\\/src\\/logo\\.jpg\\'\\] {
+ --tw-content: 'project-a/src/logo.jpg';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-b\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-b/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-c\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-c/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/my-binary-file\\.bin\\'\\] {
+ --tw-content: 'project-d/my-binary-file.bin';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/node_modules\\/my-lib-2\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-d/node modules/my-lib-2/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/src\\/bar\\.html\\'\\] {
+ --tw-content: 'project-d/src/bar.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-d/src/index.html';
+ content: var(--tw-content);
+ }
+ @supports (-moz-orient: inline) {
+ @layer base {
+ *, ::before, ::after, ::backdrop {
+ --tw-content: "";
+ }
+ }
+ }
+ @property --tw-content {
+ syntax: "*";
+ inherits: false;
+ initial-value: "";
+ }
+ "
+ `)
+
+ // Watch mode tests
+ await spawn('pnpm tailwindcss --input src/index.css --output dist/out.css --watch', {
+ cwd: path.join(root, 'project-a'),
+ })
+
+ // Changes to project-a should not be included in the output, we changed the
+ // base folder to project-b.
+ await fs.write(
+ 'project-a/src/index.html',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/src/index.html']`,
+ ])
+
+ // Changes to this file should be included, because we explicitly listed
+ // them using `@source`.
+ await fs.write(
+ 'project-a/src/logo.jpg',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/src/logo.jpg']`,
+ ])
+
+ // Changes to these files should be included, because we explicitly listed
+ // them using `@source`.
+ await fs.write(
+ 'project-a/node_modules/my-lib-1/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/node_modules/my-lib-1/src/index.html']`,
+ ])
+ await fs.write(
+ 'project-a/node_modules/my-lib-2/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/node_modules/my-lib-2/src/index.html']`,
+ ])
+
+ // Changes to this file should be included, because we changed the base to
+ // `project-b`.
+ await fs.write(
+ 'project-b/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-b/src/index.html']`,
+ ])
+
+ // Changes to this file should not be included. We did change the base to
+ // `project-b`, but we still apply the auto source detection rules which
+ // ignore `node_modules`.
+ await fs.write(
+ 'project-b/node_modules/my-lib-3/src/index.html',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-b/node_modules/my-lib-3/src/index.html']`,
+ ])
+
+ // Project C was added explicitly via `@source`, therefore changes to these
+ // files should be included.
+ await fs.write(
+ 'project-c/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-c/src/index.html']`,
+ ])
+
+ // Except for these files, since they are ignored by the default auto source
+ // detection rules.
+ await fs.write(
+ 'project-c/src/logo.jpg',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-c/src/logo.jpg']`,
+ ])
+ await fs.write(
+ 'project-c/node_modules/my-lib-1/src/index.html',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-c/node_modules/my-lib-1/src/index.html']`,
+ ])
+
+ // Creating new files in the "root" of auto source detected folders
+ await fs.write(
+ 'project-b/new-file.html',
+ html``,
+ )
+ await fs.write(
+ 'project-b/new-folder/new-file.html',
+ html``,
+ )
+ await fs.write(
+ 'project-c/new-file.html',
+ html``,
+ )
+ await fs.write(
+ 'project-c/new-folder/new-file.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.created_&]:content-['project-b/new-file.html']`,
+ candidate`[.created_&]:content-['project-b/new-folder/new-file.html']`,
+ candidate`[.created_&]:content-['project-c/new-file.html']`,
+ candidate`[.created_&]:content-['project-c/new-folder/new-file.html']`,
+ ])
+ },
+)
+
+test(
+ 'auto source detection disabled',
+ {
+ fs: {
+ 'package.json': json`
+ {
+ "dependencies": {
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/cli": "workspace:^"
+ }
+ }
+ `,
+ 'index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
+
+ /* (1) */
+ /* - Only './src' should be auto-scanned, not the current working directory */
+ /* - .gitignore'd paths should be ignored (node_modules) */
+ /* - Binary extensions should be ignored (jpg, zip) */
+ @import 'tailwindcss/utilities' source(none);
+
+ /* (2) */
+ /* - './pages' should be auto-scanned */
+ /* - Only '.html' files should be included */
+ /* - './page/ignored.html' should be ignored */
+ @source "./pages/**/*.html";
+ `,
+
+ '.gitignore': dedent`
+ /src/ignored
+ /pages/ignored.html
+ `,
+
+ // (1)
+ 'index.html': 'content-["index.html"] content-["BAD"]', // "Root" source is in `./src`
+ 'src/index.html': 'content-["src/index.html"] content-["BAD"]',
+ 'src/nested/index.html': 'content-["src/nested/index.html"] content-["BAD"]',
+ 'src/index.jpg': 'content-["src/index.jpg"] content-["BAD"]',
+ 'src/nested/index.tar': 'content-["src/nested/index.tar"] content-["BAD"]',
+ 'src/ignored/index.html': 'content-["src/ignored/index.html"] content-["BAD"]',
+
+ // (4)
+ 'pages/foo.html': 'content-["pages/foo.html"]',
+ 'pages/nested/foo.html': 'content-["pages/nested/foo.html"]',
+ 'pages/ignored.html': 'content-["pages/ignored.html"] content-["BAD"]',
+ 'pages/foo.jsx': 'content-["pages/foo.jsx"] content-["BAD"]',
+ 'pages/nested/foo.jsx': 'content-["pages/nested/foo.jsx"] content-["BAD"]',
+ },
+ },
+ async ({ fs, exec }) => {
+ await exec('pnpm tailwindcss --input index.css --output dist/out.css')
+
+ expect(await fs.dumpFiles('./dist/*.css')).toMatchInlineSnapshot(`
+ "
+ --- ./dist/out.css ---
+ .content-\\[\\"pages\\/foo\\.html\\"\\] {
+ --tw-content: "pages/foo.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"pages\\/nested\\/foo\\.html\\"\\] {
+ --tw-content: "pages/nested/foo.html";
+ content: var(--tw-content);
+ }
+ @supports (-moz-orient: inline) {
+ @layer base {
+ *, ::before, ::after, ::backdrop {
+ --tw-content: "";
+ }
+ }
+ }
+ @property --tw-content {
+ syntax: "*";
+ inherits: false;
+ initial-value: "";
+ }
+ "
+ `)
+ },
+)
diff --git a/integrations/postcss/index.test.ts b/integrations/postcss/index.test.ts
index 8b236a2ef92c..de444d61dbe8 100644
--- a/integrations/postcss/index.test.ts
+++ b/integrations/postcss/index.test.ts
@@ -1,4 +1,6 @@
+import dedent from 'dedent'
import path from 'node:path'
+import { expect } from 'vitest'
import { candidate, css, html, js, json, test, yaml } from '../utils'
test(
@@ -464,3 +466,601 @@ test(
await fs.expectFileToContain('project-a/dist/out.css', [candidate`content-['c/src/index.js']`])
},
)
+
+test(
+ 'auto source detection kitchen sink',
+ {
+ fs: {
+ 'package.json': json`
+ {
+ "dependencies": {
+ "postcss": "^8",
+ "postcss-cli": "^10",
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/postcss": "workspace:^"
+ }
+ }
+ `,
+ 'postcss.config.js': js`
+ module.exports = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+ }
+ `,
+ 'index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
+
+ /* (1) */
+ /* - Only './src' should be auto-scanned, not the current working directory */
+ /* - .gitignore'd paths should be ignored (node_modules) */
+ /* - Binary extensions should be ignored (jpg, zip) */
+ @import 'tailwindcss/utilities' source('./src');
+
+ /* (2) */
+ /* - All HTML and JSX files in 'ignored/components' should be scanned */
+ /* - All other extensions should be ignored */
+ @source "./ignored/components/*.{html,jsx}";
+
+ /* (3) */
+ /* - './components' should be auto-scanned in addition to './src' */
+ /* - './components/ignored.html' should still be ignored */
+ /* - Binary extensions in './components' should be ignored */
+ @source "./components";
+
+ /* (4) */
+ /* - './pages' should be auto-scanned */
+ /* - Only '.html' files should be included */
+ /* - './page/ignored.html' should be ignored */
+ @source "./pages/**/*.html";
+ `,
+
+ '.gitignore': dedent`
+ /src/ignored
+ /ignored
+ /components/ignored.html
+ /pages/ignored.html
+ `,
+
+ // (1)
+ 'index.html': 'content-["index.html"] content-["BAD"]', // "Root" source is in `./src`
+ 'src/index.html': 'content-["src/index.html"]',
+ 'src/nested/index.html': 'content-["src/nested/index.html"]',
+ 'src/index.jpg': 'content-["src/index.jpg"] content-["BAD"]',
+ 'src/nested/index.tar': 'content-["src/nested/index.tar"] content-["BAD"]',
+ 'src/ignored/index.html': 'content-["src/ignored/index.html"] content-["BAD"]',
+
+ // (2)
+ 'ignored/components/my-component.html': 'content-["ignored/components/my-component.html"]',
+ 'ignored/components/my-component.jsx': 'content-["ignored/components/my-component.jsx"]',
+
+ // Ignored and not explicitly listed by (2)
+ 'ignored/components/my-component.tsx':
+ 'content-["ignored/components/my-component.tsx"] content-["BAD"]',
+ 'ignored/components/nested/my-component.html':
+ 'content-["ignored/components/nested/my-component.html"] content-["BAD"]',
+
+ // (3)
+ 'components/my-component.tsx': 'content-["components/my-component.tsx"]',
+ 'components/nested/my-component.tsx': 'content-["components/nested/my-component.tsx"]',
+ 'components/ignored.html': 'content-["components/ignored.html"] content-["BAD"]',
+
+ // (4)
+ 'pages/foo.html': 'content-["pages/foo.html"]',
+ 'pages/nested/foo.html': 'content-["pages/nested/foo.html"]',
+ 'pages/ignored.html': 'content-["pages/ignored.html"] content-["BAD"]',
+ 'pages/foo.jsx': 'content-["pages/foo.jsx"] content-["BAD"]',
+ 'pages/nested/foo.jsx': 'content-["pages/nested/foo.jsx"] content-["BAD"]',
+ },
+ },
+ async ({ fs, exec }) => {
+ await exec('pnpm postcss index.css --output dist/out.css')
+
+ expect(await fs.dumpFiles('./dist/*.css')).toMatchInlineSnapshot(`
+ "
+ --- ./dist/out.css ---
+ .content-\\[\\"components\\/my-component\\.tsx\\"\\] {
+ --tw-content: "components/my-component.tsx";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"components\\/nested\\/my-component\\.tsx\\"\\] {
+ --tw-content: "components/nested/my-component.tsx";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"ignored\\/components\\/my-component\\.html\\"\\] {
+ --tw-content: "ignored/components/my-component.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"ignored\\/components\\/my-component\\.jsx\\"\\] {
+ --tw-content: "ignored/components/my-component.jsx";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"pages\\/foo\\.html\\"\\] {
+ --tw-content: "pages/foo.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"pages\\/nested\\/foo\\.html\\"\\] {
+ --tw-content: "pages/nested/foo.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"src\\/index\\.html\\"\\] {
+ --tw-content: "src/index.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"src\\/nested\\/index\\.html\\"\\] {
+ --tw-content: "src/nested/index.html";
+ content: var(--tw-content);
+ }
+ @supports (-moz-orient: inline) {
+ @layer base {
+ *, ::before, ::after, ::backdrop {
+ --tw-content: "";
+ }
+ }
+ }
+ @property --tw-content {
+ syntax: "*";
+ inherits: false;
+ initial-value: "";
+ }
+ "
+ `)
+ },
+)
+
+test(
+ 'auto source detection in depth, source(…) and `@source` can be configured to use auto source detection (build + watch mode)',
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
+ {
+ "dependencies": {
+ "postcss": "^8",
+ "postcss-cli": "^10",
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/postcss": "workspace:^"
+ }
+ }
+ `,
+ 'project-a/postcss.config.js': js`
+ module.exports = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+ }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
+
+ /* Run auto-content detection in ../../project-b */
+ @import 'tailwindcss/utilities' source('../../project-b');
+
+ /* Explicitly using node_modules in the @source allows git ignored folders */
+ @source '../node_modules/{my-lib-1,my-lib-2}/src/**/*.html';
+
+ /* We typically ignore these extensions, but now include them explicitly */
+ @source './logo.{jpg,png}';
+
+ /* Project C should apply auto source detection */
+ @source '../../project-c';
+
+ /* Project D should apply auto source detection rules, such as ignoring node_modules */
+ @source '../../project-d/**/*.{html,js}';
+ @source '../../project-d/**/*.bin';
+
+ /* Same as above, but my-lib-2 _should_ be includes */
+ @source '../../project-d/node_modules/my-lib-2/src/*.{html,js}';
+
+ /* bar.html is git ignored, but explicitly listed here to scan */
+ @source '../../project-d/src/bar.html';
+ `,
+
+ // Project A is the current folder, but we explicitly configured
+ // `source(project-b)`, therefore project-a should not be included in
+ // the output.
+ 'project-a/src/index.html': html`
+
+ `,
+
+ // Project A explicitly includes an extension we usually ignore,
+ // therefore it should be included in the output.
+ 'project-a/src/logo.jpg': html`
+
+ `,
+
+ // Project A explicitly includes node_modules/{my-lib-1,my-lib-2},
+ // therefore these files should be included in the output.
+ 'project-a/node_modules/my-lib-1/src/index.html': html`
+
+ `,
+ 'project-a/node_modules/my-lib-2/src/index.html': html`
+
+ `,
+
+ // Project B is the configured `source(…)`, therefore auto source
+ // detection should include known extensions and folders in the output.
+ 'project-b/src/index.html': html`
+
+ `,
+
+ // Project B is the configured `source(…)`, therefore auto source
+ // detection should apply and node_modules should not be included in the
+ // output.
+ 'project-b/node_modules/my-lib-3/src/index.html': html`
+
+ `,
+
+ // Project C should apply auto source detection, therefore known
+ // extensions and folders should be included in the output.
+ 'project-c/src/index.html': html`
+
+ `,
+
+ // Project C should apply auto source detection, therefore known ignored
+ // extensions should not be included in the output.
+ 'project-c/src/logo.jpg': html`
+
+ `,
+
+ // Project C should apply auto source detection, therefore node_modules
+ // should not be included in the output.
+ 'project-c/node_modules/my-lib-1/src/index.html': html`
+
+ `,
+
+ // Project D should apply auto source detection rules, such as ignoring
+ // node_modules.
+ 'project-d/node_modules/my-lib-1/src/index.html': html`
+
+ `,
+
+ // Project D has an explicit glob containing node_modules, thus should include the html file
+ 'project-d/node_modules/my-lib-2/src/index.html': html`
+
+ `,
+
+ 'project-d/src/.gitignore': dedent`
+ foo.html
+ bar.html
+ `,
+
+ // Project D, foo.html is ignored by the gitignore file.
+ 'project-d/src/foo.html': html`
+
+ `,
+
+ // Project D, bar.html is ignored by the gitignore file. But explicitly
+ // listed as a `@source` glob.
+ 'project-d/src/bar.html': html`
+
+ `,
+
+ // Project D should look for files with the extensions html and js.
+ 'project-d/src/index.html': html`
+
+ `,
+
+ // Project D should have a binary file even though we ignore binary files
+ // by default, but it's explicitly listed.
+ 'project-d/my-binary-file.bin': html`
+
+ `,
+ },
+ },
+ async ({ fs, exec, spawn, root }) => {
+ await exec('pnpm postcss src/index.css --output dist/out.css --verbose', {
+ cwd: path.join(root, 'project-a'),
+ })
+
+ expect(await fs.dumpFiles('./project-a/dist/*.css')).toMatchInlineSnapshot(`
+ "
+ --- ./project-a/dist/out.css ---
+ .content-\\[\\'project-a\\/node_modules\\/my-lib-1\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-a/node modules/my-lib-1/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-a\\/node_modules\\/my-lib-2\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-a/node modules/my-lib-2/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-a\\/src\\/logo\\.jpg\\'\\] {
+ --tw-content: 'project-a/src/logo.jpg';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-b\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-b/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-c\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-c/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/my-binary-file\\.bin\\'\\] {
+ --tw-content: 'project-d/my-binary-file.bin';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/node_modules\\/my-lib-2\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-d/node modules/my-lib-2/src/index.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/src\\/bar\\.html\\'\\] {
+ --tw-content: 'project-d/src/bar.html';
+ content: var(--tw-content);
+ }
+ .content-\\[\\'project-d\\/src\\/index\\.html\\'\\] {
+ --tw-content: 'project-d/src/index.html';
+ content: var(--tw-content);
+ }
+ @supports (-moz-orient: inline) {
+ @layer base {
+ *, ::before, ::after, ::backdrop {
+ --tw-content: "";
+ }
+ }
+ }
+ @property --tw-content {
+ syntax: "*";
+ inherits: false;
+ initial-value: "";
+ }
+ "
+ `)
+
+ // Watch mode tests
+ let process = await spawn(
+ 'pnpm postcss src/index.css --output dist/out.css --watch --verbose',
+ {
+ cwd: path.join(root, 'project-a'),
+ },
+ )
+
+ await process.onStderr((message) => message.includes('Waiting for file changes...'))
+
+ // Changes to project-a should not be included in the output, we changed the
+ // base folder to project-b.
+ await fs.write(
+ 'project-a/src/index.html',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/src/index.html']`,
+ ])
+
+ // Changes to this file should be included, because we explicitly listed
+ // them using `@source`.
+ await fs.write(
+ 'project-a/src/logo.jpg',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/src/logo.jpg']`,
+ ])
+
+ // Changes to these files should be included, because we explicitly listed
+ // them using `@source`.
+ await fs.write(
+ 'project-a/node_modules/my-lib-1/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/node_modules/my-lib-1/src/index.html']`,
+ ])
+
+ await fs.write(
+ 'project-a/node_modules/my-lib-2/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/node_modules/my-lib-2/src/index.html']`,
+ ])
+
+ // Changes to this file should be included, because we changed the base to
+ // `project-b`.
+ await fs.write(
+ 'project-b/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-b/src/index.html']`,
+ ])
+
+ // Changes to this file should not be included. We did change the base to
+ // `project-b`, but we still apply the auto source detection rules which
+ // ignore `node_modules`.
+ await fs.write(
+ 'project-b/node_modules/my-lib-3/src/index.html',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-b/node_modules/my-lib-3/src/index.html']`,
+ ])
+
+ // Project C was added explicitly via `@source`, therefore changes to these
+ // files should be included.
+ await fs.write(
+ 'project-c/src/index.html',
+ html``,
+ )
+ await fs.expectFileToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-c/src/index.html']`,
+ ])
+
+ // Except for these files, since they are ignored by the default auto source
+ // detection rules.
+ await fs.write(
+ 'project-c/src/logo.jpg',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-c/src/logo.jpg']`,
+ ])
+ await fs.write(
+ 'project-c/node_modules/my-lib-1/src/index.html',
+ html``,
+ )
+ await fs.expectFileNotToContain('./project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-c/node_modules/my-lib-1/src/index.html']`,
+ ])
+
+ // Creating new files in the "root" of auto source detected folders
+ // await fs.write(
+ // 'project-b/new-file.html',
+ // html``,
+ // )
+ // await fs.write(
+ // 'project-b/new-folder/new-file.html',
+ // html``,
+ // )
+ // await fs.write(
+ // 'project-c/new-file.html',
+ // html``,
+ // )
+ // await fs.write(
+ // 'project-c/new-folder/new-file.html',
+ // html``,
+ // )
+
+ // await fs.write('project-a/src/index.css', await fs.read('project-a/src/index.css'))
+ // await new Promise((resolve) => setTimeout(resolve, 1000))
+
+ // await fs.expectFileToContain('./project-a/dist/out.css', [
+ // candidate`[.created_&]:content-['project-b/new-file.html']`,
+ // candidate`[.created_&]:content-['project-b/new-folder/new-file.html']`,
+ // candidate`[.created_&]:content-['project-c/new-file.html']`,
+ // candidate`[.created_&]:content-['project-c/new-folder/new-file.html']`,
+ // ])
+ },
+)
+
+test(
+ 'auto source detection disabled',
+ {
+ fs: {
+ 'package.json': json`
+ {
+ "dependencies": {
+ "postcss": "^8",
+ "postcss-cli": "^10",
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/postcss": "workspace:^"
+ }
+ }
+ `,
+ 'postcss.config.js': js`
+ module.exports = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+ }
+ `,
+ 'index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
+
+ /* (1) */
+ /* - Only './src' should be auto-scanned, not the current working directory */
+ /* - .gitignore'd paths should be ignored (node_modules) */
+ /* - Binary extensions should be ignored (jpg, zip) */
+ @import 'tailwindcss/utilities' source(none);
+
+ /* (2) */
+ /* - './pages' should be auto-scanned */
+ /* - Only '.html' files should be included */
+ /* - './page/ignored.html' should be ignored */
+ @source "./pages/**/*.html";
+ `,
+
+ '.gitignore': dedent`
+ /src/ignored
+ /pages/ignored.html
+ `,
+
+ // (1)
+ 'index.html': 'content-["index.html"] content-["BAD"]', // "Root" source is in `./src`
+ 'src/index.html': 'content-["src/index.html"] content-["BAD"]',
+ 'src/nested/index.html': 'content-["src/nested/index.html"] content-["BAD"]',
+ 'src/index.jpg': 'content-["src/index.jpg"] content-["BAD"]',
+ 'src/nested/index.tar': 'content-["src/nested/index.tar"] content-["BAD"]',
+ 'src/ignored/index.html': 'content-["src/ignored/index.html"] content-["BAD"]',
+
+ // (4)
+ 'pages/foo.html': 'content-["pages/foo.html"]',
+ 'pages/nested/foo.html': 'content-["pages/nested/foo.html"]',
+ 'pages/ignored.html': 'content-["pages/ignored.html"] content-["BAD"]',
+ 'pages/foo.jsx': 'content-["pages/foo.jsx"] content-["BAD"]',
+ 'pages/nested/foo.jsx': 'content-["pages/nested/foo.jsx"] content-["BAD"]',
+ },
+ },
+ async ({ fs, exec }) => {
+ await exec('pnpm postcss index.css --output dist/out.css')
+
+ expect(await fs.dumpFiles('./dist/*.css')).toMatchInlineSnapshot(`
+ "
+ --- ./dist/out.css ---
+ .content-\\[\\"pages\\/foo\\.html\\"\\] {
+ --tw-content: "pages/foo.html";
+ content: var(--tw-content);
+ }
+ .content-\\[\\"pages\\/nested\\/foo\\.html\\"\\] {
+ --tw-content: "pages/nested/foo.html";
+ content: var(--tw-content);
+ }
+ @supports (-moz-orient: inline) {
+ @layer base {
+ *, ::before, ::after, ::backdrop {
+ --tw-content: "";
+ }
+ }
+ }
+ @property --tw-content {
+ syntax: "*";
+ inherits: false;
+ initial-value: "";
+ }
+ "
+ `)
+ },
+)
diff --git a/integrations/utils.ts b/integrations/utils.ts
index 5eed468a077e..9389d638b381 100644
--- a/integrations/utils.ts
+++ b/integrations/utils.ts
@@ -112,8 +112,14 @@ export function test(
(error, stdout, stderr) => {
if (error) {
if (execOptions.ignoreStdErr !== true) console.error(stderr)
+ if (only || debug) {
+ console.error(stdout)
+ }
reject(error)
} else {
+ if (only || debug) {
+ console.log(stdout.toString() + '\n\n' + stderr.toString())
+ }
resolve(stdout.toString() + '\n\n' + stderr.toString())
}
},
@@ -187,14 +193,14 @@ export function test(
child.stdout.on('data', (result) => {
let content = result.toString()
- if (debug) console.log(content)
+ if (debug || only) console.log(content)
combined.push(['stdout', content])
stdoutMessages.push(content)
notifyNext(stdoutActors, stdoutMessages)
})
child.stderr.on('data', (result) => {
let content = result.toString()
- if (debug) console.error(content)
+ if (debug || only) console.error(content)
combined.push(['stderr', content])
stderrMessages.push(content)
notifyNext(stderrActors, stderrMessages)
@@ -303,7 +309,11 @@ export function test(
return Promise.all(
files.map(async (file) => {
let content = await fs.readFile(path.join(root, file), 'utf8')
- return [file, content]
+ return [
+ file,
+ // Drop license comment
+ content.replace(/[\s\n]*\/\*! tailwindcss .*? \*\/[\s\n]*/g, ''),
+ ]
}),
)
},
diff --git a/integrations/vite/index.test.ts b/integrations/vite/index.test.ts
index 89814ccc6168..bb16cd5ddf62 100644
--- a/integrations/vite/index.test.ts
+++ b/integrations/vite/index.test.ts
@@ -427,6 +427,265 @@ for (let transformer of ['postcss', 'lightningcss']) {
})
},
)
+
+ test(
+ `source(none) disables looking at the module graph`,
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': txt`
+ {
+ "type": "module",
+ "dependencies": {
+ "@tailwindcss/vite": "workspace:^",
+ "tailwindcss": "workspace:^"
+ },
+ "devDependencies": {
+ ${transformer === 'lightningcss' ? `"lightningcss": "^1.26.0",` : ''}
+ "vite": "^5.3.5"
+ }
+ }
+ `,
+ 'project-a/vite.config.ts': ts`
+ import tailwindcss from '@tailwindcss/vite'
+ import { defineConfig } from 'vite'
+
+ export default defineConfig({
+ css: ${transformer === 'postcss' ? '{}' : "{ transformer: 'lightningcss' }"},
+ build: { cssMinify: false },
+ plugins: [tailwindcss()],
+ })
+ `,
+ 'project-a/index.html': html`
+
+
+
+
+
Hello, world!
+
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss' source(none);
+ @source '../../project-b/src/**/*.html';
+ `,
+ 'project-b/src/index.html': html`
+
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, exec }) => {
+ await exec('pnpm vite build', { cwd: path.join(root, 'project-a') })
+
+ let files = await fs.glob('project-a/dist/**/*.css')
+ expect(files).toHaveLength(1)
+ let [filename] = files[0]
+
+ // `underline` and `m-2` are only present from files in the module graph
+ // which we've explicitly disabled with source(none) so they should not
+ // be present
+ await fs.expectFileNotToContain(filename, [
+ //
+ candidate`underline`,
+ candidate`m-2`,
+ ])
+
+ // The files from `project-b` should be included because there is an
+ // explicit `@source` directive for it
+ await fs.expectFileToContain(filename, [
+ //
+ candidate`flex`,
+ ])
+
+ // The explicit source directive only covers HTML files, so the JS file
+ // should not be included
+ await fs.expectFileNotToContain(filename, [
+ //
+ candidate`content-['project-b/src/index.js']`,
+ ])
+ },
+ )
+
+ test(
+ `source("…") filters the module graph`,
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': txt`
+ {
+ "type": "module",
+ "dependencies": {
+ "@tailwindcss/vite": "workspace:^",
+ "tailwindcss": "workspace:^"
+ },
+ "devDependencies": {
+ ${transformer === 'lightningcss' ? `"lightningcss": "^1.26.0",` : ''}
+ "vite": "^5.3.5"
+ }
+ }
+ `,
+ 'project-a/vite.config.ts': ts`
+ import tailwindcss from '@tailwindcss/vite'
+ import { defineConfig } from 'vite'
+
+ export default defineConfig({
+ css: ${transformer === 'postcss' ? '{}' : "{ transformer: 'lightningcss' }"},
+ build: { cssMinify: false },
+ plugins: [tailwindcss()],
+ })
+ `,
+ 'project-a/index.html': html`
+
+
+
+
+ Hello, world!
+
+
+ `,
+ 'project-a/app/index.js': js`
+ const className = "content-['project-a/app/index.js']"
+ export default { className }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss' source('../app');
+ @source '../../project-b/src/**/*.html';
+ `,
+ 'project-b/src/index.html': html`
+
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, exec }) => {
+ await exec('pnpm vite build', { cwd: path.join(root, 'project-a') })
+
+ let files = await fs.glob('project-a/dist/**/*.css')
+ expect(files).toHaveLength(1)
+ let [filename] = files[0]
+
+ // `underline` and `m-2` are present in files in the module graph but
+ // we've filtered the module graph such that we only look in
+ // `./app/**/*` so they should not be present
+ await fs.expectFileNotToContain(filename, [
+ //
+ candidate`underline`,
+ candidate`m-2`,
+ candidate`content-['project-a/index.html']`,
+ ])
+
+ // We've filtered the module graph to only look in ./app/**/* so the
+ // candidates from that project should be present
+ await fs.expectFileToContain(filename, [
+ //
+ candidate`content-['project-a/app/index.js']`,
+ ])
+
+ // Even through we're filtering the module graph explicit sources are
+ // additive and as such files from `project-b` should be included
+ // because there is an explicit `@source` directive for it
+ await fs.expectFileToContain(filename, [
+ //
+ candidate`content-['project-b/src/index.html']`,
+ ])
+
+ // The explicit source directive only covers HTML files, so the JS file
+ // should not be included
+ await fs.expectFileNotToContain(filename, [
+ //
+ candidate`content-['project-b/src/index.js']`,
+ ])
+ },
+ )
+
+ test(
+ `source("…") must be a directory`,
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': txt`
+ {
+ "type": "module",
+ "dependencies": {
+ "@tailwindcss/vite": "workspace:^",
+ "tailwindcss": "workspace:^"
+ },
+ "devDependencies": {
+ ${transformer === 'lightningcss' ? `"lightningcss": "^1.26.0",` : ''}
+ "vite": "^5.3.5"
+ }
+ }
+ `,
+ 'project-a/vite.config.ts': ts`
+ import tailwindcss from '@tailwindcss/vite'
+ import { defineConfig } from 'vite'
+
+ export default defineConfig({
+ css: ${transformer === 'postcss' ? '{}' : "{ transformer: 'lightningcss' }"},
+ build: { cssMinify: false },
+ plugins: [tailwindcss()],
+ })
+ `,
+ 'project-a/index.html': html`
+
+
+
+
+ Hello, world!
+
+
+ `,
+ 'project-a/app/index.js': js`
+ const className = "content-['project-a/app/index.js']"
+ export default { className }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss' source('../i-do-not-exist');
+ @source '../../project-b/src/**/*.html';
+ `,
+ 'project-b/src/index.html': html`
+
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, exec }) => {
+ await expect(() =>
+ exec('pnpm vite build', { cwd: path.join(root, 'project-a') }),
+ ).rejects.toThrowError('The `source(../i-do-not-exist)` does not exist')
+
+ let files = await fs.glob('project-a/dist/**/*.css')
+ expect(files).toHaveLength(0)
+ },
+ )
})
}
diff --git a/packages/@tailwindcss-cli/src/commands/build/index.ts b/packages/@tailwindcss-cli/src/commands/build/index.ts
index dc8ba2a90178..b61f3dd49b86 100644
--- a/packages/@tailwindcss-cli/src/commands/build/index.ts
+++ b/packages/@tailwindcss-cli/src/commands/build/index.ts
@@ -137,21 +137,34 @@ export async function handle(args: Result>) {
fullRebuildPaths.push(path)
},
})
+
+ let sources = (() => {
+ // Disable auto source detection
+ if (compiler.root === 'none') {
+ return []
+ }
+
+ // No root specified, use the base directory
+ if (compiler.root === null) {
+ return [{ base, pattern: '**/*' }]
+ }
+
+ // Use the specified root
+ return [compiler.root]
+ })().concat(compiler.globs)
+
+ let scanner = new Scanner({ sources })
env.DEBUG && console.timeEnd('[@tailwindcss/cli] Setup compiler')
- return compiler
+
+ return [compiler, scanner] as const
}
- // Compile the input
- let compiler = await createCompiler(input)
- let scanner = new Scanner({
- detectSources: { base },
- sources: compiler.globs,
- })
+ let [compiler, scanner] = await createCompiler(input)
// Watch for changes
if (args['--watch']) {
let cleanupWatchers = await createWatchers(
- watchDirectories(base, scanner),
+ watchDirectories(scanner),
async function handle(files) {
try {
// If the only change happened to the output file, then we don't want to
@@ -205,13 +218,7 @@ export async function handle(args: Result>) {
fullRebuildPaths = inputFilePath ? [inputFilePath] : []
// Create a new compiler, given the new `input`
- compiler = await createCompiler(input)
-
- // Re-scan the directory to get the new `candidates`
- scanner = new Scanner({
- detectSources: { base },
- sources: compiler.globs,
- })
+ ;[compiler, scanner] = await createCompiler(input)
// Scan the directory for candidates
env.DEBUG && console.time('[@tailwindcss/cli] Scan for candidates')
@@ -219,7 +226,7 @@ export async function handle(args: Result>) {
env.DEBUG && console.timeEnd('[@tailwindcss/cli] Scan for candidates')
// Setup new watchers
- cleanupWatchers = await createWatchers(watchDirectories(base, scanner), handle)
+ cleanupWatchers = await createWatchers(watchDirectories(scanner), handle)
// Re-compile the CSS
env.DEBUG && console.time('[@tailwindcss/cli] Build CSS')
@@ -287,19 +294,16 @@ export async function handle(args: Result>) {
eprintln(`Done in ${formatDuration(end - start)}`)
}
-function watchDirectories(base: string, scanner: Scanner) {
- return [base].concat(
- scanner.globs.flatMap((globEntry) => {
- // We don't want a watcher for negated globs.
- if (globEntry.pattern[0] === '!') return []
+function watchDirectories(scanner: Scanner) {
+ return scanner.globs.flatMap((globEntry) => {
+ // We don't want a watcher for negated globs.
+ if (globEntry.pattern[0] === '!') return []
- // We don't want a watcher for nested directories, these will be covered
- // by the `base` directory already.
- if (globEntry.base.startsWith(base)) return []
+ // We don't want a watcher for files, only directories.
+ if (globEntry.pattern === '') return []
- return globEntry.base
- }),
- )
+ return globEntry.base
+ })
}
async function createWatchers(dirs: string[], cb: (files: string[]) => void) {
diff --git a/packages/@tailwindcss-node/src/compile.ts b/packages/@tailwindcss-node/src/compile.ts
index 9f3f58fddae4..40bb9ef1fea4 100644
--- a/packages/@tailwindcss-node/src/compile.ts
+++ b/packages/@tailwindcss-node/src/compile.ts
@@ -10,11 +10,11 @@ import {
} from 'tailwindcss'
import { getModuleDependencies } from './get-module-dependencies'
-export function compile(
+export async function compile(
css: string,
{ base, onDependency }: { base: string; onDependency: (path: string) => void },
) {
- return _compile(css, {
+ let compiler = await _compile(css, {
base,
async loadModule(id, base) {
return loadModule(id, base, onDependency)
@@ -23,6 +23,30 @@ export function compile(
return loadStylesheet(id, base, onDependency)
},
})
+
+ // Verify if the `source(…)` path exists (until the glob pattern starts)
+ if (compiler.root && compiler.root !== 'none') {
+ let globSymbols = /[*{]/
+ let basePath = []
+ for (let segment of compiler.root.pattern.split('/')) {
+ if (globSymbols.test(segment)) {
+ break
+ }
+
+ basePath.push(segment)
+ }
+
+ let exists = await fsPromises
+ .stat(path.resolve(base, basePath.join('/')))
+ .then((stat) => stat.isDirectory())
+ .catch(() => false)
+
+ if (!exists) {
+ throw new Error(`The \`source(${compiler.root.pattern})\` does not exist`)
+ }
+ }
+
+ return compiler
}
export async function __unstable__loadDesignSystem(css: string, { base }: { base: string }) {
diff --git a/packages/@tailwindcss-postcss/src/index.ts b/packages/@tailwindcss-postcss/src/index.ts
index 69a693971607..f040b611baaf 100644
--- a/packages/@tailwindcss-postcss/src/index.ts
+++ b/packages/@tailwindcss-postcss/src/index.ts
@@ -134,11 +134,23 @@ function tailwindcss(opts: PluginOptions = {}): AcceptedPlugin {
}
if (context.scanner === null || rebuildStrategy === 'full') {
+ let sources = (() => {
+ // Disable auto source detection
+ if (context.compiler.root === 'none') {
+ return []
+ }
+
+ // No root specified, use the base directory
+ if (context.compiler.root === null) {
+ return [{ base, pattern: '**/*' }]
+ }
+
+ // Use the specified root
+ return [context.compiler.root]
+ })().concat(context.compiler.globs)
+
// Look for candidates used to generate the CSS
- context.scanner = new Scanner({
- detectSources: { base },
- sources: context.compiler.globs,
- })
+ context.scanner = new Scanner({ sources })
}
env.DEBUG && console.time('[@tailwindcss/postcss] Scan for candidates')
@@ -159,13 +171,22 @@ function tailwindcss(opts: PluginOptions = {}): AcceptedPlugin {
// giving tools like Vite or Parcel a glob that can be used to limit
// the files that cause a rebuild to only those that match it.
for (let { base, pattern } of context.scanner.globs) {
- result.messages.push({
- type: 'dir-dependency',
- plugin: '@tailwindcss/postcss',
- dir: base,
- glob: pattern,
- parent: result.opts.from,
- })
+ if (pattern === '') {
+ result.messages.push({
+ type: 'dependency',
+ plugin: '@tailwindcss/postcss',
+ file: base,
+ parent: result.opts.from,
+ })
+ } else {
+ result.messages.push({
+ type: 'dir-dependency',
+ plugin: '@tailwindcss/postcss',
+ dir: base,
+ glob: pattern,
+ parent: result.opts.from,
+ })
+ }
}
env.DEBUG && console.time('[@tailwindcss/postcss] Build CSS')
diff --git a/packages/@tailwindcss-upgrade/src/migrate-js-config.ts b/packages/@tailwindcss-upgrade/src/migrate-js-config.ts
index f8a6ad6f8dc2..60bcbcefd7cb 100644
--- a/packages/@tailwindcss-upgrade/src/migrate-js-config.ts
+++ b/packages/@tailwindcss-upgrade/src/migrate-js-config.ts
@@ -285,7 +285,7 @@ function keyframesToCss(keyframes: Record): string {
}
function autodetectedSourceFiles(base: string) {
- let scanner = new Scanner({ detectSources: { base } })
+ let scanner = new Scanner({ sources: [{ base, pattern: '**/*' }] })
scanner.scan()
return scanner.files
}
diff --git a/packages/@tailwindcss-vite/src/index.ts b/packages/@tailwindcss-vite/src/index.ts
index 9571342cdbda..a79ede7a2433 100644
--- a/packages/@tailwindcss-vite/src/index.ts
+++ b/packages/@tailwindcss-vite/src/index.ts
@@ -2,6 +2,7 @@ import { compile, env, normalizePath } from '@tailwindcss/node'
import { clearRequireCache } from '@tailwindcss/node/require-cache'
import { Scanner } from '@tailwindcss/oxide'
import { Features, transform } from 'lightningcss'
+import fs from 'node:fs/promises'
import path from 'path'
import type { Plugin, ResolvedConfig, Rollup, Update, ViteDevServer } from 'vite'
@@ -35,7 +36,7 @@ export default function tailwindcss(): Plugin[] {
// Note: To improve performance, we do not remove candidates from this set.
// This means a longer-ongoing dev mode session might contain candidates that
// are no longer referenced in code.
- let moduleGraphCandidates = new Set()
+ let moduleGraphCandidates = new DefaultMap>(() => new Set())
let moduleGraphScanner = new Scanner({})
let roots: DefaultMap = new DefaultMap(
@@ -46,7 +47,7 @@ export default function tailwindcss(): Plugin[] {
let updated = false
for (let candidate of moduleGraphScanner.scanFiles([{ content, extension }])) {
updated = true
- moduleGraphCandidates.add(candidate)
+ moduleGraphCandidates.get(id).add(candidate)
}
if (updated) {
@@ -343,14 +344,16 @@ class Root {
// the lifetime of the root.
private candidates: Set = new Set()
- // List of all file dependencies that were captured while generating the root.
- // These are retained so we can clear the require cache when we rebuild the
- // root.
+ // List of all dependencies captured while generating the root. These are
+ // retained so we can clear the require cache when we rebuild the root.
private dependencies = new Set()
+ // The resolved path given to `source(…)`. When not given this is `null`.
+ private basePath: string | null = null
+
constructor(
private id: string,
- private getSharedCandidates: () => Set,
+ private getSharedCandidates: () => Map>,
private base: string,
) {}
@@ -379,9 +382,22 @@ class Root {
})
env.DEBUG && console.timeEnd('[@tailwindcss/vite] Setup compiler')
- this.scanner = new Scanner({
- sources: this.compiler.globs,
- })
+ let sources = (() => {
+ // Disable auto source detection
+ if (this.compiler.root === 'none') {
+ return []
+ }
+
+ // No root specified, use the module graph
+ if (this.compiler.root === null) {
+ return []
+ }
+
+ // Use the specified root
+ return [this.compiler.root]
+ })().concat(this.compiler.globs)
+
+ this.scanner = new Scanner({ sources })
}
// This should not be here, but right now the Vite plugin is setup where we
@@ -411,14 +427,62 @@ class Root {
relative = normalizePath(relative)
addWatchFile(path.posix.join(relative, glob.pattern))
+
+ let root = this.compiler.root
+
+ if (root !== 'none' && root !== null) {
+ let basePath = path.posix.resolve(root.base, root.pattern)
+
+ let isDir = await fs.stat(basePath).then(
+ (stats) => stats.isDirectory(),
+ () => false,
+ )
+
+ if (!isDir) {
+ throw new Error(
+ `The path given to \`source(…)\` must be a directory but got \`source(${basePath})\` instead.`,
+ )
+ }
+
+ this.basePath = basePath
+ } else if (root === null) {
+ this.basePath = null
+ }
}
this.requiresRebuild = true
env.DEBUG && console.time('[@tailwindcss/vite] Build CSS')
- let result = this.compiler.build([...this.getSharedCandidates(), ...this.candidates])
+ let result = this.compiler.build([...this.sharedCandidates(), ...this.candidates])
env.DEBUG && console.timeEnd('[@tailwindcss/vite] Build CSS')
return result
}
+
+ private sharedCandidates(): Set {
+ if (!this.compiler) return new Set()
+ if (this.compiler.root === 'none') return new Set()
+
+ let shouldIncludeCandidatesFrom = (id: string) => {
+ if (this.basePath === null) return true
+
+ // This a virtual module that's not on the file system
+ // TODO: What should we do here?
+ if (!id.startsWith('/')) return true
+
+ return id.startsWith(this.basePath)
+ }
+
+ let shared = new Set()
+
+ for (let [id, candidates] of this.getSharedCandidates()) {
+ if (!shouldIncludeCandidatesFrom(id)) continue
+
+ for (let candidate of candidates) {
+ shared.add(candidate)
+ }
+ }
+
+ return shared
+ }
}
diff --git a/packages/tailwindcss/src/ast.ts b/packages/tailwindcss/src/ast.ts
index ee79365b92ff..9f31105891f3 100644
--- a/packages/tailwindcss/src/ast.ts
+++ b/packages/tailwindcss/src/ast.ts
@@ -223,7 +223,10 @@ export function toCss(ast: AstNode[]) {
// AtRule
else if (node.kind === 'at-rule') {
- if (node.name === '@tailwind' && node.params === 'utilities') {
+ if (
+ node.name === '@tailwind' &&
+ (node.params === 'utilities' || node.params.startsWith('utilities'))
+ ) {
for (let child of node.nodes) {
css += stringify(child, depth)
}
diff --git a/packages/tailwindcss/src/at-import.ts b/packages/tailwindcss/src/at-import.ts
index 7054419122ea..e5b937c71c3b 100644
--- a/packages/tailwindcss/src/at-import.ts
+++ b/packages/tailwindcss/src/at-import.ts
@@ -38,8 +38,12 @@ export async function substituteAtImports(
let ast = CSS.parse(loaded.content)
await substituteAtImports(ast, loaded.base, loadStylesheet, recurseCount + 1)
- contextNode.nodes = buildImportNodes(ast, layer, media, supports)
- contextNode.context.base = loaded.base
+ contextNode.nodes = buildImportNodes(
+ [context({ base: loaded.base }, ast)],
+ layer,
+ media,
+ supports,
+ )
})(),
)
diff --git a/packages/tailwindcss/src/candidate.bench.ts b/packages/tailwindcss/src/candidate.bench.ts
index ef7c97114dfd..5b7171432e17 100644
--- a/packages/tailwindcss/src/candidate.bench.ts
+++ b/packages/tailwindcss/src/candidate.bench.ts
@@ -8,7 +8,7 @@ import { Theme } from './theme'
const root = process.env.FOLDER || process.cwd()
// Auto content detection
-const scanner = new Scanner({ detectSources: { base: root } })
+const scanner = new Scanner({ sources: [{ base: root, pattern: '**/*' }] })
const candidates = scanner.scan()
const designSystem = buildDesignSystem(new Theme())
diff --git a/packages/tailwindcss/src/index.bench.ts b/packages/tailwindcss/src/index.bench.ts
index dcffb522e3f2..fb9cd0e71084 100644
--- a/packages/tailwindcss/src/index.bench.ts
+++ b/packages/tailwindcss/src/index.bench.ts
@@ -7,7 +7,7 @@ const root = process.env.FOLDER || process.cwd()
const css = String.raw
bench('compile', async () => {
- let scanner = new Scanner({ detectSources: { base: root } })
+ let scanner = new Scanner({ sources: [{ base: root, pattern: '**/*' }] })
let candidates = scanner.scan()
let { build } = await compile(css`
diff --git a/packages/tailwindcss/src/index.ts b/packages/tailwindcss/src/index.ts
index 31ec72c6c535..402411861e48 100644
--- a/packages/tailwindcss/src/index.ts
+++ b/packages/tailwindcss/src/index.ts
@@ -4,7 +4,7 @@ import {
atRoot,
atRule,
comment,
- context,
+ context as contextNode,
decl,
rule,
styleRule,
@@ -76,21 +76,64 @@ async function parseCss(
loadStylesheet = throwOnLoadStylesheet,
}: CompileOptions = {},
) {
- let ast = [context({ base }, CSS.parse(css))] as AstNode[]
+ let ast = [contextNode({ base }, CSS.parse(css))] as AstNode[]
await substituteAtImports(ast, base, loadStylesheet)
- let important: boolean | null = null
+ let important = null as boolean | null
let theme = new Theme()
let customVariants: ((designSystem: DesignSystem) => void)[] = []
let customUtilities: ((designSystem: DesignSystem) => void)[] = []
let firstThemeRule = null as StyleRule | null
+ let utilitiesNode = null as AtRule | null
let globs: { base: string; pattern: string }[] = []
+ let root:
+ | null // Unknown root
+ | 'none' // Explicitly no root specified via `source(none)`
+ // Specified via `source(…)`, relative to the `base`
+ | { base: string; pattern: string } = null
// Handle at-rules
walk(ast, (node, { parent, replaceWith, context }) => {
if (node.kind !== 'at-rule') return
+ // Find `@tailwind utilities` so that we can later replace it with the
+ // actual generated utility class CSS.
+ if (
+ utilitiesNode === null &&
+ node.name === '@tailwind' &&
+ (node.params === 'utilities' || node.params.startsWith('utilities'))
+ ) {
+ let params = segment(node.params, ' ')
+ for (let param of params) {
+ if (param.startsWith('source(')) {
+ let path = param.slice(7, -1)
+
+ // Keyword: `source(none)`
+ if (path === 'none') {
+ root = path
+ continue
+ }
+
+ // Explicit path: `source('…')`
+ if (
+ (path[0] === '"' && path[path.length - 1] !== '"') ||
+ (path[0] === "'" && path[path.length - 1] !== "'") ||
+ (path[0] !== "'" && path[0] !== '"')
+ ) {
+ throw new Error('`source(…)` paths must be quoted.')
+ }
+
+ root = {
+ base: context.sourceBase ?? context.base,
+ pattern: path.slice(1, -1),
+ }
+ }
+ }
+
+ utilitiesNode = node
+ }
+
// Collect custom `@utility` at-rules
if (node.name === '@utility') {
if (parent !== null) {
@@ -234,12 +277,27 @@ async function parseCss(
let unknownParams: string[] = []
for (let param of params) {
+ // Handle `@media source(…)`
+ if (param.startsWith('source(')) {
+ let path = param.slice(7, -1)
+
+ walk(node.nodes, (child, { replaceWith }) => {
+ if (child.kind !== 'at-rule') return
+
+ if (child.name === '@tailwind' && child.params === 'utilities') {
+ child.params += ` source(${path})`
+ replaceWith([contextNode({ sourceBase: context.base }, [child])])
+ return WalkAction.Stop
+ }
+ })
+ }
+
// Handle `@media theme(…)`
//
// We support `@import "tailwindcss/theme" theme(reference)` as a way to
// import an external theme file as a reference, which becomes `@media
// theme(reference) { … }` when the `@import` is processed.
- if (param.startsWith('theme(')) {
+ else if (param.startsWith('theme(')) {
let themeParams = param.slice(6, -1)
walk(node.nodes, (child) => {
@@ -417,6 +475,8 @@ async function parseCss(
designSystem,
ast,
globs,
+ root,
+ utilitiesNode,
}
}
@@ -425,24 +485,13 @@ export async function compile(
opts: CompileOptions = {},
): Promise<{
globs: { base: string; pattern: string }[]
+ root:
+ | null // Unknown root
+ | 'none' // Explicitly no root specified via `source(none)`
+ | { base: string; pattern: string } // Specified via `source(…)`, relative to the `base`
build(candidates: string[]): string
}> {
- let { designSystem, ast, globs } = await parseCss(css, opts)
-
- let tailwindUtilitiesNode: AtRule | null = null
-
- // Find `@tailwind utilities` so that we can later replace it with the actual
- // generated utility class CSS.
- walk(ast, (node) => {
- if (node.kind === 'at-rule' && node.name === '@tailwind' && node.params === 'utilities') {
- tailwindUtilitiesNode = node
-
- // Stop walking after finding `@tailwind utilities` to avoid walking all
- // of the generated CSS. This means `@tailwind utilities` can only appear
- // once per file but that's the intended usage at this point in time.
- return WalkAction.Stop
- }
- })
+ let { designSystem, ast, globs, root, utilitiesNode } = await parseCss(css, opts)
if (process.env.NODE_ENV !== 'test') {
ast.unshift(comment(`! tailwindcss v${version} | MIT License | https://tailwindcss.com `))
@@ -462,6 +511,7 @@ export async function compile(
return {
globs,
+ root,
build(newRawCandidates: string[]) {
let didChange = false
@@ -480,7 +530,7 @@ export async function compile(
return compiledCss
}
- if (tailwindUtilitiesNode) {
+ if (utilitiesNode) {
let newNodes = compileCandidates(allValidCandidates, designSystem, {
onInvalidCandidate,
}).astNodes
@@ -494,7 +544,7 @@ export async function compile(
previousAstNodeCount = newNodes.length
- tailwindUtilitiesNode.nodes = newNodes
+ utilitiesNode.nodes = newNodes
compiledCss = toCss(ast)
}
diff --git a/turbo.json b/turbo.json
index 9dbd528909cb..35a3e63a7ce7 100644
--- a/turbo.json
+++ b/turbo.json
@@ -10,8 +10,8 @@
"./build.rs",
"./package.json",
"./Cargo.toml",
- "../core/src/**/*",
- "../core/Cargo.toml",
+ "../oxide/src/**/*",
+ "../oxide/Cargo.toml",
"../Cargo.toml",
"../package.json"
]
@@ -24,8 +24,8 @@
"./build.rs",
"./package.json",
"./Cargo.toml",
- "../core/src/**/*",
- "../core/Cargo.toml",
+ "../oxide/src/**/*",
+ "../oxide/Cargo.toml",
"../Cargo.toml",
"../package.json"
],