diff --git a/cli/compilers/ts.rs b/cli/compilers/ts.rs index 0261b983e74637463e63814f07786088cc94e815..73f5a11fbb89788e484b1cbfa4f4d8671555fcf9 100644 --- a/cli/compilers/ts.rs +++ b/cli/compilers/ts.rs @@ -451,7 +451,7 @@ impl TsCompiler { /// /// Along compiled file a special metadata file is saved as well containing /// hash that can be validated to avoid unnecessary recompilation. - fn cache_compiled_file( + async fn cache_compiled_file( &self, module_specifier: &ModuleSpecifier, contents: &str, @@ -459,35 +459,31 @@ impl TsCompiler { let js_key = self .disk_cache .get_cache_filename_with_extension(module_specifier.as_url(), "js"); - self - .disk_cache - .set(&js_key, contents.as_bytes()) - .and_then(|_| { - self.mark_compiled(module_specifier.as_url()); - - let source_file = self - .file_fetcher - .fetch_cached_source_file(&module_specifier) - .expect("Source file not found"); - - let version_hash = source_code_version_hash( - &source_file.source_code, - version::DENO, - &self.config.hash, - ); + self.disk_cache.set(&js_key, contents.as_bytes())?; + self.mark_compiled(module_specifier.as_url()); + let source_file = self + .file_fetcher + .fetch_cached_source_file(&module_specifier) + .await + .expect("Source file not found"); + + let version_hash = source_code_version_hash( + &source_file.source_code, + version::DENO, + &self.config.hash, + ); - let compiled_file_metadata = CompiledFileMetadata { - source_path: source_file.filename, - version_hash, - }; - let meta_key = self - .disk_cache - .get_cache_filename_with_extension(module_specifier.as_url(), "meta"); - self.disk_cache.set( - &meta_key, - compiled_file_metadata.to_json_string()?.as_bytes(), - ) - }) + let compiled_file_metadata = CompiledFileMetadata { + source_path: source_file.filename, + version_hash, + }; + let meta_key = self + .disk_cache + .get_cache_filename_with_extension(module_specifier.as_url(), "meta"); + self.disk_cache.set( + &meta_key, + compiled_file_metadata.to_json_string()?.as_bytes(), + ) } /// Return associated source map file for given TS module. @@ -528,7 +524,7 @@ impl TsCompiler { } /// This method is called by TS compiler via an "op". - pub fn cache_compiler_output( + pub async fn cache_compiler_output( &self, module_specifier: &ModuleSpecifier, extension: &str, @@ -536,7 +532,7 @@ impl TsCompiler { ) -> std::io::Result<()> { match extension { ".map" => self.cache_source_map(module_specifier, contents), - ".js" => self.cache_compiled_file(module_specifier, contents), + ".js" => self.cache_compiled_file(module_specifier, contents).await, _ => unreachable!(), } } @@ -576,9 +572,10 @@ impl TsCompiler { script_name: &str, ) -> Option { if let Some(module_specifier) = self.try_to_resolve(script_name) { - return self + let fut = self .file_fetcher .fetch_cached_source_file(&module_specifier); + return futures::executor::block_on(fut); } None diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index de3a76eec310d66c757339df21165d1edb354bb6..ea82f151edcd00843be8047e4606075344e1ee77 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -10,7 +10,6 @@ use crate::http_util::FetchOnceResult; use crate::msg; use deno_core::ErrBox; use deno_core::ModuleSpecifier; -use futures::future::Either; use futures::future::FutureExt; use regex::Regex; use reqwest; @@ -42,9 +41,6 @@ pub struct SourceFile { pub source_code: Vec, } -pub type SourceFileFuture = - dyn Future> + Send; - /// Simple struct implementing in-process caching to prevent multiple /// fs reads/net fetches for same file. #[derive(Clone, Default)] @@ -114,8 +110,10 @@ impl SourceFileFetcher { Ok(()) } + // TODO(bartlomieju): fetching cached resources should be done + // using blocking fs syscalls /// Required for TS compiler and source maps. - pub fn fetch_cached_source_file( + pub async fn fetch_cached_source_file( &self, specifier: &ModuleSpecifier, ) -> Option { @@ -127,79 +125,81 @@ impl SourceFileFetcher { // If file is not in memory cache check if it can be found // in local cache - which effectively means trying to fetch - // using "--cached-only" flag. We can safely block on this - // future, because it doesn't do any asynchronous action - // it that path. - let fut = self.get_source_file_async(specifier.as_url(), true, false, true); - - futures::executor::block_on(fut).ok() + // using "--cached-only" flag. + // It should be safe to for caller block on this + // future, because it doesn't actually do any asynchronous + // action in that path. + self + .get_source_file_async(specifier.as_url(), true, false, true) + .await + .ok() } - pub fn fetch_source_file_async( + pub async fn fetch_source_file_async( &self, specifier: &ModuleSpecifier, maybe_referrer: Option, - ) -> Pin> { + ) -> Result { let module_url = specifier.as_url().to_owned(); debug!("fetch_source_file_async specifier: {} ", &module_url); // Check if this file was already fetched and can be retrieved from in-process cache. - if let Some(source_file) = self.source_file_cache.get(specifier.to_string()) - { - return Box::pin(async { Ok(source_file) }); + let maybe_cached_file = self.source_file_cache.get(specifier.to_string()); + if let Some(source_file) = maybe_cached_file { + return Ok(source_file); } let source_file_cache = self.source_file_cache.clone(); let specifier_ = specifier.clone(); - let source_file = self.get_source_file_async( - &module_url, - self.use_disk_cache, - self.no_remote, - self.cached_only, - ); + let result = self + .get_source_file_async( + &module_url, + self.use_disk_cache, + self.no_remote, + self.cached_only, + ) + .await; - Box::pin(async move { - match source_file.await { - Ok(mut file) => { - // TODO: move somewhere? - if file.source_code.starts_with(b"#!") { - file.source_code = filter_shebang(file.source_code); - } + match result { + Ok(mut file) => { + // TODO: move somewhere? + if file.source_code.starts_with(b"#!") { + file.source_code = filter_shebang(file.source_code); + } - // Cache in-process for subsequent access. - source_file_cache.set(specifier_.to_string(), file.clone()); + // Cache in-process for subsequent access. + source_file_cache.set(specifier_.to_string(), file.clone()); - Ok(file) - } - Err(err) => { - let err_kind = err.kind(); - let referrer_suffix = if let Some(referrer) = maybe_referrer { - format!(r#" from "{}""#, referrer) - } else { - "".to_owned() - }; - // Hack: Check error message for "--cached-only" because the kind - // conflicts with other errors. - let err = if err.to_string().contains("--cached-only") { - let msg = format!( - r#"Cannot find module "{}"{} in cache, --cached-only is specified"#, - module_url, referrer_suffix - ); - DenoError::new(ErrorKind::NotFound, msg).into() - } else if err_kind == ErrorKind::NotFound { - let msg = format!( - r#"Cannot resolve module "{}"{}"#, - module_url, referrer_suffix - ); - DenoError::new(ErrorKind::NotFound, msg).into() - } else { - err - }; - Err(err) - } + Ok(file) } - }) + Err(err) => { + let err_kind = err.kind(); + let referrer_suffix = if let Some(referrer) = maybe_referrer { + format!(r#" from "{}""#, referrer) + } else { + "".to_owned() + }; + // Hack: Check error message for "--cached-only" because the kind + // conflicts with other errors. + let err = if err.to_string().contains("--cached-only") { + let msg = format!( + r#"Cannot find module "{}"{} in cache, --cached-only is specified"#, + module_url, referrer_suffix + ); + DenoError::new(ErrorKind::NotFound, msg).into() + } else if err_kind == ErrorKind::NotFound { + let msg = format!( + r#"Cannot resolve module "{}"{}"#, + module_url, referrer_suffix + ); + DenoError::new(ErrorKind::NotFound, msg).into() + } else { + err + }; + Err(err) + } + } } /// This is main method that is responsible for fetching local or remote files. @@ -213,54 +213,38 @@ impl SourceFileFetcher { /// /// If `cached_only` is true then this method will fail for remote files /// not already cached. - fn get_source_file_async( + async fn get_source_file_async( &self, module_url: &Url, use_disk_cache: bool, no_remote: bool, cached_only: bool, - ) -> impl Future> { + ) -> Result { let url_scheme = module_url.scheme(); let is_local_file = url_scheme == "file"; - - if let Err(err) = SourceFileFetcher::check_if_supported_scheme(&module_url) - { - return Either::Left(futures::future::err(err)); - } + SourceFileFetcher::check_if_supported_scheme(&module_url)?; // Local files are always fetched from disk bypassing cache entirely. if is_local_file { - match self.fetch_local_file(&module_url) { - Ok(source_file) => { - return Either::Left(futures::future::ok(source_file)); - } - Err(err) => { - return Either::Left(futures::future::err(err)); - } - } + return self.fetch_local_file(&module_url); } // The file is remote, fail if `no_remote` is true. if no_remote { - return Either::Left(futures::future::err( - std::io::Error::new( - std::io::ErrorKind::NotFound, - format!( - "Not allowed to get remote file '{}'", - module_url.to_string() - ), - ) - .into(), - )); + let e = std::io::Error::new( + std::io::ErrorKind::NotFound, + format!( + "Not allowed to get remote file '{}'", + module_url.to_string() + ), + ); + return Err(e.into()); } // Fetch remote file and cache on-disk for subsequent access - Either::Right(self.fetch_remote_source_async( - &module_url, - use_disk_cache, - cached_only, - 10, - )) + self + .fetch_remote_source_async(&module_url, use_disk_cache, cached_only, 10) + .await } /// Fetch local source file. @@ -355,16 +339,19 @@ impl SourceFileFetcher { } /// Asynchronously fetch remote source file specified by the URL following redirects. + /// + /// Note that this is a recursive method so it can't be "async", but rather return + /// Pin>. fn fetch_remote_source_async( &self, module_url: &Url, use_disk_cache: bool, cached_only: bool, redirect_limit: i64, - ) -> Pin> { + ) -> Pin>>> { if redirect_limit < 0 { let e = DenoError::new(ErrorKind::Http, "too many redirects".to_string()); - return futures::future::err(e.into()).boxed(); + return futures::future::err(e.into()).boxed_local(); } let is_blacklisted = @@ -373,13 +360,13 @@ impl SourceFileFetcher { if use_disk_cache && !is_blacklisted { match self.fetch_cached_remote_source(&module_url) { Ok(Some(source_file)) => { - return futures::future::ok(source_file).boxed(); + return futures::future::ok(source_file).boxed_local(); } Ok(None) => { // there's no cached version } Err(err) => { - return futures::future::err(err).boxed(); + return futures::future::err(err).boxed_local(); } } } @@ -397,7 +384,7 @@ impl SourceFileFetcher { ) .into(), ) - .boxed(); + .boxed_local(); } eprintln!( @@ -470,7 +457,7 @@ impl SourceFileFetcher { } }; - f.boxed() + f.boxed_local() } } diff --git a/cli/ops/compiler.rs b/cli/ops/compiler.rs index f61bf68206fce1369210092abcc58b3cc441c02f..4d67a80095a2bf35caa172b3d3bbf188f41a9272 100644 --- a/cli/ops/compiler.rs +++ b/cli/ops/compiler.rs @@ -8,6 +8,7 @@ use crate::ops::json_op; use crate::state::State; use deno_core::Loader; use deno_core::*; +use futures::future::FutureExt; pub fn init(i: &mut Isolate, s: &State) { i.register_op("cache", s.core_op(json_op(s.stateful_op(op_cache)))); @@ -43,16 +44,15 @@ fn op_cache( let module_specifier = ModuleSpecifier::resolve_url(&args.module_id) .expect("Should be valid module specifier"); - state - .borrow() - .global_state - .ts_compiler - .cache_compiler_output( - &module_specifier, - &args.extension, - &args.contents, - )?; + let state_ = &state.borrow().global_state; + let ts_compiler = state_.ts_compiler.clone(); + let fut = ts_compiler.cache_compiler_output( + &module_specifier, + &args.extension, + &args.contents, + ); + futures::executor::block_on(fut)?; Ok(JsonOp::Sync(json!({}))) } @@ -102,21 +102,27 @@ fn op_fetch_source_files( None }; - let mut futures = vec![]; let global_state = state.borrow().global_state.clone(); - - for specifier in &args.specifiers { - let resolved_specifier = - ModuleSpecifier::resolve_url(&specifier).expect("Invalid specifier"); - let fut = global_state - .file_fetcher - .fetch_source_file_async(&resolved_specifier, ref_specifier.clone()); - futures.push(fut); - } - - let future = Box::pin(async move { - let files = try_join_all(futures).await?; - + let file_fetcher = global_state.file_fetcher.clone(); + let specifiers = args.specifiers.clone(); + let future = async move { + let file_futures: Vec<_> = specifiers + .into_iter() + .map(|specifier| { + let file_fetcher_ = file_fetcher.clone(); + let ref_specifier_ = ref_specifier.clone(); + async move { + let resolved_specifier = ModuleSpecifier::resolve_url(&specifier) + .expect("Invalid specifier"); + file_fetcher_ + .fetch_source_file_async(&resolved_specifier, ref_specifier_) + .await + } + .boxed_local() + }) + .collect(); + + let files = try_join_all(file_futures).await?; // We want to get an array of futures that resolves to let v = files.into_iter().map(|f| { async { @@ -156,7 +162,8 @@ fn op_fetch_source_files( let v = try_join_all(v).await?; Ok(v.into()) - }); + } + .boxed_local(); Ok(JsonOp::Async(future)) }