未验证 提交 742a16b5 编写于 作者: B Bartek Iwańczuk 提交者: GitHub

refactor: rewrite file_fetcher to use async fns, lift blocking call (#4037)

上级 90125566
......@@ -451,7 +451,7 @@ impl TsCompiler {
///
/// Along compiled file a special metadata file is saved as well containing
/// hash that can be validated to avoid unnecessary recompilation.
fn cache_compiled_file(
async fn cache_compiled_file(
&self,
module_specifier: &ModuleSpecifier,
contents: &str,
......@@ -459,15 +459,12 @@ impl TsCompiler {
let js_key = self
.disk_cache
.get_cache_filename_with_extension(module_specifier.as_url(), "js");
self
.disk_cache
.set(&js_key, contents.as_bytes())
.and_then(|_| {
self.disk_cache.set(&js_key, contents.as_bytes())?;
self.mark_compiled(module_specifier.as_url());
let source_file = self
.file_fetcher
.fetch_cached_source_file(&module_specifier)
.await
.expect("Source file not found");
let version_hash = source_code_version_hash(
......@@ -487,7 +484,6 @@ impl TsCompiler {
&meta_key,
compiled_file_metadata.to_json_string()?.as_bytes(),
)
})
}
/// Return associated source map file for given TS module.
......@@ -528,7 +524,7 @@ impl TsCompiler {
}
/// This method is called by TS compiler via an "op".
pub fn cache_compiler_output(
pub async fn cache_compiler_output(
&self,
module_specifier: &ModuleSpecifier,
extension: &str,
......@@ -536,7 +532,7 @@ impl TsCompiler {
) -> std::io::Result<()> {
match extension {
".map" => self.cache_source_map(module_specifier, contents),
".js" => self.cache_compiled_file(module_specifier, contents),
".js" => self.cache_compiled_file(module_specifier, contents).await,
_ => unreachable!(),
}
}
......@@ -576,9 +572,10 @@ impl TsCompiler {
script_name: &str,
) -> Option<SourceFile> {
if let Some(module_specifier) = self.try_to_resolve(script_name) {
return self
let fut = self
.file_fetcher
.fetch_cached_source_file(&module_specifier);
return futures::executor::block_on(fut);
}
None
......
......@@ -10,7 +10,6 @@ use crate::http_util::FetchOnceResult;
use crate::msg;
use deno_core::ErrBox;
use deno_core::ModuleSpecifier;
use futures::future::Either;
use futures::future::FutureExt;
use regex::Regex;
use reqwest;
......@@ -42,9 +41,6 @@ pub struct SourceFile {
pub source_code: Vec<u8>,
}
pub type SourceFileFuture =
dyn Future<Output = Result<SourceFile, ErrBox>> + Send;
/// Simple struct implementing in-process caching to prevent multiple
/// fs reads/net fetches for same file.
#[derive(Clone, Default)]
......@@ -114,8 +110,10 @@ impl SourceFileFetcher {
Ok(())
}
// TODO(bartlomieju): fetching cached resources should be done
// using blocking fs syscalls
/// Required for TS compiler and source maps.
pub fn fetch_cached_source_file(
pub async fn fetch_cached_source_file(
&self,
specifier: &ModuleSpecifier,
) -> Option<SourceFile> {
......@@ -127,40 +125,43 @@ impl SourceFileFetcher {
// If file is not in memory cache check if it can be found
// in local cache - which effectively means trying to fetch
// using "--cached-only" flag. We can safely block on this
// future, because it doesn't do any asynchronous action
// it that path.
let fut = self.get_source_file_async(specifier.as_url(), true, false, true);
futures::executor::block_on(fut).ok()
// using "--cached-only" flag.
// It should be safe to for caller block on this
// future, because it doesn't actually do any asynchronous
// action in that path.
self
.get_source_file_async(specifier.as_url(), true, false, true)
.await
.ok()
}
pub fn fetch_source_file_async(
pub async fn fetch_source_file_async(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<ModuleSpecifier>,
) -> Pin<Box<SourceFileFuture>> {
) -> Result<SourceFile, ErrBox> {
let module_url = specifier.as_url().to_owned();
debug!("fetch_source_file_async specifier: {} ", &module_url);
// Check if this file was already fetched and can be retrieved from in-process cache.
if let Some(source_file) = self.source_file_cache.get(specifier.to_string())
{
return Box::pin(async { Ok(source_file) });
let maybe_cached_file = self.source_file_cache.get(specifier.to_string());
if let Some(source_file) = maybe_cached_file {
return Ok(source_file);
}
let source_file_cache = self.source_file_cache.clone();
let specifier_ = specifier.clone();
let source_file = self.get_source_file_async(
let result = self
.get_source_file_async(
&module_url,
self.use_disk_cache,
self.no_remote,
self.cached_only,
);
)
.await;
Box::pin(async move {
match source_file.await {
match result {
Ok(mut file) => {
// TODO: move somewhere?
if file.source_code.starts_with(b"#!") {
......@@ -199,7 +200,6 @@ impl SourceFileFetcher {
Err(err)
}
}
})
}
/// This is main method that is responsible for fetching local or remote files.
......@@ -213,54 +213,38 @@ impl SourceFileFetcher {
///
/// If `cached_only` is true then this method will fail for remote files
/// not already cached.
fn get_source_file_async(
async fn get_source_file_async(
&self,
module_url: &Url,
use_disk_cache: bool,
no_remote: bool,
cached_only: bool,
) -> impl Future<Output = Result<SourceFile, ErrBox>> {
) -> Result<SourceFile, ErrBox> {
let url_scheme = module_url.scheme();
let is_local_file = url_scheme == "file";
if let Err(err) = SourceFileFetcher::check_if_supported_scheme(&module_url)
{
return Either::Left(futures::future::err(err));
}
SourceFileFetcher::check_if_supported_scheme(&module_url)?;
// Local files are always fetched from disk bypassing cache entirely.
if is_local_file {
match self.fetch_local_file(&module_url) {
Ok(source_file) => {
return Either::Left(futures::future::ok(source_file));
}
Err(err) => {
return Either::Left(futures::future::err(err));
}
}
return self.fetch_local_file(&module_url);
}
// The file is remote, fail if `no_remote` is true.
if no_remote {
return Either::Left(futures::future::err(
std::io::Error::new(
let e = std::io::Error::new(
std::io::ErrorKind::NotFound,
format!(
"Not allowed to get remote file '{}'",
module_url.to_string()
),
)
.into(),
));
);
return Err(e.into());
}
// Fetch remote file and cache on-disk for subsequent access
Either::Right(self.fetch_remote_source_async(
&module_url,
use_disk_cache,
cached_only,
10,
))
self
.fetch_remote_source_async(&module_url, use_disk_cache, cached_only, 10)
.await
}
/// Fetch local source file.
......@@ -355,16 +339,19 @@ impl SourceFileFetcher {
}
/// Asynchronously fetch remote source file specified by the URL following redirects.
///
/// Note that this is a recursive method so it can't be "async", but rather return
/// Pin<Box<..>>.
fn fetch_remote_source_async(
&self,
module_url: &Url,
use_disk_cache: bool,
cached_only: bool,
redirect_limit: i64,
) -> Pin<Box<SourceFileFuture>> {
) -> Pin<Box<dyn Future<Output = Result<SourceFile, ErrBox>>>> {
if redirect_limit < 0 {
let e = DenoError::new(ErrorKind::Http, "too many redirects".to_string());
return futures::future::err(e.into()).boxed();
return futures::future::err(e.into()).boxed_local();
}
let is_blacklisted =
......@@ -373,13 +360,13 @@ impl SourceFileFetcher {
if use_disk_cache && !is_blacklisted {
match self.fetch_cached_remote_source(&module_url) {
Ok(Some(source_file)) => {
return futures::future::ok(source_file).boxed();
return futures::future::ok(source_file).boxed_local();
}
Ok(None) => {
// there's no cached version
}
Err(err) => {
return futures::future::err(err).boxed();
return futures::future::err(err).boxed_local();
}
}
}
......@@ -397,7 +384,7 @@ impl SourceFileFetcher {
)
.into(),
)
.boxed();
.boxed_local();
}
eprintln!(
......@@ -470,7 +457,7 @@ impl SourceFileFetcher {
}
};
f.boxed()
f.boxed_local()
}
}
......
......@@ -8,6 +8,7 @@ use crate::ops::json_op;
use crate::state::State;
use deno_core::Loader;
use deno_core::*;
use futures::future::FutureExt;
pub fn init(i: &mut Isolate, s: &State) {
i.register_op("cache", s.core_op(json_op(s.stateful_op(op_cache))));
......@@ -43,16 +44,15 @@ fn op_cache(
let module_specifier = ModuleSpecifier::resolve_url(&args.module_id)
.expect("Should be valid module specifier");
state
.borrow()
.global_state
.ts_compiler
.cache_compiler_output(
let state_ = &state.borrow().global_state;
let ts_compiler = state_.ts_compiler.clone();
let fut = ts_compiler.cache_compiler_output(
&module_specifier,
&args.extension,
&args.contents,
)?;
);
futures::executor::block_on(fut)?;
Ok(JsonOp::Sync(json!({})))
}
......@@ -102,21 +102,27 @@ fn op_fetch_source_files(
None
};
let mut futures = vec![];
let global_state = state.borrow().global_state.clone();
for specifier in &args.specifiers {
let resolved_specifier =
ModuleSpecifier::resolve_url(&specifier).expect("Invalid specifier");
let fut = global_state
.file_fetcher
.fetch_source_file_async(&resolved_specifier, ref_specifier.clone());
futures.push(fut);
let file_fetcher = global_state.file_fetcher.clone();
let specifiers = args.specifiers.clone();
let future = async move {
let file_futures: Vec<_> = specifiers
.into_iter()
.map(|specifier| {
let file_fetcher_ = file_fetcher.clone();
let ref_specifier_ = ref_specifier.clone();
async move {
let resolved_specifier = ModuleSpecifier::resolve_url(&specifier)
.expect("Invalid specifier");
file_fetcher_
.fetch_source_file_async(&resolved_specifier, ref_specifier_)
.await
}
.boxed_local()
})
.collect();
let future = Box::pin(async move {
let files = try_join_all(futures).await?;
let files = try_join_all(file_futures).await?;
// We want to get an array of futures that resolves to
let v = files.into_iter().map(|f| {
async {
......@@ -156,7 +162,8 @@ fn op_fetch_source_files(
let v = try_join_all(v).await?;
Ok(v.into())
});
}
.boxed_local();
Ok(JsonOp::Async(future))
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册