Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion src/docbuilder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,6 @@ pub use self::rustwide_builder::{
};

#[cfg(test)]
pub use self::rustwide_builder::RUSTDOC_JSON_COMPRESSION_ALGORITHMS;
pub use self::rustwide_builder::{
RUSTDOC_JSON_COMPRESSION_ALGORITHMS, read_format_version_from_rustdoc_json,
};
2 changes: 1 addition & 1 deletion src/docbuilder/rustwide_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pub const RUSTDOC_JSON_COMPRESSION_ALGORITHMS: &[CompressionAlgorithm] =
&[CompressionAlgorithm::Zstd, CompressionAlgorithm::Gzip];

/// read the format version from a rustdoc JSON file.
fn read_format_version_from_rustdoc_json(
pub fn read_format_version_from_rustdoc_json(
reader: impl std::io::Read,
) -> Result<RustdocJsonFormatVersion> {
let reader = BufReader::new(reader);
Expand Down
18 changes: 13 additions & 5 deletions src/storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -419,14 +419,22 @@ impl AsyncStorage {
self.get_stream(path).await?.materialize(max_size).await
}

/// get a decompressing stream to an object in storage
/// get a raw stream to an object in storage
///
/// We don't decompress ourselves, S3 only decompresses with a correct
/// `Content-Encoding` header set, which we don't.
#[instrument]
pub(crate) async fn get_stream(&self, path: &str) -> Result<StreamingBlob> {
let blob = match &self.backend {
pub(crate) async fn get_raw_stream(&self, path: &str) -> Result<StreamingBlob> {
match &self.backend {
StorageBackend::Database(db) => db.get_stream(path, None).await,
StorageBackend::S3(s3) => s3.get_stream(path, None).await,
}?;
Ok(blob.decompress().await?)
}
}

/// get a decompressing stream to an object in storage.
#[instrument]
pub(crate) async fn get_stream(&self, path: &str) -> Result<StreamingBlob> {
Ok(self.get_raw_stream(path).await?.decompress().await?)
}

/// get, decompress and materialize part of an object from store
Expand Down
6 changes: 3 additions & 3 deletions src/test/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ pub(crate) trait AxumRouterTestExt {
path: &str,
cache_policy: cache::CachePolicy,
config: &Config,
) -> Result<()>;
) -> Result<AxumResponse>;
async fn assert_success(&self, path: &str) -> Result<AxumResponse>;
async fn get(&self, path: &str) -> Result<AxumResponse>;
async fn post(&self, path: &str) -> Result<AxumResponse>;
Expand Down Expand Up @@ -269,7 +269,7 @@ impl AxumRouterTestExt for axum::Router {
path: &str,
cache_policy: cache::CachePolicy,
config: &Config,
) -> Result<()> {
) -> Result<AxumResponse> {
let response = self.get(path).await?;
let status = response.status();
assert!(
Expand All @@ -278,7 +278,7 @@ impl AxumRouterTestExt for axum::Router {
response.redirect_target().unwrap_or_default()
);
response.assert_cache_control(cache_policy, config);
Ok(())
Ok(response)
}

async fn get(&self, path: &str) -> Result<AxumResponse> {
Expand Down
2 changes: 1 addition & 1 deletion src/web/extractors/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ mod path;
pub(crate) mod rustdoc;

pub(crate) use context::DbConnection;
pub(crate) use path::{Path, PathFileExtension};
pub(crate) use path::{Path, WantedCompression};
58 changes: 57 additions & 1 deletion src/web/extractors/path.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
//! custom axum extractors for path parameters
use crate::web::error::AxumNope;
use crate::{
storage::{CompressionAlgorithm, compression::compression_from_file_extension},
web::error::AxumNope,
};
use anyhow::anyhow;
use axum::{
RequestPartsExt,
extract::{FromRequestParts, OptionalFromRequestParts},
http::request::Parts,
};
use derive_more::Deref;

/// custom axum `Path` extractor that uses our own AxumNope::BadRequest
/// as error response instead of a plain text "bad request"
Expand Down Expand Up @@ -97,6 +101,58 @@ where
}
}

/// get wanted compression from file extension in path.
///
/// TODO: we could also additionally read the accept-encoding header here. But especially
/// in combination with priorities it's complex to parse correctly. So for now only
/// file extensions in the URL.
/// When using Accept-Encoding, we also have to return "Vary: Accept-Encoding" to ensure
/// the cache behaves correctly.
#[derive(Debug, Clone, Deref, Default, PartialEq)]
pub(crate) struct WantedCompression(pub(crate) CompressionAlgorithm);

impl<S> FromRequestParts<S> for WantedCompression
where
S: Send + Sync,
{
type Rejection = AxumNope;

async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
parts
.extract::<Option<WantedCompression>>()
.await
.expect("can never fail")
.ok_or_else(|| AxumNope::BadRequest(anyhow!("compression extension not found in path")))
}
}

impl<S> OptionalFromRequestParts<S> for WantedCompression
where
S: Send + Sync,
{
type Rejection = AxumNope;

async fn from_request_parts(
parts: &mut Parts,
_state: &S,
) -> Result<Option<Self>, Self::Rejection> {
if let Some(ext) = parts
.extract::<Option<PathFileExtension>>()
.await
.expect("can't fail")
.map(|ext| ext.0)
{
Ok(Some(WantedCompression(
compression_from_file_extension(&ext).ok_or_else(|| {
AxumNope::BadRequest(anyhow!("unknown compression file extension: {}", ext))
})?,
)))
} else {
Ok(None)
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
89 changes: 88 additions & 1 deletion src/web/extractors/rustdoc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

use crate::{
db::BuildId,
storage::CompressionAlgorithm,
web::{
MatchedRelease, MetaData, ReqVersion, error::AxumNope, escaped_uri::EscapedURI,
extractors::Path, url_decode,
Expand Down Expand Up @@ -573,6 +574,30 @@ impl RustdocParams {
EscapedURI::from_path(path)
}

pub(crate) fn json_download_url(
&self,
wanted_compression: Option<CompressionAlgorithm>,
format_version: Option<&str>,
) -> EscapedURI {
let mut path = format!("/crate/{}/{}", self.name, self.req_version);

if let Some(doc_target) = self.doc_target() {
path.push_str(&format!("/{doc_target}"));
}

if let Some(format_version) = format_version {
path.push_str(&format!("/json/{format_version}"));
} else {
path.push_str("/json");
}

if let Some(wanted_compression) = wanted_compression {
path.push_str(&format!(".{}", wanted_compression.file_extension()));
}

EscapedURI::from_path(path)
}

pub(crate) fn features_url(&self) -> EscapedURI {
EscapedURI::from_path(format!(
"/crate/{}/{}/features",
Expand Down Expand Up @@ -863,7 +888,7 @@ mod tests {
use super::*;
use crate::{
db::types::version::Version,
test::{AxumResponseTestExt, AxumRouterTestExt},
test::{AxumResponseTestExt, AxumRouterTestExt, V1},
};
use axum::{Router, routing::get};
use test_case::test_case;
Expand Down Expand Up @@ -1719,4 +1744,66 @@ mod tests {
format!("/{KRATE}/0.14.0/{KRATE}/trait.Itertools.html")
)
}

#[test_case(None)]
#[test_case(Some(CompressionAlgorithm::Gzip))]
#[test_case(Some(CompressionAlgorithm::Zstd))]
fn test_plain_json_url(wanted_compression: Option<CompressionAlgorithm>) {
let mut params = RustdocParams::new(KRATE)
.with_page_kind(PageKind::Rustdoc)
.with_req_version(ReqVersion::Exact(V1));

assert_eq!(
params.json_download_url(wanted_compression, None),
format!(
"/crate/{KRATE}/{V1}/json{}",
wanted_compression
.map(|c| format!(".{}", c.file_extension()))
.unwrap_or_default()
)
);

params = params.with_doc_target("some-target");

assert_eq!(
params.json_download_url(wanted_compression, None),
format!(
"/crate/{KRATE}/{V1}/some-target/json{}",
wanted_compression
.map(|c| format!(".{}", c.file_extension()))
.unwrap_or_default()
)
);
}

#[test_case(None)]
#[test_case(Some(CompressionAlgorithm::Gzip))]
#[test_case(Some(CompressionAlgorithm::Zstd))]
fn test_plain_json_url_with_format(wanted_compression: Option<CompressionAlgorithm>) {
let mut params = RustdocParams::new(KRATE)
.with_page_kind(PageKind::Rustdoc)
.with_req_version(ReqVersion::Exact(V1));

assert_eq!(
params.json_download_url(wanted_compression, Some("42")),
format!(
"/crate/{KRATE}/{V1}/json/42{}",
wanted_compression
.map(|c| format!(".{}", c.file_extension()))
.unwrap_or_default()
)
);

params = params.with_doc_target("some-target");

assert_eq!(
params.json_download_url(wanted_compression, Some("42")),
format!(
"/crate/{KRATE}/{V1}/some-target/json/42{}",
wanted_compression
.map(|c| format!(".{}", c.file_extension()))
.unwrap_or_default()
)
);
}
}
Loading