Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Optional;

Expand Down Expand Up @@ -87,11 +88,39 @@ public Uni<Response> get( @Parameter( in = PATH, required = true ) @PathParam( "
Optional<File> download = archiveService.getLocally( path );
if ( download.isPresent() && download.get().isFile() )
{
InputStream inputStream = FileUtils.openInputStream( download.get() );
final Response.ResponseBuilder builder = Response.ok( new TransferStreamingOutput( inputStream ) );
logger.debug( "Download path: {} from historical archive.", path );
publishTrackingEvent( path, id );
return Uni.createFrom().item( builder.build() );
Uni<Boolean> checksumValidation =
proxyService.validateChecksum( id, packageType, type, name, path, request );
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this mean there will be an individual request for each checksum ? can we make the folo records into the archive, and verify the checksum against that, to avoid the request to indy. Just an idea, not sure if that is possible.

Copy link
Member Author

@yma955 yma955 Jan 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, @sswguo
Local checksums will read the historical unpack payload directly, so cost would be small, and for remote checksums, will only download the necessary one(s) which local also owns, once validation is done, no other checksums will be downloaded.
For the folo record you suggest, I'm not sure if it will be retrieved before build is coming to end, since this happens at the progress of one build, and report should be still sealed in progress, the folo requests here should be direct to content requests with the corresponding content path, I'm afraid it will not have a better way than this since we need to know the new latest version of content. Corrent me if I'm wrong on this if we have a way to get the whole report on the current progress build.

Copy link
Member Author

@yma955 yma955 Jan 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@sswguo This is a way to align the version between sidecar(local downloads) and Indy content retrieving, for the archive contents alignment with the current folo record of the finished build, it had some way done from Archive service. From https://issues.redhat.com/browse/MMENG-4251 description, we could know it's not aligned, but not sure misalignment on which service, so it would be safe both would be fixed.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@yma96 thanks for the explanation, looks good for me. ; -)

return checksumValidation.onItem().transform( result -> {
if ( result != null && result )
{
try
{
InputStream inputStream = FileUtils.openInputStream( download.get() );
final Response.ResponseBuilder builder =
Response.ok( new TransferStreamingOutput( inputStream ) );
logger.debug( "Download path: {} from historical archive.", path );
publishTrackingEvent( path, id );
return Uni.createFrom().item( builder.build() );
}
catch ( IOException e )
{
logger.error( "IO error for local file, path {}.", path, e );
}
}
else
{
try
{
logger.debug( "Checksum validation failed, download from proxy: {}.", path );
return proxyService.doGet( id, packageType, type, name, path, request );
}
catch ( Exception e )
{
logger.error( "Error for proxy download, path {}.", path, e );
}
}
return null;
} ).flatMap( response -> response );
}
else
{
Expand Down
140 changes: 132 additions & 8 deletions src/main/java/org/commonjava/util/sidecar/services/ProxyService.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import kotlin.Pair;
import org.commonjava.util.sidecar.config.ProxyConfiguration;
import org.commonjava.util.sidecar.interceptor.ExceptionHandler;
import org.commonjava.util.sidecar.model.dto.HistoricalEntryDTO;
import org.commonjava.util.sidecar.util.OtelAdapter;
import org.commonjava.util.sidecar.util.ProxyStreamingOutput;
import org.commonjava.util.sidecar.util.UrlUtils;
Expand All @@ -31,7 +32,11 @@
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedHashMap;
import java.util.Map;

import static io.vertx.core.http.HttpMethod.HEAD;
import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
Expand All @@ -54,28 +59,34 @@ public class ProxyService
@Inject
OtelAdapter otel;

@Inject
ReportService reportService;

public Uni<Response> doHead( String trackingId, String packageType, String type, String name, String path,
HttpServerRequest request ) throws Exception
HttpServerRequest request )
throws Exception
{
String contentPath = UrlUtils.buildUrl( FOLO_TRACK_REST_BASE_PATH, trackingId, packageType, type, name, path );
return doHead( contentPath, request );
}

public Uni<Response> doHead( String path, HttpServerRequest request ) throws Exception
public Uni<Response> doHead( String path, HttpServerRequest request )
throws Exception
{
return normalizePathAnd( path, p -> classifier.classifyAnd( p, request, ( client, service ) -> wrapAsyncCall(
client.head( p, request ).call(), request.method() ) ) );
client.head( p, request ).call(), request.method() ) ) );
}

public Uni<Response> doGet( String trackingId, String packageType, String type, String name, String path,
HttpServerRequest request ) throws Exception
HttpServerRequest request )
throws Exception
{
String contentPath = UrlUtils.buildUrl( FOLO_TRACK_REST_BASE_PATH, trackingId, packageType, type, name, path );
return doGet( contentPath, request );
}

public Uni<Response> doGet( String path, HttpServerRequest request )
throws Exception
throws Exception
{
return normalizePathAnd( path, p -> classifier.classifyAnd( p, request, ( client, service ) -> wrapAsyncCall(
client.get( p, request ).call(), request.method() ) ) );
Expand Down Expand Up @@ -113,16 +124,87 @@ public Uni<Response> doPut( String path, InputStream is, HttpServerRequest reque
public Uni<Response> doDelete( String path, HttpServerRequest request ) throws Exception
{
return normalizePathAnd( path, p -> classifier.classifyAnd( p, request, ( client, service ) -> wrapAsyncCall(
client.delete( p ).headersFrom( request ).call(), request.method() ) ) );
client.delete( p ).headersFrom( request ).call(), request.method() ) ) );
}

public Uni<Response> wrapAsyncCall( WebClientAdapter.CallAdapter asyncCall, HttpMethod method )
{
Uni<Response> ret =
asyncCall.enqueue().onItem().transform( ( resp ) -> convertProxyResp( resp, method ) );
Uni<Response> ret = asyncCall.enqueue().onItem().transform( ( resp ) -> convertProxyResp( resp, method ) );
return ret.onFailure().recoverWithItem( this::handleProxyException );
}

public Uni<Boolean> validateChecksum( String trackingId, String packageType, String type, String name, String path,
HttpServerRequest request )
{
Map<String, String> localChecksums = getChecksums( path );
Uni<Boolean> resultUni = Uni.createFrom().item( false );

for ( String checksumType : localChecksums.keySet() )
{
String localChecksum = localChecksums.get( checksumType );
if ( localChecksum == null )
{
continue;
}
String checksumUrl = path + "." + checksumType;
resultUni = resultUni.onItem().call( () -> {
try
{
return downloadAndCompareChecksum( trackingId, packageType, type, name, checksumUrl, localChecksum,
request ).onItem().invoke( result -> {
if ( result != null && result )
{
// This is just used to skip loop to avoid unnecessary checksum download
logger.debug(
"Found the valid checksum compare result, stopping further checks, remote path {}",
checksumUrl );
throw new FoundValidChecksumException();
}
} );
}
catch ( Exception e )
{
logger.error( "Checksum download compare error for path: {}", checksumUrl, e );
}
return null;
} );
}
return resultUni.onFailure().recoverWithItem( false ).onItem().transform( result -> {
// If catch FoundValidChecksumException,return true
return true;
} ); // If no valid checksum compare result found, return false
}

private Uni<Boolean> downloadAndCompareChecksum( String trackingId, String packageType, String type, String name,
String checksumUrl, String localChecksum,
HttpServerRequest request )
throws Exception
{
return doGet( trackingId, packageType, type, name, checksumUrl, request ).onItem().transform( response -> {
if ( response.getStatus() == Response.Status.OK.getStatusCode() )
{
ProxyStreamingOutput streamingOutput = (ProxyStreamingOutput) response.getEntity();
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream())
{
streamingOutput.write( outputStream );
String remoteChecksum = outputStream.toString();
return localChecksum.equals( remoteChecksum );
}
catch ( IOException e )
{
logger.error( "Error to read remote checksum, path:{}.", checksumUrl, e );
return null;
}
}
else
{
logger.error( "Failed to download remote checksum for {}: HTTP {}.", checksumUrl,
response.getStatus() );
return null;
}
} );
}

/**
* Send status 500 with error message body.
* @param t error
Expand Down Expand Up @@ -162,4 +244,46 @@ private boolean isHeaderAllowed( Pair<? extends String, ? extends String> header
String key = header.getFirst();
return !FORBIDDEN_HEADERS.contains( key.toLowerCase() );
}

private Map<String, String> getChecksums( String path )
{
Map<String, String> result = new LinkedHashMap<>();
HistoricalEntryDTO entryDTO = reportService.getHistoricalContentMap().get( path );
if ( entryDTO != null )
{
result.put( ChecksumType.SHA1.getValue(), entryDTO.getSha1() );
result.put( ChecksumType.SHA256.getValue(), entryDTO.getSha256() );
result.put( ChecksumType.MD5.getValue(), entryDTO.getMd5() );
}

return result;
}

enum ChecksumType
{
SHA1( "sha1" ),
SHA256( "sha256" ),
MD5( "md5" );

private final String value;

ChecksumType( String value )
{
this.value = value;
}

public String getValue()
{
return value;
}
}

class FoundValidChecksumException
extends RuntimeException
{
public FoundValidChecksumException()
{
super( "Found a valid checksum, stopping further checks." );
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,10 @@ public void storeTrackedDownload( JsonObject message )
Quarkus.asyncExit();
}
}

}

public HashMap<String, HistoricalEntryDTO> getHistoricalContentMap()
{
return historicalContentMap;
}
}
Loading