Merge pull request #305 from matter-labs/small-quality

refactor: many small code quality improvements
This commit is contained in:
Lucille Blumire 2025-04-17 17:43:22 +01:00 committed by GitHub
commit 9bd0e9c36e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 99 additions and 124 deletions

View file

@ -65,6 +65,8 @@ impl Display for Rtmr {
} }
} }
const CHUNK_SIZE: u64 = 1024 * 128;
fn main() -> Result<()> { fn main() -> Result<()> {
let args = Arguments::parse(); let args = Arguments::parse();
tracing::subscriber::set_global_default(setup_logging( tracing::subscriber::set_global_default(setup_logging(
@ -127,7 +129,7 @@ fn main() -> Result<()> {
let _ = device.seek(SeekFrom::Start(pstart))?; let _ = device.seek(SeekFrom::Start(pstart))?;
assert_eq!(header.part_size, 128); assert_eq!(header.part_size, 128);
assert!(header.num_parts < u8::MAX as _); assert!(header.num_parts < u32::from(u8::MAX));
let empty_bytes = [0u8; 128]; let empty_bytes = [0u8; 128];
@ -158,7 +160,7 @@ fn main() -> Result<()> {
let section_table = pe.get_section_table()?; let section_table = pe.get_section_table()?;
for section in section_table.iter() { for section in &section_table {
debug!(section_name = ?section.name()?); debug!(section_name = ?section.name()?);
} }
@ -175,14 +177,13 @@ fn main() -> Result<()> {
.find(|s| s.name().unwrap().eq(sect)) .find(|s| s.name().unwrap().eq(sect))
.ok_or(anyhow!("Failed to find section `{sect}`"))?; .ok_or(anyhow!("Failed to find section `{sect}`"))?;
let mut start = s.pointer_to_raw_data as u64; let mut start = u64::from(s.pointer_to_raw_data);
let end = start + s.virtual_size as u64; let end = start + u64::from(s.virtual_size);
debug!(sect, start, end, len = (s.virtual_size)); debug!(sect, start, end, len = (s.virtual_size));
let mut hasher = Sha384::new(); let mut hasher = Sha384::new();
const CHUNK_SIZE: u64 = 1024 * 128;
loop { loop {
if start >= end { if start >= end {
break; break;

View file

@ -61,13 +61,11 @@ pub fn extend_sha384(base: &str, extend: &str) -> Result<String> {
let mut hasher = sha2::Sha384::new(); let mut hasher = sha2::Sha384::new();
hasher.update(pad::<48>(&hex::decode(base).context(format!( hasher.update(pad::<48>(&hex::decode(base).context(format!(
"Failed to decode base digest '{}' - expected hex string", "Failed to decode base digest '{base}' - expected hex string",
base
))?)?); ))?)?);
hasher.update(pad::<48>(&hex::decode(extend).context(format!( hasher.update(pad::<48>(&hex::decode(extend).context(format!(
"Failed to decode extend digest '{}' - expected hex string", "Failed to decode extend digest '{extend}' - expected hex string",
extend
))?)?); ))?)?);
Ok(hex::encode(hasher.finalize())) Ok(hex::encode(hasher.finalize()))

View file

@ -26,7 +26,7 @@ async fn main() -> Result<()> {
.context("failed to get quote and collateral")?; .context("failed to get quote and collateral")?;
let base64_string = general_purpose::STANDARD.encode(report.quote.as_ref()); let base64_string = general_purpose::STANDARD.encode(report.quote.as_ref());
print!("{}", base64_string); print!("{base64_string}");
Ok(()) Ok(())
} }

View file

@ -81,7 +81,7 @@ fn print_quote_verification_summary(quote_verification_result: &QuoteVerificatio
for advisory in advisories { for advisory in advisories {
println!("\tInfo: Advisory ID: {advisory}"); println!("\tInfo: Advisory ID: {advisory}");
} }
println!("Quote verification result: {}", tcblevel); println!("Quote verification result: {tcblevel}");
println!("{:#}", &quote.report); println!("{:#}", &quote.report);
} }

View file

@ -26,12 +26,10 @@ impl MainNodeClient {
/// Create a new client for the main node /// Create a new client for the main node
pub fn new(rpc_url: Url, chain_id: u64) -> error::Result<Self> { pub fn new(rpc_url: Url, chain_id: u64) -> error::Result<Self> {
let chain_id = L2ChainId::try_from(chain_id) let chain_id = L2ChainId::try_from(chain_id)
.map_err(|e| error::Error::Internal(format!("Invalid chain ID: {}", e)))?; .map_err(|e| error::Error::Internal(format!("Invalid chain ID: {e}")))?;
let node_client = NodeClient::http(rpc_url.into()) let node_client = NodeClient::http(rpc_url.into())
.map_err(|e| { .map_err(|e| error::Error::Internal(format!("Failed to create JSON-RPC client: {e}")))?
error::Error::Internal(format!("Failed to create JSON-RPC client: {}", e))
})?
.for_network(chain_id.into()) .for_network(chain_id.into())
.build(); .build();
@ -46,13 +44,13 @@ impl JsonRpcClient for MainNodeClient {
.get_l1_batch_details(batch_number) .get_l1_batch_details(batch_number)
.rpc_context("get_l1_batch_details") .rpc_context("get_l1_batch_details")
.await .await
.map_err(|e| error::Error::JsonRpc(format!("Failed to get batch details: {}", e)))? .map_err(|e| error::Error::JsonRpc(format!("Failed to get batch details: {e}")))?
.ok_or_else(|| { .ok_or_else(|| {
error::Error::JsonRpc(format!("No details found for batch #{}", batch_number)) error::Error::JsonRpc(format!("No details found for batch #{batch_number}"))
})?; })?;
batch_details.base.root_hash.ok_or_else(|| { batch_details.base.root_hash.ok_or_else(|| {
error::Error::JsonRpc(format!("No root hash found for batch #{}", batch_number)) error::Error::JsonRpc(format!("No root hash found for batch #{batch_number}"))
}) })
} }
} }

View file

@ -190,15 +190,12 @@ impl VerifierConfig {
pub fn new(args: VerifierConfigArgs) -> error::Result<Self> { pub fn new(args: VerifierConfigArgs) -> error::Result<Self> {
let policy = if let Some(path) = &args.attestation_policy_file { let policy = if let Some(path) = &args.attestation_policy_file {
let policy_content = fs::read_to_string(path).map_err(|e| { let policy_content = fs::read_to_string(path).map_err(|e| {
error::Error::internal(format!("Failed to read attestation policy file: {}", e)) error::Error::internal(format!("Failed to read attestation policy file: {e}"))
})?; })?;
let policy_config: AttestationPolicyConfig = serde_yaml::from_str(&policy_content) let policy_config: AttestationPolicyConfig = serde_yaml::from_str(&policy_content)
.map_err(|e| { .map_err(|e| {
error::Error::internal(format!( error::Error::internal(format!("Failed to parse attestation policy file: {e}"))
"Failed to parse attestation policy file: {}",
e
))
})?; })?;
tracing::info!("Loaded attestation policy from file: {:?}", path); tracing::info!("Loaded attestation policy from file: {:?}", path);
@ -263,7 +260,7 @@ fn decode_tdx_mrs(
Some(mrs_array) => { Some(mrs_array) => {
let result = mrs_array let result = mrs_array
.into_iter() .into_iter()
.map(|strings| decode_and_combine_mrs(strings, bytes_length)) .map(|strings| decode_and_combine_mrs(&strings, bytes_length))
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
Ok(Some(result)) Ok(Some(result))
} }
@ -272,12 +269,12 @@ fn decode_tdx_mrs(
// Helper function to decode and combine MRs // Helper function to decode and combine MRs
fn decode_and_combine_mrs( fn decode_and_combine_mrs(
strings: [String; 5], strings: &[String; 5],
bytes_length: usize, bytes_length: usize,
) -> Result<Bytes, hex::FromHexError> { ) -> Result<Bytes, hex::FromHexError> {
let mut buffer = BytesMut::with_capacity(bytes_length * 5); let mut buffer = BytesMut::with_capacity(bytes_length * 5);
for s in &strings { for s in strings {
if s.len() > (bytes_length * 2) { if s.len() > (bytes_length * 2) {
return Err(hex::FromHexError::InvalidStringLength); return Err(hex::FromHexError::InvalidStringLength);
} }
@ -295,19 +292,16 @@ fn parse_batch_range(s: &str) -> error::Result<(L1BatchNumber, L1BatchNumber)> {
.map(L1BatchNumber::from) .map(L1BatchNumber::from)
.map_err(|e| error::Error::internal(format!("Can't convert batch {s} to number: {e}"))) .map_err(|e| error::Error::internal(format!("Can't convert batch {s} to number: {e}")))
}; };
match s.split_once('-') { if let Some((start, end)) = s.split_once('-') {
Some((start, end)) => { let (start, end) = (parse(start)?, parse(end)?);
let (start, end) = (parse(start)?, parse(end)?); if start > end {
if start > end { Err(error::Error::InvalidBatchRange(s.into()))
Err(error::Error::InvalidBatchRange(s.into())) } else {
} else { Ok((start, end))
Ok((start, end))
}
}
None => {
let batch_number = parse(s)?;
Ok((batch_number, batch_number))
} }
} else {
let batch_number = parse(s)?;
Ok((batch_number, batch_number))
} }
} }

View file

@ -31,13 +31,13 @@ impl fmt::Display for VerifierMode {
end_batch, end_batch,
} => { } => {
if start_batch == end_batch { if start_batch == end_batch {
write!(f, "one-shot mode (batch {})", start_batch) write!(f, "one-shot mode (batch {start_batch})")
} else { } else {
write!(f, "one-shot mode (batches {}-{})", start_batch, end_batch) write!(f, "one-shot mode (batches {start_batch}-{end_batch})")
} }
} }
VerifierMode::Continuous { start_batch } => { VerifierMode::Continuous { start_batch } => {
write!(f, "continuous mode (starting from batch {})", start_batch) write!(f, "continuous mode (starting from batch {start_batch})")
} }
} }
} }
@ -72,9 +72,9 @@ impl VerificationResult {
verified_count, verified_count,
unverified_count, unverified_count,
} => verified_count > unverified_count, } => verified_count > unverified_count,
VerificationResult::Failure => false, VerificationResult::Failure
VerificationResult::Interrupted => false, | VerificationResult::Interrupted
VerificationResult::NoProofsFound => false, | VerificationResult::NoProofsFound => false,
} }
} }
} }
@ -89,8 +89,7 @@ impl fmt::Display for VerificationResult {
} => { } => {
write!( write!(
f, f,
"Partial Success ({} verified, {} failed)", "Partial Success ({verified_count} verified, {unverified_count} failed)"
verified_count, unverified_count
) )
} }
VerificationResult::Failure => write!(f, "Failure"), VerificationResult::Failure => write!(f, "Failure"),

View file

@ -96,7 +96,7 @@ impl Error {
impl From<reqwest::Error> for Error { impl From<reqwest::Error> for Error {
fn from(value: reqwest::Error) -> Self { fn from(value: reqwest::Error) -> Self {
Self::Http { Self::Http {
status_code: value.status().map(|v| v.as_u16()).unwrap_or(0), status_code: value.status().map_or(0, |v| v.as_u16()),
message: value.to_string(), message: value.to_string(),
} }
} }

View file

@ -74,7 +74,7 @@ async fn main() -> Result<()> {
}, },
Err(e) => { Err(e) => {
tracing::error!("Task panicked: {}", e); tracing::error!("Task panicked: {}", e);
Err(Error::internal(format!("Task panicked: {}", e))) Err(Error::internal(format!("Task panicked: {e}")))
} }
} }
}, },

View file

@ -53,7 +53,7 @@ impl BatchProcessor {
// Fetch proofs for the current batch across different TEE types // Fetch proofs for the current batch across different TEE types
let mut proofs = Vec::new(); let mut proofs = Vec::new();
for tee_type in self.config.args.tee_types.iter() { for tee_type in self.config.args.tee_types.iter().copied() {
match self match self
.proof_fetcher .proof_fetcher
.get_proofs(token, batch_number, tee_type) .get_proofs(token, batch_number, tee_type)
@ -68,7 +68,6 @@ impl BatchProcessor {
batch_number.0, batch_number.0,
e e
); );
continue;
} }
} }
} }

View file

@ -50,8 +50,9 @@ impl ContinuousProcessor {
match self.batch_processor.process_batch(token, batch).await { match self.batch_processor.process_batch(token, batch).await {
Ok(result) => { Ok(result) => {
match result { match result {
VerificationResult::Success => success_count += 1, VerificationResult::Success | VerificationResult::PartialSuccess { .. } => {
VerificationResult::PartialSuccess { .. } => success_count += 1, success_count += 1;
}
VerificationResult::Failure => failure_count += 1, VerificationResult::Failure => failure_count += 1,
VerificationResult::Interrupted => { VerificationResult::Interrupted => {
results.push((current_batch, result)); results.push((current_batch, result));

View file

@ -43,14 +43,14 @@ impl ProcessorFactory {
/// Create a new processor based on the provided configuration /// Create a new processor based on the provided configuration
pub fn create(config: VerifierConfig) -> Result<(ProcessorType, VerifierMode)> { pub fn create(config: VerifierConfig) -> Result<(ProcessorType, VerifierMode)> {
let mode = if let Some((start, end)) = config.args.batch_range { let mode = if let Some((start, end)) = config.args.batch_range {
let processor = OneShotProcessor::new(config.clone(), start, end)?; let processor = OneShotProcessor::new(config, start, end)?;
let mode = VerifierMode::OneShot { let mode = VerifierMode::OneShot {
start_batch: start, start_batch: start,
end_batch: end, end_batch: end,
}; };
(ProcessorType::OneShot(processor), mode) (ProcessorType::OneShot(processor), mode)
} else if let Some(start) = config.args.continuous { } else if let Some(start) = config.args.continuous {
let processor = ContinuousProcessor::new(config.clone(), start)?; let processor = ContinuousProcessor::new(config, start)?;
let mode = VerifierMode::Continuous { start_batch: start }; let mode = VerifierMode::Continuous { start_batch: start };
(ProcessorType::Continuous(processor), mode) (ProcessorType::Continuous(processor), mode)
} else { } else {

View file

@ -55,8 +55,9 @@ impl OneShotProcessor {
let result = self.batch_processor.process_batch(token, batch).await?; let result = self.batch_processor.process_batch(token, batch).await?;
match result { match result {
VerificationResult::Success => success_count += 1, VerificationResult::Success | VerificationResult::PartialSuccess { .. } => {
VerificationResult::PartialSuccess { .. } => success_count += 1, success_count += 1;
}
VerificationResult::Failure => failure_count += 1, VerificationResult::Failure => failure_count += 1,
VerificationResult::Interrupted => { VerificationResult::Interrupted => {
results.push((batch_number, result)); results.push((batch_number, result));

View file

@ -36,7 +36,7 @@ impl ProofFetcher {
&self, &self,
token: &CancellationToken, token: &CancellationToken,
batch_number: L1BatchNumber, batch_number: L1BatchNumber,
tee_type: &TeeType, tee_type: TeeType,
) -> Result<Vec<Proof>> { ) -> Result<Vec<Proof>> {
let mut proofs_request = GetProofsRequest::new(batch_number, tee_type); let mut proofs_request = GetProofsRequest::new(batch_number, tee_type);
let mut backoff = Duration::from_secs(1); let mut backoff = Duration::from_secs(1);

View file

@ -21,7 +21,7 @@ impl ProofResponseParser {
} }
} }
return Err(error::Error::JsonRpc(format!("JSONRPC error: {:?}", error))); return Err(error::Error::JsonRpc(format!("JSONRPC error: {error:?}")));
} }
// Extract proofs from the result // Extract proofs from the result

View file

@ -17,7 +17,7 @@ pub struct GetProofsRequest {
impl GetProofsRequest { impl GetProofsRequest {
/// Create a new request for the given batch number /// Create a new request for the given batch number
pub fn new(batch_number: L1BatchNumber, tee_type: &TeeType) -> Self { pub fn new(batch_number: L1BatchNumber, tee_type: TeeType) -> Self {
GetProofsRequest { GetProofsRequest {
jsonrpc: "2.0".to_string(), jsonrpc: "2.0".to_string(),
id: 1, id: 1,

View file

@ -17,7 +17,7 @@ impl AttestationVerifier {
// Get current time for verification // Get current time for verification
let unix_time: i64 = std::time::SystemTime::now() let unix_time: i64 = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH) .duration_since(std::time::UNIX_EPOCH)
.map_err(|e| error::Error::internal(format!("Failed to get system time: {}", e)))? .map_err(|e| error::Error::internal(format!("Failed to get system time: {e}")))?
.as_secs() as _; .as_secs() as _;
// Verify the quote with the collateral // Verify the quote with the collateral

View file

@ -48,7 +48,7 @@ impl<C: JsonRpcClient> BatchVerifier<C> {
let mut total_proofs_count: u32 = 0; let mut total_proofs_count: u32 = 0;
let mut verified_proofs_count: u32 = 0; let mut verified_proofs_count: u32 = 0;
for proof in proofs.into_iter() { for proof in proofs {
if token.is_cancelled() { if token.is_cancelled() {
tracing::warn!("Stop signal received during batch verification"); tracing::warn!("Stop signal received during batch verification");
return Ok(BatchVerificationResult { return Ok(BatchVerificationResult {
@ -119,7 +119,7 @@ impl<C: JsonRpcClient> BatchVerifier<C> {
); );
verified_proofs_count += 1; verified_proofs_count += 1;
} else { } else {
tracing::warn!(batch_no, proof.proved_at, tee_type, "Verification failed!",); tracing::warn!(batch_no, proof.proved_at, tee_type, "Verification failed!");
} }
} }

View file

@ -24,7 +24,7 @@ impl PolicyEnforcer {
match &quote.report { match &quote.report {
Report::SgxEnclave(report_body) => { Report::SgxEnclave(report_body) => {
// Validate TCB level // Validate TCB level
Self::validate_tcb_level(&attestation_policy.sgx_allowed_tcb_levels, tcblevel)?; Self::validate_tcb_level(attestation_policy.sgx_allowed_tcb_levels, tcblevel)?;
// Validate SGX Advisories // Validate SGX Advisories
for advisory in &quote_verification_result.advisories { for advisory in &quote_verification_result.advisories {
@ -50,7 +50,7 @@ impl PolicyEnforcer {
} }
Report::TD10(report_body) => { Report::TD10(report_body) => {
// Validate TCB level // Validate TCB level
Self::validate_tcb_level(&attestation_policy.tdx_allowed_tcb_levels, tcblevel)?; Self::validate_tcb_level(attestation_policy.tdx_allowed_tcb_levels, tcblevel)?;
// Validate TDX Advisories // Validate TDX Advisories
for advisory in &quote_verification_result.advisories { for advisory in &quote_verification_result.advisories {
@ -74,7 +74,7 @@ impl PolicyEnforcer {
} }
Report::TD15(report_body) => { Report::TD15(report_body) => {
// Validate TCB level // Validate TCB level
Self::validate_tcb_level(&attestation_policy.tdx_allowed_tcb_levels, tcblevel)?; Self::validate_tcb_level(attestation_policy.tdx_allowed_tcb_levels, tcblevel)?;
// Validate TDX Advisories // Validate TDX Advisories
for advisory in &quote_verification_result.advisories { for advisory in &quote_verification_result.advisories {
@ -101,14 +101,10 @@ impl PolicyEnforcer {
} }
/// Helper method to validate TCB levels /// Helper method to validate TCB levels
fn validate_tcb_level( fn validate_tcb_level(allowed_levels: EnumSet<TcbLevel>, actual_level: TcbLevel) -> Result<()> {
allowed_levels: &EnumSet<TcbLevel>,
actual_level: TcbLevel,
) -> Result<()> {
if !allowed_levels.contains(actual_level) { if !allowed_levels.contains(actual_level) {
let error_msg = format!( let error_msg = format!(
"Quote verification failed: TCB level mismatch (expected one of: {:?}, actual: {})", "Quote verification failed: TCB level mismatch (expected one of: {allowed_levels:?}, actual: {actual_level})",
allowed_levels, actual_level
); );
return Err(Error::policy_violation(error_msg)); return Err(Error::policy_violation(error_msg));
} }
@ -117,7 +113,7 @@ impl PolicyEnforcer {
/// Helper method to build combined TDX measurement register /// Helper method to build combined TDX measurement register
fn build_tdx_mr<const N: usize>(parts: [&[u8]; N]) -> Vec<u8> { fn build_tdx_mr<const N: usize>(parts: [&[u8]; N]) -> Vec<u8> {
parts.into_iter().flatten().cloned().collect() parts.into_iter().flatten().copied().collect()
} }
/// Check if a policy value matches the actual value /// Check if a policy value matches the actual value
@ -152,8 +148,7 @@ impl PolicyEnforcer {
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join(", "); .join(", ");
let error_msg = format!( let error_msg = format!(
"Quote verification failed: {} mismatch (expected one of: [ {} ], actual: {:x})", "Quote verification failed: {field_name} mismatch (expected one of: [ {valid_values} ], actual: {actual_value:x})"
field_name, valid_values, actual_value
); );
return Err(Error::policy_violation(error_msg)); return Err(Error::policy_violation(error_msg));
} }

View file

@ -30,9 +30,8 @@ impl SignatureVerifier {
let report_data_bytes = quote_verification_result.quote.get_report_data(); let report_data_bytes = quote_verification_result.quote.get_report_data();
tracing::trace!(?report_data_bytes); tracing::trace!(?report_data_bytes);
let report_data = ReportData::try_from(report_data_bytes).map_err(|e| { let report_data = ReportData::try_from(report_data_bytes)
error::Error::internal(format!("Could not convert to ReportData: {}", e)) .map_err(|e| error::Error::internal(format!("Could not convert to ReportData: {e}")))?;
})?;
Self::verify(&report_data, &root_hash, signature) Self::verify(&report_data, &root_hash, signature)
} }
@ -100,7 +99,7 @@ impl SignatureVerifier {
})?; })?;
recover_signer(&signature_bytes, &root_hash_msg).map_err(|e| { recover_signer(&signature_bytes, &root_hash_msg).map_err(|e| {
error::Error::signature_verification(format!("Failed to recover signer: {}", e)) error::Error::signature_verification(format!("Failed to recover signer: {e}"))
})? })?
} }
// Any other length is invalid // Any other length is invalid

View file

@ -25,20 +25,18 @@ impl ApiClient {
if technology == "tdx" && self.api_version == ApiVersion::V3 { if technology == "tdx" && self.api_version == ApiVersion::V3 {
return Err(IntelApiError::UnsupportedApiVersion(format!( return Err(IntelApiError::UnsupportedApiVersion(format!(
"TDX endpoint /{}/{}/{} requires API v4", "TDX endpoint /{service}/{endpoint}/{technology} requires API v4",
service, endpoint, technology
))); )));
} }
if technology == "sgx" && service == "registration" { if technology == "sgx" && service == "registration" {
// Registration paths are fixed at v1 regardless of client's api_version // Registration paths are fixed at v1 regardless of client's api_version
return Ok(format!("/sgx/registration/v1/{}", endpoint).replace("//", "/")); return Ok(format!("/sgx/registration/v1/{endpoint}").replace("//", "/"));
} }
Ok(format!( Ok(
"/{}/certification/{}/{}/{}", format!("/{technology}/certification/{api_segment}/{service}/{endpoint}")
technology, api_segment, service, endpoint .replace("//", "/"),
) )
.replace("//", "/"))
} }
/// Helper to add an optional header if the string is non-empty. /// Helper to add an optional header if the string is non-empty.
@ -187,25 +185,22 @@ impl ApiClient {
/// Ensures the client is configured for API v4, otherwise returns an error. /// Ensures the client is configured for API v4, otherwise returns an error.
pub(super) fn ensure_v4_api(&self, function_name: &str) -> Result<(), IntelApiError> { pub(super) fn ensure_v4_api(&self, function_name: &str) -> Result<(), IntelApiError> {
if self.api_version != ApiVersion::V4 { if self.api_version != ApiVersion::V4 {
Err(IntelApiError::UnsupportedApiVersion(format!( return Err(IntelApiError::UnsupportedApiVersion(format!(
"{} requires API v4", "{function_name} requires API v4",
function_name )));
)))
} else {
Ok(())
} }
Ok(())
} }
/// Checks if a V4-only parameter is provided with a V3 API version. /// Checks if a V4-only parameter is provided with a V3 API version.
pub(super) fn check_v4_only_param<T>( pub(super) fn check_v4_only_param<T: Copy>(
&self, &self,
param_value: Option<T>, param_value: Option<T>,
param_name: &str, param_name: &str,
) -> Result<(), IntelApiError> { ) -> Result<(), IntelApiError> {
if self.api_version == ApiVersion::V3 && param_value.is_some() { if self.api_version == ApiVersion::V3 && param_value.is_some() {
Err(IntelApiError::UnsupportedApiVersion(format!( Err(IntelApiError::UnsupportedApiVersion(format!(
"'{}' parameter requires API v4", "'{param_name}' parameter requires API v4",
param_name
))) )))
} else { } else {
Ok(()) Ok(())

View file

@ -312,13 +312,13 @@ fn init_telemetry(
if config.logging.console { if config.logging.console {
// Optionally configure JSON logging // Optionally configure JSON logging
if config.logging.json { if config.logging.json {
subscriber.with(fmt_layer.json()).init() subscriber.with(fmt_layer.json()).init();
} else { } else {
subscriber.with(fmt_layer.pretty()).init() subscriber.with(fmt_layer.pretty()).init();
} }
} else { } else {
subscriber.init() subscriber.init();
}; }
Ok(()) Ok(())
} }

View file

@ -15,7 +15,7 @@ use sha3::{Digest, Keccak256};
pub fn recover_signer(sig: &[u8; 65], root_hash: &Message) -> Result<[u8; 20]> { pub fn recover_signer(sig: &[u8; 65], root_hash: &Message) -> Result<[u8; 20]> {
let sig = RecoverableSignature::from_compact( let sig = RecoverableSignature::from_compact(
&sig[0..64], &sig[0..64],
RecoveryId::try_from(sig[64] as i32 - 27)?, RecoveryId::try_from(i32::from(sig[64]) - 27)?,
)?; )?;
let public = SECP256K1.recover_ecdsa(root_hash, &sig)?; let public = SECP256K1.recover_ecdsa(root_hash, &sig)?;
Ok(public_key_to_ethereum_address(&public)) Ok(public_key_to_ethereum_address(&public))

View file

@ -53,10 +53,7 @@ pub fn setup_logging(
.try_from_env() .try_from_env()
.unwrap_or(match *log_level { .unwrap_or(match *log_level {
LevelFilter::OFF => EnvFilter::new("off"), LevelFilter::OFF => EnvFilter::new("off"),
_ => EnvFilter::new(format!( _ => EnvFilter::new(format!("warn,{crate_name}={log_level},teepot={log_level}")),
"warn,{crate_name}={log_level},teepot={log_level}",
log_level = log_level
)),
}); });
let fmt_layer = tracing_subscriber::fmt::layer() let fmt_layer = tracing_subscriber::fmt::layer()

View file

@ -37,8 +37,7 @@ pub enum ReportData {
fn report_data_to_bytes(data: &[u8], version: u8) -> [u8; REPORT_DATA_LENGTH] { fn report_data_to_bytes(data: &[u8], version: u8) -> [u8; REPORT_DATA_LENGTH] {
debug_assert!( debug_assert!(
data.len() < REPORT_DATA_LENGTH, // Ensure there is space for the version byte data.len() < REPORT_DATA_LENGTH, // Ensure there is space for the version byte
"Data length exceeds maximum of {} bytes", "Data length exceeds maximum of {REPORT_DATA_LENGTH} bytes",
REPORT_DATA_LENGTH
); );
let mut bytes = [0u8; REPORT_DATA_LENGTH]; let mut bytes = [0u8; REPORT_DATA_LENGTH];
bytes[..data.len()].copy_from_slice(data); bytes[..data.len()].copy_from_slice(data);

View file

@ -104,7 +104,7 @@ pub trait QuoteContextErr {
impl<T, E: std::fmt::Display> QuoteContextErr for Result<T, E> { impl<T, E: std::fmt::Display> QuoteContextErr for Result<T, E> {
type Ok = T; type Ok = T;
fn str_context<I: std::fmt::Display>(self, msg: I) -> Result<T, QuoteError> { fn str_context<I: std::fmt::Display>(self, msg: I) -> Result<T, QuoteError> {
self.map_err(|e| QuoteError::Unexpected(format!("{}: {}", msg, e))) self.map_err(|e| QuoteError::Unexpected(format!("{msg}: {e}")))
} }
} }

View file

@ -31,9 +31,9 @@ use std::{
use tracing::trace; use tracing::trace;
#[allow(missing_docs)] #[allow(missing_docs)]
pub const TEE_TYPE_SGX: u32 = 0x00000000; pub const TEE_TYPE_SGX: u32 = 0x0000_0000;
#[allow(missing_docs)] #[allow(missing_docs)]
pub const TEE_TYPE_TDX: u32 = 0x00000081; pub const TEE_TYPE_TDX: u32 = 0x0000_0081;
#[allow(missing_docs)] #[allow(missing_docs)]
pub const BODY_SGX_ENCLAVE_REPORT_TYPE: u16 = 1; pub const BODY_SGX_ENCLAVE_REPORT_TYPE: u16 = 1;
@ -583,7 +583,7 @@ impl Display for TEEType {
TEEType::TDX => "tdx", TEEType::TDX => "tdx",
TEEType::SNP => "snp", TEEType::SNP => "snp",
}; };
write!(f, "{}", str) write!(f, "{str}")
} }
} }

View file

@ -24,7 +24,7 @@ fn extract_header_value(
.ok_or_else(|| QuoteError::Unexpected(format!("Missing required header: {header_name}")))? .ok_or_else(|| QuoteError::Unexpected(format!("Missing required header: {header_name}")))?
.to_str() .to_str()
.map_err(|e| QuoteError::Unexpected(format!("Invalid header value: {e}"))) .map_err(|e| QuoteError::Unexpected(format!("Invalid header value: {e}")))
.map(|val| val.to_string()) .map(str::to_string)
} }
/// Fetch collateral data from Intel's Provisioning Certification Service /// Fetch collateral data from Intel's Provisioning Certification Service
@ -74,14 +74,14 @@ pub(crate) fn get_collateral(quote: &[u8]) -> Result<Collateral, QuoteError> {
let (collateral, pck_crl, pck_issuer_chain) = result; let (collateral, pck_crl, pck_issuer_chain) = result;
// Convert QuoteCollateralV3 to Collateral // Convert QuoteCollateralV3 to Collateral
convert_to_collateral(collateral, pck_crl, pck_issuer_chain) convert_to_collateral(collateral, &pck_crl, &pck_issuer_chain)
} }
// Helper function to convert QuoteCollateralV3 to Collateral // Helper function to convert QuoteCollateralV3 to Collateral
fn convert_to_collateral( fn convert_to_collateral(
collateral: QuoteCollateralV3, collateral: QuoteCollateralV3,
pck_crl: String, pck_crl: &str,
pck_issuer_chain: Bytes, pck_issuer_chain: &[u8],
) -> Result<Collateral, QuoteError> { ) -> Result<Collateral, QuoteError> {
let QuoteCollateralV3 { let QuoteCollateralV3 {
tcb_info_issuer_chain, tcb_info_issuer_chain,
@ -119,7 +119,7 @@ fn convert_to_collateral(
root_ca_crl: Box::new([]), root_ca_crl: Box::new([]),
// Converted values // Converted values
pck_crl_issuer_chain: pck_issuer_chain.as_ref().into(), pck_crl_issuer_chain: pck_issuer_chain.into(),
pck_crl: pck_crl.as_bytes().into(), pck_crl: pck_crl.as_bytes().into(),
tcb_info_issuer_chain: to_bytes_with_nul(tcb_info_issuer_chain, "tcb_info_issuer_chain")?, tcb_info_issuer_chain: to_bytes_with_nul(tcb_info_issuer_chain, "tcb_info_issuer_chain")?,
tcb_info: to_bytes_with_nul(tcb_info_json, "tcb_info")?, tcb_info: to_bytes_with_nul(tcb_info_json, "tcb_info")?,
@ -134,14 +134,14 @@ fn convert_to_collateral(
/// Split the last zero byte /// Split the last zero byte
fn get_str_from_bytes(bytes: &[u8], context: &str) -> Result<String, QuoteError> { fn get_str_from_bytes(bytes: &[u8], context: &str) -> Result<String, QuoteError> {
let c_str = CStr::from_bytes_until_nul(bytes) let c_str = CStr::from_bytes_until_nul(bytes)
.str_context(format!("Failed to extract CString: {}", context))?; .str_context(format!("Failed to extract CString: {context}"))?;
Ok(c_str.to_string_lossy().into_owned()) Ok(c_str.to_string_lossy().into_owned())
} }
/// Parse JSON field from collateral data /// Parse JSON field from collateral data
fn parse_json_field(data: &[u8], context: &str) -> Result<serde_json::Value, QuoteError> { fn parse_json_field(data: &[u8], context: &str) -> Result<serde_json::Value, QuoteError> {
serde_json::from_str(&get_str_from_bytes(data, context)?) serde_json::from_str(&get_str_from_bytes(data, context)?)
.str_context(format!("Failed to parse JSON: {}", context)) .str_context(format!("Failed to parse JSON: {context}"))
} }
/// Convert Collateral to QuoteCollateralV3 /// Convert Collateral to QuoteCollateralV3

View file

@ -38,15 +38,14 @@ impl FromStr for TcbLevel {
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_ascii_lowercase().as_str() { match s.to_ascii_lowercase().as_str() {
"ok" => Ok(TcbLevel::Ok), "ok" | "uptodate" => Ok(TcbLevel::Ok),
"uptodate" => Ok(TcbLevel::Ok),
"configneeded" => Ok(TcbLevel::ConfigNeeded), "configneeded" => Ok(TcbLevel::ConfigNeeded),
"configandswhardeningneeded" => Ok(TcbLevel::ConfigAndSwHardeningNeeded), "configandswhardeningneeded" => Ok(TcbLevel::ConfigAndSwHardeningNeeded),
"swhardeningneeded" => Ok(TcbLevel::SwHardeningNeeded), "swhardeningneeded" => Ok(TcbLevel::SwHardeningNeeded),
"outofdate" => Ok(TcbLevel::OutOfDate), "outofdate" => Ok(TcbLevel::OutOfDate),
"outofdateconfigneeded" => Ok(TcbLevel::OutOfDateConfigNeeded), "outofdateconfigneeded" => Ok(TcbLevel::OutOfDateConfigNeeded),
"invalid" => Ok(TcbLevel::Invalid), "invalid" => Ok(TcbLevel::Invalid),
_ => Err(format!("Invalid TCB level: {}", s)), _ => Err(format!("Invalid TCB level: {s}")),
} }
} }
} }
@ -72,8 +71,8 @@ pub fn parse_tcb_levels(
let mut set = EnumSet::new(); let mut set = EnumSet::new();
for level_str in s.split(',') { for level_str in s.split(',') {
let level_str = level_str.trim(); let level_str = level_str.trim();
let level = TcbLevel::from_str(level_str) let level =
.map_err(|_| format!("Invalid TCB level: {}", level_str))?; TcbLevel::from_str(level_str).map_err(|_| format!("Invalid TCB level: {level_str}"))?;
set.insert(level); set.insert(level);
} }
Ok(set) Ok(set)

View file

@ -50,8 +50,8 @@ pub struct Author {
unsafe impl Zeroable for Author {} unsafe impl Zeroable for Author {}
impl Author { impl Author {
const HEADER1: [u8; 16] = 0x06000000E10000000000010000000000u128.to_be_bytes(); const HEADER1: [u8; 16] = 0x0600_0000_E100_0000_0000_0100_0000_0000u128.to_be_bytes();
const HEADER2: [u8; 16] = 0x01010000600000006000000001000000u128.to_be_bytes(); const HEADER2: [u8; 16] = 0x0101_0000_6000_0000_6000_0000_0100_0000u128.to_be_bytes();
#[allow(clippy::unreadable_literal)] #[allow(clippy::unreadable_literal)]
/// Creates a new Author from a date and software defined value. /// Creates a new Author from a date and software defined value.
@ -245,7 +245,7 @@ impl Digest for S256Digest {
#[inline] #[inline]
fn update(&mut self, bytes: &[u8]) { fn update(&mut self, bytes: &[u8]) {
self.0.update(bytes) self.0.update(bytes);
} }
#[inline] #[inline]