Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
marirs committed Feb 17, 2024
1 parent 777289d commit fdcd528
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 43 deletions.
21 changes: 14 additions & 7 deletions examples/capa_cli.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
use std::fs;
use capa::FileCapabilities;
use clap::Parser;
use prettytable::{color, format::Alignment, Attr, Cell, Row, Table};
use serde_json::{to_value, Map, Value};
use std::fs;
use std::time::Instant;

#[derive(Parser)]
#[clap(
author,
version,
about,
long_about = "Find Capabilities of a given file!"
author,
version,
about,
long_about = "Find Capabilities of a given file!"
)]
struct CliOpts {
/// File to analyse
Expand All @@ -26,7 +26,12 @@ struct CliOpts {
#[clap(short = 'o', long, value_name = "JSON_PATH")]
output: Option<String>,
/// map_features
#[clap(short = 'm', long, value_name = "MAP_FEATURES", default_value = "false")]
#[clap(
short = 'm',
long,
value_name = "MAP_FEATURES",
default_value = "false"
)]
map_features: bool,
/// filter map_features
#[clap(short = 'f', long, value_name = "FILTER_MAP_FEATURES")]
Expand Down Expand Up @@ -118,7 +123,9 @@ fn main() {
}
}
if let Some(json_path) = json_path {
let json = s.serialize_file_capabilities(cli.filter_map_features).unwrap();
let json = s
.serialize_file_capabilities(cli.filter_map_features)
.unwrap();
fs::write(json_path.clone(), json).expect("Unable to write file");
println!("Analysis result saved in JSON format at: {}", json_path);
}
Expand Down
17 changes: 10 additions & 7 deletions src/extractor/dnfile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ impl super::Extractor for Extractor {
OpCodeValue::Jmp,
OpCodeValue::Newobj,
]
.contains(&insn.opcode.value)
.contains(&insn.opcode.value)
{
continue;
}
Expand Down Expand Up @@ -214,11 +214,11 @@ impl super::Extractor for Extractor {
self.extract_function_call_from_features(&f)?,
self.extract_recurcive_call_features(&f)?,
]
.into_iter()
.fold(Vec::new(), |mut acc, f| {
acc.extend(f);
acc
}))
.into_iter()
.fold(Vec::new(), |mut acc, f| {
acc.extend(f);
acc
}))
}

fn get_basic_blocks(
Expand Down Expand Up @@ -1011,7 +1011,10 @@ impl Extractor {
if !field.namespace.is_empty() {
res.push((
crate::rules::features::Feature::Namespace(
crate::rules::features::NamespaceFeature::new(&field.namespace, "")?,
crate::rules::features::NamespaceFeature::new(
&field.namespace,
"",
)?,
),
insn.offset as u64,
));
Expand Down
24 changes: 14 additions & 10 deletions src/extractor/smda.rs
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ impl Extractor {
fn extract_file_embedded_pe(&self) -> Result<Vec<(crate::rules::features::Feature, u64)>> {
let mut res = vec![];
for (mz_offset, _pe_offset, _key) in
Extractor::find_embedded_pe_headers(&self.report.buffer)
Extractor::find_embedded_pe_headers(&self.report.buffer)
{
res.push((
crate::rules::features::Feature::Characteristic(
Expand Down Expand Up @@ -1103,7 +1103,8 @@ pub fn read_string(report: &DisassemblyReport, offset: &u64) -> Result<String> {
let ulen = detect_unicode_len(report, offset)?;
if ulen > 2 {
let bytes = read_bytes(report, offset, ulen)?;
let utf16_units: Vec<u16> = bytes.chunks_exact(2)
let utf16_units: Vec<u16> = bytes
.chunks_exact(2)
.map(|arr| u16::from_le_bytes([arr[0], arr[1]]))
.collect();
return Ok(std::string::String::from_utf16(&utf16_units)?);
Expand All @@ -1113,16 +1114,17 @@ pub fn read_string(report: &DisassemblyReport, offset: &u64) -> Result<String> {

pub fn detect_ascii_len(report: &DisassemblyReport, offset: &u64) -> Result<usize> {
let buffer_len = report.buffer.len() as u64;
let rva = offset.checked_sub(report.base_addr)
.ok_or_else(|| std::io::Error::new(
let rva = offset.checked_sub(report.base_addr).ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::Other,
"Offset is out of bounds relative to the base address"
))?;
"Offset is out of bounds relative to the base address",
)
})?;

if rva as usize >= report.buffer.len() {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"RVA is beyond buffer length"
"RVA is beyond buffer length",
))?;
}

Expand All @@ -1135,7 +1137,7 @@ pub fn detect_ascii_len(report: &DisassemblyReport, offset: &u64) -> Result<usiz
if rva + ascii_len as u64 >= buffer_len {
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Buffer overflow detected while detecting ASCII length"
"Buffer overflow detected while detecting ASCII length",
))?;
}

Expand Down Expand Up @@ -1261,7 +1263,8 @@ pub fn extract_unicode_strings(data: &[u8], min_length: usize) -> Result<Vec<(St
// UTF-16LE
for mat in re_le.find_iter(data) {
let matched_bytes = mat.as_bytes();
let utf16_units = matched_bytes.chunks(2)
let utf16_units = matched_bytes
.chunks(2)
.map(|chunk| u16::from_le_bytes([chunk[0], chunk[1]]))
.collect::<Vec<u16>>();
if let Ok(decoded_string) = String::from_utf16(&utf16_units) {
Expand All @@ -1272,7 +1275,8 @@ pub fn extract_unicode_strings(data: &[u8], min_length: usize) -> Result<Vec<(St
// UTF-16BE
for mat in re_be.find_iter(data) {
let matched_bytes = mat.as_bytes();
let utf16_units = matched_bytes.chunks(2)
let utf16_units = matched_bytes
.chunks(2)
.map(|chunk| u16::from_be_bytes([chunk[1], chunk[0]]))
.collect::<Vec<u16>>();
if let Ok(decoded_string) = String::from_utf16(&utf16_units) {
Expand Down
52 changes: 37 additions & 15 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ use consts::{FileFormat, Os};
use sede::{from_hex, to_hex};
use serde::{Deserialize, Serialize};
use smda::FileArchitecture;
use std::collections::HashSet;
use std::{
collections::{BTreeMap, BTreeSet, HashMap},
thread::spawn,
};
use std::collections::HashSet;

mod error;
pub use crate::error::Error;
Expand Down Expand Up @@ -127,7 +127,7 @@ impl FileCapabilities {
}

self.attacks
.entry(parts[0].to_string())
.entry(parts[0].to_string())
.or_insert_with(BTreeSet::new)
.insert(detail);
}
Expand All @@ -150,7 +150,7 @@ impl FileCapabilities {
}

self.mbc
.entry(parts[0].to_string())
.entry(parts[0].to_string())
.or_insert_with(BTreeSet::new)
.insert(detail);
}
Expand All @@ -159,8 +159,10 @@ impl FileCapabilities {

if let Some(namespace) = rule.meta.get(&Yaml::String("namespace".to_string())) {
if let Yaml::String(s) = namespace {
self.capability_namespaces.insert(rule.name.clone(), s.clone());
let first_non_zero_address = caps.iter()
self.capability_namespaces
.insert(rule.name.clone(), s.clone());
let first_non_zero_address = caps
.iter()
.find(|&&(addr, _)| addr != 0)
.map(|&(addr, _)| addr)
.unwrap_or(0);
Expand Down Expand Up @@ -200,35 +202,43 @@ impl FileCapabilities {
Ok(())
}

pub fn construct_json_for_capabilities_associations(&mut self, filter: Option<String>) -> Value {
pub fn construct_json_for_capabilities_associations(
&mut self,
filter: Option<String>,
) -> Value {
if let Some(f) = filter {
let filters: Vec<&str> = f.split('|').collect();
self.map_features.retain(|k, _v| filters.iter().any(|filter| k.contains(filter)));
self.map_features
.retain(|k, _v| filters.iter().any(|filter| k.contains(filter)));
}

let mut rules = serde_json::Map::new();
for (name, association) in &self.capabilities_associations {
let attacks_json = association
.attack
.iter()
.map(|a|
.map(|a| {
json!({
"id": a.id,
"subtechnique": a.subtechnique,
"tactic": a.tactic,
"technique": a.technique,
})).collect::<Vec<_>>();
})
})
.collect::<Vec<_>>();

let mbc_json = association
.mbc
.iter()
.map(|m|
.map(|m| {
json!({
"objective": m.objective,
"behavior": m.behavior,
"method": m.method,
"id": m.id,
})).collect::<Vec<_>>();
})
})
.collect::<Vec<_>>();

let association_json = json!({
"attacks": attacks_json,
Expand All @@ -242,7 +252,10 @@ impl FileCapabilities {
}
Value::Object(rules)
}
pub fn serialize_file_capabilities(&mut self, filter: Option<String>) -> serde_json::Result<String> {
pub fn serialize_file_capabilities(
&mut self,
filter: Option<String>,
) -> serde_json::Result<String> {
let associations_json = self.construct_json_for_capabilities_associations(filter);
let mut fc_json = serde_json::to_value(self.clone())?;
fc_json
Expand Down Expand Up @@ -484,7 +497,10 @@ fn find_file_capabilities<'a>(
let mut matches: HashMap<&crate::rules::Rule, Vec<(u64, (bool, Vec<u64>))>> = HashMap::new();
for rule_set in [&ruleset.file_rules, &ruleset.function_rules].iter() {
for (rule, matched) in match_fn(rule_set, &file_features, &0, logger)?.1 {
matches.entry(rule).or_default().extend(matched.iter().cloned());
matches
.entry(rule)
.or_default()
.extend(matched.iter().cloned());
}
}

Expand Down Expand Up @@ -662,7 +678,10 @@ fn index_rule_matches(
crate::rules::features::MatchedRuleFeature::new(&rule.name, "")?,
);

features.entry(matched_rule_feature.clone()).or_default().extend(locations.iter().cloned());
features
.entry(matched_rule_feature.clone())
.or_default()
.extend(locations.iter().cloned());

if let Some(Yaml::String(namespace)) = rule.meta.get(&Yaml::String("namespace".to_string())) {
let parts: Vec<&str> = namespace.split('/').collect();
Expand All @@ -671,7 +690,10 @@ fn index_rule_matches(
let ns_feature = crate::rules::features::Feature::MatchedRule(
crate::rules::features::MatchedRuleFeature::new(&sub_namespace, "")?,
);
features.entry(ns_feature).or_default().extend(locations.iter().cloned());
features
.entry(ns_feature)
.or_default()
.extend(locations.iter().cloned());
}
}
Ok(())
Expand Down
8 changes: 4 additions & 4 deletions src/rules/features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,6 @@ impl Feature {
}
.to_string()
}

}

#[derive(Debug, Clone, Eq)]
Expand Down Expand Up @@ -1323,9 +1322,10 @@ impl BytesFeature {
) -> Result<(bool, Vec<u64>)> {
for (feature, locations) in features {
if let Feature::Bytes(s) = feature {
if s.value.windows(
self.value.len())
.any(|window| window == self.value) {
if s.value
.windows(self.value.len())
.any(|window| window == self.value)
{
return Ok((true, locations.clone()));
}
} else {
Expand Down

0 comments on commit fdcd528

Please sign in to comment.