Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG-Japanese.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
**改善:**

- 異なるログソースの取り扱いを容易にするため、コードをリファクタリングした。 (@fukusuket)
- Microsoft Graph API JSON形式のAzureログに対応した。 (#113) (@fukusuket)

**バグ修正:**

Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
**Enhancements:**

- Code refactored for easier handling of different log sources. (@fukusuket)
- Added support for Microsoft Graph API JSON format for Azure logs. (#113) (@fukusuket)

**Bug Fixes:**

Expand Down
63 changes: 0 additions & 63 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ git2="*"
hashbrown="*"
indicatif = "*"
itertools = "*"
krapslog = "0.6"
libmimalloc-sys = { version = "*", features = ["extended"] }
maxminddb = "*"
mimalloc = { version = "*", default-features = false }
Expand Down
10 changes: 6 additions & 4 deletions config/azure_profile.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
Timestamp: '.time'
Timestamp: '.time|.eventTimestamp'
RuleTitle: 'sigma.title'
RuleAuthor: 'sigma.author'
Level: 'sigma.level'
Category: '.category'
OperationName: '.operationName'
SrcIP: '.callerIpAddress'
Category: '.category.value|.category'
OperationName: '.operationName.value|.operationName'
Entity: '.properties.entity'
Caller: '.caller'
SrcIP: '.claims.ipaddr|.callerIpAddress'
ResourceId: '.resourceId'
CorrelationId : '.correlationId'
RuleID: 'sigma.id'
35 changes: 24 additions & 11 deletions src/core/log_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ impl LogSource {
match self {
LogSource::Aws => "config/aws_profile.yaml",
LogSource::Azure => "config/azure_profile.yaml",
// The 'All' variant does not have a specific profile path.
// Return an empty string to indicate no profile is available.
LogSource::All => "",
}
}
Expand All @@ -30,15 +28,30 @@ pub fn is_match_service(service: &Option<String>, event: &Event) -> bool {
if let Some(s) = service {
match s.as_str() {
"cloudtrail" => true,
"activitylogs" => event
.get("category")
.is_some_and(|v| v.value_to_string() == "Administrative"),
"auditlogs" => event
.get("category")
.is_some_and(|v| v.value_to_string() == "AuditLogs"),
"signinlogs" => event
.get("category")
.is_some_and(|v| v.value_to_string() == "SignInLogs"),
"activitylogs" => {
event
.get("category")
.is_some_and(|v| v.value_to_string() == "Administrative")
|| event
.get("category.value")
.is_some_and(|v| v.value_to_string() == "Administrative")
}
"auditlogs" => {
event
.get("category")
.is_some_and(|v| v.value_to_string() == "AuditLogs")
|| event
.get("category.value")
.is_some_and(|v| v.value_to_string() == "AuditLogs")
}
"signinlogs" => {
event
.get("category")
.is_some_and(|v| v.value_to_string() == "SignInLogs")
|| event
.get("category.value")
.is_some_and(|v| v.value_to_string() == "SignInLogs")
}
_ => false,
}
} else {
Expand Down
92 changes: 83 additions & 9 deletions src/core/scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ pub fn scan_file<'a>(
Ok(value) => value,
Err(_e) => return,
};

detect_events(
&events,
context,
Expand Down Expand Up @@ -185,10 +184,24 @@ fn log_contents_to_events(log_contents: &str, log: &LogSource) -> Vec<Value> {
}
}
}
LogSource::Azure => log_contents
.lines()
.filter_map(|line| serde_json::from_str::<Value>(line).ok())
.collect(),
LogSource::Azure => {
// Try parsing as JSON array first
if let Ok(Value::Array(json_array)) = serde_json::from_str::<Value>(log_contents) {
return json_array;
}
// Try parsing as JSON object with "value" key first
if let Ok(json_value) = serde_json::from_str::<Value>(log_contents)
&& let Value::Object(ref json_map) = json_value
&& let Some(Value::Array(json_array)) = json_map.get("value")
{
return json_array.clone();
}
// Fall back to JSONL format
log_contents
.lines()
.filter_map(|line| serde_json::from_str::<Value>(line).ok())
.collect()
}
_ => vec![],
}
}
Expand Down Expand Up @@ -418,12 +431,35 @@ pub fn load_json_from_file(
}
}
LogSource::Azure => {
log_contents.lines().for_each(|line| {
if let Ok(json_value) = serde_json::from_str::<Value>(line) {
events.push(json_value);
let log_contents_trimmed = log_contents
.strip_prefix('\u{FEFF}')
.unwrap_or(log_contents);
let json_value: Result<Value, _> = serde_json::from_str(log_contents_trimmed);
match json_value {
Ok(json_value) => match json_value {
// JSON array format
Value::Array(json_array) => {
events.extend(json_array);
}
// JSON object with "value" key
Value::Object(json_map) => {
if let Some(Value::Array(json_array)) = json_map.get("value") {
events.extend(json_array.clone());
}
}
_ => {}
},
Err(_) => {
// Fall back to JSONL format
log_contents.lines().for_each(|line| {
if let Ok(json_value) = serde_json::from_str::<Value>(line) {
events.push(json_value);
}
});
}
});
}
}

_ => {}
}
Ok(events)
Expand Down Expand Up @@ -463,4 +499,42 @@ mod tests {
let event = result.unwrap();
assert_eq!(event.len(), 29);
}

#[test]
fn test_load_azure_value_format() {
let test_file = "test_files/json/azure_value_format.json";
let log_contents = fs::read_to_string(test_file).unwrap();
let result = load_json_from_file(&log_contents, &LogSource::Azure);
assert!(result.is_ok());
let events = result.unwrap();
assert_eq!(events.len(), 1);
// Verify that the event has expected fields
assert!(events[0].get("caller").is_some());
assert_eq!(
events[0].get("caller").unwrap().as_str().unwrap(),
"[email protected]"
);
}

#[test]
fn test_load_azure_graph_api_format() {
let test_file = "test_files/json/azure_graph_api_format.json";
let log_contents = fs::read_to_string(test_file).unwrap();
let result = load_json_from_file(&log_contents, &LogSource::Azure);
assert!(result.is_ok());
let events = result.unwrap();
assert_eq!(events.len(), 3);

// Verify first event has expected fields
assert!(events[0].get("eventTimestamp").is_some());
assert_eq!(
events[0].get("eventTimestamp").unwrap().as_str().unwrap(),
"2025-11-30T01:45:06.4650448Z"
);
assert!(events[0].get("caller").is_some());
assert_eq!(
events[0].get("caller").unwrap().as_str().unwrap(),
"[email protected]"
);
}
}
25 changes: 15 additions & 10 deletions src/core/timeline_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,19 +298,24 @@ fn get_value_from_event_common(
return ip;
}
}

// イベントフィールド処理(共通)
if key.starts_with(".") {
let key = key.strip_prefix(".").unwrap();
if let Some(value) = event.get(key) {
if key == "eventTime" {
value.value_to_string().replace("T", " ").replace("Z", "")
} else {
value.value_to_string()
let key_without_prefix = key.trim_start_matches('.').trim();
let keys: Vec<&str> = key_without_prefix.split('|').collect();
for k in keys {
let k_trimmed = k.trim_matches('.').trim();
if let Some(value) = event.get(k_trimmed) {
return if k_trimmed.contains("eventTime")
|| k_trimmed.contains("time")
|| k_trimmed.contains("eventTimestamp")
{
value.value_to_string().replace("T", " ").replace("Z", "")
} else {
value.value_to_string()
};
}
} else {
"-".to_string()
}
"-".to_string()
} else if key.starts_with("sigma.") {
let key = key.replace("sigma.", "");
match key.as_str() {
Expand Down Expand Up @@ -497,7 +502,7 @@ impl<'a> OutputContext<'a> {
.iter()
.find(|(k, _)| k == "Timestamp")
.map(|(_k, v)| v.as_str())
.unwrap_or("eventTime");
.unwrap_or(".eventTime|.time|.eventTimestamp");
Self {
profile,
prof_ts_key,
Expand Down
Loading
Loading