• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

Achiefs / fim / 14275166216

04 Apr 2025 10:13PM UTC coverage: 84.711% (-0.4%) from 85.125%
14275166216

push

github

web-flow
Merge pull request #194 from Achiefs/177-db-hash

Added Hash diff scanner

353 of 433 new or added lines in 12 files covered. (81.52%)

4 existing lines in 1 file now uncovered.

1435 of 1694 relevant lines covered (84.71%)

1.52 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

88.89
/src/hashevent.rs
1
// Copyright (C) 2024, Achiefs.
2

3
// Global constants definitions
4
pub const REMOVE: &str = "REMOVE";
5
pub const CREATE: &str = "CREATE";
6
pub const WRITE: &str = "WRITE";
7

8
use crate::appconfig;
9
use crate::appconfig::*;
10
use crate::dbfile::*;
11

12
use log::*;
13
use std::fs::OpenOptions;
14
use serde_json::{json, to_string};
15
use std::io::Write;
16
use reqwest::Client;
17
use std::time::Duration;
18

19
pub struct HashEvent {
20
    previous_dbfile: Option<DBFile>,
21
    dbfile: DBFile,
22
    operation: String,
23
}
24

25
#[cfg(test)]
26
mod test;
27

28
impl HashEvent {
29
    pub fn new(previous_dbfile: Option<DBFile>, dbfile: DBFile, operation: String) -> Self {
1✔
30
        HashEvent {
31
            previous_dbfile,
32
            dbfile,
33
            operation,
34
        }
35
    }
36

37
    // ------------------------------------------------------------------------
38

39
    fn log(&self, file: String) {
1✔
40
        let mut events_file = OpenOptions::new()
3✔
41
            .create(true)
42
            .append(true)
43
            .open(file)
1✔
44
            .expect("(hashevent::log) Unable to open events log file.");
45

46
            match writeln!(events_file, "{}", self.format_json()) {
3✔
47
                Ok(_d) => debug!("Hash event log written"),
3✔
NEW
48
                Err(e) => error!("Hash event could not be written, Err: [{}]", e)
×
49
            };
50
    }
51

52
    // ------------------------------------------------------------------------
53

54
    async fn send(&self, cfg: AppConfig) {
8✔
55
        use time::OffsetDateTime;
56

57
        let event = self.get_json();
2✔
58
        let current_date = OffsetDateTime::now_utc();
2✔
59
        let index = format!("fim-{}-{}-{}", current_date.year(), current_date.month() as u8, current_date.day() );
2✔
60

61
        // Splunk endpoint integration
62
        if cfg.endpoint_type == "Splunk" {
4✔
63
            let data = json!({
2✔
64
                "source": cfg.node,
65
                "sourcetype": "_json",
66
                "event": event,
67
                "index": "fim_events"
68
            });
69
            debug!("Sending received event to Splunk integration, event: {}", data);
3✔
70
            let request_url = format!("{}/services/collector/event", cfg.endpoint_address);
2✔
71
            let client = Client::builder()
5✔
72
                .danger_accept_invalid_certs(cfg.insecure)
2✔
73
                .timeout(Duration::from_secs(30))
2✔
74
                .build().unwrap();
75
            match client
8✔
76
                .post(request_url)
2✔
77
                .header("Authorization", format!("Splunk {}", cfg.endpoint_token))
2✔
78
                .json(&data)
1✔
79
                .send()
80
                .await {
3✔
NEW
81
                    Ok(response) => debug!("Response received: {:?}",
×
NEW
82
                        response.text().await.unwrap()),
×
83
                    Err(e) => debug!("Error on request: {:?}", e)
1✔
84
            }
85
        // Elastic endpoint integration
86
        } else {
87
            let request_url = format!("{}/{}/_doc/{}", cfg.endpoint_address, index, self.dbfile.id);
2✔
88
            let client = Client::builder()
5✔
89
                .danger_accept_invalid_certs(cfg.insecure)
2✔
90
                .timeout(Duration::from_secs(30))
2✔
91
                .build().unwrap();
92
            match client
8✔
93
                .post(request_url)
1✔
94
                .basic_auth(cfg.endpoint_user, Some(cfg.endpoint_pass))
1✔
95
                .json(&event)
1✔
96
                .send()
97
                .await {
3✔
NEW
98
                    Ok(response) => debug!("Response received: {:?}",
×
NEW
99
                        response.text().await.unwrap()),
×
100
                    Err(e) => debug!("Error on request: {:?}", e)
1✔
101
            }
102
        }
103
    }
104

105
    // ------------------------------------------------------------------------
106

107
    pub async fn process(&self, cfg: AppConfig) {
4✔
108
        match cfg.get_events_destination().as_str() {
3✔
109
            appconfig::BOTH_MODE => {
1✔
NEW
110
                self.log(cfg.get_events_file());
×
NEW
111
                self.send(cfg).await;
×
112
            },
113
            appconfig::NETWORK_MODE => {
2✔
NEW
114
                self.send(cfg).await;
×
115
            },
116
            _ => self.log(cfg.get_events_file())
2✔
117
        }
118
    }
119

120
    // ------------------------------------------------------------------------
121

122
    fn format_json(&self) -> String { to_string(&self.get_json()).unwrap() }
2✔
123

124
    // ------------------------------------------------------------------------
125

126
    fn get_json(&self) -> serde_json::Value {
1✔
127
        match &self.previous_dbfile {
1✔
128
            Some(data) => {
1✔
129
                json!({
5✔
130
                    "previous_dbfile.id": data.id.clone(),
1✔
131
                    "previous_dbfile.timestamp": data.timestamp.clone(),
1✔
132
                    "previous_dbfile.hash": data.hash.clone(),
1✔
133
                    "previous_dbfile.path": data.path.clone(),
1✔
134
                    "previous_dbfile.size": data.size.clone(),
1✔
135
                    "previous_dbfile.permissions": data.permissions.clone(),
1✔
136
                    "dbfile.id": self.dbfile.id.clone(),
1✔
137
                    "dbfile.timestamp": self.dbfile.timestamp.clone(),
1✔
138
                    "dbfile.hash": self.dbfile.hash.clone(),
1✔
139
                    "dbfile.path": self.dbfile.path.clone(),
1✔
140
                    "dbfile.size": self.dbfile.size.clone(),
1✔
141
                    "dbfile.permissions": self.dbfile.permissions.clone(),
1✔
142
                    "operation": self.operation.clone(),
1✔
143
                })
144
            },
145
            None => json!({
3✔
146
                "dbfile.id": self.dbfile.id.clone(),
1✔
147
                "dbfile.timestamp": self.dbfile.timestamp.clone(),
1✔
148
                "dbfile.hash": self.dbfile.hash.clone(),
1✔
149
                "dbfile.path": self.dbfile.path.clone(),
1✔
150
                "dbfile.size": self.dbfile.size.clone(),
1✔
151
                "dbfile.permissions": self.dbfile.permissions.clone(),
1✔
152
                "operation": self.operation.clone(),
1✔
153
            })
154
        }
155

156
    }
157
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc