Skip to content
Snippets Groups Projects
Commit 1e714e73 authored by Zitnik, Anze's avatar Zitnik, Anze
Browse files

Use logging.

Replaced all print() calls with logger.info/debug. Printing evidence objects. Logging to /var/log/evidence_collector.log and setting tailf to this file in docker entrypoint.
Version 0.0.4.

Squashed commit of the following:

commit 825d1f95a141f8e11703c27889e53a6e16c3cd66
Author: Anže Žitnik <anze.zitnik@xlab.si>
Date:   Wed Dec 1 12:21:26 2021 +0100

    Output logs to /var/log... and tailf in Dockerfile

commit db9b34317d19e42316fc0c5f0a8f60b03b2e4dbc
Author: Anže Žitnik <anze.zitnik@xlab.si>
Date:   Wed Dec 1 12:19:54 2021 +0100

    Introduce logger. Replace all prints.
parent e6220b09
No related branches found
No related tags found
No related merge requests found
VERSION=v0.0.3
VERSION=v0.0.4
SERVICE=evidence-collector
......@@ -12,4 +12,4 @@ rqscheduler --host $redis_host --port $redis_port &
python3 -m scheduler.scheduler
tail -f /dev/null
\ No newline at end of file
tail -f /var/log/evidence_collector.log
\ No newline at end of file
......@@ -29,6 +29,6 @@ def create_evidence(id, service_id, tool_id, raw, resource):
return evidence
def print_evidence(evidence):
def print_evidence(logger, evidence):
evidence.raw = evidence.raw[:50] + "..."
print(evidence)
\ No newline at end of file
logger.debug(evidence)
\ No newline at end of file
from evidence.evidence_store_pb2_grpc import EvidenceStoreStub
from evidence.evidence_pb2 import Evidence
import grpc
import json
class ForwardEvidence(object):
def __init__(self):
f = open('constants.json',)
constants = json.load(f)
f.close()
def __init__(self, constants, logger):
self.channel = grpc.insecure_channel('{}:{}'.format(constants['clouditor']['host'], constants['clouditor']['port']))
self.stub = EvidenceStoreStub(self.channel)
self.logger = logger
def send_evidence(self, evidence):
try:
response = self.stub.StoreEvidence(evidence)
print('gRPC evidence forwarded: ' + str(response))
self.logger.info('gRPC evidence forwarded: ' + str(response))
except grpc.RpcError as err:
print(err)
print(err.details())
print('{}, {}'.format(err.code().name, err.code().value))
self.logger.error(err)
self.logger.error(err.details())
self.logger.error('{}, {}'.format(err.code().name, err.code().value))
[loggers]
keys=root
[handlers]
keys=consoleHandler,fileHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=DEBUG
handlers=fileHandler
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleFormatter
args=(sys.stdout,)
[handler_fileHandler]
class=FileHandler
level=DEBUG
formatter=simpleFormatter
args=('/var/log/evidence_collector.log',)
[formatter_simpleFormatter]
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
datefmt=
import json
from redis import Redis
from rq import Queue
from rq_scheduler import Scheduler
from wazuh_evidence_collector import wazuh_evidence_collector
f = open('constants.json',)
constants = json.load(f)
f.close()
from wazuh_evidence_collector.wazuh_evidence_collector import CONSTANTS, LOGGER
def remove_jobs(scheduler):
jobs = scheduler.get_jobs()
......@@ -16,10 +12,10 @@ def remove_jobs(scheduler):
def print_jobs(scheduler):
jobs = scheduler.get_jobs()
for job in jobs:
print(job)
LOGGER.info(job)
redis = Redis(constants['redis']['host'], constants['redis']['port'])
q = Queue(constants['redis']['queue'], connection=redis)
redis = Redis(CONSTANTS['redis']['host'], CONSTANTS['redis']['port'])
q = Queue(CONSTANTS['redis']['queue'], connection=redis)
scheduler = Scheduler(connection=redis)
# TODO: Remove if needed
......@@ -32,7 +28,7 @@ scheduler.cron(
func=wazuh_evidence_collector.run_collector,
args=[],
repeat=None,
queue_name=constants['redis']['queue'],
queue_name=CONSTANTS['redis']['queue'],
use_local_timezone=False
)
......
......@@ -6,23 +6,31 @@ from forward_evidence.forward_evidence import ForwardEvidence
from evidence.generate_evidence import create_resource, create_evidence, print_evidence
import uuid
import configparser
import logging
import logging.config
f = open('constants.json',)
constants = json.load(f)
CONSTANTS = json.load(f)
f.close()
wc = WazuhClient(constants['wazuh']['host'], constants['wazuh']['port'], constants['wazuh']['username'], constants['wazuh']['password'])
logging.config.fileConfig('logging.conf')
LOGGER = logging.getLogger('root')
wc = WazuhClient(CONSTANTS['wazuh']['host'], CONSTANTS['wazuh']['port'], CONSTANTS['wazuh']['username'], CONSTANTS['wazuh']['password'])
es = Elasticsearch(
constants['elastic']['host'],
http_auth=(constants['elastic']['username'], constants['elastic']['password']),
CONSTANTS['elastic']['host'],
http_auth=(CONSTANTS['elastic']['username'], CONSTANTS['elastic']['password']),
scheme='https',
port=constants['elastic']['port'],
port=CONSTANTS['elastic']['port'],
use_ssl=False,
verify_certs=False,
ssl_show_warn=False,
)
forwarder = ForwardEvidence(CONSTANTS, LOGGER)
# Get ID (UUID)
def get_id():
id = uuid.uuid1()
......@@ -62,10 +70,9 @@ def run_collector():
evidence_list.append(generate_evidence(wc, es, agent))
# TODO:
forwarder = ForwardEvidence()
for evidence in evidence_list:
forwarder.send_evidence(evidence)
print_evidence(evidence)
print_evidence(LOGGER, evidence)
return evidence_list
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment