Skip to content
Snippets Groups Projects
Commit 4484a05f authored by Zitnik, Anze's avatar Zitnik, Anze
Browse files

Merge branch 'develop' into 'master'

gRPC implementation

See merge request medina/evidence-collector!3
parents 9221f03b d4329db8
No related branches found
No related tags found
No related merge requests found
VERSION=v0.0.1
VERSION=v0.0.2
SERVICE=evidence-collector
\ No newline at end of file
{
"wazuh": {
"ip": "192.168.33.10",
"host": "192.168.33.10",
"port": 55000,
"username": "wazuh-wui",
"password": "wazuh-wui"
},
"elastic": {
"ip": "192.168.33.10",
"host": "192.168.33.10",
"port": 9200,
"username": "admin",
"password": "changeme"
},
"redis": {
"ip": "localhost",
"host": "localhost",
"port": 6379,
"queue": "low"
},
"clouditor": {
"host":"192.168.33.14",
"port": 9090
}
}
\ No newline at end of file
#!/bin/bash
redis_ip=$(cat constants.json | jq -r '.redis.ip')
redis_host=$(cat constants.json | jq -r '.redis.host')
redis_port=$(cat constants.json | jq -r '.redis.port')
redis_queue=$(cat constants.json | jq -r '.redis.queue')
......@@ -8,7 +8,7 @@ redis-server --port $redis_port &
rqworker $redis_queue &
rqscheduler --host $redis_ip --port $redis_port &
rqscheduler --host $redis_host --port $redis_port &
python3 -m scheduler.scheduler
......
......@@ -16,6 +16,6 @@ class Evidence:
def toJson(self):
return json.dumps(self.__dict__)
def simple_evidence(evidence_id, timestamp, resource_id, measurement_result, raw):
return Evidence(evidence_id, timestamp, resource_id, None, None, None, None, measurement_result, raw)
def simple_evidence(evidence_id, timestamp, resource_id, feature_property, measurement_result, raw):
return Evidence(evidence_id, timestamp, resource_id, None, None, None, feature_property, measurement_result, raw)
\ No newline at end of file
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/evidence.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
# source: evidence.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
......@@ -17,11 +16,12 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__
DESCRIPTOR = _descriptor.FileDescriptor(
name='proto/evidence.proto',
name='evidence.proto',
package='',
syntax='proto3',
serialized_options=_b('Z\010evidence'),
serialized_pb=_b('\n\x14proto/evidence.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc1\x01\n\x08\x45vidence\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\nservice_id\x18\x02 \x01(\t\x12\x13\n\x0bresource_id\x18\x03 \x01(\t\x12-\n\ttimestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12\x61pplicable_metrics\x18\x05 \x03(\x05\x12\x0b\n\x03raw\x18\x06 \x01(\t\x12(\n\x08resource\x18\x07 \x01(\x0b\x32\x16.google.protobuf.ValueB\nZ\x08\x65videnceb\x06proto3')
serialized_options=b'Z\010evidence',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0e\x65vidence.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc1\x01\n\x08\x45vidence\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\nservice_id\x18\x02 \x01(\t\x12\x13\n\x0bresource_id\x18\x03 \x01(\t\x12-\n\ttimestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12\x61pplicable_metrics\x18\x05 \x03(\x05\x12\x0b\n\x03raw\x18\x06 \x01(\t\x12(\n\x08resource\x18\x07 \x01(\x0b\x32\x16.google.protobuf.ValueB\nZ\x08\x65videnceb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
......@@ -34,56 +34,57 @@ _EVIDENCE = _descriptor.Descriptor(
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Evidence.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service_id', full_name='Evidence.service_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_id', full_name='Evidence.resource_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='Evidence.timestamp', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='applicable_metrics', full_name='Evidence.applicable_metrics', index=4,
number=5, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='raw', full_name='Evidence.raw', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource', full_name='Evidence.resource', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
......@@ -96,8 +97,8 @@ _EVIDENCE = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
serialized_start=88,
serialized_end=281,
serialized_start=82,
serialized_end=275,
)
_EVIDENCE.fields_by_name['timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
......@@ -105,11 +106,11 @@ _EVIDENCE.fields_by_name['resource'].message_type = google_dot_protobuf_dot_stru
DESCRIPTOR.message_types_by_name['Evidence'] = _EVIDENCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Evidence = _reflection.GeneratedProtocolMessageType('Evidence', (_message.Message,), dict(
DESCRIPTOR = _EVIDENCE,
__module__ = 'proto.evidence_pb2'
Evidence = _reflection.GeneratedProtocolMessageType('Evidence', (_message.Message,), {
'DESCRIPTOR' : _EVIDENCE,
'__module__' : 'evidence_pb2'
# @@protoc_insertion_point(class_scope:Evidence)
))
})
_sym_db.RegisterMessage(Evidence)
......
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: evidence_store.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import evidence.evidence_pb2 as evidence__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='evidence_store.proto',
package='clouditor',
syntax='proto3',
serialized_options=b'Z\010evidence',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x14\x65vidence_store.proto\x12\tclouditor\x1a\x0e\x65vidence.proto\x1a\x1bgoogle/protobuf/empty.proto\"\'\n\x15StoreEvidenceResponse\x12\x0e\n\x06status\x18\x01 \x01(\x08\"\x16\n\x14ListEvidencesRequest\"5\n\x15ListEvidencesResponse\x12\x1c\n\tevidences\x18\x01 \x03(\x0b\x32\t.Evidence2\xd8\x01\n\rEvidenceStore\x12<\n\rStoreEvidence\x12\t.Evidence\x1a .clouditor.StoreEvidenceResponse\x12\x35\n\x0eStoreEvidences\x12\t.Evidence\x1a\x16.google.protobuf.Empty(\x01\x12R\n\rListEvidences\x12\x1f.clouditor.ListEvidencesRequest\x1a .clouditor.ListEvidencesResponseB\nZ\x08\x65videnceb\x06proto3'
,
dependencies=[evidence__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_STOREEVIDENCERESPONSE = _descriptor.Descriptor(
name='StoreEvidenceResponse',
full_name='clouditor.StoreEvidenceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clouditor.StoreEvidenceResponse.status', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=80,
serialized_end=119,
)
_LISTEVIDENCESREQUEST = _descriptor.Descriptor(
name='ListEvidencesRequest',
full_name='clouditor.ListEvidencesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=121,
serialized_end=143,
)
_LISTEVIDENCESRESPONSE = _descriptor.Descriptor(
name='ListEvidencesResponse',
full_name='clouditor.ListEvidencesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='evidences', full_name='clouditor.ListEvidencesResponse.evidences', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=145,
serialized_end=198,
)
_LISTEVIDENCESRESPONSE.fields_by_name['evidences'].message_type = evidence__pb2._EVIDENCE
DESCRIPTOR.message_types_by_name['StoreEvidenceResponse'] = _STOREEVIDENCERESPONSE
DESCRIPTOR.message_types_by_name['ListEvidencesRequest'] = _LISTEVIDENCESREQUEST
DESCRIPTOR.message_types_by_name['ListEvidencesResponse'] = _LISTEVIDENCESRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
StoreEvidenceResponse = _reflection.GeneratedProtocolMessageType('StoreEvidenceResponse', (_message.Message,), {
'DESCRIPTOR' : _STOREEVIDENCERESPONSE,
'__module__' : 'evidence_store_pb2'
# @@protoc_insertion_point(class_scope:clouditor.StoreEvidenceResponse)
})
_sym_db.RegisterMessage(StoreEvidenceResponse)
ListEvidencesRequest = _reflection.GeneratedProtocolMessageType('ListEvidencesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTEVIDENCESREQUEST,
'__module__' : 'evidence_store_pb2'
# @@protoc_insertion_point(class_scope:clouditor.ListEvidencesRequest)
})
_sym_db.RegisterMessage(ListEvidencesRequest)
ListEvidencesResponse = _reflection.GeneratedProtocolMessageType('ListEvidencesResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTEVIDENCESRESPONSE,
'__module__' : 'evidence_store_pb2'
# @@protoc_insertion_point(class_scope:clouditor.ListEvidencesResponse)
})
_sym_db.RegisterMessage(ListEvidencesResponse)
DESCRIPTOR._options = None
_EVIDENCESTORE = _descriptor.ServiceDescriptor(
name='EvidenceStore',
full_name='clouditor.EvidenceStore',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=201,
serialized_end=417,
methods=[
_descriptor.MethodDescriptor(
name='StoreEvidence',
full_name='clouditor.EvidenceStore.StoreEvidence',
index=0,
containing_service=None,
input_type=evidence__pb2._EVIDENCE,
output_type=_STOREEVIDENCERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StoreEvidences',
full_name='clouditor.EvidenceStore.StoreEvidences',
index=1,
containing_service=None,
input_type=evidence__pb2._EVIDENCE,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListEvidences',
full_name='clouditor.EvidenceStore.ListEvidences',
index=2,
containing_service=None,
input_type=_LISTEVIDENCESREQUEST,
output_type=_LISTEVIDENCESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_EVIDENCESTORE)
DESCRIPTOR.services_by_name['EvidenceStore'] = _EVIDENCESTORE
# @@protoc_insertion_point(module_scope)
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import evidence.evidence_pb2 as evidence__pb2
import evidence.evidence_store_pb2 as evidence__store__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class EvidenceStoreStub(object):
"""Manages the storage of evidences
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.StoreEvidence = channel.unary_unary(
'/clouditor.EvidenceStore/StoreEvidence',
request_serializer=evidence__pb2.Evidence.SerializeToString,
response_deserializer=evidence__store__pb2.StoreEvidenceResponse.FromString,
)
self.StoreEvidences = channel.stream_unary(
'/clouditor.EvidenceStore/StoreEvidences',
request_serializer=evidence__pb2.Evidence.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListEvidences = channel.unary_unary(
'/clouditor.EvidenceStore/ListEvidences',
request_serializer=evidence__store__pb2.ListEvidencesRequest.SerializeToString,
response_deserializer=evidence__store__pb2.ListEvidencesResponse.FromString,
)
class EvidenceStoreServicer(object):
"""Manages the storage of evidences
"""
def StoreEvidence(self, request, context):
"""Stores an evidence to the evidence storage
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StoreEvidences(self, request_iterator, context):
"""Stores a stream of evidences to the evidence storage
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListEvidences(self, request, context):
"""Returns the evidences lying in the evidence storage
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_EvidenceStoreServicer_to_server(servicer, server):
rpc_method_handlers = {
'StoreEvidence': grpc.unary_unary_rpc_method_handler(
servicer.StoreEvidence,
request_deserializer=evidence__pb2.Evidence.FromString,
response_serializer=evidence__store__pb2.StoreEvidenceResponse.SerializeToString,
),
'StoreEvidences': grpc.stream_unary_rpc_method_handler(
servicer.StoreEvidences,
request_deserializer=evidence__pb2.Evidence.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListEvidences': grpc.unary_unary_rpc_method_handler(
servicer.ListEvidences,
request_deserializer=evidence__store__pb2.ListEvidencesRequest.FromString,
response_serializer=evidence__store__pb2.ListEvidencesResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'clouditor.EvidenceStore', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class EvidenceStore(object):
"""Manages the storage of evidences
"""
@staticmethod
def StoreEvidence(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/clouditor.EvidenceStore/StoreEvidence',
evidence__pb2.Evidence.SerializeToString,
evidence__store__pb2.StoreEvidenceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def StoreEvidences(request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.stream_unary(request_iterator, target, '/clouditor.EvidenceStore/StoreEvidences',
evidence__pb2.Evidence.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListEvidences(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/clouditor.EvidenceStore/ListEvidences',
evidence__store__pb2.ListEvidencesRequest.SerializeToString,
evidence__store__pb2.ListEvidencesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
from evidence import evidence_pb2, evidence
from evidence.evidence_store_pb2_grpc import EvidenceStoreStub
from evidence.evidence_pb2 import Evidence
from google.protobuf.struct_pb2 import Value
import grpc
import json
f = open('constants.json',)
constants = json.load(f)
f.close()
def create_grpc_message(ev):
ev_grpc = evidence_pb2.Evidence()
ev_grpc = Evidence()
ev_grpc.id = ev.id
ev_grpc.timestamp = ev.timestamp
ev_grpc.id = ev.evidence_id
ev_grpc.timestamp.GetCurrentTime()
ev_grpc.resource_id = ev.resource_id
ev_grpc.service_id = ev.tool
ev_grpc.resource = ev.resource_type
ev_grpc.applicable_metrics = ev.measurement_result
ev_grpc.raw = ev.raw
ev_grpc.service_id = str(ev.tool)
ev_grpc.resource.string_value = str(ev.resource_type)
ev_grpc.applicable_metrics.extend([1] if ev.measurement_result else [0])
ev_grpc.raw = ''.join(map(str, ev.raw))
return ev_grpc
class ForwardEvidence(object):
def __init__(self):
self.channel = grpc.insecure_channel('{}:{}'.format(constants['clouditor']['host'], constants['clouditor']['port']))
self.stub = EvidenceStoreStub(self.channel)
def send_evidence(self, evidence):
grpc_evidence = create_grpc_message(evidence)
try:
response = self.stub.StoreEvidence(grpc_evidence)
print('gRPC evidence forwarded: ' + str(response))
except grpc.RpcError as err:
print(err)
print(err.details())
print('{}, {}'.format(err.code().name, err.code().value))
syntax = "proto3";
package clouditor;
import "evidence.proto";
import "google/protobuf/empty.proto";
option go_package = "evidence";
// Manages the storage of evidences
service EvidenceStore {
// Stores an evidence to the evidence storage
rpc StoreEvidence(Evidence) returns (StoreEvidenceResponse);
// Stores a stream of evidences to the evidence storage
rpc StoreEvidences(stream Evidence) returns (google.protobuf.Empty);
// Returns the evidences lying in the evidence storage
rpc ListEvidences(ListEvidencesRequest) returns (ListEvidencesResponse);
}
message StoreEvidenceResponse { bool status = 1; }
message ListEvidencesRequest {}
message ListEvidencesResponse { repeated Evidence evidences = 1; }
\ No newline at end of file
elasticsearch==7.13.4
redis==3.3.11
rq_scheduler==0.11.0
urllib3==1.25.8
redis==3.5.3
rq==1.2.2
elasticsearch_dsl==7.4.0
rq==1.10.0
rq_scheduler==0.11.0
grpcio==1.41.1
elasticsearch==7.13.4
protobuf==3.19.1
click==7.1.2
\ No newline at end of file
......@@ -18,7 +18,7 @@ def print_jobs(scheduler):
for job in jobs:
print(job)
redis = Redis(constants['redis']['ip'], constants['redis']['port'])
redis = Redis(constants['redis']['host'], constants['redis']['port'])
q = Queue(constants['redis']['queue'], connection=redis)
scheduler = Scheduler(connection=redis)
......
......@@ -5,8 +5,7 @@ redis1="# oO0OoO0OoO0Oo Redis is starting oO0OoO0OoO0Oo"
redis2="Ready to accept connections"
scheduler="Registering birth"
worker1="Worker rq:worker:"
worker2="Subscribing to channel rq:pubsub:"
worker3="Listening on "
worker2="Listening on "
if ! [[ $logs =~ $redis1 ]]
then
......@@ -37,9 +36,3 @@ if ! [[ $logs =~ $worker2 ]]
echo "Redis worker not started" 1>&2
exit 1
fi
if ! [[ $logs =~ $worker3 ]]
then
echo "Redis worker not started" 1>&2
exit 1
fi
......@@ -2,6 +2,7 @@ import json
from wazuh_evidence_collector.wazuh_client import WazuhClient
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from forward_evidence.forward_evidence import ForwardEvidence
from evidence.evidence import Evidence, simple_evidence
from random import randint
from sys import maxsize
......@@ -12,10 +13,10 @@ f = open('constants.json',)
constants = json.load(f)
f.close()
wc = WazuhClient(constants['wazuh']['ip'], constants['wazuh']['port'], constants['wazuh']['username'], constants['wazuh']['password'])
wc = WazuhClient(constants['wazuh']['host'], constants['wazuh']['port'], constants['wazuh']['username'], constants['wazuh']['password'])
es = Elasticsearch(
constants['elastic']['ip'],
constants['elastic']['host'],
http_auth=(constants['elastic']['username'], constants['elastic']['password']),
scheme='https',
port=constants['elastic']['port'],
......@@ -56,10 +57,12 @@ def run_full_check():
agent_evidences.append(wazuh_monitoring_enabled(wc, agent))
agent_evidences.append(malvare_protection_enabled(wc, es, agent))
# TODO: : Remove for production. This is only output for easier local testing.
# TODO:
forwarder = ForwardEvidence()
for evidence in agent_evidences:
forwarder.send_evidence(evidence)
pprint.pprint(evidence.__dict__)
return agent_evidences
# Check Wazuh's configuration
......@@ -122,9 +125,9 @@ def wazuh_monitoring_enabled(wc, agent_id):
raw_evidence.append(evidence)
if result_syscheck and result_rootcheck and result_aler_integration:
return simple_evidence(get_id('05.3'), get_timestamp(), agent_id, "true", raw_evidence)
return simple_evidence(get_id('05.3'), get_timestamp(), agent_id, "wazuh_monitoring_enabled", "true", raw_evidence)
else:
return simple_evidence(get_id('05.3'), get_timestamp(), agent_id, "false", raw_evidence)
return simple_evidence(get_id('05.3'), get_timestamp(), agent_id, "wazuh_monitoring_enabled", "false", raw_evidence)
# Check if agent uses ClamAV or VirusTotal
def malvare_protection_enabled(wc, es, agent_id):
......@@ -165,7 +168,7 @@ def malvare_protection_enabled(wc, es, agent_id):
def check_clamd_logs_elastic(es, agent_id):
s = Search(using=es, index="wazuh-alerts-*") \
.query("match", predecoder__program_name="clamd") \
.query("match", rule__description="Clamd restarted") \
.query("match", rule__descrhosttion="Clamd restarted") \
.query("match", agent__id=agent_id)
body = s.execute().to_dict()
......@@ -186,9 +189,9 @@ def malvare_protection_enabled(wc, es, agent_id):
raw_evidence.append(evidence)
if result_virus_total or (result_lamd_process and result_clamd_logs):
return simple_evidence(get_id('05.4'), get_timestamp(), agent_id, "true", raw_evidence)
return simple_evidence(get_id('05.4'), get_timestamp(), agent_id, "malvare_protection_enabled", "true", raw_evidence)
else:
return simple_evidence(get_id('05.4'), get_timestamp(), agent_id, "false", raw_evidence)
return simple_evidence(get_id('05.4'), get_timestamp(), agent_id, "malvare_protection_enabled", "false", raw_evidence)
# Check last Syscheck & Rootcheck scan times
# TODO: When producing 'real' evidence, make sure to provide differentiation between Syscheck and Rootcheck outputs.
......@@ -197,12 +200,15 @@ def check_last_scan_time(wc, agent_id):
measurement_result = body['data']['affected_items'][0]['end']
evidence1 = simple_evidence(get_id('05.4'), get_timestamp(), measurement_result, body)
evidence1 = simple_evidence(get_id('05.4'), get_timestamp(), "last_scan", measurement_result, body)
body = wc.req('GET', 'rootcheck/' + agent_id + '/last_scan')
measurement_result = body['data']['affected_items'][0]['end']
evidence2 = simple_evidence(get_id('05.4'), get_timestamp(), measurement_result, body)
evidence2 = simple_evidence(get_id('05.4'), get_timestamp(), "last_scan", measurement_result, body)
return evidence1, evidence2
if __name__ == "__main__":
run_full_check()
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment