diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..3d204c27096f0b58439ec81b9fb751d167720044
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,5 @@
+__pycache__/
+*.pyc
+*$py.class
+.idea/
+dump.rdb
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index 682c0c44dc668bc8ce21dd167aedd78deb028336..c541bbcc8f416c4d65cc8b337b715038127dba78 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,6 +9,6 @@ RUN pip3 install -r requirements.txt
 
 COPY . .
 
-RUN apt-get update && apt-get install -y redis-server
+RUN apt-get update && apt-get install -y redis-server jq
 
-ENTRYPOINT ["entrypoint.sh"]
\ No newline at end of file
+ENTRYPOINT ["./entrypoint.sh"]
\ No newline at end of file
diff --git a/README.md b/README.md
index 5afabb4828862e4b86b1629f3ad0d5ab52831c63..43e990d36bda74db328b561af8f329e0a9d93aa4 100644
--- a/README.md
+++ b/README.md
@@ -40,9 +40,11 @@ docker run evidence-collector
 
 ```
 pip install -r requirements.txt
+
+sudo apt-get install jq
 ```
 
-4. Install Redis server (or run it in a separate Docker container - in this case remove server start command from `entrypoint.sh`):
+4. a) Install Redis server locally:
 
 ```
 sudo apt-get install redis-server
@@ -50,7 +52,19 @@ sudo apt-get install redis-server
 
 > Note: To stop Redis server use `/etc/init.d/redis-server stop`.
 
-5. Run `entrypoint.sh`"
+4. b) Run Redis server in Docker container:
+
+```
+docker run --name my-redis-server -p 6379:6379 -d redis
+```
+
+In this case also comment-out server start command in `entrypoint.sh`:
+
+```
+#redis-server &
+```
+
+5. Run `entrypoint.sh`:
 
 ```
 ./entrypoint.sh
@@ -58,6 +72,8 @@ sudo apt-get install redis-server
 
 > Note: This repository consists of multiple Python modules. When running Python code manually, use of `-m` flag might be necessary.
 
+## Component configuration 
+
 ### API User authentication
 
 Current implementation has disabled SSL certificate verification & uses simple username/password verification (defined inside `/constants/constants.py`). Production version should change this with cert verification.
diff --git a/constants.json b/constants.json
new file mode 100644
index 0000000000000000000000000000000000000000..4eae1cfe315ab7cd52ac265e06a63f3875d5f947
--- /dev/null
+++ b/constants.json
@@ -0,0 +1,19 @@
+{
+    "wazuh": {
+        "ip": "192.168.33.10",
+        "port": 55000,
+        "username": "wazuh-wui",
+        "password": "wazuh-wui"
+    },
+    "elastic": {
+        "ip": "192.168.33.10",
+        "port": 9200,
+        "username": "admin",
+        "password": "changeme"
+    },
+    "redis": {
+        "ip": "localhost",
+        "port": 6379,
+        "queue": "low"
+    }
+}
\ No newline at end of file
diff --git a/constants/constants.py b/constants/constants.py
deleted file mode 100644
index 247023ecaaea783e62a19d4b270ecb0931129451..0000000000000000000000000000000000000000
--- a/constants/constants.py
+++ /dev/null
@@ -1,13 +0,0 @@
-WAZUH_IP = '192.168.33.10'
-WAZUH_API_PORT = 55000
-WAZUH_USERNAME = 'wazuh-wui'
-WAZUH_PASSWORD = 'wazuh-wui'
-
-ELASTIC_IP = '192.168.33.10'
-ELASTIC_API_PORT = 9200
-ELASTIC_USERNAME = 'admin'
-ELASTIC_PASSWORD = 'changeme'
-
-REDIS_IP = 'localhost'
-REDIS_PORT = '6379'
-REDIS_QUEUE_NAME = 'low'
\ No newline at end of file
diff --git a/entrypoint.sh b/entrypoint.sh
index 32f3c3e67567bd11050ae5ee75be730a6b845d01..a1f23dece14c49b7ab8db8c27e277ee54dfaa8f6 100755
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -1,9 +1,15 @@
 #!/bin/bash
 
-redis-server &
+redis_ip=$(cat constants.json | jq -r '.redis.ip')
+redis_port=$(cat constants.json | jq -r '.redis.port')
+redis_queue=$(cat constants.json | jq -r '.redis.queue')
 
-rqworker low &
+redis-server --port $redis_port &
 
-rqscheduler &
+rqworker $redis_queue &
 
-python3 -m scheduler.scheduler &
\ No newline at end of file
+rqscheduler --host $redis_ip --port $redis_port &
+
+python3 -m scheduler.scheduler
+
+tail -f /dev/null
\ No newline at end of file
diff --git a/forward_evidence/forward_evidence.py b/forward_evidence/forward_evidence.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca4995b6fd802b81dc44ef4c7d89ee381b312e46
--- /dev/null
+++ b/forward_evidence/forward_evidence.py
@@ -0,0 +1,13 @@
+from evidence import evidence_pb2, evidence
+
+def create_grpc_message(ev):
+    ev_grpc = evidence_pb2.Evidence()
+
+    ev_grpc.id = ev.id
+    ev_grpc.timestamp = ev.timestamp
+    ev_grpc.resource_id = ev.resource_id
+    ev_grpc.service_id = ev.tool
+    ev_grpc.resource = ev.resource_type
+    ev_grpc.applicable_metrics = ev.measurement_result
+    ev_grpc.raw = ev.raw
+
diff --git a/proto/evidence.proto b/proto/evidence.proto
index ddd6159713ec3e0ca6c07ea1aa5d7d32028b3648..e9ed94fb901861f8c493df7efa32a2bbec66e2a6 100644
--- a/proto/evidence.proto
+++ b/proto/evidence.proto
@@ -5,26 +5,26 @@ import "google/protobuf/timestamp.proto";
 
 option go_package = "evidence";
 
-// TODO
-// Coppied from https://github.com/clouditor/clouditor/blob/main/proto/evidence.proto
+// TODO: Addapt to the final Evidence structure..
+// Copied from https://github.com/clouditor/clouditor/blob/main/proto/evidence.proto
 message Evidence {
-    string id = 1;
-  
-    string service_id = 2;
-  
-    string resource_id = 3;
-  
-    // TODO: replace with google/type/date.proto timestamp.proto or date.proto?
-    google.protobuf.Timestamp timestamp = 4;
-  
-    repeated int32 applicable_metrics = 5;
-  
-    // "raw" evidence (for the auditor), for example the raw JSON response from
-    // the API. This does not follow a defined schema
-    string raw = 6;
-  
-    // optional; a semantic representation of the Cloud resource according to our
-    // defined ontology. a JSON serialized node of our semantic graph. This may be
-    // Clouditor-specific.
-    google.protobuf.Value resource = 7;
-  }
\ No newline at end of file
+  string id = 1;
+
+  string service_id = 2;
+
+  string resource_id = 3;
+
+  // TODO: replace with google/type/date.proto timestamp.proto or date.proto?
+  google.protobuf.Timestamp timestamp = 4;
+
+  repeated int32 applicable_metrics = 5;
+
+  // "raw" evidence (for the auditor), for example the raw JSON response from
+  // the API. This does not follow a defined schema
+  string raw = 6;
+
+  // optional; a semantic representation of the Cloud resource according to our
+  // defined ontology. a JSON serialized node of our semantic graph. This may be
+  // Clouditor-specific.
+  google.protobuf.Value resource = 7;
+}
\ No newline at end of file
diff --git a/scheduler/scheduler.py b/scheduler/scheduler.py
index 5f5c0ee03b5eb83fd9f0a01ee77e0217c6c745bb..8662516e3092195e297423924e40d7af967e0c5b 100644
--- a/scheduler/scheduler.py
+++ b/scheduler/scheduler.py
@@ -1,9 +1,13 @@
+import json
 from redis import Redis
 from rq import Queue
 from rq_scheduler import Scheduler
-from constants import constants
 from wazuh_evidence_collector import wazuh_evidence_collector
 
+f = open('constants.json',)
+constants = json.load(f)
+f.close()
+
 def remove_jobs(scheduler):
     jobs = scheduler.get_jobs()
     for job in jobs:
@@ -14,20 +18,21 @@ def print_jobs(scheduler):
     for job in jobs:
         print(job)
 
-redis = Redis(constants.REDIS_IP, constants.REDIS_PORT)
-q = Queue(constants.REDIS_QUEUE_NAME, connection=redis)
+redis = Redis(constants['redis']['ip'], constants['redis']['port'])
+q = Queue(constants['redis']['queue'], connection=redis)
 scheduler = Scheduler(connection=redis)
 
 # TODO: Remove if needed
 remove_jobs(scheduler)
 
 # TODO: Change cron expression and repeat value for production verion.
+# Should probably be "0 0 * * * ".
 scheduler.cron(
     '* * * * * ',               
     func=wazuh_evidence_collector.run_full_check,                 
     args=[],
     repeat=10,
-    queue_name='low',
+    queue_name=constants['redis']['queue'],
     use_local_timezone=False
 )
 
diff --git a/wazuh_evidence_collector/wazuh_evidence_collector.py b/wazuh_evidence_collector/wazuh_evidence_collector.py
index 9372fd0039fd6fec5fc1589f8d0b2e8105a762b6..dd4a859b28c2002923d6bddf4fa62dc7542f1b4a 100644
--- a/wazuh_evidence_collector/wazuh_evidence_collector.py
+++ b/wazuh_evidence_collector/wazuh_evidence_collector.py
@@ -1,3 +1,4 @@
+import json
 from wazuh_evidence_collector.wazuh_client import WazuhClient
 from elasticsearch import Elasticsearch
 from elasticsearch_dsl import Search
@@ -5,15 +6,19 @@ from evidence.evidence import Evidence, simple_evidence
 from random import randint
 from sys import maxsize
 from datetime import datetime
-from constants.constants import *
 import pprint
 
-wc = WazuhClient(WAZUH_IP, WAZUH_API_PORT, WAZUH_USERNAME, WAZUH_PASSWORD)
+f = open('constants.json',)
+constants = json.load(f)
+f.close()
+
+wc = WazuhClient(constants['wazuh']['ip'], constants['wazuh']['port'], constants['wazuh']['username'], constants['wazuh']['password'])
+
 es = Elasticsearch(
-        ELASTIC_IP,
-        http_auth=(ELASTIC_USERNAME, ELASTIC_PASSWORD),
+        constants['elastic']['ip'],
+        http_auth=(constants['elastic']['username'], constants['elastic']['password']),
         scheme='https',
-        port=ELASTIC_API_PORT,
+        port=constants['elastic']['port'],
         use_ssl=False,
         verify_certs=False,
         ssl_show_warn=False,
@@ -53,8 +58,7 @@ def run_full_check():
 
     # TODO: : Remove for production. This is only output for easier local testing.
     for evidence in agent_evidences:
-        pprint.pprint(evidence.toJson())
-
+        pprint.pprint(evidence.__dict__)
     
     return agent_evidences