Skip to content
Snippets Groups Projects
Commit 272e93e3 authored by Etxaniz Errazkin, Iñaki's avatar Etxaniz Errazkin, Iñaki
Browse files

push to public

parents
Branches
No related tags found
No related merge requests found
CHANGELOG.md merge=union
*.iml
.vertx/
logs/
target/
.idea/
nbactions.xml
nb-configuration.xml
*.bat
\ No newline at end of file
FROM maven:3.6.0-jdk-11-slim AS builder
WORKDIR /home/app
COPY pom.xml .
RUN mvn dependency:go-offline
COPY src src/
RUN mvn clean package
FROM openjdk:14-jdk
ENV VERTICLE_FILE transforming-csv.jar
# Set the location of the verticles
ENV VERTICLE_HOME /usr/verticles
RUN mkdir $VERTICLE_HOME
EXPOSE 8080
COPY --from=builder /home/app/target/$VERTICLE_FILE /$VERTICLE_HOME/$VERTICLE_FILE
RUN mkdir -p $VERTICLE_HOME && groupadd vertx && useradd -g vertx vertx && chown -R vertx $VERTICLE_HOME && chmod -R g+w $VERTICLE_HOME
USER vertx
# Launch the verticle
WORKDIR $VERTICLE_HOME
ENTRYPOINT ["sh", "-c"]
CMD ["exec java $JAVA_OPTS -jar $VERTICLE_FILE"]
Copyright 2019 Fraunhofer FOKUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
# Transforming CSV
Microservice for transforming CSV files to JSON.
Example of a result:
```json
[
["ID", "Name", "Age"],
["1", "Jane", "25"],
["2", "John", "26"],
...
]
```
## Table of Contents
1. [Build](#build)
1. [Run](#run)
1. [License](#license)
## Build
Requirements:
* Git
* Docker
```bash
$ git clone <repo-uri>
$ cd <repo>
$ docker build -t piveau/<service-name> .
```
## Run
```bash
$ docker run -it -p 8080:8080 piveau/<service-name>
```
## License
[Apache License, Version 2.0](LICENSE.md)
openapi: 3.0.0
info:
version: ${project.version}
title: Pipe endpoint
x-logo:
url: "images/logo"
paths:
/pipe:
post:
description: Sink for pipe object
summary: receiving a pipe object
operationId: incomingPipe
requestBody:
description: The pipe object
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/Pipe"
responses:
'202':
description: Order accepted. Returns the runId. If not already set, one is generated
content:
text/plain:
schema:
type: string
/config/schema:
get:
description: Returns the json schema for the segment configuration object
summary: get the config schema
operationId: configSchema
responses:
'200':
description: JSON schema for the segment configuration object
content:
application/schema+json:
schema:
type: object
components:
schemas:
Pipe:
description: The piveau pipe object
type: object
required:
- header
- body
properties:
header:
$ref: '#/components/schemas/PipeHeader'
body:
$ref: '#/components/schemas/PipeBody'
PipeHeader:
description: All headers are set by the pipe orchestrator and shall not be altered by services
type: object
required:
- id
- name
- transport
- version
properties:
id:
type: string
format: uuid
name:
type: string
runId:
type: string
startTime:
type: string
format: date-time
lastRun:
type: string
format: date-time
title:
type: string
context:
type: string
transport:
description: "'payload': data transportation by payload in pipe; 'xxx': data transportation externally"
enum:
- payload
version:
description: Version of service_pipe_definition
type: string
PipeBody:
type: object
required:
- segments
properties:
segments:
type: array
minItems: 1
items:
$ref: '#/components/schemas/Segment'
payloads:
type: array
items:
$ref: '#/components/schemas/Payload'
Segment:
type: object
required:
- header
- body
properties:
header:
$ref: '#/components/schemas/SegmentHeader'
body:
$ref: '#/components/schemas/SegmentBody'
SegmentHeader:
type: object
required:
- name
- segmentNumber
- processed
properties:
id:
type: string
format: uuid
name:
type: string
segmentNumber:
type: integer
minimum: 0
processed:
type: boolean
title:
type: string
errorHandlerId:
type: string
format: uuid
SegmentBody:
type: object
properties:
endpoint:
$ref: '#/components/schemas/Endpoint'
config:
type: object
payload:
$ref: '#/components/schemas/Payload'
Payload:
type: object
required:
- header
- body
properties:
header:
$ref: '#/components/schemas/PayloadHeader'
body:
$ref: '#/components/schemas/PayloadBody'
PayloadHeader:
type: object
required:
- seqNumber
- dataType
properties:
seqNumber:
type: integer
minimum: 0
dataType:
enum:
- text
- base64
total:
type: integer
minimum: 0
signed:
type: boolean
default: false
signature:
type: string
signee:
type: string
encrypted:
type: boolean
default: false
PayloadBody:
type: object
required:
- data
properties:
key:
type: string
cipher:
type: string
dataMimeType:
type: string
dataInfo:
type: object
data:
anyOf:
- type: string
- type: string
format: byte
Endpoint:
type: object
properties:
protocol:
type: string
address:
type: string
method:
type: string
authentication:
type: object
properties:
authMethod:
type: string
credentials:
type: object
properties:
username:
type: string
password:
type: string
format: password
pom.xml 0 → 100644
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>io.piveau.consus</groupId>
<artifactId>transforming-csv</artifactId>
<version>1.0.0</version>
<name>piveau-consus-transforming-csv</name>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.build.timestamp.format>yyyy-MM-dd HH:mm</maven.build.timestamp.format>
<maven.compiler.release>11</maven.compiler.release>
<buildTimestamp>${maven.build.timestamp}</buildTimestamp>
<vertx.version>4.0.3</vertx.version>
<vertx.verticle>io.piveau.transforming.MainVerticle</vertx.verticle>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-stack-depchain</artifactId>
<version>${vertx.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>paca</id>
<name>paca</name>
<url>https://paca.fokus.fraunhofer.de/repository/maven-public/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-core</artifactId>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-config</artifactId>
</dependency>
<dependency>
<groupId>io.piveau.utils</groupId>
<artifactId>piveau-utils</artifactId>
<version>7.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.piveau.pipe</groupId>
<artifactId>pipe-connector</artifactId>
<version>5.2.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.12.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-csv</artifactId>
<version>2.12.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.janino</groupId>
<artifactId>janino</artifactId>
<version>3.1.3</version>
</dependency>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>6.6</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.3</version>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
</plugin>
<plugin>
<groupId>io.reactiverse</groupId>
<artifactId>vertx-maven-plugin</artifactId>
<version>1.0.23</version>
<executions>
<execution>
<id>vmp</id>
<goals>
<goal>initialize</goal>
<goal>package</goal>
</goals>
</execution>
</executions>
<configuration>
<redeploy>true</redeploy>
</configuration>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M5</version>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.6</version>
<executions>
<execution>
<id>prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
*
!target/transforming-csv.jar
package io.piveau.transforming;
public class ApplicationConfig {
static final String ENV_GIT_URI = "GIT_URI";
static final String ENV_GIT_USER_NAME = "GIT_USER_NAME";
static final String ENV_GIT_TOKEN = "GIT_TOKEN";
static final String ENV_GIT_BRANCH = "GIT_BRANCH";
static final String DEFAULT_GIT_BRANCH = "master";
}
package io.piveau.transforming;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvParser;
import io.piveau.pipe.PipeContext;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Promise;
import io.vertx.core.eventbus.Message;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class CSVTransformingVerticle extends AbstractVerticle {
private final Logger log = LoggerFactory.getLogger(getClass());
public static final String ADDRESS = "io.piveau.pipe.transformation.js.queue";
CsvMapper csvMapper = new CsvMapper()
.enable(CsvParser.Feature.WRAP_AS_ARRAY);
@Override
public void start(Promise<Void> startPromise) {
vertx.eventBus().consumer(ADDRESS, this::handlePipe);
startPromise.complete();
}
private void handlePipe(Message<PipeContext> message) {
PipeContext pipeContext = message.body();
pipeContext.log().trace("Incoming pipe");
JsonArray result = new JsonArray();
try (MappingIterator<String[]> iterator = csvMapper
.readerFor(String[].class)
.readValues(pipeContext.getStringData())) {
while (iterator.hasNext()) {
result.add(iterator.next());
}
pipeContext.setResult(result.encodePrettily(), "application/json", pipeContext.getDataInfo()).forward();
} catch (IOException e) {
pipeContext.log().error("Failed to read CSV", e);
}
}
}
package io.piveau.transforming;
import io.piveau.pipe.connector.PipeConnector;
import io.vertx.core.*;
import java.util.Arrays;
public class MainVerticle extends AbstractVerticle {
@Override
public void start(Promise<Void> startPromise) {
vertx.deployVerticle(CSVTransformingVerticle.class, new DeploymentOptions().setWorker(true).setWorkerPoolSize(100))
.compose(id -> PipeConnector.create(vertx))
.onSuccess(connector -> {
connector.publishTo(CSVTransformingVerticle.ADDRESS);
startPromise.complete();
})
.onFailure(startPromise::fail);
}
public static void main(String[] args) {
String[] params = Arrays.copyOf(args, args.length + 1);
params[params.length - 1] = MainVerticle.class.getName();
Launcher.executeCommand("run", params);
}
}
{
"timestamp": "${buildTimestamp}",
"version": "${project.version}"
}
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<property name="pipeAppender" value="${PIVEAU_PIPE_LOG_APPENDER:-STDOUT}" />
<if condition='property("pipeAppender").equals("LOGSTASH")'>
<then>
<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${PIVEAU_LOGSTASH_HOST:-logstash}:${PIVEAU_LOGSTASH_PORT:-5044}</destination>
<encoder class="net.logstash.logback.encoder.LogstashEncoder" />
</appender>
</then>
</if>
<if condition='property("pipeAppender").equals("PIPEFILE")'>
<then>
<appender name="PIPEFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<prudent>true</prudent>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- daily rollover -->
<fileNamePattern>${PIVEAU_PIPE_LOG_PATH:-logs/piveau-pipe.%d{yyyy-MM-dd}.log}</fileNamePattern>
<!-- keep 30 days' worth of history capped at 3GB total size -->
<maxHistory>10</maxHistory>
<totalSizeCap>1GB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>[%d{yyyy-MM-dd HH:mm:ss.SSS}] %-5level %logger %msg%n</pattern>
</encoder>
</appender>
</then>
</if>
<logger name="com.mchange.v2" level="warn"/>
<logger name="io.netty" level="warn"/>
<logger name="io.vertx" level="info"/>
<logger name="com.hazelcast" level="debug"/>
<logger name="io.piveau" level="${PIVEAU_LOG_LEVEL:-INFO}"/>
<logger name="pipe" level="${PIVEAU_PIPE_LOG_LEVEL:-INFO}" additivity="false">
<appender-ref ref="${pipeAppender}" />
</logger>
<root level="info">
<appender-ref ref="STDOUT" />
</root>
</configuration>
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment