Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Strongly typed Datastore configuration - Fixes #2088
Browse files Browse the repository at this point in the history
- Updates from review
- Update datastore config checks touse isBlank()
- Update docs
johnaohara committed Jan 7, 2025
1 parent 9038d5c commit c34af61
Showing 24 changed files with 909 additions and 509 deletions.
5 changes: 3 additions & 2 deletions docs/site/content/en/docs/Integrations/elasticsearch/index.md
Original file line number Diff line number Diff line change
@@ -26,8 +26,9 @@ New Datastore
1. Select `Elasticsearch` from the `Datastore Type` dropdown
2. Provide a `Name` for the Datastore
3. Enter the `URL` for the Elasticsearch instance
4. Enter the `API Key` for the Elasticsearch instance, generated in step 1
5. Click `Save`
4. Select `api-key` from `Authentication Type` from dropdown
5. Enter the `API Key` for the Elasticsearch instance, generated in step 1
6. Click `Save`

## Test Configuration

Binary file modified docs/site/content/en/docs/Integrations/elasticsearch/modal.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
124 changes: 102 additions & 22 deletions docs/site/content/en/openapi/openapi.yaml
Original file line number Diff line number Diff line change
@@ -66,6 +66,21 @@ paths:
schema:
format: int32
type: integer
/api/config/datastore/types:
get:
tags:
- Config
description: Obtain list of available datastore types
operationId: datastoreTypes
responses:
"200":
description: OK
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/TypeConfig"
/api/config/datastore/{id}:
delete:
tags:
@@ -77,7 +92,8 @@ paths:
in: path
required: true
schema:
type: string
format: int32
type: integer
responses:
"204":
description: No Content
@@ -92,7 +108,8 @@ paths:
in: path
required: true
schema:
type: string
format: int32
type: integer
responses:
"200":
description: OK
@@ -2569,6 +2586,15 @@ paths:
description: No Content
components:
schemas:
APIKeyAuth:
type: object
properties:
type:
description: type
type: string
apiKey:
description: Api key
type: string
Access:
description: "Resources have different visibility within the UI. 'PUBLIC', 'PROTECTED'\
\ and 'PRIVATE'. Restricted resources are not visible to users who do not\
@@ -2710,17 +2736,26 @@ components:
CollectorApiDatastoreConfig:
description: Type of backend datastore
required:
- authentication
- builtIn
- apiKey
- url
type: object
properties:
authentication:
type: object
oneOf:
- $ref: "#/components/schemas/NoAuth"
- $ref: "#/components/schemas/APIKeyAuth"
- $ref: "#/components/schemas/UsernamePassAuth"
discriminator:
propertyName: type
mapping:
none: "#/components/schemas/NoAuth"
api-key: "#/components/schemas/APIKeyAuth"
username: "#/components/schemas/UsernamePassAuth"
builtIn:
description: Built In
type: boolean
apiKey:
description: Collector API KEY
type: string
url:
description: "Collector url, e.g. https://collector.foci.life/api/v1/image-stats"
type: string
@@ -2992,13 +3027,12 @@ components:
items:
$ref: "#/components/schemas/ValidationError"
Datastore:
description: Type of backend datastore
description: Instance of backend datastore
required:
- access
- owner
- id
- name
- builtIn
- config
- type
type: object
@@ -3029,14 +3063,10 @@ components:
\ Test definition"
type: string
example: Perf Elasticsearch
builtIn:
description: Is this a built-in datastore? Built-in datastores cannot be
deleted or modified
type: boolean
example: false
config:
type: object
oneOf:
- $ref: "#/components/schemas/CollectorApiDatastoreConfig"
- $ref: "#/components/schemas/ElasticsearchDatastoreConfig"
- $ref: "#/components/schemas/PostgresDatastoreConfig"
type:
@@ -3078,25 +3108,29 @@ components:
ElasticsearchDatastoreConfig:
description: Type of backend datastore
required:
- authentication
- builtIn
- url
type: object
properties:
authentication:
type: object
oneOf:
- $ref: "#/components/schemas/NoAuth"
- $ref: "#/components/schemas/APIKeyAuth"
- $ref: "#/components/schemas/UsernamePassAuth"
discriminator:
propertyName: type
mapping:
none: "#/components/schemas/NoAuth"
api-key: "#/components/schemas/APIKeyAuth"
username: "#/components/schemas/UsernamePassAuth"
builtIn:
description: Built In
type: boolean
apiKey:
description: Elasticsearch API KEY
type: string
url:
description: Elasticsearch url
type: string
username:
description: Elasticsearch username
type: string
password:
description: Elasticsearch password
type: string
ErrorDetails:
required:
- type
@@ -3675,6 +3709,12 @@ components:
testId:
format: int32
type: integer
NoAuth:
type: object
properties:
type:
description: type
type: string
PersistentLog:
description: Persistent Log
required:
@@ -3699,9 +3739,22 @@ components:
PostgresDatastoreConfig:
description: Built in backend datastore
required:
- authentication
- builtIn
type: object
properties:
authentication:
type: object
oneOf:
- $ref: "#/components/schemas/NoAuth"
- $ref: "#/components/schemas/APIKeyAuth"
- $ref: "#/components/schemas/UsernamePassAuth"
discriminator:
propertyName: type
mapping:
none: "#/components/schemas/NoAuth"
api-key: "#/components/schemas/APIKeyAuth"
username: "#/components/schemas/UsernamePassAuth"
builtIn:
description: Built In
type: boolean
@@ -4883,6 +4936,21 @@ components:
description: Transformer name
type: string
example: my-dataset-transformer
TypeConfig:
type: object
properties:
enumName:
type: string
name:
type: string
label:
type: string
supportedAuths:
type: array
items:
type: string
builtIn:
type: boolean
UserData:
required:
- id
@@ -4899,6 +4967,18 @@ components:
type: string
email:
type: string
UsernamePassAuth:
type: object
properties:
type:
description: type
type: string
username:
description: Username
type: string
password:
description: Password
type: string
ValidationError:
required:
- schemaId
Original file line number Diff line number Diff line change
@@ -1,19 +1,51 @@
package io.hyperfoil.tools.horreum.api.data.datastore;

import jakarta.validation.constraints.NotNull;

import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.DiscriminatorMapping;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;

import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.UsernamePassAuth;

public abstract class BaseDatastoreConfig {

@Schema(type = SchemaType.BOOLEAN, required = true, description = "Built In")
public Boolean builtIn = true;
@NotNull
@JsonProperty(required = true)
@Schema(type = SchemaType.OBJECT, discriminatorProperty = "type", discriminatorMapping = {
@DiscriminatorMapping(schema = NoAuth.class, value = NoAuth._TYPE),
@DiscriminatorMapping(schema = APIKeyAuth.class, value = APIKeyAuth._TYPE),
@DiscriminatorMapping(schema = UsernamePassAuth.class, value = UsernamePassAuth._TYPE)
}, oneOf = { //subtype mapping for openapi
NoAuth.class,
APIKeyAuth.class,
UsernamePassAuth.class
})
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes({ //subtype mapping for jackson
@JsonSubTypes.Type(value = NoAuth.class, name = NoAuth._TYPE),
@JsonSubTypes.Type(value = APIKeyAuth.class, name = APIKeyAuth._TYPE),
@JsonSubTypes.Type(value = UsernamePassAuth.class, name = UsernamePassAuth._TYPE)
})
public Object authentication; //the python generator is failing if this is a concrete type

public BaseDatastoreConfig() {
}
@Schema(type = SchemaType.BOOLEAN, required = true, description = "Built In")
public Boolean builtIn;

public BaseDatastoreConfig(Boolean builtIn) {
this.builtIn = builtIn;
}

public BaseDatastoreConfig() {
this.builtIn = false;
}

public abstract String validateConfig();

}
Original file line number Diff line number Diff line change
@@ -3,26 +3,30 @@
import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth;

@Schema(type = SchemaType.OBJECT, required = true, description = "Type of backend datastore")
public class CollectorApiDatastoreConfig extends BaseDatastoreConfig {

public static final String[] auths = { NoAuth._TYPE, APIKeyAuth._TYPE };
public static final String name = "Collectorapi";
public static final String label = "Collector API";
public static final Boolean builtIn = false;

public CollectorApiDatastoreConfig() {
super(false);
}

@Schema(type = SchemaType.STRING, required = true, description = "Collector API KEY")
public String apiKey;

@Schema(type = SchemaType.STRING, required = true, description = "Collector url, e.g. https://collector.foci.life/api/v1/image-stats")
public String url;

@Override
public String validateConfig() {
if ("".equals(apiKey)) {
return "apiKey must be set";
}
if ("".equals(url)) {
return "url must be set";
if (authentication instanceof APIKeyAuth) {
APIKeyAuth apiKeyAuth = (APIKeyAuth) authentication;
if (apiKeyAuth.apiKey.isBlank() || apiKeyAuth.apiKey == null) {
return "apiKey must be set";
}
}

return null;
Original file line number Diff line number Diff line change
@@ -10,7 +10,7 @@

import io.hyperfoil.tools.horreum.api.data.ProtectedType;

@Schema(type = SchemaType.OBJECT, required = true, description = "Type of backend datastore")
@Schema(type = SchemaType.OBJECT, required = true, description = "Instance of backend datastore")
public class Datastore extends ProtectedType {
@JsonProperty(required = true)
@Schema(description = "Unique Datastore id", example = "101")
@@ -21,14 +21,10 @@ public class Datastore extends ProtectedType {
@Schema(description = "Name of the datastore, used to identify the datastore in the Test definition", example = "Perf Elasticsearch")
public String name;

@NotNull
@JsonProperty(required = true)
@Schema(description = "Is this a built-in datastore? Built-in datastores cannot be deleted or modified", example = "false")
public Boolean builtIn;

@NotNull
@JsonProperty(required = true)
@Schema(type = SchemaType.OBJECT, oneOf = {
CollectorApiDatastoreConfig.class,
ElasticsearchDatastoreConfig.class,
PostgresDatastoreConfig.class
})
Original file line number Diff line number Diff line change
@@ -1,32 +1,50 @@
package io.hyperfoil.tools.horreum.api.data.datastore;

import java.lang.reflect.Field;

import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.core.type.TypeReference;

import io.quarkus.logging.Log;

@Schema(type = SchemaType.STRING, required = true, description = "Type of backend datastore")
public enum DatastoreType {
POSTGRES("POSTGRES", new TypeReference<PostgresDatastoreConfig>() {
}),
ELASTICSEARCH("ELASTICSEARCH", new TypeReference<ElasticsearchDatastoreConfig>() {
}),
COLLECTORAPI("COLLECTORAPI", new TypeReference<CollectorApiDatastoreConfig>() {
});
POSTGRES(PostgresDatastoreConfig.class),
ELASTICSEARCH(ElasticsearchDatastoreConfig.class),
COLLECTORAPI(CollectorApiDatastoreConfig.class);

private static final DatastoreType[] VALUES = values();

private final String label;
private final String name;
private final TypeReference<? extends BaseDatastoreConfig> typeReference;
private final String[] supportedAuths;
private final Boolean buildIn;
private final Class<? extends BaseDatastoreConfig> klass;

private <T extends BaseDatastoreConfig> DatastoreType(String name, TypeReference<T> typeReference) {
this.typeReference = typeReference;
this.name = name;
<T extends BaseDatastoreConfig> DatastoreType(Class<T> klass) {
this.klass = klass;
this.label = extractField(klass, "label");
this.name = extractField(klass, "name");
this.supportedAuths = extractField(klass, "auths");
this.buildIn = extractField(klass, "builtIn");
}

private static <T, K> T extractField(Class<K> klass, String name) {
try {
Field supportedAuthField = klass.getField(name);
return (T) supportedAuthField.get(null);
} catch (NoSuchFieldException | IllegalAccessException e) {
Log.errorf("Could not extract field %s from class %s", name, klass.getName(), e);
return null;
} catch (NullPointerException e) {
Log.errorf("Could not extract field %s from class %s", name, klass.getName(), e);
return null;
}
}

public <T extends BaseDatastoreConfig> TypeReference<T> getTypeReference() {
return (TypeReference<T>) typeReference;
public <T extends BaseDatastoreConfig> Class<T> getTypeReference() {
return (Class<T>) klass;
}

@JsonCreator
@@ -37,4 +55,25 @@ public static DatastoreType fromString(String str) {
return DatastoreType.valueOf(str);
}
}

public TypeConfig getConfig() {
return new TypeConfig(this, name, label, buildIn, supportedAuths);
}

public static class TypeConfig {
public String enumName;
public String name;
public String label;

public String[] supportedAuths;
public Boolean builtIn;

public TypeConfig(DatastoreType type, String name, String label, Boolean builtIn, String[] supportedAuths) {
this.enumName = type.name();
this.name = name;
this.label = label;
this.builtIn = builtIn;
this.supportedAuths = supportedAuths;
}
}
}
Original file line number Diff line number Diff line change
@@ -1,62 +1,44 @@
package io.hyperfoil.tools.horreum.api.data.datastore;

import static java.util.Objects.requireNonNullElse;

import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.UsernamePassAuth;

@Schema(type = SchemaType.OBJECT, required = true, description = "Type of backend datastore")
public class ElasticsearchDatastoreConfig extends BaseDatastoreConfig {

public static final String[] auths = { NoAuth._TYPE, APIKeyAuth._TYPE, UsernamePassAuth._TYPE };
public static final String name = "Elasticsearch";
public static final String label = "Elasticsearch";
public static final Boolean builtIn = false;

public ElasticsearchDatastoreConfig() {
super(false);
}

@Schema(type = SchemaType.STRING, description = "Elasticsearch API KEY")
public String apiKey;
}

@Schema(type = SchemaType.STRING, required = true, description = "Elasticsearch url")
public String url;

@Schema(type = SchemaType.STRING, description = "Elasticsearch username")
public String username;

@Schema(type = SchemaType.STRING, description = "Elasticsearch password")
@JsonIgnore
public String password;

@JsonProperty("password")
public void setSecrets(String password) {
this.password = password;
}

@JsonProperty("password")
public String getMaskedSecrets() {
if (this.password != null) {
return "********";
} else {
return null;
}
}

@Override
public String validateConfig() {

String _apiKey = requireNonNullElse(apiKey, "");
String _username = requireNonNullElse(username, "");
String _password = requireNonNullElse(password, "");

if ("".equals(_apiKey) && ("".equals(_username) || "".equals(_password))) {
return "Either apiKey or username and password must be set";
//TODO:: replace with pattern matching after upgrading to java 17
if (authentication instanceof APIKeyAuth) {
APIKeyAuth apiKeyAuth = (APIKeyAuth) authentication;
if (apiKeyAuth.apiKey == null || apiKeyAuth.apiKey.isBlank()) {
return "apiKey must be set";
}
} else if (authentication instanceof UsernamePassAuth) {
UsernamePassAuth usernamePassAuth = (UsernamePassAuth) authentication;

if (usernamePassAuth.username == null || usernamePassAuth.username.isBlank()
|| usernamePassAuth.password == null || usernamePassAuth.password.isBlank()) {
return "username and password must be set";
}
}

if (!"".equals(_apiKey) && !("".equals(_username) || "".equals(_password))) {
return "Only apiKey or username and password can be set";
}

return null;
}

Original file line number Diff line number Diff line change
@@ -3,9 +3,20 @@
import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth;

@Schema(type = SchemaType.OBJECT, required = true, description = "Built in backend datastore")
public class PostgresDatastoreConfig extends BaseDatastoreConfig {

public static final String[] auths = { NoAuth._TYPE };
public static final String name = "Postgres";
public static final String label = "Postgres";
public static final Boolean builtIn = true;

public PostgresDatastoreConfig() {
super(true);
}

@Override
public String validateConfig() {
return null;
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package io.hyperfoil.tools.horreum.api.data.datastore.auth;

import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

public class APIKeyAuth {
public static final String _TYPE = "api-key";

@Schema(type = SchemaType.STRING, description = "type")
public String type;

@Schema(type = SchemaType.STRING, description = "Api key")
public String apiKey;

public APIKeyAuth() {
this.type = _TYPE;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package io.hyperfoil.tools.horreum.api.data.datastore.auth;

import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

public class NoAuth {
public static final String _TYPE = "none";

@Schema(type = SchemaType.STRING, description = "type")
public String type;

public NoAuth() {
this.type = _TYPE;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package io.hyperfoil.tools.horreum.api.data.datastore.auth;

import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;

public class UsernamePassAuth {
public static final String _TYPE = "username";

@Schema(type = SchemaType.STRING, description = "type")
public String type;

@Schema(type = SchemaType.STRING, description = "Username")
public String username;

@Schema(type = SchemaType.STRING, description = "Password")
public String password;

public UsernamePassAuth() {
this.type = _TYPE;
}
}
Original file line number Diff line number Diff line change
@@ -19,6 +19,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;

import io.hyperfoil.tools.horreum.api.data.datastore.Datastore;
import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.quarkus.runtime.Startup;

@Startup
@@ -49,6 +50,11 @@ public interface ConfigService {
})
List<Datastore> datastores(@PathParam("team") String team);

@GET
@Path("datastore/types")
@Operation(description = "Obtain list of available datastore types")
List<DatastoreType.TypeConfig> datastoreTypes();

@POST
@Path("datastore")
@Operation(description = "Create a new Datastore")
@@ -66,12 +72,12 @@ public interface ConfigService {
@GET
@Path("datastore/{id}/test")
@Operation(description = "Test a Datastore connection")
DatastoreTestResponse testDatastore(@PathParam("id") String datastoreId);
DatastoreTestResponse testDatastore(@PathParam("id") Integer datastoreId);

@DELETE
@Path("datastore/{id}")
@Operation(description = "Test a Datastore")
void deleteDatastore(@PathParam("id") String datastoreId);
void deleteDatastore(@PathParam("id") Integer datastoreId);

class VersionInfo {
@Schema(description = "Version of Horreum", example = "0.9.4")

This file was deleted.

Original file line number Diff line number Diff line change
@@ -25,6 +25,7 @@

import io.hyperfoil.tools.horreum.api.data.datastore.CollectorApiDatastoreConfig;
import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import io.hyperfoil.tools.horreum.svc.ServiceException;

@@ -66,8 +67,10 @@ public DatastoreResponse handleRun(JsonNode payload,
+ "&newerThan=" + newerThan
+ "&olderThan=" + olderThan);
HttpRequest.Builder builder = HttpRequest.newBuilder().uri(uri);
builder.header("Content-Type", "application/json")
.header("token", jsonDatastoreConfig.apiKey);
builder.header("Content-Type", "application/json");
if (jsonDatastoreConfig.authentication instanceof APIKeyAuth) {
builder.header("token", ((APIKeyAuth) jsonDatastoreConfig.authentication).apiKey);
}
HttpRequest request = builder.build();
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
if (response.statusCode() != Response.Status.OK.getStatusCode()) {
@@ -93,10 +96,15 @@ private static void verifyPayload(ObjectMapper mapper, CollectorApiDatastoreConf
// Verify that the tag is in the distinct list of tags
URI tagsUri = URI.create(jsonDatastoreConfig.url + "/tags/distinct");
HttpRequest.Builder tagsBuilder = HttpRequest.newBuilder().uri(tagsUri);
HttpRequest tagsRequest = tagsBuilder
.header("Content-Type", "application/json")
.header("token", jsonDatastoreConfig.apiKey).build();
HttpResponse<String> response = client.send(tagsRequest, HttpResponse.BodyHandlers.ofString());

tagsBuilder
.header("Content-Type", "application/json");

if (jsonDatastoreConfig.authentication instanceof APIKeyAuth) {
tagsBuilder
.header("token", ((APIKeyAuth) jsonDatastoreConfig.authentication).apiKey);
}
HttpResponse<String> response = client.send(tagsBuilder.build(), HttpResponse.BodyHandlers.ofString());
String[] distinctTags;
try {
distinctTags = mapper.readValue(response.body(), String[].class);
@@ -141,7 +149,9 @@ private static CollectorApiDatastoreConfig getCollectorApiDatastoreConfig(Datast
log.error("Could not find collector API datastore: " + configuration.name);
throw ServiceException.serverError("Could not find CollectorAPI datastore: " + configuration.name);
}
assert jsonDatastoreConfig.apiKey != null : "API key must be set";
if (jsonDatastoreConfig.authentication instanceof APIKeyAuth) {
assert ((APIKeyAuth) jsonDatastoreConfig.authentication).apiKey != null : "API key must be set";
}
assert jsonDatastoreConfig.url != null : "URL must be set";
return jsonDatastoreConfig;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package io.hyperfoil.tools.horreum.datastore;

import java.util.List;

import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;

import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.svc.ServiceException;
import io.quarkus.arc.All;

@ApplicationScoped
public class DatastoreResolver {
@Inject
@All
List<Datastore> datastores;

public Datastore getDatastore(DatastoreType type) {
return datastores.stream()
.filter(store -> store.type().equals(type))
.findFirst()
.orElseThrow(() -> new IllegalStateException("Unknown datastore type: " + type));
}

public void validatedDatastoreConfig(DatastoreType type, Object config) {
io.hyperfoil.tools.horreum.datastore.Datastore datastoreImpl;
try {
datastoreImpl = this.getDatastore(type);
} catch (IllegalStateException e) {
throw ServiceException.badRequest("Unknown datastore type: " + type
+ ". Please try again, if the problem persists please contact the system administrator.");
}

if (datastoreImpl == null) {
throw ServiceException.badRequest("Unknown datastore type: " + type);
}

String error = datastoreImpl.validateConfig(config);

if (error != null) {
throw ServiceException.badRequest(error);
}

}

}
Original file line number Diff line number Diff line change
@@ -2,9 +2,7 @@

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;

import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
@@ -32,6 +30,8 @@

import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.api.data.datastore.ElasticsearchDatastoreConfig;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.APIKeyAuth;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.UsernamePassAuth;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;

@ApplicationScoped
@@ -42,8 +42,6 @@ public class ElasticsearchDatastore implements Datastore {
@Inject
ObjectMapper mapper;

Map<String, RestClient> hostCache = new ConcurrentHashMap<>();

@Override
public DatastoreResponse handleRun(JsonNode payload,
JsonNode metaData,
@@ -67,18 +65,27 @@ public DatastoreResponse handleRun(JsonNode payload,
if (elasticsearchDatastoreConfig != null) {

RestClientBuilder builder = RestClient.builder(HttpHost.create(elasticsearchDatastoreConfig.url));
if (elasticsearchDatastoreConfig.apiKey != null) {

if (elasticsearchDatastoreConfig.authentication instanceof APIKeyAuth) {

APIKeyAuth apiKeyAuth = (((APIKeyAuth) elasticsearchDatastoreConfig.authentication));

builder.setDefaultHeaders(new Header[] {
new BasicHeader("Authorization", "ApiKey " + elasticsearchDatastoreConfig.apiKey)
new BasicHeader("Authorization", "ApiKey " + apiKeyAuth.apiKey)
});
} else {

} else if (elasticsearchDatastoreConfig.authentication instanceof UsernamePassAuth) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();

UsernamePassAuth usernamePassAuth = (((UsernamePassAuth) elasticsearchDatastoreConfig.authentication));

credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(elasticsearchDatastoreConfig.username,
elasticsearchDatastoreConfig.password));
new UsernamePasswordCredentials(usernamePassAuth.username,
usernamePassAuth.password));

builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider));

}

restClient = builder.build();
@@ -146,62 +153,64 @@ public DatastoreResponse handleRun(JsonNode payload,
throw new BadRequestException("Schema is required for search requests");
}

//TODO: error handling
final MultiIndexQuery multiIndexQuery = mapper.treeToValue(apiRequest.query, MultiIndexQuery.class);

//1st retrieve the list of docs from 1st Index
request = new Request(
"GET",
"/" + apiRequest.index + "/_search");
try {
final MultiIndexQuery multiIndexQuery = mapper.treeToValue(apiRequest.query, MultiIndexQuery.class);
//1st retrieve the list of docs from 1st Index
request = new Request(
"GET",
"/" + apiRequest.index + "/_search");

request.setJsonEntity(mapper.writeValueAsString(multiIndexQuery.metaQuery));
finalString = extracted(restClient, request);
request.setJsonEntity(mapper.writeValueAsString(multiIndexQuery.metaQuery));
finalString = extracted(restClient, request);

elasticResults = (ArrayNode) mapper.readTree(finalString).get("hits").get("hits");
extractedResults = mapper.createArrayNode();
elasticResults = (ArrayNode) mapper.readTree(finalString).get("hits").get("hits");
extractedResults = mapper.createArrayNode();

//2nd retrieve the docs from 2nd Index and combine into a single result with metadata and doc contents
final RestClient finalRestClient = restClient; //copy of restClient for use in lambda
//2nd retrieve the docs from 2nd Index and combine into a single result with metadata and doc contents
final RestClient finalRestClient = restClient; //copy of restClient for use in lambda

elasticResults.forEach(jsonNode -> {
elasticResults.forEach(jsonNode -> {

ObjectNode result = ((ObjectNode) jsonNode.get("_source")).put("$schema", schemaUri);
String docString = """
{
"error": "Could not retrieve doc from secondary index"
"msg": "ERR_MSG"
}
""";
ObjectNode result = ((ObjectNode) jsonNode.get("_source")).put("$schema", schemaUri);
String docString = """
{
"error": "Could not retrieve doc from secondary index"
"msg": "ERR_MSG"
}
""";

var subRequest = new Request(
"GET",
"/" + multiIndexQuery.targetIndex + "/_doc/"
+ jsonNode.get("_source").get(multiIndexQuery.docField).textValue());
var subRequest = new Request(
"GET",
"/" + multiIndexQuery.targetIndex + "/_doc/"
+ jsonNode.get("_source").get(multiIndexQuery.docField).textValue());

try {
docString = extracted(finalRestClient, subRequest);
try {
docString = extracted(finalRestClient, subRequest);

} catch (IOException e) {
} catch (IOException e) {

docString.replaceAll("ERR_MSG", e.getMessage());
String msg = String.format("Could not query doc request: index: %s; docID: %s (%s)",
multiIndexQuery.targetIndex, multiIndexQuery.docField, e.getMessage());
log.error(msg);
}
docString.replaceAll("ERR_MSG", e.getMessage());
String msg = String.format("Could not query doc request: index: %s; docID: %s (%s)",
multiIndexQuery.targetIndex, multiIndexQuery.docField, e.getMessage());
log.error(msg);
}

try {
result.put("$doc", mapper.readTree(docString));
} catch (JsonProcessingException e) {
docString.replaceAll("ERR_MSG", e.getMessage());
String msg = String.format("Could not parse doc result: %s, %s", docString, e.getMessage());
log.error(msg);
}
try {
result.put("$doc", mapper.readTree(docString));
} catch (JsonProcessingException e) {
docString.replaceAll("ERR_MSG", e.getMessage());
String msg = String.format("Could not parse doc result: %s, %s", docString, e.getMessage());
log.error(msg);
}

extractedResults.add(result);
extractedResults.add(result);

});
});

return new DatastoreResponse(extractedResults, payload);
return new DatastoreResponse(extractedResults, payload);
} catch (JsonProcessingException e) {
throw new RuntimeException("Could not process json query: " + e.getMessage());
}

default:
throw new BadRequestException("Invalid request type: " + apiRequest.type);
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
package io.hyperfoil.tools.horreum.svc;

import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.*;
import java.util.stream.Collectors;

import jakarta.annotation.security.PermitAll;
@@ -20,8 +17,9 @@
import io.hyperfoil.tools.horreum.api.Version;
import io.hyperfoil.tools.horreum.api.data.Access;
import io.hyperfoil.tools.horreum.api.data.datastore.Datastore;
import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.api.services.ConfigService;
import io.hyperfoil.tools.horreum.datastore.BackendResolver;
import io.hyperfoil.tools.horreum.datastore.DatastoreResolver;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import io.hyperfoil.tools.horreum.mapper.DatasourceMapper;
import io.hyperfoil.tools.horreum.server.WithRoles;
@@ -32,6 +30,10 @@ public class ConfigServiceImpl implements ConfigService {

private static final Logger log = Logger.getLogger(ConfigServiceImpl.class);

//cache available dataStore configurations
private static final List<DatastoreType.TypeConfig> datastoreTypes = Arrays.stream(DatastoreType.values())
.map(DatastoreType::getConfig).toList();

@ConfigProperty(name = "horreum.privacy")
Optional<String> privacyStatement;

@@ -42,7 +44,7 @@ public class ConfigServiceImpl implements ConfigService {
EntityManager em;

@Inject
BackendResolver backendResolver;
DatastoreResolver backendResolver;

@Override
public KeycloakConfig keycloak() {
@@ -80,6 +82,11 @@ public List<Datastore> datastores(String team) {
}
}

@Override
public List<DatastoreType.TypeConfig> datastoreTypes() {
return datastoreTypes;
}

@Override
@RolesAllowed(Roles.TESTER)
@WithRoles
@@ -102,29 +109,13 @@ public Integer newDatastore(Datastore datastore) {
} else if (!identity.getRoles().contains(dao.owner)) {
log.debugf("Failed to create datastore %s: requested owner %s, available roles: %s", dao.name, dao.owner,
identity.getRoles());
throw ServiceException.badRequest("This user does not have permissions to upload datastore for owner=" + dao.owner);
throw ServiceException.badRequest("This user does not have permissions to create datastore for owner=" + dao.owner);
}
if (dao.access == null) {
dao.access = Access.PRIVATE;
}

io.hyperfoil.tools.horreum.datastore.Datastore datastoreImpl;
try {
datastoreImpl = backendResolver.getBackend(datastore.type);
} catch (IllegalStateException e) {
throw ServiceException.badRequest("Unknown datastore type: " + datastore.type
+ ". Please try again, if the problem persists please contact the system administrator.");
}

if (datastoreImpl == null) {
throw ServiceException.badRequest("Unknown datastore type: " + datastore.type);
}

String error = datastoreImpl.validateConfig(datastore.config);

if (error != null) {
throw ServiceException.badRequest(error);
}
backendResolver.validatedDatastoreConfig(datastore.type, datastore.config);

log.debugf("Creating new Datastore with owner=%s and access=%s", dao.owner, dao.access);

@@ -144,18 +135,20 @@ public Integer newDatastore(Datastore datastore) {
@RolesAllowed(Roles.TESTER)
@WithRoles
@Transactional
public Integer updateDatastore(Datastore backend) {
DatastoreConfigDAO dao = DatastoreConfigDAO.findById(backend.id);
public Integer updateDatastore(Datastore datastore) {
DatastoreConfigDAO dao = DatastoreConfigDAO.findById(datastore.id);
if (dao == null)
throw ServiceException.notFound("Datastore with id " + backend.id + " does not exist");
throw ServiceException.notFound("Datastore with id " + datastore.id + " does not exist");

DatastoreConfigDAO newDao = DatasourceMapper.to(backend);
DatastoreConfigDAO newDao = DatasourceMapper.to(datastore);

dao.type = newDao.type;
dao.name = newDao.name;
dao.configuration = newDao.configuration;
dao.access = newDao.access;

backendResolver.validatedDatastoreConfig(datastore.type, datastore.config);

dao.persist();

return dao.id;
@@ -166,16 +159,16 @@ public Integer updateDatastore(Datastore backend) {
@RolesAllowed(Roles.TESTER)
@WithRoles
@Transactional
public DatastoreTestResponse testDatastore(String datastoreId) {
public DatastoreTestResponse testDatastore(Integer datastoreId) {
return null;
}

@Override
@RolesAllowed(Roles.TESTER)
@WithRoles
@Transactional
public void deleteDatastore(String datastoreId) {
DatastoreConfigDAO.deleteById(Integer.parseInt(datastoreId));
public void deleteDatastore(Integer datastoreId) {
DatastoreConfigDAO.deleteById(datastoreId);
}

private String getString(String propertyName) {
Original file line number Diff line number Diff line change
@@ -67,8 +67,8 @@
import io.hyperfoil.tools.horreum.api.services.SchemaService;
import io.hyperfoil.tools.horreum.api.services.TestService;
import io.hyperfoil.tools.horreum.bus.AsyncEventChannels;
import io.hyperfoil.tools.horreum.datastore.BackendResolver;
import io.hyperfoil.tools.horreum.datastore.Datastore;
import io.hyperfoil.tools.horreum.datastore.DatastoreResolver;
import io.hyperfoil.tools.horreum.datastore.DatastoreResponse;
import io.hyperfoil.tools.horreum.entity.PersistentLogDAO;
import io.hyperfoil.tools.horreum.entity.alerting.DataPointDAO;
@@ -148,7 +148,7 @@ WHEN jsonb_typeof(data) = 'array' THEN ?1 IN (SELECT jsonb_array_elements(data)-
@Inject
ServiceMediator mediator;
@Inject
BackendResolver backendResolver;
DatastoreResolver backendResolver;

@Inject
Session session;
@@ -440,17 +440,13 @@ public Response addRunFromData(String start, String stop, String test, String ow
}

/**
* Processes and persists a run or multiple runs based on the provided data and metadata.
* It performs the following steps:
* - Validates and parses the input data string into a JSON structure.
* - Resolves the appropriate datastore to handle the run processing.
* - Handles single or multiple runs based on the datastore's response type.
* - Persists runs and their associated datasets in the database.
* - Queues dataset recalculation tasks for further processing.
* Processes and persists a run or multiple runs based on the provided data and metadata. It performs the following steps: -
* Validates and parses the input data string into a JSON structure. - Resolves the appropriate datastore to handle the run
* processing. - Handles single or multiple runs based on the datastore's response type. - Persists runs and their
* associated datasets in the database. - Queues dataset recalculation tasks for further processing.
*
* If the response, in the case of datastore, contains more than 10 runs,
* the processing of the entire run is offloaded to an asynchronous queue.
* For fewer runs, processing occurs synchronously.
* If the response, in the case of datastore, contains more than 10 runs, the processing of the entire run is offloaded to
* an asynchronous queue. For fewer runs, processing occurs synchronously.
*
* @param start the start time for the run
* @param stop the stop time for the run
@@ -490,7 +486,8 @@ Response addRunFromData(String start, String stop, String test,

TestDAO testEntity = testService.ensureTestExists(testNameOrId);

Datastore datastore = backendResolver.getBackend(testEntity.backendConfig.type);
Datastore datastore = backendResolver.getDatastore(testEntity.backendConfig.type);

DatastoreResponse response = datastore.handleRun(data, metadata, testEntity.backendConfig,
Optional.ofNullable(schemaUri));

@@ -619,12 +616,10 @@ private Object findIfNotSet(String value, JsonNode data) {
}

/**
* Adds a new authenticated run to the database with appropriate ownership and access settings.
* This method performs the following tasks:
* - Ensures the run's ID is reset and metadata is correctly handled.
* - Determines the owner of the run, defaulting to a specific uploader role if no owner is provided.
* - Validates ownership permissions against the user's roles.
* - Persists or updates the run in the database and handles related datasets.
* Adds a new authenticated run to the database with appropriate ownership and access settings. This method performs the
* following tasks: - Ensures the run's ID is reset and metadata is correctly handled. - Determines the owner of the run,
* defaulting to a specific uploader role if no owner is provided. - Validates ownership permissions against the user's
* roles. - Persists or updates the run in the database and handles related datasets.
*
* @param run the RunDAO object containing the run details
* @param test the TestDAO object containing the test details
@@ -1134,15 +1129,15 @@ public void recalculateAll(String fromStr, String toStr) {
}

/**
* Transforms the data for a given run by applying applicable schemas and transformers.
* It ensures any existing datasets for the run are removed before creating new ones,
* handles timeouts for ongoing transformations, and creates datasets with the transformed data.
* If the flag {isRecalculation} is set to true the label values recalculation is performed
* right away synchronously otherwise it is completely skipped and let to the caller trigger it
* Transforms the data for a given run by applying applicable schemas and transformers. It ensures any existing datasets for
* the run are removed before creating new ones, handles timeouts for ongoing transformations, and creates datasets with the
* transformed data. If the flag {isRecalculation} is set to true the label values recalculation is performed right away
* synchronously otherwise it is completely skipped and let to the caller trigger it
*
* @param runId the ID of the run to transform
* @param isRecalculation flag indicating if this is a recalculation
* @return the list of datasets ids that have been created, or empty list if the run is invalid or not found or already ongoing
* @return the list of datasets ids that have been created, or empty list if the run is invalid or not found or already
* ongoing
*/
@WithRoles(extras = Roles.HORREUM_SYSTEM)
@Transactional
@@ -1374,9 +1369,9 @@ List<Integer> transform(int runId, boolean isRecalculation) {
}

/**
* Persists a dataset, optionally triggers recalculation events, and validates the dataset.
* The recalculation is getting triggered sync only if the {isRecalculation} is set to true
* otherwise it is completely skipped
* Persists a dataset, optionally triggers recalculation events, and validates the dataset. The recalculation is getting
* triggered sync only if the {isRecalculation} is set to true otherwise it is completely skipped
*
* @param ds the DatasetDAO object to be persisted
* @param isRecalculation whether the dataset is a result of recalculation
* @return the ID of the persisted dataset
@@ -1490,9 +1485,9 @@ static class RunFromUri {
}

/**
* Represents the result of persisting a run, including the run ID and associated dataset IDs.
* This class is used to encapsulate the ID of the newly persisted run and the IDs of the datasets
* connected to the run, providing a structured way to return this data.
* Represents the result of persisting a run, including the run ID and associated dataset IDs. This class is used to
* encapsulate the ID of the newly persisted run and the IDs of the datasets connected to the run, providing a structured
* way to return this data.
*/
public static class RunPersistence {
private final Integer runId;
70 changes: 70 additions & 0 deletions horreum-backend/src/main/resources/db/changeLog.xml
Original file line number Diff line number Diff line change
@@ -4688,4 +4688,74 @@
$$ LANGUAGE plpgsql;
</sql>
</changeSet>
<!-- Migrate datastore configuration -->
<changeSet id="126" author="johara">
<validCheckSum>ANY</validCheckSum>
<!-- set default Postgres datastore -->
<sql>
UPDATE backendconfig
SET configuration = '{"builtIn": true, "authentication": {"type": "none"}}'
WHERE id = 1
</sql>
<!-- update datastores with no auth -->
<sql>
UPDATE backendconfig
SET configuration = updated.newConfig
FROM (select id, jsonb_insert(newConfig - 'apiKey' - 'username' - 'password', '{authentication,type}', '"none"') as newConfig
from (select
id,
jsonb_insert(configuration, '{authentication}', '{}') as newConfig
from backendconfig
where jsonb_path_exists(configuration, '$.apiKey')
AND configuration ->> 'apiKey' = ''
AND not jsonb_path_exists(configuration, '$.username'))
)
as updated
WHERE backendconfig.id = updated.id;
</sql>

<!-- update datastores with apiKey auth -->
<sql>
UPDATE backendconfig
SET configuration = updated.newConfig
FROM (select id, jsonb_insert(newConfig, '{authentication,type}', '"api-key"') as newConfig
from (Select rootConfig.id,
jsonb_insert(rootConfig.newConfig - 'apiKey' - 'username' - 'password', '{authentication,apiKey}',
to_jsonb(rootConfig.apiKey)) as newConfig
FROM (select id,
name,
configuration ->> 'apiKey' as apiKey,
configuration,
jsonb_insert(configuration, '{authentication}', '{}') as newConfig
from backendconfig
where jsonb_path_exists(configuration, '$.apiKey')
AND not configuration ->> 'apiKey' = ''
and not jsonb_path_exists(configuration, '$.authentication'))
as rootConfig)
as updated)
as updated
WHERE backendconfig.id = updated.id
</sql>

<!-- update datastores with username & password auth -->
<sql>
UPDATE backendconfig
SET configuration = updated.newConfig
FROM
(select id, jsonb_insert(newConfig, '{authentication,type}', '"username"') as newConfig
FROM
(select id, jsonb_insert(updatedConfig.newConfig - 'password', '{authentication,password}', to_jsonb(updatedConfig.password)) as newConfig
from
(Select rootConfig.id, username, password, jsonb_insert(rootConfig.newConfig - 'apiKey' - 'username', '{authentication,username}', to_jsonb(rootConfig.username)) as newConfig
FROM
(select id, name, configuration ->> 'username' as username, configuration ->> 'password' as password, configuration, jsonb_insert(configuration, '{authentication}', '{}') as newConfig
from backendconfig
where jsonb_path_exists(configuration, '$.username')
) as rootConfig
) as updatedConfig)
) as updated
WHERE backendconfig.id = updated.id;
</sql>
</changeSet>

</databaseChangeLog>
Original file line number Diff line number Diff line change
@@ -28,6 +28,7 @@
import io.hyperfoil.tools.horreum.api.data.datastore.Datastore;
import io.hyperfoil.tools.horreum.api.data.datastore.DatastoreType;
import io.hyperfoil.tools.horreum.api.data.datastore.ElasticsearchDatastoreConfig;
import io.hyperfoil.tools.horreum.api.data.datastore.auth.NoAuth;
import io.hyperfoil.tools.horreum.bus.AsyncEventChannels;
import io.hyperfoil.tools.horreum.entity.backend.DatastoreConfigDAO;
import io.hyperfoil.tools.horreum.entity.data.DatasetDAO;
@@ -233,13 +234,12 @@ private TestConfig createNewTestAndDatastores(TestInfo info) {
Datastore newDatastore = new Datastore();
newDatastore.name = info.getDisplayName();
newDatastore.type = DatastoreType.ELASTICSEARCH;
newDatastore.builtIn = false;
newDatastore.access = Access.PRIVATE;
newDatastore.owner = TESTER_ROLES[0];

ElasticsearchDatastoreConfig elasticConfig = new ElasticsearchDatastoreConfig();
elasticConfig.url = hosts.get().get(0);
elasticConfig.apiKey = apiKey.orElse("123");
elasticConfig.authentication = new NoAuth();

newDatastore.config = mapper.valueToTree(elasticConfig);

312 changes: 174 additions & 138 deletions horreum-web/src/domain/admin/Datastores.tsx

Large diffs are not rendered by default.

308 changes: 184 additions & 124 deletions horreum-web/src/domain/admin/datastore/ModifyDatastoreModal.tsx

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion horreum-web/src/domain/runs/ValidationErrorTable.tsx
Original file line number Diff line number Diff line change
@@ -26,7 +26,7 @@ export default function ValidationErrorTable(props: ValidationErrorTableProps) {
<NavLink key="schema" to={`/schema/${error.schemaId}`}>
{props.schemas.find(s => s.id === error.schemaId)?.name || "unknown schema " + error.schemaId}
</NavLink>
: "None"}
: "none"}
</Td>
<Td key="Type">{error.error.type}</Td>
<Td key="Path"><code>{error.error.path}</code></Td>

0 comments on commit c34af61

Please sign in to comment.