Skip to content

Commit

Permalink
Allow multiple additive templates (#49)
Browse files Browse the repository at this point in the history
* Allow multiple additive templates

* Drop fat application jars

* Bug fix: actually skip when not a KafkaTopic Acl
  • Loading branch information
ryannedolan authored Aug 28, 2023
1 parent f81e6ac commit 6e1b6c0
Show file tree
Hide file tree
Showing 16 changed files with 128 additions and 104 deletions.
8 changes: 3 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
FROM eclipse-temurin:18
WORKDIR /home/
ADD ./hoptimator-cli/run.sh ./hoptimator
ADD ./hoptimator-operator/run.sh ./hoptimator-operator
ADD ./hoptimator-cli/build/libs/hoptimator-cli-all.jar ./hoptimator-cli-all.jar
ADD ./hoptimator-operator/build/libs/hoptimator-operator-all.jar ./hoptimator-operator-all.jar
ADD ./hoptimator-cli-integration/build/distributions/hoptimator-cli-integration.tar ./
ADD ./hoptimator-operator-integration/build/distributions/hoptimator-operator-integration.tar ./
ADD ./etc/* ./
ENTRYPOINT ["/bin/sh", "-c"]
CMD ["./hoptimator -n '' -p '' -u jdbc:calcite:model=model.yaml"]
CMD ["./hoptimator-cli-integration/bin/hoptimator-cli-integration -n '' -p '' -u jdbc:calcite:model=model.yaml"]

2 changes: 1 addition & 1 deletion bin/hoptimator
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#!/bin/sh

kubectl exec -it hoptimator -c hoptimator -- ./hoptimator -n "" -p "" -u "jdbc:calcite:model=/etc/config/model.yaml" "$@"
kubectl exec -it hoptimator -c hoptimator -- ./hoptimator-cli-integration/bin/hoptimator-cli-integration -n "" -p "" -u "jdbc:calcite:model=/etc/config/model.yaml" "$@"
2 changes: 1 addition & 1 deletion deploy/hoptimator-operator-deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ spec:
- name: hoptimator-operator
image: docker.io/library/hoptimator
imagePullPolicy: Never
command: ["./hoptimator-operator", "/etc/config/model.yaml"]
command: ["./hoptimator-operator-integration/bin/hoptimator-operator-integration", "/etc/config/model.yaml"]
volumeMounts:
- name: config-volume
mountPath: /etc/config
Expand Down
2 changes: 1 addition & 1 deletion deploy/hoptimator-pod.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ spec:
mountPath: /etc/config
readinessProbe:
exec:
command: ["./hoptimator", "--run=./readiness-probe.sql"]
command: ["./hoptimator-cli-integration/bin/hoptimator-cli-integration", "--run=./readiness-probe.sql"]
timeoutSeconds: 30
volumes:
- name: config-volume
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,24 @@
package com.linkedin.hoptimator.catalog;

import java.io.InputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Scanner;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.Scanner;
import java.util.function.Supplier;
import java.io.InputStream;
import java.util.stream.Collectors;
import java.net.URL;

/**
* Represents something required by a Table.
Expand Down Expand Up @@ -96,9 +99,13 @@ public Set<String> keys() {
}

/** Render this Resource using the given TemplateFactory */
public String render(TemplateFactory templateFactory) {
public Collection<String> render(TemplateFactory templateFactory) {
try {
return templateFactory.get(this).render(this);
List<String> res = new ArrayList<>();
for (Template template : templateFactory.find(this)) {
res.add(template.render(this));
}
return res;
} catch (Exception e) {
throw new RuntimeException("Error rendering " + template, e);
}
Expand Down Expand Up @@ -306,10 +313,15 @@ public String render(Resource resource) {

/** Locates a Template for a given Resource */
public interface TemplateFactory {
Template get(Resource resource);
Collection<Template> find(Resource resource) throws IOException;

default Collection<String> render(Resource resource) throws IOException {
return find(resource).stream().map(x -> x.render(resource))
.collect(Collectors.toList());
}
}

/** Finds a Template for a given Resource by looking for resource files in the classpath. */
/** Finds Templates for a given Resource by looking for resource files in the classpath. */
public static class SimpleTemplateFactory implements TemplateFactory {
private final Environment env;

Expand All @@ -318,20 +330,22 @@ public SimpleTemplateFactory(Environment env) {
}

@Override
public Template get(Resource resource) {
public Collection<Template> find(Resource resource) throws IOException {
String template = resource.template();
InputStream in = getClass().getClassLoader().getResourceAsStream(template + ".yaml.template");
if (in == null) {
throw new IllegalArgumentException("No template '" + template + "' found in jar resources");
}
StringBuilder sb = new StringBuilder();
Scanner scanner = new Scanner(in);
scanner.useDelimiter("\n");
while (scanner.hasNext()) {
sb.append(scanner.next());
sb.append("\n");
List<Template> res = new ArrayList<>();
for (Enumeration<URL> e = getClass().getClassLoader().getResources(template + ".yaml.template");
e.hasMoreElements();) {
InputStream in = e.nextElement().openStream();
StringBuilder sb = new StringBuilder();
Scanner scanner = new Scanner(in);
scanner.useDelimiter("\n");
while (scanner.hasNext()) {
sb.append(scanner.next());
sb.append("\n");
}
res.add(new SimpleTemplate(env, sb.toString()));
}
return new SimpleTemplate(env, sb.toString());
return res;
}
}
}
37 changes: 37 additions & 0 deletions hoptimator-cli-integration/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
plugins {
id 'java'
id 'application'
id 'idea'
}

dependencies {
implementation project(path: ':hoptimator-cli', configuration: 'shadow')

// include adapters for integration tests
implementation project(':hoptimator-kafka-adapter')
implementation project(':hoptimator-mysql-adapter')
implementation project(':hoptimator-flink-adapter')
implementation libs.flinkCsv
implementation libs.flinkConnectorKafka
implementation libs.slf4jSimple
}

application {
mainClassName = 'com.linkedin.hoptimator.HoptimatorCliApp'
applicationDefaultJvmArgs = [
"--add-opens", "java.base/java.lang=ALL-UNNAMED",
"--add-opens", "java.base/java.util=ALL-UNNAMED",
"--add-opens", "java.base/java.time=ALL-UNNAMED" ]
}

java {
withJavadocJar()
withSourcesJar()
}

idea {
module {
downloadJavadoc = true
downloadSources = true
}
}
17 changes: 0 additions & 17 deletions hoptimator-cli/build.gradle
Original file line number Diff line number Diff line change
@@ -1,27 +1,14 @@
plugins {
id 'com.github.johnrengelman.shadow' version '8.1.1'
id 'java'
id 'application'
id 'idea'
id 'maven-publish'
}

configurations {
integration.extendsFrom implementation
}

dependencies {
implementation project(':hoptimator-planner')
implementation project(':hoptimator-catalog')

// include adapters for integration tests
integration project(':hoptimator-kafka-adapter')
integration project(':hoptimator-mysql-adapter')
integration project(':hoptimator-flink-adapter')
integration libs.flinkCsv
integration libs.flinkConnectorKafka
integration libs.slf4jSimple

implementation libs.avro
implementation libs.sqlline
implementation libs.slf4jApi
Expand Down Expand Up @@ -87,15 +74,11 @@ java {
}

shadowJar {
configurations = [project.configurations.integration]

// This is required for Flink and Avatica to play nicely
relocate 'com.google', 'org.apache.flink.calcite.shaded.com.google'

exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
zip64 true
manifest.attributes 'Main-Class': 'com.linkedin.hoptimator.HoptimatorCliApp'
mainClassName = 'com.linkedin.hoptimator.HoptimatorCliApp'
mergeServiceFiles()
}

Expand Down
10 changes: 0 additions & 10 deletions hoptimator-cli/run.sh

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ public void execute(String line, DispatchCallback dispatchCallback) {
// TODO provide generated avro schema to environment
Resource.TemplateFactory templateFactory = new Resource.SimpleTemplateFactory(
new Resource.SimpleEnvironment(properties).orIgnore());
sqlline.output(pipeline.render(templateFactory));
pipeline.render(templateFactory).stream().forEach(x -> sqlline.output(x));
dispatchCallback.setToSuccess();
} catch (Exception e) {
sqlline.error(e.toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ public Result reconcile(Request request) {

if (!targetKind.equals("KafkaTopic")) {
log.info("Not a KafkaTopic Acl. Skipping.");
return new Result(false);
}

V1alpha1AclSpec.MethodEnum method = object.getSpec().getMethod();
Expand Down
36 changes: 36 additions & 0 deletions hoptimator-operator-integration/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
plugins {
id 'java'
id 'application'
id 'idea'
}

dependencies {
implementation project(':hoptimator-operator')

// include adapters in integration tests
implementation project(':hoptimator-kafka-adapter')
implementation project(':hoptimator-mysql-adapter')
implementation project(':hoptimator-flink-adapter')
implementation libs.slf4jSimple

implementation libs.calciteCore
implementation libs.kubernetesClient
implementation libs.kubernetesExtendedClient
implementation libs.slf4jApi
implementation libs.commonsCli
implementation libs.avro

testImplementation libs.junit
testImplementation libs.assertj
}

application {
mainClassName = 'com.linkedin.hoptimator.operator.HoptimatorOperatorApp'
}

idea {
module {
downloadJavadoc = true
downloadSources = true
}
}
30 changes: 0 additions & 30 deletions hoptimator-operator/build.gradle
Original file line number Diff line number Diff line change
@@ -1,26 +1,14 @@
plugins {
id 'java'
id 'com.github.johnrengelman.shadow' version '8.1.1'
id 'application'
id 'maven-publish'
id 'idea'
}

configurations {
integration.extendsFrom implementation
}

dependencies {
implementation project(':hoptimator-planner')
implementation project(':hoptimator-catalog')
implementation project(':hoptimator-models')

// include adapters in integration tests
integration project(':hoptimator-kafka-adapter')
integration project(':hoptimator-mysql-adapter')
integration project(':hoptimator-flink-adapter')
integration libs.slf4jSimple

implementation libs.calciteCore
implementation libs.kubernetesClient
implementation libs.kubernetesExtendedClient
Expand All @@ -32,10 +20,6 @@ dependencies {
testImplementation libs.assertj
}

application {
mainClassName = 'com.linkedin.hoptimator.operator.HoptimatorOperatorApp'
}

publishing {
repositories {
maven {
Expand Down Expand Up @@ -81,20 +65,6 @@ publishing {
}
}


shadowJar {
configurations = [project.configurations.integration]

// This is required for Flink and Avatica to play nicely
relocate 'com.google', 'org.apache.flink.calcite.shaded.com.google'

exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
zip64 true
manifest.attributes 'Main-Class': 'com.linkedin.hoptimator.operator.HoptimatorOperatorApp'
mainClassName = 'com.linkedin.hoptimator.operator.HoptimatorOperatorApp'
mergeServiceFiles()
}

idea {
module {
downloadJavadoc = true
Expand Down
9 changes: 0 additions & 9 deletions hoptimator-operator/run.sh

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -98,21 +99,21 @@ public Result reconcile(Request request) {

// Render resources related to all source tables.
List<String> upstreamResources = pipeline.upstreamResources().stream()
.map(x -> x.render(templateFactory))
.flatMap(x -> x.render(templateFactory).stream())
.collect(Collectors.toList());

// Render the SQL job
String sqlJob = pipeline.sqlJob().render(templateFactory);
Collection<String> sqlJob = pipeline.sqlJob().render(templateFactory);

// Render resources related to the sink table. For these resources, we pass along any
// "hints" as part of the environment.
List<String> downstreamResources = pipeline.downstreamResources().stream()
.map(x -> x.render(sinkTemplateFactory))
.flatMap(x -> x.render(sinkTemplateFactory).stream())
.collect(Collectors.toList());

List<String> combined = new ArrayList<>();
combined.addAll(upstreamResources);
combined.add(sqlJob);
combined.addAll(sqlJob);
combined.addAll(downstreamResources);

status.setResources(combined);
Expand Down
Loading

0 comments on commit 6e1b6c0

Please sign in to comment.