From fb132c40281b7b22c86cbefe676e645754f7eabe Mon Sep 17 00:00:00 2001
From: Scott Hamrick <2623452+cshamrick@users.noreply.github.com>
Date: Thu, 13 Nov 2025 10:07:32 -0600
Subject: [PATCH 1/3] chore(sdk) add checkstyle
Signed-off-by: Scott Hamrick <2623452+cshamrick@users.noreply.github.com>
---
checkstyle-suppressions.xml | 47 +++++++++
checkstyle.xml | 199 ++++++++++++++++++++++++++++++++++++
pom.xml | 18 ++++
3 files changed, 264 insertions(+)
create mode 100644 checkstyle-suppressions.xml
create mode 100644 checkstyle.xml
diff --git a/checkstyle-suppressions.xml b/checkstyle-suppressions.xml
new file mode 100644
index 00000000..00ae34cb
--- /dev/null
+++ b/checkstyle-suppressions.xml
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/checkstyle.xml b/checkstyle.xml
new file mode 100644
index 00000000..1bf40b3e
--- /dev/null
+++ b/checkstyle.xml
@@ -0,0 +1,199 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index b6860291..62457c7f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -213,6 +213,11 @@
maven-deploy-plugin
3.1.2
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+ 3.6.0
+
@@ -275,6 +280,19 @@
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+
+ validate
+ validate
+
+ check
+
+
+
+
From 04df580561f315e531a57fc10dca7e4d7a7fa308 Mon Sep 17 00:00:00 2001
From: Scott Hamrick <2623452+cshamrick@users.noreply.github.com>
Date: Fri, 21 Nov 2025 09:32:44 -0600
Subject: [PATCH 2/3] fix(sdk): encapsulate fields with getters and setters in
sdk classes
Signed-off-by: Scott Hamrick <2623452+cshamrick@users.noreply.github.com>
---
checkstyle-suppressions.xml | 14 +-
.../java/io/opentdf/platform/Command.java | 6 +-
.../platform/DecryptCollectionExample.java | 2 +-
.../platform/EncryptCollectionExample.java | 2 +-
.../io/opentdf/platform/EncryptExample.java | 2 +-
.../opentdf/platform/sdk/Autoconfigure.java | 88 ++-
.../java/io/opentdf/platform/sdk/Config.java | 389 +++++++++--
.../io/opentdf/platform/sdk/KASClient.java | 117 +++-
.../io/opentdf/platform/sdk/KASKeyCache.java | 2 +-
.../java/io/opentdf/platform/sdk/NanoTDF.java | 70 +-
.../java/io/opentdf/platform/sdk/Planner.java | 60 +-
.../java/io/opentdf/platform/sdk/TDF.java | 84 +--
.../java/io/opentdf/platform/sdk/Version.java | 2 +-
.../io/opentdf/platform/sdk/ZipWriter.java | 602 +++++++++++++++---
.../platform/sdk/AutoconfigureTest.java | 83 +--
.../io/opentdf/platform/sdk/ConfigTest.java | 48 +-
.../java/io/opentdf/platform/sdk/Fuzzing.java | 2 +-
.../opentdf/platform/sdk/KASClientTest.java | 12 +-
.../opentdf/platform/sdk/KASKeyCacheTest.java | 48 +-
.../io/opentdf/platform/sdk/NanoTDFTest.java | 40 +-
.../io/opentdf/platform/sdk/PlannerTest.java | 174 +++--
.../io/opentdf/platform/sdk/TDFE2ETest.java | 4 +-
.../java/io/opentdf/platform/sdk/TDFTest.java | 46 +-
23 files changed, 1348 insertions(+), 549 deletions(-)
diff --git a/checkstyle-suppressions.xml b/checkstyle-suppressions.xml
index 00ae34cb..fc926b9f 100644
--- a/checkstyle-suppressions.xml
+++ b/checkstyle-suppressions.xml
@@ -10,15 +10,15 @@
-
+
-
-
-
-
+
+
+
+
@@ -43,5 +43,7 @@
-
+
+
+
\ No newline at end of file
diff --git a/cmdline/src/main/java/io/opentdf/platform/Command.java b/cmdline/src/main/java/io/opentdf/platform/Command.java
index cce6f6be..056b3659 100644
--- a/cmdline/src/main/java/io/opentdf/platform/Command.java
+++ b/cmdline/src/main/java/io/opentdf/platform/Command.java
@@ -165,7 +165,7 @@ void encrypt(
var sdk = buildSDK();
var kasInfos = kas.stream().map(k -> {
var ki = new Config.KASInfo();
- ki.URL = k;
+ ki.setURL(k);
return ki;
}).toArray(Config.KASInfo[]::new);
@@ -274,7 +274,7 @@ void decrypt(@Option(names = { "-f", "--file" }, required = true) Path tdfPath,
}
}
- for (Map.Entry entry : assertionVerificationKeys.keys
+ for (Map.Entry entry : assertionVerificationKeys.getKeys()
.entrySet()) {
try {
Object correctedKey = correctKeyType(entry.getValue().alg, entry.getValue().key, true);
@@ -336,7 +336,7 @@ void createNanoTDF(
var sdk = buildSDK();
var kasInfos = kas.stream().map(k -> {
var ki = new Config.KASInfo();
- ki.URL = k;
+ ki.setURL(k);
return ki;
}).toArray(Config.KASInfo[]::new);
diff --git a/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java b/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java
index 6e9bcb3e..db2bcd87 100644
--- a/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java
+++ b/examples/src/main/java/io/opentdf/platform/DecryptCollectionExample.java
@@ -20,7 +20,7 @@ public static void main(String[] args) throws IOException {
.build();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "http://localhost:8080/kas";
+ kasInfo.setURL("http://localhost:8080/kas");
// Convert String to InputStream
diff --git a/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java b/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java
index 5c060450..9f2a3431 100644
--- a/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java
+++ b/examples/src/main/java/io/opentdf/platform/EncryptCollectionExample.java
@@ -21,7 +21,7 @@ public static void main(String[] args) throws IOException {
.build();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "http://localhost:8080/kas";
+ kasInfo.setURL("http://localhost:8080/kas");
var tdfConfig = Config.newNanoTDFConfig(
Config.withNanoKasInformation(kasInfo),
diff --git a/examples/src/main/java/io/opentdf/platform/EncryptExample.java b/examples/src/main/java/io/opentdf/platform/EncryptExample.java
index b2426303..35d8d495 100644
--- a/examples/src/main/java/io/opentdf/platform/EncryptExample.java
+++ b/examples/src/main/java/io/opentdf/platform/EncryptExample.java
@@ -39,7 +39,7 @@ public static void main(String[] args) throws IOException, ParseException {
.build();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "http://localhost:8080/kas";
+ kasInfo.setURL("http://localhost:8080/kas");
var wrappingKeyType = KeyType.fromString(keyEncapsulationAlgorithm.toLowerCase());
var tdfConfig = Config.newTDFConfig(Config.withKasInformation(kasInfo),
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java b/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java
index ffe9e239..f85d819c 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java
@@ -63,10 +63,26 @@ private Autoconfigure() {
}
static class KeySplitTemplate {
- final String kas;
- final String splitID;
- final String kid;
- final KeyType keyType;
+ private final String kas;
+ private final String splitID;
+ private final String kid;
+ private final KeyType keyType;
+
+ public String getKas() {
+ return kas;
+ }
+
+ public String getSplitID() {
+ return splitID;
+ }
+
+ public String getKid() {
+ return kid;
+ }
+
+ public KeyType getKeyType() {
+ return keyType;
+ }
@Override
public String toString() {
@@ -99,8 +115,16 @@ public KeySplitTemplate(String kas, String splitID, String kid, KeyType keyType)
}
public static class KeySplitStep {
- final String kas;
- final String splitID;
+ private final String kas;
+ private final String splitID;
+
+ public String getKas() {
+ return kas;
+ }
+
+ public String getSplitID() {
+ return splitID;
+ }
KeySplitStep(String kas, String splitId) {
this.kas = Objects.requireNonNull(kas);
@@ -289,8 +313,16 @@ String name() {
}
static class KeyAccessGrant {
- public Attribute attr;
- public List kases;
+ private Attribute attr;
+ private List kases;
+
+ public Attribute getAttr() {
+ return attr;
+ }
+
+ public List getKases() {
+ return kases;
+ }
public KeyAccessGrant(Attribute attr, List kases) {
this.attr = attr;
@@ -349,11 +381,11 @@ boolean addAllGrants(AttributeValueFQN fqn, List granted, List<
for (var cachedGrantKey: cachedGrantKeys) {
var mappedKey = new Config.KASInfo();
- mappedKey.URL = grantedKey.getUri();
- mappedKey.KID = cachedGrantKey.getKid();
- mappedKey.Algorithm = KeyType.fromPublicKeyAlgorithm(cachedGrantKey.getAlg()).toString();
- mappedKey.PublicKey = cachedGrantKey.getPem();
- mappedKey.Default = false;
+ mappedKey.setURL(grantedKey.getUri());
+ mappedKey.setKID(cachedGrantKey.getKid());
+ mappedKey.setAlgorithm(KeyType.fromPublicKeyAlgorithm(cachedGrantKey.getAlg()).toString());
+ mappedKey.setPublicKey(cachedGrantKey.getPem());
+ mappedKey.setDefault(false);
mappedKeys.computeIfAbsent(fqn.key, k -> new ArrayList<>()).add(mappedKey);
}
}
@@ -512,11 +544,11 @@ BooleanKeyExpression assignKeysTo(AttributeBooleanExpression e) {
continue;
}
for (var kasInfo : mapped) {
- if (kasInfo.URL == null || kasInfo.URL.isEmpty()) {
+ if (kasInfo.getURL() == null || kasInfo.getURL().isEmpty()) {
logger.warn("No KAS URL found for attribute value {}", value);
continue;
}
- keys.add(new PublicKeyInfo(kasInfo.URL, kasInfo.KID, kasInfo.Algorithm));
+ keys.add(new PublicKeyInfo(kasInfo.getURL(), kasInfo.getKID(), kasInfo.getAlgorithm()));
}
}
@@ -615,9 +647,17 @@ public String toString() {
}
static class PublicKeyInfo implements Comparable {
- final String kas;
- final String kid;
- final String algorithm;
+ private final String kas;
+ private final String kid;
+ private final String algorithm;
+
+ public String getKas() {
+ return kas;
+ }
+
+ public String getKid() { return kid; }
+
+ public String getAlgorithm() { return algorithm; }
PublicKeyInfo(String kas) {
this(kas, null, null);
@@ -629,10 +669,6 @@ static class PublicKeyInfo implements Comparable {
this.algorithm = algorithm;
}
- String getKas() {
- return kas;
- }
-
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
@@ -884,11 +920,11 @@ static Granter newGranterFromService(AttributesServiceClientInterface as, KASKey
static Autoconfigure.Granter createGranter(SDK.Services services, Config.TDFConfig tdfConfig) {
Autoconfigure.Granter granter = new Autoconfigure.Granter(new ArrayList<>());
- if (tdfConfig.attributeValues != null && !tdfConfig.attributeValues.isEmpty()) {
- granter = Autoconfigure.newGranterFromAttributes(services.kas().getKeyCache(), tdfConfig.attributeValues.toArray(new Value[0]));
- } else if (tdfConfig.attributes != null && !tdfConfig.attributes.isEmpty()) {
+ if (tdfConfig.getAttributeValues() != null && !tdfConfig.getAttributeValues().isEmpty()) {
+ granter = Autoconfigure.newGranterFromAttributes(services.kas().getKeyCache(), tdfConfig.getAttributeValues().toArray(new Value[0]));
+ } else if (tdfConfig.getAttributes() != null && !tdfConfig.getAttributes().isEmpty()) {
granter = Autoconfigure.newGranterFromService(services.attributes(), services.kas().getKeyCache(),
- tdfConfig.attributes.toArray(new Autoconfigure.AttributeValueFQN[0]));
+ tdfConfig.getAttributes().toArray(new Autoconfigure.AttributeValueFQN[0]));
}
return granter;
}
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Config.java b/sdk/src/main/java/io/opentdf/platform/sdk/Config.java
index ea49d074..3986b20a 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/Config.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/Config.java
@@ -40,11 +40,60 @@ public enum IntegrityAlgorithm {
}
public static class KASInfo implements Cloneable {
- public String URL;
- public String PublicKey;
- public String KID;
- public Boolean Default;
- public String Algorithm;
+ private String URL;
+ private String PublicKey;
+ private String KID;
+ private Boolean Default;
+ private String Algorithm;
+
+ public KASInfo() {}
+
+ public KASInfo(String URL, String publicKey, String KID, String algorithm) {
+ this.URL = URL;
+ PublicKey = publicKey;
+ this.KID = KID;
+ Algorithm = algorithm;
+ }
+
+ public String getURL() {
+ return URL;
+ }
+
+ public void setURL(String URL) {
+ this.URL = URL;
+ }
+
+ public String getPublicKey() {
+ return PublicKey;
+ }
+
+ public void setPublicKey(String publicKey) {
+ PublicKey = publicKey;
+ }
+
+ public String getKID() {
+ return KID;
+ }
+
+ public void setKID(String KID) {
+ this.KID = KID;
+ }
+
+ public Boolean getDefault() {
+ return Default;
+ }
+
+ public void setDefault(Boolean aDefault) {
+ Default = aDefault;
+ }
+
+ public String getAlgorithm() {
+ return Algorithm;
+ }
+
+ public void setAlgorithm(String algorithm) {
+ Algorithm = algorithm;
+ }
@Override
public KASInfo clone() {
@@ -108,8 +157,24 @@ public static KASInfo fromSimpleKasKey(SimpleKasKey ki) {
}
public static class AssertionVerificationKeys {
- public AssertionConfig.AssertionKey defaultKey;
- public Map keys = new HashMap<>();
+ private AssertionConfig.AssertionKey defaultKey;
+ private Map keys = new HashMap<>();
+
+ public Map getKeys() {
+ return keys;
+ }
+
+ public void setKeys(Map keys) {
+ this.keys = keys;
+ }
+
+ public AssertionConfig.AssertionKey getDefaultKey() {
+ return defaultKey;
+ }
+
+ public void setDefaultKey(AssertionConfig.AssertionKey defaultKey) {
+ this.defaultKey = defaultKey;
+ }
Boolean isEmpty() {
return this.defaultKey == null && this.keys.isEmpty();
@@ -127,11 +192,51 @@ AssertionConfig.AssertionKey getKey(String key) {
public static class TDFReaderConfig {
// Optional Map of Assertion Verification Keys
- AssertionVerificationKeys assertionVerificationKeys = new AssertionVerificationKeys();
- boolean disableAssertionVerification;
- KeyType sessionKeyType;
- Set kasAllowlist;
- boolean ignoreKasAllowlist;
+ private AssertionVerificationKeys assertionVerificationKeys = new AssertionVerificationKeys();
+ private boolean disableAssertionVerification;
+ private KeyType sessionKeyType;
+ private Set kasAllowlist;
+ private boolean ignoreKasAllowlist;
+
+ public void setAssertionVerificationKeys(AssertionVerificationKeys assertionVerificationKeys) {
+ this.assertionVerificationKeys = assertionVerificationKeys;
+ }
+
+ public void setDisableAssertionVerification(boolean disableAssertionVerification) {
+ this.disableAssertionVerification = disableAssertionVerification;
+ }
+
+ public void setSessionKeyType(KeyType sessionKeyType) {
+ this.sessionKeyType = sessionKeyType;
+ }
+
+ public void setKasAllowlist(Set kasAllowlist) {
+ this.kasAllowlist = kasAllowlist;
+ }
+
+ public void setIgnoreKasAllowlist(boolean ignoreKasAllowlist) {
+ this.ignoreKasAllowlist = ignoreKasAllowlist;
+ }
+
+ public AssertionVerificationKeys getAssertionVerificationKeys() {
+ return assertionVerificationKeys;
+ }
+
+ public boolean isDisableAssertionVerification() {
+ return disableAssertionVerification;
+ }
+
+ public KeyType getSessionKeyType() {
+ return sessionKeyType;
+ }
+
+ public Set getKasAllowlist() {
+ return kasAllowlist;
+ }
+
+ public boolean isIgnoreKasAllowlist() {
+ return ignoreKasAllowlist;
+ }
}
@SafeVarargs
@@ -176,25 +281,177 @@ public static Consumer WithIgnoreKasAllowlist(boolean ignore) {
public static class TDFConfig {
- public Boolean autoconfigure;
- public int defaultSegmentSize;
- public boolean enableEncryption;
- public TDFFormat tdfFormat;
- public String tdfPublicKey;
- public String tdfPrivateKey;
- public String metaData;
- public IntegrityAlgorithm integrityAlgorithm;
- public IntegrityAlgorithm segmentIntegrityAlgorithm;
- public List attributes;
- public List attributeValues;
- public List kasInfoList;
- public List assertionConfigList;
- public String mimeType;
- public List splitPlan;
- public KeyType wrappingKeyType;
- public boolean hexEncodeRootAndSegmentHashes;
- public boolean renderVersionInfoInManifest;
- public boolean systemMetadataAssertion;
+ private Boolean autoconfigure;
+ private int defaultSegmentSize;
+ private boolean enableEncryption;
+ private TDFFormat tdfFormat;
+ private String tdfPublicKey;
+ private String tdfPrivateKey;
+ private String metaData;
+ private IntegrityAlgorithm integrityAlgorithm;
+ private IntegrityAlgorithm segmentIntegrityAlgorithm;
+ private List attributes;
+ private List attributeValues;
+ private List kasInfoList;
+ private List assertionConfigList;
+ private String mimeType;
+ private List splitPlan;
+ private KeyType wrappingKeyType;
+ private boolean hexEncodeRootAndSegmentHashes;
+ private boolean renderVersionInfoInManifest;
+ private boolean systemMetadataAssertion;
+
+ public Boolean getAutoconfigure() {
+ return autoconfigure;
+ }
+
+ public void setAutoconfigure(Boolean autoconfigure) {
+ this.autoconfigure = autoconfigure;
+ }
+
+ public int getDefaultSegmentSize() {
+ return defaultSegmentSize;
+ }
+
+ public void setDefaultSegmentSize(int defaultSegmentSize) {
+ this.defaultSegmentSize = defaultSegmentSize;
+ }
+
+ public boolean isEnableEncryption() {
+ return enableEncryption;
+ }
+
+ public void setEnableEncryption(boolean enableEncryption) {
+ this.enableEncryption = enableEncryption;
+ }
+
+ public TDFFormat getTdfFormat() {
+ return tdfFormat;
+ }
+
+ public void setTdfFormat(TDFFormat tdfFormat) {
+ this.tdfFormat = tdfFormat;
+ }
+
+ public String getTdfPublicKey() {
+ return tdfPublicKey;
+ }
+
+ public void setTdfPublicKey(String tdfPublicKey) {
+ this.tdfPublicKey = tdfPublicKey;
+ }
+
+ public String getTdfPrivateKey() {
+ return tdfPrivateKey;
+ }
+
+ public void setTdfPrivateKey(String tdfPrivateKey) {
+ this.tdfPrivateKey = tdfPrivateKey;
+ }
+
+ public String getMetaData() {
+ return metaData;
+ }
+
+ public void setMetaData(String metaData) {
+ this.metaData = metaData;
+ }
+
+ public IntegrityAlgorithm getIntegrityAlgorithm() {
+ return integrityAlgorithm;
+ }
+
+ public void setIntegrityAlgorithm(IntegrityAlgorithm integrityAlgorithm) {
+ this.integrityAlgorithm = integrityAlgorithm;
+ }
+
+ public IntegrityAlgorithm getSegmentIntegrityAlgorithm() {
+ return segmentIntegrityAlgorithm;
+ }
+
+ public void setSegmentIntegrityAlgorithm(IntegrityAlgorithm segmentIntegrityAlgorithm) {
+ this.segmentIntegrityAlgorithm = segmentIntegrityAlgorithm;
+ }
+
+ public List getAttributes() {
+ return attributes;
+ }
+
+ public void setAttributes(List attributes) {
+ this.attributes = attributes;
+ }
+
+ public List getAttributeValues() {
+ return attributeValues;
+ }
+
+ public void setAttributeValues(List attributeValues) {
+ this.attributeValues = attributeValues;
+ }
+
+ public List getKasInfoList() {
+ return kasInfoList;
+ }
+
+ public void setKasInfoList(List kasInfoList) {
+ this.kasInfoList = kasInfoList;
+ }
+
+ public List getAssertionConfigList() {
+ return assertionConfigList;
+ }
+
+ public void setAssertionConfigList(List assertionConfigList) {
+ this.assertionConfigList = assertionConfigList;
+ }
+
+ public String getMimeType() {
+ return mimeType;
+ }
+
+ public void setMimeType(String mimeType) {
+ this.mimeType = mimeType;
+ }
+
+ public List getSplitPlan() {
+ return splitPlan;
+ }
+
+ public void setSplitPlan(List splitPlan) {
+ this.splitPlan = splitPlan;
+ }
+
+ public KeyType getWrappingKeyType() {
+ return wrappingKeyType;
+ }
+
+ public void setWrappingKeyType(KeyType wrappingKeyType) {
+ this.wrappingKeyType = wrappingKeyType;
+ }
+
+ public boolean isHexEncodeRootAndSegmentHashes() {
+ return hexEncodeRootAndSegmentHashes;
+ }
+
+ public void setHexEncodeRootAndSegmentHashes(boolean hexEncodeRootAndSegmentHashes) {
+ this.hexEncodeRootAndSegmentHashes = hexEncodeRootAndSegmentHashes;
+ }
+
+ public boolean getRenderVersionInfoInManifest() {
+ return renderVersionInfoInManifest;
+ }
+
+ public void setRenderVersionInfoInManifest(boolean renderVersionInfoInManifest) {
+ this.renderVersionInfoInManifest = renderVersionInfoInManifest;
+ }
+
+ public boolean isSystemMetadataAssertion() {
+ return systemMetadataAssertion;
+ }
+
+ public void setSystemMetadataAssertion(boolean systemMetadataAssertion) {
+ this.systemMetadataAssertion = systemMetadataAssertion;
+ }
public TDFConfig() {
this.autoconfigure = true;
@@ -343,13 +600,43 @@ public static Consumer withSystemMetadataAssertion() {
}
public static class NanoTDFConfig {
- public ECCMode eccMode;
- public NanoTDFType.Cipher cipher;
- public SymmetricAndPayloadConfig config;
- public List attributes;
- public List kasInfoList;
- public CollectionConfig collectionConfig;
- public NanoTDFType.PolicyType policyType;
+ private ECCMode eccMode;
+ private NanoTDFType.Cipher cipher;
+ private SymmetricAndPayloadConfig config;
+ private List attributes;
+ private List kasInfoList;
+ private CollectionConfig collectionConfig;
+ private NanoTDFType.PolicyType policyType;
+
+ public ECCMode getEccMode() {
+ return eccMode;
+ }
+
+ public NanoTDFType.Cipher getCipher() {
+ return cipher;
+ }
+
+ public SymmetricAndPayloadConfig getConfig() {
+ return config;
+ }
+
+ public List getAttributes() {
+ return attributes;
+ }
+
+ public List getKasInfoList() {
+ return kasInfoList;
+ }
+
+ public CollectionConfig getCollectionConfig() {
+ return collectionConfig;
+ }
+
+ public NanoTDFType.PolicyType getPolicyType() {
+ return policyType;
+ }
+
+
public NanoTDFConfig() {
this.eccMode = new ECCMode();
@@ -424,8 +711,24 @@ public static Consumer withPolicyType(NanoTDFType.PolicyType poli
}
public static class NanoTDFReaderConfig {
- Set kasAllowlist;
- boolean ignoreKasAllowlist;
+ private Set kasAllowlist;
+ private boolean ignoreKasAllowlist;
+
+ public void setKasAllowlist(Set kasAllowlist) {
+ this.kasAllowlist = kasAllowlist;
+ }
+
+ public void setIgnoreKasAllowlist(boolean ignoreKasAllowlist) {
+ this.ignoreKasAllowlist = ignoreKasAllowlist;
+ }
+
+ public Set getKasAllowlist() {
+ return kasAllowlist;
+ }
+
+ public boolean isIgnoreKasAllowlist() {
+ return ignoreKasAllowlist;
+ }
}
public static NanoTDFReaderConfig newNanoTDFReaderConfig(Consumer... options) {
@@ -482,9 +785,13 @@ public AesGcm getKey() {
public static class CollectionConfig {
private int iterationCounter;
private HeaderInfo headerInfo;
- public final boolean useCollection;
+ private final boolean useCollection;
private Boolean updatedHeaderInfo;
+ public boolean getUseCollection() {
+ return useCollection;
+ }
+
public CollectionConfig(boolean useCollection) {
this.useCollection = useCollection;
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java b/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java
index 7ab09283..8f1bce16 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java
@@ -76,7 +76,7 @@ public KASInfo getECPublicKey(Config.KASInfo kasInfo, NanoTDFType.ECCurve curve)
log.debug("retrieving public key with kasinfo = [{}]", kasInfo);
var req = PublicKeyRequest.newBuilder().setAlgorithm(curve.getPlatformCurveName()).build();
- var r = getStub(kasInfo.URL).publicKeyBlocking(req, Collections.emptyMap()).execute();
+ var r = getStub(kasInfo.getURL()).publicKeyBlocking(req, Collections.emptyMap()).execute();
PublicKeyResponse res;
try {
res = ResponseMessageKt.getOrThrow(r);
@@ -84,23 +84,23 @@ public KASInfo getECPublicKey(Config.KASInfo kasInfo, NanoTDFType.ECCurve curve)
throw new SDKException("error getting public key", e);
}
var k2 = kasInfo.clone();
- k2.KID = res.getKid();
- k2.PublicKey = res.getPublicKey();
+ k2.setKID(res.getKid());
+ k2.setPublicKey(res.getPublicKey());
return k2;
}
@Override
public Config.KASInfo getPublicKey(Config.KASInfo kasInfo) {
- Config.KASInfo cachedValue = this.kasKeyCache.get(kasInfo.URL, kasInfo.Algorithm, kasInfo.KID);
+ Config.KASInfo cachedValue = this.kasKeyCache.get(kasInfo.getURL(), kasInfo.getAlgorithm(), kasInfo.getKID());
if (cachedValue != null) {
return cachedValue;
}
- PublicKeyRequest request = (kasInfo.Algorithm == null || kasInfo.Algorithm.isEmpty())
+ PublicKeyRequest request = (kasInfo.getAlgorithm() == null || kasInfo.getAlgorithm().isEmpty())
? PublicKeyRequest.getDefaultInstance()
- : PublicKeyRequest.newBuilder().setAlgorithm(kasInfo.Algorithm).build();
+ : PublicKeyRequest.newBuilder().setAlgorithm(kasInfo.getAlgorithm()).build();
- var req = getStub(kasInfo.URL).publicKeyBlocking(request, Collections.emptyMap()).execute();
+ var req = getStub(kasInfo.getURL()).publicKeyBlocking(request, Collections.emptyMap()).execute();
PublicKeyResponse resp;
try {
resp = RequestHelper.getOrThrow(req);
@@ -109,10 +109,10 @@ public Config.KASInfo getPublicKey(Config.KASInfo kasInfo) {
}
var kiCopy = new Config.KASInfo();
- kiCopy.KID = resp.getKid();
- kiCopy.PublicKey = resp.getPublicKey();
- kiCopy.URL = kasInfo.URL;
- kiCopy.Algorithm = kasInfo.Algorithm;
+ kiCopy.setKID(resp.getKid());
+ kiCopy.setPublicKey(resp.getPublicKey());
+ kiCopy.setURL(kasInfo.getURL());
+ kiCopy.setAlgorithm(kasInfo.getAlgorithm());
this.kasKeyCache.store(kiCopy);
return kiCopy;
@@ -130,22 +130,81 @@ public synchronized void close() {
}
static class RewrapRequestBody {
- String policy;
- String clientPublicKey;
- Manifest.KeyAccess keyAccess;
+ private String policy;
+ private String clientPublicKey;
+ private Manifest.KeyAccess keyAccess;
+
+ public String getPolicy() {
+ return policy;
+ }
+
+ public String getClientPublicKey() {
+ return clientPublicKey;
+ }
+
+ public Manifest.KeyAccess getKeyAccess() {
+ return keyAccess;
+ }
+
+ RewrapRequestBody(String policy, String clientPublicKey, Manifest.KeyAccess keyAccess) {
+ this.policy = policy;
+ this.clientPublicKey = clientPublicKey;
+ this.keyAccess = keyAccess;
+ }
}
static class NanoTDFKeyAccess {
- String header;
- String type;
- String url;
- String protocol;
+ private String header;
+ private String type;
+ private String url;
+ private String protocol;
+
+ public String getHeader() {
+ return header;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ public String getProtocol() {
+ return protocol;
+ }
+
+ public NanoTDFKeyAccess(String header, String type, String url, String protocol) {
+ this.header = header;
+ this.type = type;
+ this.url = url;
+ this.protocol = protocol;
+ }
}
static class NanoTDFRewrapRequestBody {
- String algorithm;
- String clientPublicKey;
- NanoTDFKeyAccess keyAccess;
+ private String algorithm;
+ private String clientPublicKey;
+ private NanoTDFKeyAccess keyAccess;
+
+ public String getAlgorithm() {
+ return algorithm;
+ }
+
+ public String getClientPublicKey() {
+ return clientPublicKey;
+ }
+
+ public NanoTDFKeyAccess getKeyAccess() {
+ return keyAccess;
+ }
+
+ public NanoTDFRewrapRequestBody(String algorithm, String clientPublicKey, NanoTDFKeyAccess keyAccess) {
+ this.algorithm = algorithm;
+ this.clientPublicKey = clientPublicKey;
+ this.keyAccess = keyAccess;
+ }
}
private static final Gson gson = new Gson();
@@ -166,10 +225,7 @@ public byte[] unwrap(Manifest.KeyAccess keyAccess, String policy, KeyType sessi
}
}
- RewrapRequestBody body = new RewrapRequestBody();
- body.policy = policy;
- body.clientPublicKey = clientPublicKey;
- body.keyAccess = keyAccess;
+ RewrapRequestBody body = new RewrapRequestBody(policy, clientPublicKey, keyAccess);
var requestBody = gson.toJson(body);
var claims = new JWTClaimsSet.Builder()
@@ -226,16 +282,9 @@ public byte[] unwrap(Manifest.KeyAccess keyAccess, String policy, KeyType sessi
public byte[] unwrapNanoTDF(NanoTDFType.ECCurve curve, String header, String kasURL) {
ECKeyPair keyPair = new ECKeyPair(curve, ECKeyPair.ECAlgorithm.ECDH);
- NanoTDFKeyAccess keyAccess = new NanoTDFKeyAccess();
- keyAccess.header = header;
- keyAccess.type = "remote";
- keyAccess.url = kasURL;
- keyAccess.protocol = "kas";
+ NanoTDFKeyAccess keyAccess = new NanoTDFKeyAccess(header, "remote", kasURL, "kas");
- NanoTDFRewrapRequestBody body = new NanoTDFRewrapRequestBody();
- body.algorithm = format("ec:%s", curve.getCurveName());
- body.clientPublicKey = keyPair.publicKeyInPEMFormat();
- body.keyAccess = keyAccess;
+ NanoTDFRewrapRequestBody body = new NanoTDFRewrapRequestBody(format("ec:%s", curve.getCurveName()), keyPair.publicKeyInPEMFormat(), keyAccess);
var requestBody = gson.toJson(body);
var claims = new JWTClaimsSet.Builder()
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/KASKeyCache.java b/sdk/src/main/java/io/opentdf/platform/sdk/KASKeyCache.java
index 75bae93c..7d990765 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/KASKeyCache.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/KASKeyCache.java
@@ -50,7 +50,7 @@ public Config.KASInfo get(String url, String algorithm, String kid) {
public void store(Config.KASInfo kasInfo) {
log.debug("storing kasInfo into the cache {}", kasInfo);
- KASKeyRequest cacheKey = new KASKeyRequest(kasInfo.URL, kasInfo.Algorithm, kasInfo.KID);
+ KASKeyRequest cacheKey = new KASKeyRequest(kasInfo.getURL(), kasInfo.getAlgorithm(), kasInfo.getKID());
cache.put(cacheKey, new TimeStampedKASInfo(kasInfo, LocalDateTime.now()));
}
}
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java b/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java
index 2fe3d70d..b3f195ea 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java
@@ -77,16 +77,16 @@ private static Optional getBaseKey(WellKnownServiceClientInterfa
}
private Optional getKasInfo(Config.NanoTDFConfig nanoTDFConfig) {
- if (nanoTDFConfig.kasInfoList.isEmpty()) {
+ if (nanoTDFConfig.getKasInfoList().isEmpty()) {
logger.debug("no kas info provided in NanoTDFConfig");
return Optional.empty();
}
- return Optional.of(nanoTDFConfig.kasInfoList.get(0));
+ return Optional.of(nanoTDFConfig.getKasInfoList().get(0));
}
private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig) throws InvalidNanoTDFConfig, UnsupportedNanoTDFFeature {
- if (nanoTDFConfig.collectionConfig.useCollection) {
- Config.HeaderInfo headerInfo = nanoTDFConfig.collectionConfig.getHeaderInfo();
+ if (nanoTDFConfig.getCollectionConfig().getUseCollection()) {
+ Config.HeaderInfo headerInfo = nanoTDFConfig.getCollectionConfig().getHeaderInfo();
if (headerInfo != null) {
return headerInfo;
}
@@ -97,21 +97,21 @@ private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig) thro
.or(() -> NanoTDF.getBaseKey(services.wellknown()))
.orElseThrow(() -> new SDKException("no KAS info provided and couldn't get base key, cannot create NanoTDF"));
- String url = kasInfo.URL;
- if (kasInfo.PublicKey == null || kasInfo.PublicKey.isEmpty()) {
+ String url = kasInfo.getURL();
+ if (kasInfo.getPublicKey() == null || kasInfo.getPublicKey().isEmpty()) {
logger.info("no public key provided for KAS at {}, retrieving", url);
- kasInfo = services.kas().getECPublicKey(kasInfo, nanoTDFConfig.eccMode.getCurve());
+ kasInfo = services.kas().getECPublicKey(kasInfo, nanoTDFConfig.getEccMode().getCurve());
}
// Kas url resource locator
- ResourceLocator kasURL = new ResourceLocator(kasInfo.URL, kasInfo.KID);
+ ResourceLocator kasURL = new ResourceLocator(kasInfo.getURL(), kasInfo.getKID());
assert kasURL.getIdentifier() != null : "Identifier in ResourceLocator cannot be null";
NanoTDFType.ECCurve ecCurve = getEcCurve(nanoTDFConfig, kasInfo);
ECKeyPair keyPair = new ECKeyPair(ecCurve, ECKeyPair.ECAlgorithm.ECDSA);
// Generate symmetric key
- ECPublicKey kasPublicKey = ECKeyPair.publicKeyFromPem(kasInfo.PublicKey);
+ ECPublicKey kasPublicKey = ECKeyPair.publicKeyFromPem(kasInfo.getPublicKey());
byte[] symmetricKey = ECKeyPair.computeECDHKey(kasPublicKey, keyPair.getPrivateKey());
// Generate HKDF key
@@ -125,7 +125,7 @@ private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig) thro
byte[] key = ECKeyPair.calculateHKDF(hashOfSalt, symmetricKey);
// Encrypt policy
- PolicyObject policyObject = createPolicyObject(nanoTDFConfig.attributes);
+ PolicyObject policyObject = createPolicyObject(nanoTDFConfig.getAttributes());
String policyObjectAsStr = gson.toJson(policyObject);
logger.debug("createNanoTDF policy object - {}", policyObjectAsStr);
@@ -134,19 +134,19 @@ private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig) thro
final byte[] policyBody;
PolicyInfo policyInfo = new PolicyInfo();
AesGcm gcm = new AesGcm(key);
- if (nanoTDFConfig.policyType == NanoTDFType.PolicyType.EMBEDDED_POLICY_PLAIN_TEXT) {
+ if (nanoTDFConfig.getPolicyType() == NanoTDFType.PolicyType.EMBEDDED_POLICY_PLAIN_TEXT) {
policyBody = policyObjectAsStr.getBytes(StandardCharsets.UTF_8);
policyInfo.setEmbeddedPlainTextPolicy(policyBody);
} else {
byte[] policyObjectAsBytes = policyObjectAsStr.getBytes(StandardCharsets.UTF_8);
- int authTagSize = SymmetricAndPayloadConfig.sizeOfAuthTagForCipher(nanoTDFConfig.config.getCipherType());
+ int authTagSize = SymmetricAndPayloadConfig.sizeOfAuthTagForCipher(nanoTDFConfig.getConfig().getCipherType());
byte[] encryptedPolicy = gcm.encrypt(kEmptyIV, authTagSize, policyObjectAsBytes, 0, policyObjectAsBytes.length);
policyBody = Arrays.copyOfRange(encryptedPolicy, kEmptyIV.length, encryptedPolicy.length);
policyInfo.setEmbeddedEncryptedTextPolicy(policyBody);
}
// Set policy binding (GMAC)
- if (nanoTDFConfig.eccMode.isECDSABindingEnabled()) {
+ if (nanoTDFConfig.getEccMode().isECDSABindingEnabled()) {
throw new UnsupportedNanoTDFFeature("ECDSA policy binding is not support");
} else {
byte[] hash = digest.digest(policyBody);
@@ -158,21 +158,21 @@ private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig) thro
byte[] compressedPubKey = keyPair.compressECPublickey();
Header header = new Header();
ECCMode mode;
- if (nanoTDFConfig.eccMode.getCurve() != keyPair.getCurve()) {
- mode = new ECCMode(nanoTDFConfig.eccMode.getECCModeAsByte());
+ if (nanoTDFConfig.getEccMode().getCurve() != keyPair.getCurve()) {
+ mode = new ECCMode(nanoTDFConfig.getEccMode().getECCModeAsByte());
mode.setEllipticCurve(keyPair.getCurve());
} else {
- mode = nanoTDFConfig.eccMode;
+ mode = nanoTDFConfig.getEccMode();
}
header.setECCMode(mode);
- header.setPayloadConfig(nanoTDFConfig.config);
+ header.setPayloadConfig(nanoTDFConfig.getConfig());
header.setEphemeralKey(compressedPubKey);
header.setKasLocator(kasURL);
header.setPolicyInfo(policyInfo);
Config.HeaderInfo headerInfo = new Config.HeaderInfo(header, gcm, 0);
- if (nanoTDFConfig.collectionConfig.useCollection) {
- nanoTDFConfig.collectionConfig.updateHeaderInfo(headerInfo);
+ if (nanoTDFConfig.getCollectionConfig().getUseCollection()) {
+ nanoTDFConfig.getCollectionConfig().updateHeaderInfo(headerInfo);
}
return headerInfo;
@@ -181,14 +181,14 @@ private Config.HeaderInfo getHeaderInfo(Config.NanoTDFConfig nanoTDFConfig) thro
private static NanoTDFType.ECCurve getEcCurve(Config.NanoTDFConfig nanoTDFConfig, Config.KASInfo kasInfo) {
// it might be better to pull the curve from the OIDC in the PEM but it looks like we
// are just taking the Algorithm as correct
- Optional specifiedCurve = NanoTDFType.ECCurve.fromAlgorithm(kasInfo.Algorithm);
+ Optional specifiedCurve = NanoTDFType.ECCurve.fromAlgorithm(kasInfo.getAlgorithm());
NanoTDFType.ECCurve ecCurve;
if (specifiedCurve.isEmpty()) {
- logger.info("no curve specified in KASInfo, using the curve from config [{}]", nanoTDFConfig.eccMode.getCurve());
- ecCurve = nanoTDFConfig.eccMode.getCurve();
+ logger.info("no curve specified in KASInfo, using the curve from config [{}]", nanoTDFConfig.getEccMode().getCurve());
+ ecCurve = nanoTDFConfig.getEccMode().getCurve();
} else {
- if (specifiedCurve.get() != nanoTDFConfig.eccMode.getCurve()) {
- logger.warn("ECCurve in NanoTDFConfig [{}] does not match the curve in KASInfo, using KASInfo curve [{}]", nanoTDFConfig.eccMode.getCurve(), specifiedCurve.get());
+ if (specifiedCurve.get() != nanoTDFConfig.getEccMode().getCurve()) {
+ logger.warn("ECCurve in NanoTDFConfig [{}] does not match the curve in KASInfo, using KASInfo curve [{}]", nanoTDFConfig.getEccMode().getCurve(), specifiedCurve.get());
}
ecCurve = specifiedCurve.get();
}
@@ -205,8 +205,8 @@ public int createNanoTDF(ByteBuffer data, OutputStream outputStream,
}
// check the policy type, support only embedded policy
- if (nanoTDFConfig.policyType != NanoTDFType.PolicyType.EMBEDDED_POLICY_PLAIN_TEXT &&
- nanoTDFConfig.policyType != NanoTDFType.PolicyType.EMBEDDED_POLICY_ENCRYPTED) {
+ if (nanoTDFConfig.getPolicyType() != NanoTDFType.PolicyType.EMBEDDED_POLICY_PLAIN_TEXT &&
+ nanoTDFConfig.getPolicyType() != NanoTDFType.PolicyType.EMBEDDED_POLICY_ENCRYPTED) {
throw new UnsupportedNanoTDFFeature("unsupported policy type");
}
@@ -224,10 +224,10 @@ public int createNanoTDF(ByteBuffer data, OutputStream outputStream,
nanoTDFSize += headerSize;
logger.debug("createNanoTDF header length {}", headerSize);
- int authTagSize = SymmetricAndPayloadConfig.sizeOfAuthTagForCipher(nanoTDFConfig.config.getCipherType());
+ int authTagSize = SymmetricAndPayloadConfig.sizeOfAuthTagForCipher(nanoTDFConfig.getConfig().getCipherType());
// Encrypt the data
byte[] actualIV = new byte[kIvPadding + kNanoTDFIvSize];
- if (nanoTDFConfig.collectionConfig.useCollection) {
+ if (nanoTDFConfig.getCollectionConfig().getUseCollection()) {
ByteBuffer b = ByteBuffer.allocate(4);
b.order(ByteOrder.LITTLE_ENDIAN);
b.putInt(iteration);
@@ -272,18 +272,18 @@ public void readNanoTDF(ByteBuffer nanoTDF, OutputStream outputStream, String pl
public void readNanoTDF(ByteBuffer nanoTDF, OutputStream outputStream,
Config.NanoTDFReaderConfig nanoTdfReaderConfig, String platformUrl) throws IOException {
- if (!nanoTdfReaderConfig.ignoreKasAllowlist && (nanoTdfReaderConfig.kasAllowlist == null || nanoTdfReaderConfig.kasAllowlist.isEmpty())) {
+ if (!nanoTdfReaderConfig.isIgnoreKasAllowlist() && (nanoTdfReaderConfig.getKasAllowlist() == null || nanoTdfReaderConfig.getKasAllowlist().isEmpty())) {
ListKeyAccessServersRequest request = ListKeyAccessServersRequest.newBuilder()
.build();
ListKeyAccessServersResponse response = ResponseMessageKt.getOrThrow(services.kasRegistry().listKeyAccessServersBlocking(request, Collections.emptyMap()).execute());
- nanoTdfReaderConfig.kasAllowlist = new HashSet<>();
+ nanoTdfReaderConfig.setKasAllowlist(new HashSet<>());
var kases = response.getKeyAccessServersList();
for (var entry : kases) {
- nanoTdfReaderConfig.kasAllowlist.add(Config.getKasAddress(entry.getUri()));
+ nanoTdfReaderConfig.getKasAllowlist().add(Config.getKasAddress(entry.getUri()));
}
- nanoTdfReaderConfig.kasAllowlist.add(Config.getKasAddress(platformUrl));
+ nanoTdfReaderConfig.getKasAllowlist().add(Config.getKasAddress(platformUrl));
}
readNanoTDF(nanoTDF, outputStream, nanoTdfReaderConfig);
}
@@ -308,12 +308,12 @@ public void readNanoTDF(ByteBuffer nanoTDF, OutputStream outputStream,
String kasUrl = header.getKasLocator().getResourceUrl();
var realAddress = Config.getKasAddress(kasUrl);
- if (nanoTdfReaderConfig.ignoreKasAllowlist) {
+ if (nanoTdfReaderConfig.isIgnoreKasAllowlist()) {
logger.warn("Ignoring KasAllowlist for url {}", realAddress);
- } else if (nanoTdfReaderConfig.kasAllowlist == null || nanoTdfReaderConfig.kasAllowlist.isEmpty()) {
+ } else if (nanoTdfReaderConfig.getKasAllowlist() == null || nanoTdfReaderConfig.getKasAllowlist().isEmpty()) {
logger.error("KasAllowlist: No KAS allowlist provided and no KeyAccessServerRegistry available, {} is not allowed", realAddress);
throw new KasAllowlistException("No KAS allowlist provided and no KeyAccessServerRegistry available");
- } else if (!nanoTdfReaderConfig.kasAllowlist.contains(realAddress)) {
+ } else if (!nanoTdfReaderConfig.getKasAllowlist().contains(realAddress)) {
logger.error("KasAllowlist: kas url {} is not allowed", realAddress);
throw new KasAllowlistException("KasAllowlist: kas url "+realAddress+" is not allowed");
}
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Planner.java b/sdk/src/main/java/io/opentdf/platform/sdk/Planner.java
index b07c735c..6bf00890 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/Planner.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/Planner.java
@@ -46,16 +46,16 @@ private static String getUUID() {
Map> getSplits() {
List splitPlan;
- if (tdfConfig.autoconfigure) {
- if (tdfConfig.splitPlan != null && !tdfConfig.splitPlan.isEmpty()) {
+ if (tdfConfig.getAutoconfigure()) {
+ if (tdfConfig.getSplitPlan() != null && !tdfConfig.getSplitPlan().isEmpty()) {
throw new IllegalArgumentException("cannot use autoconfigure with a split plan provided in the TDFConfig");
}
splitPlan = getAutoconfigurePlan(services, tdfConfig);
- } else if (tdfConfig.splitPlan == null || tdfConfig.splitPlan.isEmpty()) {
- splitPlan = generatePlanFromProvidedKases(tdfConfig.kasInfoList);
+ } else if (tdfConfig.getSplitPlan() == null || tdfConfig.getSplitPlan().isEmpty()) {
+ splitPlan = generatePlanFromProvidedKases(tdfConfig.getKasInfoList());
} else {
- splitPlan = tdfConfig.splitPlan.stream()
- .map(k -> new Autoconfigure.KeySplitTemplate(k.kas, k.splitID, null, null))
+ splitPlan = tdfConfig.getSplitPlan().stream()
+ .map(k -> new Autoconfigure.KeySplitTemplate(k.getKas(), k.getSplitID(), null, null))
.collect(Collectors.toList());
}
@@ -74,12 +74,12 @@ private List getAutoconfigurePlan(SDK.Services s
List generatePlanFromProvidedKases(List kases) {
if (kases.size() == 1) {
var kasInfo = kases.get(0);
- return Collections.singletonList(new Autoconfigure.KeySplitTemplate(kasInfo.URL, "", kasInfo.KID, null));
+ return Collections.singletonList(new Autoconfigure.KeySplitTemplate(kasInfo.getURL(), "", kasInfo.getKID(), null));
}
List splitPlan = new ArrayList<>();
for (var kasInfo : kases) {
- var keyType = kasInfo.Algorithm == null ? null : KeyType.fromString(kasInfo.Algorithm);
- splitPlan.add(new Autoconfigure.KeySplitTemplate(kasInfo.URL, getUUID(), kasInfo.KID, keyType));
+ var keyType = kasInfo.getAlgorithm() == null ? null : KeyType.fromString(kasInfo.getAlgorithm());
+ splitPlan.add(new Autoconfigure.KeySplitTemplate(kasInfo.getURL(), getUUID(), kasInfo.getKID(), keyType));
}
return splitPlan;
}
@@ -133,15 +133,15 @@ static Optional fetchBaseKey(WellKnownServiceClientInterface wellk
private static class BaseKey {
@SerializedName("kas_url")
- String kasUrl;
+ private String kasUrl;
@SerializedName("public_key")
- Key publicKey;
+ private Key publicKey;
private static class Key {
- String kid;
- String pem;
- Algorithm algorithm;
+ private String kid;
+ private String pem;
+ private Algorithm algorithm;
}
}
@@ -149,27 +149,27 @@ Map> resolveKeys(List> conjunction = new HashMap<>();
var latestKASInfo = new HashMap();
// Seed anything passed in manually
- for (Config.KASInfo kasInfo : tdfConfig.kasInfoList) {
- if (kasInfo.PublicKey != null && !kasInfo.PublicKey.isEmpty()) {
- latestKASInfo.put(kasInfo.URL, kasInfo);
+ for (Config.KASInfo kasInfo : tdfConfig.getKasInfoList()) {
+ if (kasInfo.getPublicKey() != null && !kasInfo.getPublicKey().isEmpty()) {
+ latestKASInfo.put(kasInfo.getURL(), kasInfo);
}
}
for (var splitInfo: splitPlan) {
// Public key was passed in with kasInfoList
// TODO First look up in attribute information / add to split plan?
- Config.KASInfo ki = latestKASInfo.get(splitInfo.kas);
- if (ki == null || ki.PublicKey == null || ki.PublicKey.isBlank() || (splitInfo.kid != null && !splitInfo.kid.equals(ki.KID))) {
- logger.info("no public key provided for KAS at {}, retrieving", splitInfo.kas);
+ Config.KASInfo ki = latestKASInfo.get(splitInfo.getKas());
+ if (ki == null || ki.getPublicKey() == null || ki.getPublicKey().isBlank() || (splitInfo.getKid() != null && !splitInfo.getKid().equals(ki.getKID()))) {
+ logger.info("no public key provided for KAS at {}, retrieving", splitInfo.getKas());
var getKI = new Config.KASInfo();
- getKI.URL = splitInfo.kas;
- getKI.Algorithm = splitInfo.keyType == null
- ? (tdfConfig.wrappingKeyType == null ? null : tdfConfig.wrappingKeyType.toString())
- : splitInfo.keyType.toString();
+ getKI.setURL(splitInfo.getKas());
+ getKI.setAlgorithm(splitInfo.getKeyType() == null
+ ? (tdfConfig.getWrappingKeyType() == null ? null : tdfConfig.getWrappingKeyType().toString())
+ : splitInfo.getKeyType().toString());
ki = services.kas().getPublicKey(getKI);
- latestKASInfo.put(splitInfo.kas, ki);
+ latestKASInfo.put(splitInfo.getKas(), ki);
}
- conjunction.computeIfAbsent(splitInfo.splitID, s -> new ArrayList<>()).add(ki);
+ conjunction.computeIfAbsent(splitInfo.getSplitID(), s -> new ArrayList<>()).add(ki);
}
return conjunction;
}
@@ -178,11 +178,11 @@ static List defaultKases(Config.TDFConfig config) {
List allk = new ArrayList<>();
List defk = new ArrayList<>();
- for (Config.KASInfo kasInfo : config.kasInfoList) {
- if (kasInfo.Default != null && kasInfo.Default) {
- defk.add(kasInfo.URL);
+ for (Config.KASInfo kasInfo : config.getKasInfoList()) {
+ if (kasInfo.getDefault() != null && kasInfo.getDefault()) {
+ defk.add(kasInfo.getURL());
} else if (defk.isEmpty()) {
- allk.add(kasInfo.URL);
+ allk.add(kasInfo.getURL());
}
}
return defk.isEmpty() ? allk : defk;
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java b/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
index 2ae08f5b..1ab732ff 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
@@ -147,11 +147,11 @@ private PolicyObject createPolicyObject(List at
private static final Base64.Encoder encoder = Base64.getEncoder();
private void prepareManifest(Config.TDFConfig tdfConfig, Map> splits) {
- manifest.tdfVersion = tdfConfig.renderVersionInfoInManifest ? TDF_SPEC_VERSION : null;
+ manifest.tdfVersion = tdfConfig.getRenderVersionInfoInManifest() ? TDF_SPEC_VERSION : null;
manifest.encryptionInformation.keyAccessType = kSplitKeyType;
manifest.encryptionInformation.keyAccessObj = new ArrayList<>();
- PolicyObject policyObject = createPolicyObject(tdfConfig.attributes);
+ PolicyObject policyObject = createPolicyObject(tdfConfig.getAttributes());
String base64PolicyObject = encoder
.encodeToString(gson.toJson(policyObject).getBytes(StandardCharsets.UTF_8));
@@ -174,9 +174,9 @@ private void prepareManifest(Config.TDFConfig tdfConfig, Map kasInfos = split.getValue();
for (Config.KASInfo kasInfo : kasInfos) {
- if (kasInfo.PublicKey == null || kasInfo.PublicKey.isEmpty()) {
+ if (kasInfo.getPublicKey() == null || kasInfo.getPublicKey().isEmpty()) {
throw new SDK.KasPublicKeyMissing("Kas public key is missing in kas information list");
}
@@ -215,17 +215,17 @@ private Manifest.KeyAccess createKeyAccess(Config.TDFConfig tdfConfig, Config.KA
Manifest.PolicyBinding policyBinding, String encryptedMetadata, String splitID) {
Manifest.KeyAccess keyAccess = new Manifest.KeyAccess();
keyAccess.keyType = kWrapped;
- keyAccess.url = kasInfo.URL;
- keyAccess.kid = kasInfo.KID;
+ keyAccess.url = kasInfo.getURL();
+ keyAccess.kid = kasInfo.getKID();
keyAccess.protocol = kKasProtocol;
keyAccess.policyBinding = policyBinding;
keyAccess.encryptedMetadata = encryptedMetadata;
keyAccess.sid = splitID;
keyAccess.schemaVersion = KEY_ACCESS_SCHEMA_VERSION;
- var algorithm = kasInfo.Algorithm == null || kasInfo.Algorithm.isEmpty()
- ? tdfConfig.wrappingKeyType.toString()
- : kasInfo.Algorithm;
+ var algorithm = kasInfo.getAlgorithm() == null || kasInfo.getAlgorithm().isEmpty()
+ ? tdfConfig.getWrappingKeyType().toString()
+ : kasInfo.getAlgorithm();
if (KeyType.fromString(algorithm).isEc()) {
var ecKeyWrappedKeyInfo = createECWrappedKey(tdfConfig, kasInfo, symKey);
@@ -241,10 +241,10 @@ private Manifest.KeyAccess createKeyAccess(Config.TDFConfig tdfConfig, Config.KA
private ECKeyWrappedKeyInfo createECWrappedKey(Config.TDFConfig tdfConfig, Config.KASInfo kasInfo,
byte[] symKey) {
- var curveName = tdfConfig.wrappingKeyType.getECCurve();
+ var curveName = tdfConfig.getWrappingKeyType().getECCurve();
var keyPair = new ECKeyPair(curveName, ECKeyPair.ECAlgorithm.ECDH);
- ECPublicKey kasPubKey = ECKeyPair.publicKeyFromPem(kasInfo.PublicKey);
+ ECPublicKey kasPubKey = ECKeyPair.publicKeyFromPem(kasInfo.getPublicKey());
byte[] symmetricKey = ECKeyPair.computeECDHKey(kasPubKey, keyPair.getPrivateKey());
var sessionKey = ECKeyPair.calculateHKDF(GLOBAL_KEY_SALT, symmetricKey);
@@ -259,7 +259,7 @@ private ECKeyWrappedKeyInfo createECWrappedKey(Config.TDFConfig tdfConfig, Confi
}
private String createRSAWrappedKey(Config.KASInfo kasInfo, byte[] symKey) {
- AsymEncryption asymEncrypt = new AsymEncryption(kasInfo.PublicKey);
+ AsymEncryption asymEncrypt = new AsymEncryption(kasInfo.getPublicKey());
byte[] wrappedKey = asymEncrypt.encrypt(symKey);
return Base64.getEncoder().encodeToString(wrappedKey);
}
@@ -370,19 +370,19 @@ TDFObject createTDF(InputStream payload, OutputStream outputStream, Config.TDFCo
Map> splits = planner.getSplits();
// Add System Metadata Assertion if configured
- if (tdfConfig.systemMetadataAssertion) {
+ if (tdfConfig.isSystemMetadataAssertion()) {
AssertionConfig systemAssertion = AssertionConfig.getSystemMetadataAssertionConfig(TDF_SPEC_VERSION);
- tdfConfig.assertionConfigList.add(systemAssertion);
+ tdfConfig.getAssertionConfigList().add(systemAssertion);
}
TDFObject tdfObject = new TDFObject();
tdfObject.prepareManifest(tdfConfig, splits);
- long encryptedSegmentSize = tdfConfig.defaultSegmentSize + kGcmIvSize + kAesBlockSize;
+ long encryptedSegmentSize = tdfConfig.getDefaultSegmentSize() + kGcmIvSize + kAesBlockSize;
TDFWriter tdfWriter = new TDFWriter(outputStream);
ByteArrayOutputStream aggregateHash = new ByteArrayOutputStream();
- byte[] readBuf = new byte[tdfConfig.defaultSegmentSize];
+ byte[] readBuf = new byte[tdfConfig.getDefaultSegmentSize()];
tdfObject.manifest.encryptionInformation.integrityInformation.segments = new ArrayList<>();
long totalSize = 0;
@@ -410,8 +410,8 @@ TDFObject createTDF(InputStream payload, OutputStream outputStream, Config.TDFCo
cipherData = tdfObject.aesGcm.encrypt(readBuf, 0, readThisLoop).asBytes();
payloadOutput.write(cipherData);
- segmentSig = calculateSignature(cipherData, tdfObject.payloadKey, tdfConfig.segmentIntegrityAlgorithm);
- if (tdfConfig.hexEncodeRootAndSegmentHashes) {
+ segmentSig = calculateSignature(cipherData, tdfObject.payloadKey, tdfConfig.getSegmentIntegrityAlgorithm());
+ if (tdfConfig.isHexEncodeRootAndSegmentHashes()) {
segmentSig = Hex.encodeHexString(segmentSig).getBytes(StandardCharsets.UTF_8);
}
segmentInfo.hash = Base64.getEncoder().encodeToString(segmentSig);
@@ -427,24 +427,24 @@ TDFObject createTDF(InputStream payload, OutputStream outputStream, Config.TDFCo
Manifest.RootSignature rootSignature = new Manifest.RootSignature();
byte[] rootSig = calculateSignature(aggregateHash.toByteArray(), tdfObject.payloadKey,
- tdfConfig.integrityAlgorithm);
- byte[] encodedRootSig = tdfConfig.hexEncodeRootAndSegmentHashes
+ tdfConfig.getIntegrityAlgorithm());
+ byte[] encodedRootSig = tdfConfig.isHexEncodeRootAndSegmentHashes()
? Hex.encodeHexString(rootSig).getBytes(StandardCharsets.UTF_8)
: rootSig;
rootSignature.signature = Base64.getEncoder().encodeToString(encodedRootSig);
String alg = kGmacIntegrityAlgorithm;
- if (tdfConfig.integrityAlgorithm == Config.IntegrityAlgorithm.HS256) {
+ if (tdfConfig.getIntegrityAlgorithm() == Config.IntegrityAlgorithm.HS256) {
alg = kHmacIntegrityAlgorithm;
}
rootSignature.algorithm = alg;
tdfObject.manifest.encryptionInformation.integrityInformation.rootSignature = rootSignature;
- tdfObject.manifest.encryptionInformation.integrityInformation.segmentSizeDefault = tdfConfig.defaultSegmentSize;
+ tdfObject.manifest.encryptionInformation.integrityInformation.segmentSizeDefault = tdfConfig.getDefaultSegmentSize();
tdfObject.manifest.encryptionInformation.integrityInformation.encryptedSegmentSizeDefault = (int) encryptedSegmentSize;
tdfObject.manifest.encryptionInformation.integrityInformation.segmentHashAlg = kGmacIntegrityAlgorithm;
- if (tdfConfig.segmentIntegrityAlgorithm == Config.IntegrityAlgorithm.HS256) {
+ if (tdfConfig.getSegmentIntegrityAlgorithm() == Config.IntegrityAlgorithm.HS256) {
tdfObject.manifest.encryptionInformation.integrityInformation.segmentHashAlg = kHmacIntegrityAlgorithm;
}
@@ -452,15 +452,15 @@ TDFObject createTDF(InputStream payload, OutputStream outputStream, Config.TDFCo
// Add payload info
tdfObject.manifest.payload = new Manifest.Payload();
- tdfObject.manifest.payload.mimeType = tdfConfig.mimeType;
+ tdfObject.manifest.payload.mimeType = tdfConfig.getMimeType();
tdfObject.manifest.payload.protocol = kTDFAsZip;
tdfObject.manifest.payload.type = kTDFZipReference;
tdfObject.manifest.payload.url = TDFWriter.TDF_PAYLOAD_FILE_NAME;
tdfObject.manifest.payload.isEncrypted = true;
- List signedAssertions = new ArrayList<>(tdfConfig.assertionConfigList.size());
+ List signedAssertions = new ArrayList<>(tdfConfig.getAssertionConfigList().size());
- for (var assertionConfig : tdfConfig.assertionConfigList) {
+ for (var assertionConfig : tdfConfig.getAssertionConfigList()) {
var assertion = new Manifest.Assertion();
assertion.id = assertionConfig.id;
assertion.type = assertionConfig.type.toString();
@@ -470,7 +470,7 @@ TDFObject createTDF(InputStream payload, OutputStream outputStream, Config.TDFCo
var assertionHashAsHex = assertion.hash();
byte[] assertionHash;
- if (tdfConfig.hexEncodeRootAndSegmentHashes) {
+ if (tdfConfig.isHexEncodeRootAndSegmentHashes()) {
assertionHash = assertionHashAsHex.getBytes(StandardCharsets.UTF_8);
} else {
try {
@@ -517,8 +517,8 @@ Reader loadTDF(SeekableByteChannel tdf, String platformUrl) throws SDKException,
Reader loadTDF(SeekableByteChannel tdf, Config.TDFReaderConfig tdfReaderConfig, String platformUrl)
throws SDKException, IOException {
- if (!tdfReaderConfig.ignoreKasAllowlist
- && (tdfReaderConfig.kasAllowlist == null || tdfReaderConfig.kasAllowlist.isEmpty())) {
+ if (!tdfReaderConfig.isIgnoreKasAllowlist()
+ && (tdfReaderConfig.getKasAllowlist() == null || tdfReaderConfig.getKasAllowlist().isEmpty())) {
ListKeyAccessServersRequest request = ListKeyAccessServersRequest.newBuilder()
.build();
ListKeyAccessServersResponse response;
@@ -528,12 +528,12 @@ Reader loadTDF(SeekableByteChannel tdf, Config.TDFReaderConfig tdfReaderConfig,
} catch (ConnectException e) {
throw new SDKException("error getting kas servers", e);
}
- tdfReaderConfig.kasAllowlist = new HashSet<>();
+ tdfReaderConfig.setKasAllowlist(new HashSet<>());
for (var entry : response.getKeyAccessServersList()) {
- tdfReaderConfig.kasAllowlist.add(Config.getKasAddress(entry.getUri()));
+ tdfReaderConfig.getKasAllowlist().add(Config.getKasAddress(entry.getUri()));
}
- tdfReaderConfig.kasAllowlist.add(Config.getKasAddress(platformUrl));
+ tdfReaderConfig.getKasAllowlist().add(Config.getKasAddress(platformUrl));
}
return loadTDF(tdf, tdfReaderConfig);
}
@@ -558,26 +558,26 @@ Reader loadTDF(SeekableByteChannel tdf, Config.TDFReaderConfig tdfReaderConfig)
String splitId = keyAccess.sid == null || keyAccess.sid.isEmpty() ? EMPTY_SPLIT_ID : keyAccess.sid;
Autoconfigure.KeySplitStep ss = new Autoconfigure.KeySplitStep(keyAccess.url, splitId);
byte[] unwrappedKey;
- if (foundSplits.contains(ss.splitID)) {
+ if (foundSplits.contains(ss.getSplitID())) {
continue;
}
- knownSplits.add(ss.splitID);
+ knownSplits.add(ss.getSplitID());
try {
var realAddress = Config.getKasAddress(keyAccess.url);
- if (tdfReaderConfig.ignoreKasAllowlist) {
+ if (tdfReaderConfig.isIgnoreKasAllowlist()) {
logger.warn("Ignoring KasAllowlist for url {}", realAddress);
- } else if (tdfReaderConfig.kasAllowlist == null || tdfReaderConfig.kasAllowlist.isEmpty()) {
+ } else if (tdfReaderConfig.getKasAllowlist() == null || tdfReaderConfig.getKasAllowlist().isEmpty()) {
logger.error(
"KasAllowlist: No KAS allowlist provided and no KeyAccessServerRegistry available, {} is not allowed",
realAddress);
throw new SDK.KasAllowlistException(
"No KAS allowlist provided and no KeyAccessServerRegistry available");
- } else if (!tdfReaderConfig.kasAllowlist.contains(realAddress)) {
+ } else if (!tdfReaderConfig.getKasAllowlist().contains(realAddress)) {
logger.error("KasAllowlist: kas url {} is not allowed", realAddress);
throw new SDK.KasAllowlistException("KasAllowlist: kas url " + realAddress + " is not allowed");
}
unwrappedKey = services.kas().unwrap(keyAccess, manifest.encryptionInformation.policy,
- tdfReaderConfig.sessionKeyType);
+ tdfReaderConfig.getSessionKeyType());
} catch (Exception e) {
skippedSplits.put(ss, e);
continue;
@@ -586,7 +586,7 @@ Reader loadTDF(SeekableByteChannel tdf, Config.TDFReaderConfig tdfReaderConfig)
for (int index = 0; index < unwrappedKey.length; index++) {
payloadKey[index] ^= unwrappedKey[index];
}
- foundSplits.add(ss.splitID);
+ foundSplits.add(ss.getSplitID());
if (keyAccess.encryptedMetadata != null && !keyAccess.encryptedMetadata.isEmpty()) {
AesGcm aesGcm = new AesGcm(unwrappedKey);
@@ -678,13 +678,13 @@ Reader loadTDF(SeekableByteChannel tdf, Config.TDFReaderConfig tdfReaderConfig)
// Validate assertions
for (var assertion : manifest.assertions) {
// Skip assertion verification if disabled
- if (tdfReaderConfig.disableAssertionVerification) {
+ if (tdfReaderConfig.isDisableAssertionVerification()) {
break;
}
// Set default to HS256
var assertionKey = new AssertionConfig.AssertionKey(AssertionConfig.AssertionKeyAlg.HS256, payloadKey);
- Config.AssertionVerificationKeys assertionVerificationKeys = tdfReaderConfig.assertionVerificationKeys;
+ Config.AssertionVerificationKeys assertionVerificationKeys = tdfReaderConfig.getAssertionVerificationKeys();
if (!assertionVerificationKeys.isEmpty()) {
var keyForAssertion = assertionVerificationKeys.getKey(assertion.id);
if (keyForAssertion != null) {
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Version.java b/sdk/src/main/java/io/opentdf/platform/sdk/Version.java
index 0d881bb8..2abc1286 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/Version.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/Version.java
@@ -20,7 +20,7 @@ class Version implements Comparable {
private final String prereleaseAndMetadata;
private static final Logger log = LoggerFactory.getLogger(Version.class);
- Pattern SEMVER_PATTERN = Pattern.compile(
+ private static final Pattern SEMVER_PATTERN = Pattern.compile(
"^(?0|[1-9]\\d*)\\.(?0|[1-9]\\d*)\\.(?0|[1-9]\\d*)(?\\D.*)?$");
@Override
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/ZipWriter.java b/sdk/src/main/java/io/opentdf/platform/sdk/ZipWriter.java
index 71aea34c..43463a57 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/ZipWriter.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/ZipWriter.java
@@ -40,14 +40,14 @@ public OutputStream stream(String name) throws IOException {
var nameBytes = name.getBytes(StandardCharsets.UTF_8);
LocalFileHeader localFileHeader = new LocalFileHeader();
- localFileHeader.lastModifiedTime = (int) fileTime;
- localFileHeader.lastModifiedDate = (int) fileDate;
- localFileHeader.filenameLength = (short) nameBytes.length;
- localFileHeader.crc32 = 0;
- localFileHeader.generalPurposeBitFlag = (1 << 3) | (1 << 11); // we are using the data descriptor and we are using UTF-8
- localFileHeader.compressedSize = ZIP_64_MAGIC_VAL;
- localFileHeader.uncompressedSize = ZIP_64_MAGIC_VAL;
- localFileHeader.extraFieldLength = 0;
+ localFileHeader.setLastModifiedTime((int) fileTime);
+ localFileHeader.setLastModifiedDate((int) fileDate);
+ localFileHeader.setFilenameLength((short) nameBytes.length);
+ localFileHeader.setCrc32(0);
+ localFileHeader.setGeneralPurposeBitFlag((1 << 3) | (1 << 11)); // we are using the data descriptor and we are using UTF-8
+ localFileHeader.setCompressedSize(ZIP_64_MAGIC_VAL);
+ localFileHeader.setUncompressedSize(ZIP_64_MAGIC_VAL);
+ localFileHeader.setExtraFieldLength((short) 0);
localFileHeader.write(out, nameBytes);
@@ -75,20 +75,20 @@ public void close() throws IOException {
// Write Zip64 data descriptor
Zip64DataDescriptor dataDescriptor = new Zip64DataDescriptor();
- dataDescriptor.crc32 = crcValue;
- dataDescriptor.compressedSize = fileSize;
- dataDescriptor.uncompressedSize = fileSize;
+ dataDescriptor.setCrc32(crcValue);
+ dataDescriptor.setCompressedSize(fileSize);
+ dataDescriptor.setUncompressedSize(fileSize);
dataDescriptor.write(out);
var fileInfo = new FileInfo();
- fileInfo.offset = startPosition;
- fileInfo.flag = (short) localFileHeader.generalPurposeBitFlag;
- fileInfo.size = fileSize;
- fileInfo.crc = crcValue;
- fileInfo.filename = name;
- fileInfo.fileTime = (short) fileTime;
- fileInfo.fileDate = (short) fileDate;
- fileInfo.isZip64 = true;
+ fileInfo.setOffset(startPosition);
+ fileInfo.setFlag((short) localFileHeader.generalPurposeBitFlag);
+ fileInfo.setSize(fileSize);
+ fileInfo.setCrc(crcValue);
+ fileInfo.setFilename(name);
+ fileInfo.setFileTime((short) fileTime);
+ fileInfo.setFileDate((short) fileDate);
+ fileInfo.setIsZip64(true);
fileInfos.add(fileInfo);
}
@@ -158,14 +158,14 @@ private FileInfo writeByteArray(String name, byte[] data, CountingOutputStream o
var nameBytes = name.getBytes(StandardCharsets.UTF_8);
LocalFileHeader localFileHeader = new LocalFileHeader();
- localFileHeader.lastModifiedTime = (int) fileTime;
- localFileHeader.lastModifiedDate = (int) fileDate;
- localFileHeader.filenameLength = (short) nameBytes.length;
- localFileHeader.generalPurposeBitFlag = 0;
- localFileHeader.crc32 = (int) crcValue;
- localFileHeader.compressedSize = data.length;
- localFileHeader.uncompressedSize = data.length;
- localFileHeader.extraFieldLength = 0;
+ localFileHeader.setLastModifiedTime((int) fileTime);
+ localFileHeader.setLastModifiedDate((int) fileDate);
+ localFileHeader.setFilenameLength((short) nameBytes.length);
+ localFileHeader.setGeneralPurposeBitFlag(0);
+ localFileHeader.setCrc32((int) crcValue);
+ localFileHeader.setCompressedSize(data.length);
+ localFileHeader.setUncompressedSize(data.length);
+ localFileHeader.setExtraFieldLength((short) 0);
localFileHeader.write(out, name.getBytes(StandardCharsets.UTF_8));
@@ -218,8 +218,7 @@ private void writeZip64EndOfCentralDirectory(long numEntries, long startOfCentra
private void writeZip64EndOfCentralDirectoryLocator(long startOfEndOfCD, OutputStream out) throws IOException {
Zip64EndOfCDRecordLocator zip64EndOfCDRecordLocator = new Zip64EndOfCDRecordLocator();
- zip64EndOfCDRecordLocator.CDOffset = startOfEndOfCD;
-
+ zip64EndOfCDRecordLocator.setCDOffset(startOfEndOfCD);
zip64EndOfCDRecordLocator.write(out);
}
@@ -260,18 +259,87 @@ private static long getTimeDateUnMSDosFormat() {
}
private static class LocalFileHeader {
- final int signature = 0x04034b50;
- final int version = ZIP_VERSION;
- int generalPurposeBitFlag;
- final int compressionMethod = 0;
- int lastModifiedTime;
- int lastModifiedDate;
- int crc32;
- int compressedSize;
- int uncompressedSize;
-
- short filenameLength;
- short extraFieldLength = 0;
+ private static final int signature = 0x04034b50;
+ private static final int version = ZIP_VERSION;
+ private int generalPurposeBitFlag;
+ private static final int compressionMethod = 0;
+ private int lastModifiedTime;
+ private int lastModifiedDate;
+ private int crc32;
+ private int compressedSize;
+ private int uncompressedSize;
+
+ private short filenameLength;
+ private short extraFieldLength = 0;
+
+ public void setGeneralPurposeBitFlag(int generalPurposeBitFlag) {
+ this.generalPurposeBitFlag = generalPurposeBitFlag;
+ }
+
+ public int getGeneralPurposeBitFlag() {
+ return generalPurposeBitFlag;
+ }
+
+ public short getFilenameLength() {
+ return filenameLength;
+ }
+
+ public void setFilenameLength(short filenameLength) {
+ this.filenameLength = filenameLength;
+ }
+
+ public short getExtraFieldLength() {
+ return extraFieldLength;
+ }
+
+ public void setExtraFieldLength(short extraFieldLength) {
+ this.extraFieldLength = extraFieldLength;
+ }
+
+ public void setLastModifiedTime(int lastModifiedTime) {
+ this.lastModifiedTime = lastModifiedTime;
+ }
+
+ public int getLastModifiedTime() {
+ return lastModifiedTime;
+ }
+
+ public void setLastModifiedDate(int lastModifiedDate) {
+ this.lastModifiedDate = lastModifiedDate;
+ }
+
+ public int getLastModifiedDate() {
+ return lastModifiedDate;
+ }
+
+ public void setCrc32(int crc32) {
+ this.crc32 = crc32;
+ }
+
+ public int getCrc32() {
+ return crc32;
+ }
+
+ public int getCompressionMethod() {
+ return compressionMethod;
+ }
+
+ public void setCompressedSize(int compressedSize) {
+ this.compressedSize = compressedSize;
+ }
+
+ public int getCompressedSize() {
+ return compressedSize;
+ }
+
+ public void setUncompressedSize(int uncompressedSize) {
+ this.uncompressedSize = uncompressedSize;
+ }
+
+ public int getUncompressedSize() {
+ return uncompressedSize;
+ }
+
void write(OutputStream out, byte[] filename) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(30 + filename.length);
@@ -295,10 +363,38 @@ void write(OutputStream out, byte[] filename) throws IOException {
}
private static class Zip64DataDescriptor {
- final int signature = 0x08074b50;
- long crc32;
- long compressedSize;
- long uncompressedSize;
+ private final int signature = 0x08074b50;
+ private long crc32;
+ private long compressedSize;
+ private long uncompressedSize;
+
+ public int getSignature() {
+ return signature;
+ }
+
+ public long getCrc32() {
+ return crc32;
+ }
+
+ public void setCrc32(long crc32) {
+ this.crc32 = crc32;
+ }
+
+ public long getCompressedSize() {
+ return compressedSize;
+ }
+
+ public void setCompressedSize(long compressedSize) {
+ this.compressedSize = compressedSize;
+ }
+
+ public long getUncompressedSize() {
+ return uncompressedSize;
+ }
+
+ public void setUncompressedSize(long uncompressedSize) {
+ this.uncompressedSize = uncompressedSize;
+ }
void write(OutputStream out) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(ZIP_64_DATA_DESCRIPTOR_SIZE);
@@ -314,23 +410,127 @@ void write(OutputStream out) throws IOException {
}
private static class CDFileHeader {
- final int signature = 0x02014b50;
- final short versionCreated = ZIP_VERSION;
- final short versionNeeded = ZIP_VERSION;
- int generalPurposeBitFlag;
- final int compressionMethod = 0;
- int lastModifiedTime;
- int lastModifiedDate;
- int crc32;
- int compressedSize;
- int uncompressedSize;
- short filenameLength;
- short extraFieldLength;
- final short fileCommentLength = 0;
- final short diskNumberStart = 0;
- final short internalFileAttributes = 0;
- final int externalFileAttributes = 0;
- int localHeaderOffset;
+ private final int signature = 0x02014b50;
+ private final short versionCreated = ZIP_VERSION;
+ private final short versionNeeded = ZIP_VERSION;
+ private int generalPurposeBitFlag;
+ private final int compressionMethod = 0;
+ private int lastModifiedTime;
+ private int lastModifiedDate;
+ private int crc32;
+ private int compressedSize;
+ private int uncompressedSize;
+ private short filenameLength;
+ private short extraFieldLength;
+ private final short fileCommentLength = 0;
+ private final short diskNumberStart = 0;
+ private final short internalFileAttributes = 0;
+ private final int externalFileAttributes = 0;
+ private int localHeaderOffset;
+
+ public int getSignature() {
+ return signature;
+ }
+
+ public short getVersionCreated() {
+ return versionCreated;
+ }
+
+ public short getVersionNeeded() {
+ return versionNeeded;
+ }
+
+ public int getGeneralPurposeBitFlag() {
+ return generalPurposeBitFlag;
+ }
+
+ public void setGeneralPurposeBitFlag(int generalPurposeBitFlag) {
+ this.generalPurposeBitFlag = generalPurposeBitFlag;
+ }
+
+ public int getCompressionMethod() {
+ return compressionMethod;
+ }
+
+ public int getLastModifiedTime() {
+ return lastModifiedTime;
+ }
+
+ public void setLastModifiedTime(int lastModifiedTime) {
+ this.lastModifiedTime = lastModifiedTime;
+ }
+
+ public int getLastModifiedDate() {
+ return lastModifiedDate;
+ }
+
+ public void setLastModifiedDate(int lastModifiedDate) {
+ this.lastModifiedDate = lastModifiedDate;
+ }
+
+ public int getCrc32() {
+ return crc32;
+ }
+
+ public void setCrc32(int crc32) {
+ this.crc32 = crc32;
+ }
+
+ public int getCompressedSize() {
+ return compressedSize;
+ }
+
+ public void setCompressedSize(int compressedSize) {
+ this.compressedSize = compressedSize;
+ }
+
+ public int getUncompressedSize() {
+ return uncompressedSize;
+ }
+
+ public void setUncompressedSize(int uncompressedSize) {
+ this.uncompressedSize = uncompressedSize;
+ }
+
+ public short getFilenameLength() {
+ return filenameLength;
+ }
+
+ public void setFilenameLength(short filenameLength) {
+ this.filenameLength = filenameLength;
+ }
+
+ public short getExtraFieldLength() {
+ return extraFieldLength;
+ }
+
+ public void setExtraFieldLength(short extraFieldLength) {
+ this.extraFieldLength = extraFieldLength;
+ }
+
+ public short getFileCommentLength() {
+ return fileCommentLength;
+ }
+
+ public short getDiskNumberStart() {
+ return diskNumberStart;
+ }
+
+ public short getInternalFileAttributes() {
+ return internalFileAttributes;
+ }
+
+ public int getExternalFileAttributes() {
+ return externalFileAttributes;
+ }
+
+ public int getLocalHeaderOffset() {
+ return localHeaderOffset;
+ }
+
+ public void setLocalHeaderOffset(int localHeaderOffset) {
+ this.localHeaderOffset = localHeaderOffset;
+ }
void write(OutputStream out, byte[] filename) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(46 + filename.length);
@@ -360,11 +560,43 @@ void write(OutputStream out, byte[] filename) throws IOException {
}
private static class Zip64GlobalExtendedInfoExtraField {
- final short signature = 0x0001;
- final short size = ZIP_64_GLOBAL_EXTENDED_INFO_EXTRA_FIELD_SIZE - 4;
- long originalSize;
- long compressedSize;
- long localFileHeaderOffset;
+ private final short signature = 0x0001;
+ private final short size = ZIP_64_GLOBAL_EXTENDED_INFO_EXTRA_FIELD_SIZE - 4;
+ private long originalSize;
+ private long compressedSize;
+ private long localFileHeaderOffset;
+
+ public long getLocalFileHeaderOffset() {
+ return localFileHeaderOffset;
+ }
+
+ public void setLocalFileHeaderOffset(long localFileHeaderOffset) {
+ this.localFileHeaderOffset = localFileHeaderOffset;
+ }
+
+ public long getCompressedSize() {
+ return compressedSize;
+ }
+
+ public void setCompressedSize(long compressedSize) {
+ this.compressedSize = compressedSize;
+ }
+
+ public long getOriginalSize() {
+ return originalSize;
+ }
+
+ public void setOriginalSize(long originalSize) {
+ this.originalSize = originalSize;
+ }
+
+ public short getSize() {
+ return size;
+ }
+
+ public short getSignature() {
+ return signature;
+ }
void write(OutputStream out) throws IOException {
var buffer = ByteBuffer.allocate(ZIP_64_GLOBAL_EXTENDED_INFO_EXTRA_FIELD_SIZE);
@@ -381,14 +613,62 @@ void write(OutputStream out) throws IOException {
}
private static class EndOfCDRecord {
- final int signature = 0x06054b50;
- final short diskNumber = 0;
- final short startDiskNumber = 0;
- short numberOfCDRecordEntries;
- short totalCDRecordEntries;
- int sizeOfCentralDirectory;
- int centralDirectoryOffset;
- final short commentLength = 0;
+ private final int signature = 0x06054b50;
+ private final short diskNumber = 0;
+ private final short startDiskNumber = 0;
+ private short numberOfCDRecordEntries;
+ private short totalCDRecordEntries;
+ private int sizeOfCentralDirectory;
+ private int centralDirectoryOffset;
+ private final short commentLength = 0;
+
+ public int getSignature() {
+ return signature;
+ }
+
+ public short getDiskNumber() {
+ return diskNumber;
+ }
+
+ public short getStartDiskNumber() {
+ return startDiskNumber;
+ }
+
+ public short getNumberOfCDRecordEntries() {
+ return numberOfCDRecordEntries;
+ }
+
+ public void setNumberOfCDRecordEntries(short numberOfCDRecordEntries) {
+ this.numberOfCDRecordEntries = numberOfCDRecordEntries;
+ }
+
+ public short getTotalCDRecordEntries() {
+ return totalCDRecordEntries;
+ }
+
+ public void setTotalCDRecordEntries(short totalCDRecordEntries) {
+ this.totalCDRecordEntries = totalCDRecordEntries;
+ }
+
+ public int getSizeOfCentralDirectory() {
+ return sizeOfCentralDirectory;
+ }
+
+ public void setSizeOfCentralDirectory(int sizeOfCentralDirectory) {
+ this.sizeOfCentralDirectory = sizeOfCentralDirectory;
+ }
+
+ public int getCentralDirectoryOffset() {
+ return centralDirectoryOffset;
+ }
+
+ public void setCentralDirectoryOffset(int centralDirectoryOffset) {
+ this.centralDirectoryOffset = centralDirectoryOffset;
+ }
+
+ public short getCommentLength() {
+ return commentLength;
+ }
void write(OutputStream out) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(22);
@@ -408,16 +688,72 @@ void write(OutputStream out) throws IOException {
}
private static class Zip64EndOfCDRecord {
- final int signature = 0x06064b50;
- final long recordSize = ZIP_64_END_OF_CD_RECORD_SIZE - 12;
- final short versionMadeBy = ZIP_VERSION;
- final short versionToExtract = ZIP_VERSION;
- final int diskNumber = 0;
- final int startDiskNumber = 0;
- long numberOfCDRecordEntries;
- long totalCDRecordEntries;
- long centralDirectorySize;
- long startingDiskCentralDirectoryOffset;
+ private final int signature = 0x06064b50;
+ private final long recordSize = ZIP_64_END_OF_CD_RECORD_SIZE - 12;
+ private final short versionMadeBy = ZIP_VERSION;
+ private final short versionToExtract = ZIP_VERSION;
+ private final int diskNumber = 0;
+ private final int startDiskNumber = 0;
+ private long numberOfCDRecordEntries;
+ private long totalCDRecordEntries;
+ private long centralDirectorySize;
+ private long startingDiskCentralDirectoryOffset;
+
+ public int getSignature() {
+ return signature;
+ }
+
+ public long getRecordSize() {
+ return recordSize;
+ }
+
+ public short getVersionMadeBy() {
+ return versionMadeBy;
+ }
+
+ public short getVersionToExtract() {
+ return versionToExtract;
+ }
+
+ public int getDiskNumber() {
+ return diskNumber;
+ }
+
+ public int getStartDiskNumber() {
+ return startDiskNumber;
+ }
+
+ public long getNumberOfCDRecordEntries() {
+ return numberOfCDRecordEntries;
+ }
+
+ public void setNumberOfCDRecordEntries(long numberOfCDRecordEntries) {
+ this.numberOfCDRecordEntries = numberOfCDRecordEntries;
+ }
+
+ public long getTotalCDRecordEntries() {
+ return totalCDRecordEntries;
+ }
+
+ public void setTotalCDRecordEntries(long totalCDRecordEntries) {
+ this.totalCDRecordEntries = totalCDRecordEntries;
+ }
+
+ public long getCentralDirectorySize() {
+ return centralDirectorySize;
+ }
+
+ public void setCentralDirectorySize(long centralDirectorySize) {
+ this.centralDirectorySize = centralDirectorySize;
+ }
+
+ public long getStartingDiskCentralDirectoryOffset() {
+ return startingDiskCentralDirectoryOffset;
+ }
+
+ public void setStartingDiskCentralDirectoryOffset(long startingDiskCentralDirectoryOffset) {
+ this.startingDiskCentralDirectoryOffset = startingDiskCentralDirectoryOffset;
+ }
void write(OutputStream out) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(56);
@@ -439,10 +775,18 @@ void write(OutputStream out) throws IOException {
private static class Zip64EndOfCDRecordLocator {
- final int signature = 0x07064b50;
- final int CDStartDiskNumber = 0;
- long CDOffset;
- final int numberOfDisks = 1;
+ private static final int signature = 0x07064b50;
+ private static final int CDStartDiskNumber = 0;
+ private long CDOffset;
+ private static final int numberOfDisks = 1;
+
+ long getCDOffset() {
+ return CDOffset;
+ }
+
+ void setCDOffset(long CDOffset) {
+ this.CDOffset = CDOffset;
+ }
void write(OutputStream out) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(20);
@@ -457,13 +801,77 @@ void write(OutputStream out) throws IOException {
}
private static class FileInfo {
- long crc;
- long size;
- long offset;
- String filename;
- short fileTime;
- short fileDate;
- short flag;
- boolean isZip64;
+ private long crc;
+ private long size;
+ private long offset;
+ private String filename;
+ private short fileTime;
+ private short fileDate;
+ private short flag;
+ private boolean isZip64;
+
+ long getCrc() {
+ return crc;
+ }
+
+ void setCrc(long crc) {
+ this.crc = crc;
+ }
+
+ long getSize() {
+ return size;
+ }
+
+ public void setSize(long size) {
+ this.size = size;
+ }
+
+ public long getOffset() {
+ return offset;
+ }
+
+ public void setOffset(long offset) {
+ this.offset = offset;
+ }
+
+ public String getFilename() {
+ return filename;
+ }
+
+ public void setFilename(String filename) {
+ this.filename = filename;
+ }
+
+ public short getFileTime() {
+ return fileTime;
+ }
+
+ public void setFileTime(short fileTime) {
+ this.fileTime = fileTime;
+ }
+
+ public short getFileDate() {
+ return fileDate;
+ }
+
+ public void setFileDate(short fileDate) {
+ this.fileDate = fileDate;
+ }
+
+ public short getFlag() {
+ return flag;
+ }
+
+ public void setFlag(short flag) {
+ this.flag = flag;
+ }
+
+ public boolean isZip64() {
+ return isZip64;
+ }
+
+ public void setIsZip64(boolean zip64) {
+ isZip64 = zip64;
+ }
}
}
\ No newline at end of file
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java
index 53fe1c06..2e318c50 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java
@@ -427,7 +427,7 @@ public void testConfigurationServicePutGet() {
Set actualKases = new HashSet<>();
for (Autoconfigure.KeyAccessGrant g : grants.getGrants().values()) {
assertThat(g).isNotNull();
- for (String k : g.kases) {
+ for (String k : g.getKases()) {
actualKases.add(k);
}
}
@@ -863,10 +863,10 @@ void testStoreKeysToCache_WithKeys() {
// Verify that the key was stored in the cache
Config.KASInfo storedKASInfo = keyCache.get("https://example.com/kas", "ec:secp256r1", "test-kid");
assertNotNull(storedKASInfo);
- assertEquals("https://example.com/kas", storedKASInfo.URL);
- assertEquals("test-kid", storedKASInfo.KID);
- assertEquals("ec:secp256r1", storedKASInfo.Algorithm);
- assertEquals("public-key-pem", storedKASInfo.PublicKey);
+ assertEquals("https://example.com/kas", storedKASInfo.getURL());
+ assertEquals("test-kid", storedKASInfo.getKID());
+ assertEquals("ec:secp256r1", storedKASInfo.getAlgorithm());
+ assertEquals("public-key-pem", storedKASInfo.getPublicKey());
}
@Test
@@ -906,17 +906,17 @@ void testStoreKeysToCache_MultipleKasEntries() {
// Verify that the key was stored in the cache
Config.KASInfo storedKASInfo = keyCache.get("https://example.com/kas", "ec:secp256r1", "test-kid");
assertNotNull(storedKASInfo);
- assertEquals("https://example.com/kas", storedKASInfo.URL);
- assertEquals("test-kid", storedKASInfo.KID);
- assertEquals("ec:secp256r1", storedKASInfo.Algorithm);
- assertEquals("public-key-pem", storedKASInfo.PublicKey);
+ assertEquals("https://example.com/kas", storedKASInfo.getURL());
+ assertEquals("test-kid", storedKASInfo.getKID());
+ assertEquals("ec:secp256r1", storedKASInfo.getAlgorithm());
+ assertEquals("public-key-pem", storedKASInfo.getPublicKey());
Config.KASInfo storedKASInfo2 = keyCache.get("https://example.com/kas", "rsa:2048", "test-kid-2");
assertNotNull(storedKASInfo2);
- assertEquals("https://example.com/kas", storedKASInfo2.URL);
- assertEquals("test-kid-2", storedKASInfo2.KID);
- assertEquals("rsa:2048", storedKASInfo2.Algorithm);
- assertEquals("public-key-pem-2", storedKASInfo2.PublicKey);
+ assertEquals("https://example.com/kas", storedKASInfo2.getURL());
+ assertEquals("test-kid-2", storedKASInfo2.getKID());
+ assertEquals("rsa:2048", storedKASInfo2.getAlgorithm());
+ assertEquals("public-key-pem-2", storedKASInfo2.getPublicKey());
}
GetAttributeValuesByFqnsResponse getResponseWithGrants(GetAttributeValuesByFqnsRequest req,
@@ -995,17 +995,17 @@ public void cancel() {
// Verify that the key was stored in the cache
Config.KASInfo storedKASInfo = keyCache.get("https://example.com/kas", "ec:secp256r1", "test-kid");
assertNotNull(storedKASInfo);
- assertEquals("https://example.com/kas", storedKASInfo.URL);
- assertEquals("test-kid", storedKASInfo.KID);
- assertEquals("ec:secp256r1", storedKASInfo.Algorithm);
- assertEquals("public-key-pem", storedKASInfo.PublicKey);
+ assertEquals("https://example.com/kas", storedKASInfo.getURL());
+ assertEquals("test-kid", storedKASInfo.getKID());
+ assertEquals("ec:secp256r1", storedKASInfo.getAlgorithm());
+ assertEquals("public-key-pem", storedKASInfo.getPublicKey());
Config.KASInfo storedKASInfo2 = keyCache.get("https://example.com/kas", "rsa:2048", "test-kid-2");
assertNotNull(storedKASInfo2);
- assertEquals("https://example.com/kas", storedKASInfo2.URL);
- assertEquals("test-kid-2", storedKASInfo2.KID);
- assertEquals("rsa:2048", storedKASInfo2.Algorithm);
- assertEquals("public-key-pem-2", storedKASInfo2.PublicKey);
+ assertEquals("https://example.com/kas", storedKASInfo2.getURL());
+ assertEquals("test-kid-2", storedKASInfo2.getKID());
+ assertEquals("rsa:2048", storedKASInfo2.getAlgorithm());
+ assertEquals("public-key-pem-2", storedKASInfo2.getPublicKey());
}
@Test
@@ -1052,7 +1052,7 @@ void testUsingDefaultKasesWhenNothingElseProvided() {
void createsGranterFromAttributeValues() {
// Arrange
Config.TDFConfig config = new Config.TDFConfig();
- config.attributeValues = List.of(mockValueFor(spk2spk), mockValueFor(rel2gbr));
+ config.setAttributeValues(List.of(mockValueFor(spk2spk), mockValueFor(rel2gbr)));
SDK.Services services = mock(SDK.Services.class);
SDK.KAS kas = mock(SDK.KAS.class);
@@ -1104,11 +1104,12 @@ void createsGranterFromService() {
return TestUtil.successfulUnaryCall(builder.build());
});
+ Config.TDFConfig tdfConfig = new Config.TDFConfig();
+ tdfConfig.setAttributeValues(null);
+ tdfConfig.setAttributes(policy);
+
// Act
- Autoconfigure.Granter granter = Autoconfigure.createGranter(services, new Config.TDFConfig() {{
- attributeValues = null; // force use of service
- attributes = policy;
- }});
+ Autoconfigure.Granter granter = Autoconfigure.createGranter(services, tdfConfig);
// Assert
assertThat(granter).isNotNull();
@@ -1120,17 +1121,16 @@ void createsGranterFromService() {
@Test
void getSplits_usesAutoconfigurePlan_whenAutoconfigureTrue() {
var tdfConfig = new Config.TDFConfig();
- tdfConfig.autoconfigure = true;
- tdfConfig.kasInfoList = new ArrayList<>();
- tdfConfig.splitPlan = null;
+ tdfConfig.setAutoconfigure(true);
+ tdfConfig.setKasInfoList(new ArrayList<>());
+ tdfConfig.setSplitPlan(null);
var kas = Mockito.mock(SDK.KAS.class);
Mockito.when(kas.getKeyCache()).thenReturn(new KASKeyCache());
- Config.KASInfo kasInfo = new Config.KASInfo() {{
- URL = "https://kas.example.com";
- Algorithm = "ec:secp256r1";
- KID = "kid";
- }};
+ Config.KASInfo kasInfo = new Config.KASInfo();
+ kasInfo.setURL("https://kas.example.com");
+ kasInfo.setAlgorithm("ec:secp256r1");
+ kasInfo.setKID("kid");
Mockito.when(kas.getPublicKey(any())).thenReturn(kasInfo);
var services = new FakeServicesBuilder().setKas(kas).build();
@@ -1155,22 +1155,25 @@ void getSplits_usesAutoconfigurePlan_whenAutoconfigureTrue() {
// Assert
assertThat(splits).containsKey("");
assertThat(splits.get("")).hasSize(1);
- assertThat(splits.get("").get(0).URL).isEqualTo("https://kas.example.com");
- assertThat(splits.get("").get(0).KID).isEqualTo("kid");
- assertThat(splits.get("").get(0).Algorithm).isEqualTo("ec:secp256r1");
+ assertThat(splits.get("").get(0).getURL()).isEqualTo("https://kas.example.com");
+ assertThat(splits.get("").get(0).getKID()).isEqualTo("kid");
+ assertThat(splits.get("").get(0).getAlgorithm()).isEqualTo("ec:secp256r1");
}
@Test
void testInvalidConfigurations() {
var config = new Config.TDFConfig();
- config.autoconfigure = true;
- config.splitPlan = List.of(new KeySplitStep("kas1", ""));
+ config.setAutoconfigure(true);
+ config.setSplitPlan(List.of(new KeySplitStep("kas1", "")));
Planner planner = new Planner(config, new FakeServicesBuilder().build(), (a, b) -> { throw new IllegalStateException("no way"); });
Exception thrown = assertThrows(IllegalArgumentException.class, () -> planner.getSplits());
assertThat(thrown.getMessage()).contains("cannot use autoconfigure with a split plan provided in the TDFConfig");
- config = new Config.TDFConfig() {{ autoconfigure = false; kasInfoList = Collections.EMPTY_LIST; splitPlan = null; }};
+ config = new Config.TDFConfig();
+ config.setAutoconfigure(false);
+ config.setKasInfoList(Collections.EMPTY_LIST);
+ config.setSplitPlan(null);
var otherPlanner = new Planner(config, new FakeServicesBuilder().build(), (a, b) -> { throw new IllegalStateException("no way"); });
thrown = assertThrows(SDK.KasInfoMissing.class, () -> otherPlanner.getSplits());
assertThat(thrown.getMessage()).contains("no plan was constructed via autoconfigure, explicit split plan or provided kases");
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/ConfigTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/ConfigTest.java
index 70527133..76a3392f 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/ConfigTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/ConfigTest.java
@@ -13,46 +13,46 @@ class ConfigTest {
@Test
void newTDFConfig_shouldCreateDefaultConfig() {
Config.TDFConfig config = Config.newTDFConfig();
- assertEquals(Config.DEFAULT_SEGMENT_SIZE, config.defaultSegmentSize);
- assertTrue(config.enableEncryption);
- assertEquals(Config.TDFFormat.JSONFormat, config.tdfFormat);
- assertEquals(Config.IntegrityAlgorithm.HS256, config.integrityAlgorithm);
- assertEquals(Config.IntegrityAlgorithm.GMAC, config.segmentIntegrityAlgorithm);
- assertTrue(config.attributes.isEmpty());
- assertTrue(config.kasInfoList.isEmpty());
- assertTrue(config.renderVersionInfoInManifest);
- assertFalse(config.hexEncodeRootAndSegmentHashes);
+ assertEquals(Config.DEFAULT_SEGMENT_SIZE, config.getDefaultSegmentSize());
+ assertTrue(config.isEnableEncryption());
+ assertEquals(Config.TDFFormat.JSONFormat, config.getTdfFormat());
+ assertEquals(Config.IntegrityAlgorithm.HS256, config.getIntegrityAlgorithm());
+ assertEquals(Config.IntegrityAlgorithm.GMAC, config.getSegmentIntegrityAlgorithm());
+ assertTrue(config.getAttributes().isEmpty());
+ assertTrue(config.getKasInfoList().isEmpty());
+ assertTrue(config.getRenderVersionInfoInManifest());
+ assertFalse(config.isHexEncodeRootAndSegmentHashes());
}
@Test
void withDataAttributes_shouldAddAttributes() throws AutoConfigureException {
Config.TDFConfig config = Config.newTDFConfig(Config.withDataAttributes("https://example.com/attr/attr1/value/value1", "https://example.com/attr/attr2/value/value2"));
- assertEquals(2, config.attributes.size());
- assertTrue(config.attributes.contains(new Autoconfigure.AttributeValueFQN("https://example.com/attr/attr1/value/value1")));
- assertTrue(config.attributes.contains(new Autoconfigure.AttributeValueFQN("https://example.com/attr/attr2/value/value2")));
+ assertEquals(2, config.getAttributes().size());
+ assertTrue(config.getAttributes().contains(new Autoconfigure.AttributeValueFQN("https://example.com/attr/attr1/value/value1")));
+ assertTrue(config.getAttributes().contains(new Autoconfigure.AttributeValueFQN("https://example.com/attr/attr2/value/value2")));
}
@Test
void withKasInformation_shouldAddKasInfo() {
Config.KASInfo kasInfo = new Config.KASInfo();
- kasInfo.URL = "http://example.com";
- kasInfo.PublicKey = "publicKey";
- kasInfo.KID = "r1";
+ kasInfo.setURL("http://example.com");
+ kasInfo.setPublicKey("publicKey");
+ kasInfo.setKID("r1");
Config.TDFConfig config = Config.newTDFConfig(Config.withKasInformation(kasInfo));
- assertEquals(1, config.kasInfoList.size());
- assertEquals(kasInfo, config.kasInfoList.get(0));
+ assertEquals(1, config.getKasInfoList().size());
+ assertEquals(kasInfo, config.getKasInfoList().get(0));
}
@Test
void withMetaData_shouldSetMetaData() {
Config.TDFConfig config = Config.newTDFConfig(Config.withMetaData("metaData"));
- assertEquals("metaData", config.metaData);
+ assertEquals("metaData", config.getMetaData());
}
@Test
void withSegmentSize_shouldSetSegmentSize() {
Config.TDFConfig config = Config.newTDFConfig(Config.withSegmentSize(Config.MIN_SEGMENT_SIZE));
- assertEquals(Config.MIN_SEGMENT_SIZE, config.defaultSegmentSize);
+ assertEquals(Config.MIN_SEGMENT_SIZE, config.getDefaultSegmentSize());
}
@Test
@@ -68,12 +68,12 @@ void withSegmentSize_shouldIgnoreSegmentSize() {
@Test
void withCompatibilityModeShouldSetFieldsCorrectly() {
Config.TDFConfig oldConfig = Config.newTDFConfig(Config.withTargetMode("1.0.1"));
- assertThat(oldConfig.renderVersionInfoInManifest).isFalse();
- assertThat(oldConfig.hexEncodeRootAndSegmentHashes).isTrue();
+ assertThat(oldConfig.getRenderVersionInfoInManifest()).isFalse();
+ assertThat(oldConfig.isHexEncodeRootAndSegmentHashes()).isTrue();
Config.TDFConfig newConfig = Config.newTDFConfig(Config.withTargetMode("100.0.1"));
- assertThat(newConfig.renderVersionInfoInManifest).isTrue();
- assertThat(newConfig.hexEncodeRootAndSegmentHashes).isFalse();
+ assertThat(newConfig.getRenderVersionInfoInManifest()).isTrue();
+ assertThat(newConfig.isHexEncodeRootAndSegmentHashes()).isFalse();
}
@@ -81,6 +81,6 @@ void withCompatibilityModeShouldSetFieldsCorrectly() {
void withMimeType_shouldSetMimeType() {
final String mimeType = "application/pdf";
Config.TDFConfig config = Config.newTDFConfig(Config.withMimeType(mimeType));
- assertEquals(mimeType, config.mimeType);
+ assertEquals(mimeType, config.getMimeType());
}
}
\ No newline at end of file
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/Fuzzing.java b/sdk/src/test/java/io/opentdf/platform/sdk/Fuzzing.java
index fd9f9afc..fcc8c7dc 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/Fuzzing.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/Fuzzing.java
@@ -38,7 +38,7 @@ public void fuzzTDF(FuzzedDataProvider data) {
byte[] fuzzBytes = data.consumeRemainingAsBytes();
byte[] key = new byte[32]; // use consistent zero key for performance and so fuzz can relate to seed
var assertionVerificationKeys = new Config.AssertionVerificationKeys();
- assertionVerificationKeys.defaultKey = new AssertionConfig.AssertionKey(AssertionConfig.AssertionKeyAlg.HS256, key);
+ assertionVerificationKeys.setDefaultKey(new AssertionConfig.AssertionKey(AssertionConfig.AssertionKeyAlg.HS256, key));
Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig(
Config.withAssertionVerificationKeys(assertionVerificationKeys));
TDF tdf = new TDF(new FakeServicesBuilder().setKas(TDFTest.kas).build());
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java
index a4f85eb0..a4cf8c06 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java
@@ -70,8 +70,8 @@ public void publicKey(PublicKeyRequest request, StreamObserver respons
var req = gson.fromJson(requestBodyJson, KASClient.RewrapRequestBody.class);
var decryptedKey = new AsymDecryption(serverKeypair.getPrivate())
- .decrypt(Base64.getDecoder().decode(req.keyAccess.wrappedKey));
- var encryptedKey = new AsymEncryption(req.clientPublicKey).encrypt(decryptedKey);
+ .decrypt(Base64.getDecoder().decode(req.getKeyAccess().wrappedKey));
+ var encryptedKey = new AsymEncryption(req.getClientPublicKey()).encrypt(decryptedKey);
responseObserver.onNext(
RewrapResponse.newBuilder().setEntityWrappedKey(ByteString.copyFrom(encryptedKey)).build());
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/KASKeyCacheTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/KASKeyCacheTest.java
index fdee682e..613e9739 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/KASKeyCacheTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/KASKeyCacheTest.java
@@ -18,15 +18,15 @@ class KASKeyCacheTest {
void setUp() {
kasKeyCache = new KASKeyCache();
kasInfo1 = new Config.KASInfo();
- kasInfo1.Algorithm = "rsa:2048";
- kasInfo1.URL = "https://example.com/kas1";
- kasInfo1.KID = "kid1";
- kasInfo1.PublicKey = "publicKey1";
+ kasInfo1.setAlgorithm("rsa:2048");
+ kasInfo1.setURL("https://example.com/kas1");
+ kasInfo1.setKID("kid1");
+ kasInfo1.setPublicKey("publicKey1");
kasInfo2 = new Config.KASInfo();
- kasInfo2.URL = "https://example.com/kas2";
- kasInfo2.Algorithm = "ec:secp256r1";
- kasInfo2.KID = "kid2";
- kasInfo2.PublicKey = "publicKey2";
+ kasInfo2.setURL("https://example.com/kas2");
+ kasInfo2.setAlgorithm("ec:secp256r1");
+ kasInfo2.setKID("kid2");
+ kasInfo2.setPublicKey("publicKey2");
}
@Test
@@ -39,10 +39,10 @@ void testStoreAndGet_WithinTimeLimit() {
// Ensure the item was correctly retrieved
assertNotNull(result);
- assertEquals("https://example.com/kas1", result.URL);
- assertEquals("rsa:2048", result.Algorithm);
- assertEquals("kid1", result.KID);
- assertEquals("publicKey1", result.PublicKey);
+ assertEquals("https://example.com/kas1", result.getURL());
+ assertEquals("rsa:2048", result.getAlgorithm());
+ assertEquals("kid1", result.getKID());
+ assertEquals("publicKey1", result.getPublicKey());
}
@Test
@@ -68,7 +68,7 @@ void testStoreAndGet_DifferentKIDs() {
kasKeyCache.store(kasInfo1);
// Attempt to retrieve the item with a different KID
- Config.KASInfo result = kasKeyCache.get(kasInfo1.URL, kasInfo1.Algorithm, kasInfo1.KID + "different");
+ Config.KASInfo result = kasKeyCache.get(kasInfo1.getURL(), kasInfo1.getAlgorithm(), kasInfo1.getKID() + "different");
// Ensure the item was not retrieved (it should have expired)
assertNull(result);
@@ -78,9 +78,9 @@ void testStoreAndGet_DifferentKIDs() {
void testStoreAndGet_WithNullAlgorithm() {
// Store an item in the cache with a null algorithm
kasInfo1 = new Config.KASInfo();
- kasInfo1.URL = "https://example.com/kas1";
- kasInfo1.KID = "kid1";
- kasInfo1.PublicKey = "publicKey1";
+ kasInfo1.setURL("https://example.com/kas1");
+ kasInfo1.setKID("kid1");
+ kasInfo1.setPublicKey("publicKey1");
kasKeyCache.store(kasInfo1);
// Retrieve the item with a null algorithm
@@ -88,10 +88,10 @@ void testStoreAndGet_WithNullAlgorithm() {
// Ensure the item was correctly retrieved
assertNotNull(result);
- assertEquals("https://example.com/kas1", result.URL);
- assertNull(result.Algorithm);
- assertEquals("kid1", result.KID);
- assertEquals("publicKey1", result.PublicKey);
+ assertEquals("https://example.com/kas1", result.getURL());
+ assertNull(result.getAlgorithm());
+ assertEquals("kid1", result.getKID());
+ assertEquals("publicKey1", result.getPublicKey());
}
@Test
@@ -120,12 +120,12 @@ void testStoreMultipleItemsAndGet() {
Config.KASInfo result2 = kasKeyCache.get("https://example.com/kas2", "ec:secp256r1", "kid2");
assertNotNull(result1);
- assertEquals("https://example.com/kas1", result1.URL);
- assertEquals("rsa:2048", result1.Algorithm);
+ assertEquals("https://example.com/kas1", result1.getURL());
+ assertEquals("rsa:2048", result1.getAlgorithm());
assertNotNull(result2);
- assertEquals("https://example.com/kas2", result2.URL);
- assertEquals("ec:secp256r1", result2.Algorithm);
+ assertEquals("https://example.com/kas2", result2.getURL());
+ assertEquals("ec:secp256r1", result2.getAlgorithm());
}
@Test
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java
index 599a52a5..e04b5bb9 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java
@@ -82,19 +82,19 @@ public void close() {
@Override
public Config.KASInfo getPublicKey(Config.KASInfo kasInfo) {
Config.KASInfo returnKI = new Config.KASInfo();
- returnKI.PublicKey = kasPublicKey;
+ returnKI.setPublicKey(kasPublicKey);
return returnKI;
}
@Override
public KASInfo getECPublicKey(Config.KASInfo kasInfo, NanoTDFType.ECCurve curve) {
- if (kasInfo.Algorithm != null && !"ec:secp256r1".equals(kasInfo.Algorithm)) {
+ if (kasInfo.getAlgorithm() != null && !"ec:secp256r1".equals(kasInfo.getAlgorithm())) {
throw new IllegalArgumentException("Unexpected algorithm: " + kasInfo);
}
var k2 = kasInfo.clone();
- k2.KID = KID;
- k2.PublicKey = kasPublicKey;
- k2.Algorithm = "ec:secp256r1";
+ k2.setKID(KID);
+ k2.setPublicKey(kasPublicKey);
+ k2.setAlgorithm("ec:secp256r1");
return k2;
}
@@ -168,9 +168,9 @@ public void cancel() {
void encryptionAndDecryptionWithValidKey() throws Exception {
var kasInfos = new ArrayList<>();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "https://api.example.com/kas";
- kasInfo.PublicKey = null;
- kasInfo.KID = KID;
+ kasInfo.setURL("https://api.example.com/kas");
+ kasInfo.setPublicKey(null);
+ kasInfo.setKID(KID);
kasInfos.add(kasInfo);
Config.NanoTDFConfig config = Config.newNanoTDFConfig(
@@ -248,8 +248,8 @@ void encryptionAndDecryptWithBaseKey() throws Exception {
void testWithDifferentConfigAndKeyValues() throws Exception {
var kasInfos = new ArrayList<>();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "https://api.example.com/kas";
- kasInfo.PublicKey = null;
+ kasInfo.setURL("https://api.example.com/kas");
+ kasInfo.setPublicKey(null);
kasInfos.add(kasInfo);
var config = Config.newNanoTDFConfig(
Config.withNanoKasInformation(kasInfos.toArray(new Config.KASInfo[0])),
@@ -264,8 +264,8 @@ void runBasicTest(String kasUrl, boolean allowed, KeyAccessServerRegistryService
if (writerConfig == null) {
var kasInfos = new ArrayList<>();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = kasUrl;
- kasInfo.PublicKey = null;
+ kasInfo.setURL(kasUrl);
+ kasInfo.setPublicKey(null);
kasInfos.add(kasInfo);
config = Config.newNanoTDFConfig(
Config.withNanoKasInformation(kasInfos.toArray(new Config.KASInfo[0])),
@@ -342,9 +342,9 @@ void kasAllowlistTests() throws Exception {
void collection() throws Exception {
var kasInfos = new ArrayList<>();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "https://api.example.com/kas";
- kasInfo.PublicKey = null;
- kasInfo.KID = KID;
+ kasInfo.setURL("https://api.example.com/kas");
+ kasInfo.setPublicKey(null);
+ kasInfo.setKID(KID);
kasInfos.add(kasInfo);
Config.NanoTDFConfig config = Config.newNanoTDFConfig(
@@ -359,7 +359,7 @@ void collection() throws Exception {
NanoTDF nanoTDF = new NanoTDF(new FakeServicesBuilder().setKas(kas).build());
ByteBuffer header = getHeaderBuffer(byteBuffer,nanoTDF, config);
for (int i = 0; i < Config.MAX_COLLECTION_ITERATION - 10; i++) {
- config.collectionConfig.getHeaderInfo();
+ config.getCollectionConfig().getHeaderInfo();
}
for (int i = 1; i < 10; i++) {
@@ -379,10 +379,10 @@ public void testNanoTDFWithPlainTextPolicy() throws Exception {
var kasInfos = new ArrayList();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = sampleKasUrl;
- kasInfo.PublicKey = kasPublicKey;
- kasInfo.KID = KID;
- kasInfo.Algorithm = "ec:secp256r1";
+ kasInfo.setURL(sampleKasUrl);
+ kasInfo.setPublicKey(kasPublicKey);
+ kasInfo.setKID(KID);
+ kasInfo.setAlgorithm("ec:secp256r1");
kasInfos.add(kasInfo);
Config.NanoTDFConfig config = Config.newNanoTDFConfig(
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/PlannerTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/PlannerTest.java
index c7316dd6..dec6187b 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/PlannerTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/PlannerTest.java
@@ -79,31 +79,31 @@ void fetchBaseKeyWithMissingFields() {
@Test
void generatePlanFromProvidedKases() {
var kas1 = new Config.KASInfo();
- kas1.URL = "https://kas1.example.com";
- kas1.KID = "kid1";
+ kas1.setURL("https://kas1.example.com");
+ kas1.setKID("kid1");
var kas2 = new Config.KASInfo();
- kas2.URL = "https://kas2.example.com";
- kas2.KID = "kid2";
- kas2.Algorithm = "ec:secp256r1";
+ kas2.setURL("https://kas2.example.com");
+ kas2.setKID("kid2");
+ kas2.setAlgorithm("ec:secp256r1");
var tdfConfig = new Config.TDFConfig();
- tdfConfig.kasInfoList.add(kas1);
- tdfConfig.kasInfoList.add(kas2);
+ tdfConfig.getKasInfoList().add(kas1);
+ tdfConfig.getKasInfoList().add(kas2);
var planner = new Planner(tdfConfig, new FakeServicesBuilder().build(), (ignore1, ignored2) -> { throw new IllegalArgumentException("no granter needed"); });
- List splitPlan = planner.generatePlanFromProvidedKases(tdfConfig.kasInfoList);
+ List splitPlan = planner.generatePlanFromProvidedKases(tdfConfig.getKasInfoList());
assertThat(splitPlan).asList().hasSize(2);
- assertThat(splitPlan.get(0).kas).isEqualTo("https://kas1.example.com");
- assertThat(splitPlan.get(0).kid).isEqualTo("kid1");
- assertThat(splitPlan.get(0).keyType).isNull();
+ assertThat(splitPlan.get(0).getKas()).isEqualTo("https://kas1.example.com");
+ assertThat(splitPlan.get(0).getKid()).isEqualTo("kid1");
+ assertThat(splitPlan.get(0).getKeyType()).isNull();
- assertThat(splitPlan.get(1).kas).isEqualTo("https://kas2.example.com");
- assertThat(splitPlan.get(1).kid).isEqualTo("kid2");
- assertThat(splitPlan.get(1).keyType).isEqualTo(KeyType.EC256Key);
+ assertThat(splitPlan.get(1).getKas()).isEqualTo("https://kas2.example.com");
+ assertThat(splitPlan.get(1).getKid()).isEqualTo("kid2");
+ assertThat(splitPlan.get(1).getKeyType()).isEqualTo(KeyType.EC256Key);
- assertThat(splitPlan.get(0).splitID).isNotEqualTo(splitPlan.get(1).splitID);
+ assertThat(splitPlan.get(0).getSplitID()).isNotEqualTo(splitPlan.get(1).getSplitID());
}
@Test
@@ -112,36 +112,30 @@ void testFillingInKeysWithAutoConfigure() {
Mockito.when(kas.getPublicKey(Mockito.any())).thenAnswer(invocation -> {
Config.KASInfo kasInfo = invocation.getArgument(0, Config.KASInfo.class);
var ret = new Config.KASInfo();
- ret.URL = kasInfo.URL;
- if (Objects.equals(kasInfo.URL, "https://kas1.example.com")) {
- ret.PublicKey = "pem1";
- ret.Algorithm = "rsa:2048";
- ret.KID = "kid1";
- } else if (Objects.equals(kasInfo.URL, "https://kas2.example.com")) {
- ret.PublicKey = "pem2";
- ret.Algorithm = "ec:secp256r1";
- ret.KID = "kid2";
- } else if (Objects.equals(kasInfo.URL, "https://kas3.example.com")) {
- ret.PublicKey = "pem3";
- ret.Algorithm = "ec:secp384r1";
- ret.KID = "kid3";
- assertThat(kasInfo.Algorithm).isEqualTo("ec:secp384r1");
+ ret.setURL(kasInfo.getURL());
+ if (Objects.equals(kasInfo.getURL(), "https://kas1.example.com")) {
+ ret.setPublicKey("pem1");
+ ret.setAlgorithm("rsa:2048");
+ ret.setKID("kid1");
+ } else if (Objects.equals(kasInfo.getURL(), "https://kas2.example.com")) {
+ ret.setPublicKey("pem2");
+ ret.setAlgorithm("ec:secp256r1");
+ ret.setKID("kid2");
+ } else if (Objects.equals(kasInfo.getURL(), "https://kas3.example.com")) {
+ ret.setPublicKey("pem3");
+ ret.setAlgorithm("ec:secp384r1");
+ ret.setKID("kid3");
+ assertThat(kasInfo.getAlgorithm()).isEqualTo("ec:secp384r1");
} else {
- throw new IllegalArgumentException("Unexpected KAS URL: " + kasInfo.URL);
+ throw new IllegalArgumentException("Unexpected KAS URL: " + kasInfo.getURL());
}
return ret;
});
var tdfConfig = new Config.TDFConfig();
- tdfConfig.autoconfigure = true;
- tdfConfig.wrappingKeyType = KeyType.RSA2048Key;
- tdfConfig.kasInfoList = List.of(
- new Config.KASInfo() {{
- URL = "https://kas4.example.com";
- KID = "kid4";
- Algorithm = "ec:secp384r1";
- PublicKey = "pem4";
- }}
- );
+ tdfConfig.setAutoconfigure(true);
+ tdfConfig.setWrappingKeyType(KeyType.RSA2048Key);
+ tdfConfig.setKasInfoList(List.of(new Config.KASInfo("https://kas4.example.com", "pem4", "kid4", "ec:secp384r1")));
+
var planner = new Planner(tdfConfig, new FakeServicesBuilder().setKas(kas).build(), (ignore1, ignored2) -> { throw new IllegalArgumentException("no granter needed"); });
var plan = List.of(
new Autoconfigure.KeySplitTemplate("https://kas1.example.com", "split1", null, null),
@@ -152,42 +146,42 @@ void testFillingInKeysWithAutoConfigure() {
Map> filledInPlan = planner.resolveKeys(plan);
assertThat(filledInPlan.keySet().stream().collect(Collectors.toList())).asList().containsExactlyInAnyOrder("split1", "split2");
assertThat(filledInPlan.get("split1")).asList().hasSize(2);
- var kasInfo1 = filledInPlan.get("split1").stream().filter(k -> "kid1".equals(k.KID)).findFirst().get();
- assertThat(kasInfo1.URL).isEqualTo("https://kas1.example.com");
- assertThat(kasInfo1.Algorithm).isEqualTo("rsa:2048");
- assertThat(kasInfo1.PublicKey).isEqualTo("pem1");
- var kasInfo4 = filledInPlan.get("split1").stream().filter(k -> "kid4".equals(k.KID)).findFirst().get();
- assertThat(kasInfo4.URL).isEqualTo("https://kas4.example.com");
- assertThat(kasInfo4.Algorithm).isEqualTo("ec:secp384r1");
- assertThat(kasInfo4.PublicKey).isEqualTo("pem4");
+ var kasInfo1 = filledInPlan.get("split1").stream().filter(k -> "kid1".equals(k.getKID())).findFirst().get();
+ assertThat(kasInfo1.getURL()).isEqualTo("https://kas1.example.com");
+ assertThat(kasInfo1.getAlgorithm()).isEqualTo("rsa:2048");
+ assertThat(kasInfo1.getPublicKey()).isEqualTo("pem1");
+ var kasInfo4 = filledInPlan.get("split1").stream().filter(k -> "kid4".equals(k.getKID())).findFirst().get();
+ assertThat(kasInfo4.getURL()).isEqualTo("https://kas4.example.com");
+ assertThat(kasInfo4.getAlgorithm()).isEqualTo("ec:secp384r1");
+ assertThat(kasInfo4.getPublicKey()).isEqualTo("pem4");
assertThat(filledInPlan.get("split2")).asList().hasSize(2);
- var kasInfo2 = filledInPlan.get("split2").stream().filter(kasInfo -> "kid2".equals(kasInfo.KID)).findFirst().get();
- assertThat(kasInfo2.URL).isEqualTo("https://kas2.example.com");
- assertThat(kasInfo2.Algorithm).isEqualTo("ec:secp256r1");
- assertThat(kasInfo2.PublicKey).isEqualTo("pem2");
- var kasInfo3 = filledInPlan.get("split2").stream().filter(kasInfo -> "kid3".equals(kasInfo.KID)).findFirst().get();
- assertThat(kasInfo3.URL).isEqualTo("https://kas3.example.com");
- assertThat(kasInfo3.Algorithm).isEqualTo("ec:secp384r1");
- assertThat(kasInfo3.PublicKey).isEqualTo("pem3");
+ var kasInfo2 = filledInPlan.get("split2").stream().filter(kasInfo -> "kid2".equals(kasInfo.getKID())).findFirst().get();
+ assertThat(kasInfo2.getURL()).isEqualTo("https://kas2.example.com");
+ assertThat(kasInfo2.getAlgorithm()).isEqualTo("ec:secp256r1");
+ assertThat(kasInfo2.getPublicKey()).isEqualTo("pem2");
+ var kasInfo3 = filledInPlan.get("split2").stream().filter(kasInfo -> "kid3".equals(kasInfo.getKID())).findFirst().get();
+ assertThat(kasInfo3.getURL()).isEqualTo("https://kas3.example.com");
+ assertThat(kasInfo3.getAlgorithm()).isEqualTo("ec:secp384r1");
+ assertThat(kasInfo3.getPublicKey()).isEqualTo("pem3");
}
@Test
void returnsOnlyDefaultKasesIfPresent() {
var kas1 = new Config.KASInfo();
- kas1.URL = "https://kas1.example.com";
- kas1.Default = true;
+ kas1.setURL("https://kas1.example.com");
+ kas1.setDefault(true);
var kas2 = new Config.KASInfo();
- kas2.URL = "https://kas2.example.com";
- kas2.Default = false;
+ kas2.setURL("https://kas2.example.com");
+ kas2.setDefault(false);
var kas3 = new Config.KASInfo();
- kas3.URL = "https://kas3.example.com";
- kas3.Default = true;
+ kas3.setURL("https://kas3.example.com");
+ kas3.setDefault(true);
var config = new Config.TDFConfig();
- config.kasInfoList.addAll(List.of(kas1, kas2, kas3));
+ config.getKasInfoList().addAll(List.of(kas1, kas2, kas3));
List result = Planner.defaultKases(config);
@@ -197,15 +191,15 @@ void returnsOnlyDefaultKasesIfPresent() {
@Test
void returnsAllKasesIfNoDefault() {
var kas1 = new Config.KASInfo();
- kas1.URL = "https://kas1.example.com";
- kas1.Default = false;
+ kas1.setURL("https://kas1.example.com");
+ kas1.setDefault(false);
var kas2 = new Config.KASInfo();
- kas2.URL = "https://kas2.example.com";
- kas2.Default = null; // not set
+ kas2.setURL("https://kas2.example.com");
+ kas2.setDefault(null); // not set
var config = new Config.TDFConfig();
- config.kasInfoList.addAll(List.of(kas1, kas2));
+ config.getKasInfoList().addAll(List.of(kas1, kas2));
List result = Planner.defaultKases(config);
Assertions.assertThat(result).containsExactlyInAnyOrder("https://kas1.example.com", "https://kas2.example.com");
@@ -224,39 +218,39 @@ void usesProvidedSplitPlanWhenNotAutoconfigure() {
Mockito.when(kas.getPublicKey(Mockito.any())).thenAnswer(invocation -> {
Config.KASInfo kasInfo = invocation.getArgument(0, Config.KASInfo.class);
var ret = new Config.KASInfo();
- ret.URL = kasInfo.URL;
- if (Objects.equals(kasInfo.URL, "https://kas1.example.com")) {
- ret.PublicKey = "pem1";
- ret.Algorithm = "rsa:2048";
- ret.KID = "kid1";
- } else if (Objects.equals(kasInfo.URL, "https://kas2.example.com")) {
- ret.PublicKey = "pem2";
- ret.Algorithm = "ec:secp256r1";
- ret.KID = "kid2";
+ ret.setURL(kasInfo.getURL());
+ if (Objects.equals(kasInfo.getURL(), "https://kas1.example.com")) {
+ ret.setPublicKey("pem1");
+ ret.setAlgorithm("rsa:2048");
+ ret.setKID("kid1");
+ } else if (Objects.equals(kasInfo.getURL(), "https://kas2.example.com")) {
+ ret.setPublicKey("pem2");
+ ret.setAlgorithm("ec:secp256r1");
+ ret.setKID("kid2");
} else {
- throw new IllegalArgumentException("Unexpected KAS URL: " + kasInfo.URL);
+ throw new IllegalArgumentException("Unexpected KAS URL: " + kasInfo.getURL());
}
return ret;
});
// Arrange
var kas1 = new Config.KASInfo();
- kas1.URL = "https://kas1.example.com";
- kas1.KID = "kid1";
- kas1.Algorithm = "rsa:2048";
+ kas1.setURL("https://kas1.example.com");
+ kas1.setKID("kid1");
+ kas1.setAlgorithm("rsa:2048");
var kas2 = new Config.KASInfo();
- kas2.URL = "https://kas2.example.com";
- kas2.KID = "kid2";
- kas2.Algorithm = "ec:secp256";
+ kas2.setURL("https://kas2.example.com");
+ kas2.setKID("kid2");
+ kas2.setAlgorithm("ec:secp256");
- var splitStep1 = new Autoconfigure.KeySplitStep(kas1.URL, "split1");
- var splitStep2 = new Autoconfigure.KeySplitStep(kas2.URL, "split2");
+ var splitStep1 = new Autoconfigure.KeySplitStep(kas1.getURL(), "split1");
+ var splitStep2 = new Autoconfigure.KeySplitStep(kas2.getURL(), "split2");
var tdfConfig = new Config.TDFConfig();
- tdfConfig.autoconfigure = false;
- tdfConfig.kasInfoList.add(kas1);
- tdfConfig.kasInfoList.add(kas2);
- tdfConfig.splitPlan = List.of(splitStep1, splitStep2);
+ tdfConfig.setAutoconfigure(false);
+ tdfConfig.getKasInfoList().add(kas1);
+ tdfConfig.getKasInfoList().add(kas2);
+ tdfConfig.setSplitPlan(List.of(splitStep1, splitStep2));
var planner = new Planner(tdfConfig, new FakeServicesBuilder().setKas(kas).build(), (ignore1, ignored2) -> { throw new IllegalArgumentException("no granter needed"); });
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/TDFE2ETest.java b/sdk/src/test/java/io/opentdf/platform/sdk/TDFE2ETest.java
index 08c6b8aa..844a369f 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/TDFE2ETest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/TDFE2ETest.java
@@ -35,7 +35,7 @@ public void createAndDecryptTdfIT() throws Exception {
.build();
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "localhost:8080";
+ kasInfo.setURL("localhost:8080");
List tdfConfigPairs = List.of(
new TDFConfigPair(
@@ -75,7 +75,7 @@ public void createAndDecryptNanoTDF() throws Exception {
.services;
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "http://localhost:8080";
+ kasInfo.setURL("http://localhost:8080");
for (NanoTDFType.PolicyType policyType : List.of(
NanoTDFType.PolicyType.EMBEDDED_POLICY_PLAIN_TEXT,
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/TDFTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/TDFTest.java
index c28bd2bd..88f92c9a 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/TDFTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/TDFTest.java
@@ -57,16 +57,16 @@ public Config.KASInfo getPublicKey(Config.KASInfo kasInfo) {
// handle platform url
int index;
// if the kasinfo url contains the platform url, remove it
- if (kasInfo.URL.startsWith(platformUrl)) {
- index = Integer.parseInt(kasInfo.URL
+ if (kasInfo.getURL().startsWith(platformUrl)) {
+ index = Integer.parseInt(kasInfo.getURL()
.replaceFirst("^" + Pattern.quote(platformUrl) + "/kas", ""));
} else {
- index = Integer.parseInt(kasInfo.URL.replaceFirst("^https://example.com/kas", ""));
+ index = Integer.parseInt(kasInfo.getURL().replaceFirst("^https://example.com/kas", ""));
}
var kiCopy = new Config.KASInfo();
- kiCopy.KID = "r1";
- kiCopy.PublicKey = CryptoUtils.getPublicKeyPEM(keypairs.get(index).getPublic());
- kiCopy.URL = kasInfo.URL;
+ kiCopy.setKID("r1");
+ kiCopy.setPublicKey(CryptoUtils.getPublicKeyPEM(keypairs.get(index).getPublic()));
+ kiCopy.setURL(kasInfo.getURL());
return kiCopy;
}
@@ -138,11 +138,11 @@ static void setupKeyPairsAndMocks() {
List kasRegEntries = new ArrayList<>();
for (Config.KASInfo kasInfo : getRSAKASInfos()) {
kasRegEntries.add(KeyAccessServer.newBuilder()
- .setUri(kasInfo.URL).build());
+ .setUri(kasInfo.getURL()).build());
}
for (Config.KASInfo kasInfo : getECKASInfos()) {
kasRegEntries.add(KeyAccessServer.newBuilder()
- .setUri(kasInfo.URL).build());
+ .setUri(kasInfo.getURL()).build());
}
ListKeyAccessServersResponse mockResponse = ListKeyAccessServersResponse.newBuilder()
.addAllKeyAccessServers(kasRegEntries)
@@ -194,9 +194,9 @@ public TDFConfigPair(Config.TDFConfig tdfConfig, Config.TDFReaderConfig tdfReade
assertion1.signingKey = new AssertionConfig.AssertionKey(AssertionConfig.AssertionKeyAlg.HS256, key);
var assertionVerificationKeys = new Config.AssertionVerificationKeys();
- assertionVerificationKeys.defaultKey = new AssertionConfig.AssertionKey(
- AssertionConfig.AssertionKeyAlg.HS256,
- key);
+ assertionVerificationKeys.setDefaultKey(new AssertionConfig.AssertionKey(
+ AssertionConfig.AssertionKeyAlg.HS256,
+ key));
List tdfConfigPairs = List.of(
new TDFConfigPair(
@@ -286,7 +286,7 @@ void testSimpleTDFWithAssertionWithRS256() throws Exception {
keypair.getPrivate());
var rsaKasInfo = new Config.KASInfo();
- rsaKasInfo.URL = "https://example.com/kas" + 0;
+ rsaKasInfo.setURL("https://example.com/kas" + 0);
Config.TDFConfig config = Config.newTDFConfig(
Config.withAutoconfigure(false),
@@ -304,7 +304,7 @@ void testSimpleTDFWithAssertionWithRS256() throws Exception {
tdf.createTDF(plainTextInputStream, tdfOutputStream, config);
var assertionVerificationKeys = new Config.AssertionVerificationKeys();
- assertionVerificationKeys.keys.put(assertion1Id,
+ assertionVerificationKeys.getKeys().put(assertion1Id,
new AssertionConfig.AssertionKey(AssertionConfig.AssertionKeyAlg.RS256,
keypair.getPublic()));
@@ -351,7 +351,7 @@ void testWithAssertionVerificationDisabled() throws Exception {
tdf.createTDF(plainTextInputStream, tdfOutputStream, config);
var assertionVerificationKeys = new Config.AssertionVerificationKeys();
- assertionVerificationKeys.keys.put(assertion1Id,
+ assertionVerificationKeys.getKeys().put(assertion1Id,
new AssertionConfig.AssertionKey(AssertionConfig.AssertionKeyAlg.RS256,
keypair.getPublic()));
@@ -401,7 +401,7 @@ void testSimpleTDFWithAssertionWithHS256() throws Exception {
assertionConfig2.statement.value = "{\"uuid\":\"f74efb60-4a9a-11ef-a6f1-8ee1a61c148a\",\"body\":{\"dataAttributes\":null,\"dissem\":null}}";
var rsaKasInfo = new Config.KASInfo();
- rsaKasInfo.URL = "https://example.com/kas" + 0;
+ rsaKasInfo.setURL("https://example.com/kas" + 0);
Config.TDFConfig config = Config.newTDFConfig(
Config.withAutoconfigure(false),
@@ -469,7 +469,7 @@ void testSimpleTDFWithAssertionWithHS256Failure() throws Exception {
key);
var rsaKasInfo = new Config.KASInfo();
- rsaKasInfo.URL = "https://example.com/kas" + 0;
+ rsaKasInfo.setURL("https://example.com/kas" + 0);
Config.TDFConfig config = Config.newTDFConfig(
Config.withAutoconfigure(false),
@@ -488,9 +488,9 @@ void testSimpleTDFWithAssertionWithHS256Failure() throws Exception {
byte[] notkey = new byte[32];
secureRandom.nextBytes(notkey);
var assertionVerificationKeys = new Config.AssertionVerificationKeys();
- assertionVerificationKeys.defaultKey = new AssertionConfig.AssertionKey(
- AssertionConfig.AssertionKeyAlg.HS256,
- notkey);
+ assertionVerificationKeys.setDefaultKey(new AssertionConfig.AssertionKey(
+ AssertionConfig.AssertionKeyAlg.HS256,
+ notkey));
Config.TDFReaderConfig readerConfig = Config.newTDFReaderConfig(
Config.withAssertionVerificationKeys(assertionVerificationKeys));
@@ -745,7 +745,7 @@ public void cancel() {
});
var rsaKasInfo = new Config.KASInfo();
- rsaKasInfo.URL = "https://example.com/kas" + Integer.toString(0);
+ rsaKasInfo.setURL("https://example.com/kas" + Integer.toString(0));
Config.TDFConfig config = Config.newTDFConfig(
Config.withAutoconfigure(false),
@@ -790,7 +790,7 @@ public void cancel() {
// use the platform url as kas url, should succeed
var platformKasInfo = new Config.KASInfo();
- platformKasInfo.URL = platformUrl + "/kas" + Integer.toString(0);
+ platformKasInfo.setURL(platformUrl + "/kas" + Integer.toString(0));
config = Config.newTDFConfig(
Config.withAutoconfigure(false),
Config.withKasInformation(platformKasInfo));
@@ -817,8 +817,8 @@ private static Config.KASInfo[] getKASInfos(Predicate filter) {
for (int i = 0; i < keypairs.size(); i++) {
if (filter.test(i)) {
var kasInfo = new Config.KASInfo();
- kasInfo.URL = "https://example.com/kas" + Integer.toString(i);
- kasInfo.PublicKey = null;
+ kasInfo.setURL("https://example.com/kas" + Integer.toString(i));
+ kasInfo.setPublicKey(null);
kasInfos.add(kasInfo);
}
}
From 1b06dfa79e14b3dc92b57cc229db056862662e5a Mon Sep 17 00:00:00 2001
From: Scott Hamrick <2623452+cshamrick@users.noreply.github.com>
Date: Fri, 21 Nov 2025 09:57:31 -0600
Subject: [PATCH 3/3] fix(sdk): configure source directories for
maven-checkstyle-plugin
Signed-off-by: Scott Hamrick <2623452+cshamrick@users.noreply.github.com>
---
pom.xml | 3 +++
1 file changed, 3 insertions(+)
diff --git a/pom.xml b/pom.xml
index 62457c7f..7b2c4083 100644
--- a/pom.xml
+++ b/pom.xml
@@ -283,6 +283,9 @@
org.apache.maven.plugins
maven-checkstyle-plugin
+
+ ${project.build.sourceDirectory}
+
validate