-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathbuild.gradle
197 lines (162 loc) · 7.42 KB
/
build.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
plugins {
id 'java'
id 'net.saliman.properties' version '1.5.2'
id 'com.gradleup.shadow' version '8.3.4'
// Only used for testing
id 'com.marklogic.ml-gradle' version '5.0.0'
id 'jacoco'
id "org.sonarqube" version "5.1.0.4882"
// Used to generate Avro classes. This will write classes to build/generated-test-avro-java and also add that folder
// as a source root. Since this is commented out by default, the generated Avro test class has been added to
// src/test/java. This only needs to be uncommented when there's a need to regenerate that class, at which point it
// should be copied over to src/test/java and then this plugin should be commented out again.
// id "com.github.davidmc24.gradle.plugin.avro" version "1.6.0"
}
java {
sourceCompatibility = 1.8
targetCompatibility = 1.8
}
repositories {
mavenCentral()
}
configurations {
documentation
assets
}
ext {
// Even though Kafka Connect 3.7.0 is out, we're staying with 3.6.1 in order to continue
// using the third-party Kafka JUnit tool. See https://github.com/mguenther/kafka-junit?tab=readme-ov-file
kafkaVersion = "3.8.1"
}
dependencies {
compileOnly "org.apache.kafka:connect-runtime:${kafkaVersion}"
compileOnly "org.slf4j:slf4j-api:1.7.36"
// Force DHF to use the latest version of ml-app-deployer, which minimizes security vulnerabilities
implementation "com.marklogic:ml-app-deployer:5.0.0"
implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.17.2"
// Note that in general, the version of the DHF jar must match that of the deployed DHF instance. Different versions
// may work together, but that behavior is not guaranteed.
implementation("com.marklogic:marklogic-data-hub:6.1.1") {
exclude module: "marklogic-client-api"
exclude module: "ml-javaclient-util"
exclude module: "ml-app-deployer"
// No need for mlcp-util, it's only used in 'legacy' DHF 4 jobs
exclude module: "mlcp-util"
// Excluding because it causes Kafka Connect to complain mightily if included
exclude module: "logback-classic"
}
testImplementation 'com.marklogic:marklogic-junit5:1.5.0'
testImplementation "org.apache.kafka:connect-json:${kafkaVersion}"
// Can be deleted when the disabled kafka-junit tests are deleted.
testImplementation 'net.mguenther.kafka:kafka-junit:3.6.0'
testImplementation "org.apache.avro:avro-compiler:1.12.0"
// Forcing logback to be used for test logging
testImplementation "ch.qos.logback:logback-classic:1.3.14"
testImplementation "org.slf4j:jcl-over-slf4j:2.0.16"
documentation files('LICENSE.txt')
documentation files('NOTICE.txt')
documentation files('README.md')
assets files('MarkLogic_logo.png')
assets files('apache_logo.png')
}
// This ensures that the compiler reports "unchecked" warnings.
// This helps us use the compiler to prevent potential problems.
tasks.withType(JavaCompile) {
options.compilerArgs << '-Xlint:unchecked'
options.deprecation = true
}
test {
useJUnitPlatform()
}
// Configures jacoco test coverage to be included when "test" is run
test {
finalizedBy jacocoTestReport
}
jacocoTestReport {
dependsOn test
}
// Enabling the XML report allows for sonar to grab coverage data from jacoco
jacocoTestReport {
reports {
// This isn't working with Gradle 8. Will replace this soon with the sonar instance in docker-compose.
// xml.enabled true
}
}
shadowJar {
// Exclude DHF source files
exclude "hub-internal-artifacts/**"
exclude "hub-internal-config/**"
exclude "ml-config/**"
exclude "ml-modules*/**"
exclude "scaffolding/**"
}
ext {
confluentArchiveGroup = "Confluent Connector Archive"
confluentTestingGroup = "Confluent Platform Local Testing"
baseArchiveBuildDir = "build/connectorArchive"
baseArchiveName = "${componentOwner}-${componentName}-${version}"
}
// Tasks for building the archive required for submitting to the Confluence Connector Hub
import org.apache.tools.ant.filters.ReplaceTokens
task connectorArchive_CopyManifestToBuildDirectory(type: Copy, group: confluentArchiveGroup) {
description = "Copy the project manifest into the root folder"
from '.'
include 'manifest.json'
into "${baseArchiveBuildDir}/${baseArchiveName}"
filter(ReplaceTokens, tokens: [CONFLUENT_USER: componentOwner, VERSION: version])
}
task connectorArchive_CopyAssetsToBuildDirectory(type: Copy, group: confluentArchiveGroup) {
description = "Copy the project assets into the assets folder"
from configurations.assets
into "${baseArchiveBuildDir}/${baseArchiveName}/assets"
}
task connectorArchive_CopyEtcToBuildDirectory(type: Copy, group: confluentArchiveGroup) {
description = "Copy the project support files into the etc folder"
from 'config'
include '*'
into "${baseArchiveBuildDir}/${baseArchiveName}/etc"
}
task connectorArchive_CopyDocumentationToBuildDirectory(type: Copy, group: confluentArchiveGroup) {
description = "Copy the project documentation into the doc folder"
from configurations.documentation
into "${baseArchiveBuildDir}/${baseArchiveName}/doc"
}
task connectorArchive_CopyDependenciesToBuildDirectory(type: Copy, group: confluentArchiveGroup, dependsOn: jar) {
description = "Copy the dependency jars into the lib folder"
from jar
// Confluent already includes the Jackson dependencies that this connector depends on. If the connector includes any
// itself, and the DHF integration is used with the sink connector, then the following error will occur when DHF
// tries to connect to the Manage API of MarkLogic:
// java.lang.ClassCastException: com.fasterxml.jackson.datatype.jdk8.Jdk8Module cannot be cast to com.fasterxml.jackson.databind.Module
// at org.springframework.http.converter.json.Jackson2ObjectMapperBuilder.registerWellKnownModulesIfAvailable(Jackson2ObjectMapperBuilder.java:849)
// stackoverflow indicates this may be due to multiple copies of Jackson being on the classpath, as Jdk8Module
// otherwise should be castable to Module.
// Testing has verified that excluding all "jackson-" jars still results in the connector working properly with
// Confluent 7.3.1. This has no impact on using the connector with plain Apache Kafka which does not rely on
// constructing this connector archive.
from configurations.runtimeClasspath.findAll { it.name.endsWith('jar') && !it.name.startsWith("jackson-")}
into "${baseArchiveBuildDir}/${baseArchiveName}/lib"
}
task connectorArchive_BuildDirectory(group: confluentArchiveGroup) {
description = "Build the directory that will be used to create the Kafka Connector Archive"
dependsOn = [
connectorArchive_CopyManifestToBuildDirectory,
connectorArchive_CopyDependenciesToBuildDirectory,
connectorArchive_CopyDocumentationToBuildDirectory,
connectorArchive_CopyEtcToBuildDirectory,
connectorArchive_CopyAssetsToBuildDirectory
]
}
task connectorArchive(type: Zip, dependsOn: connectorArchive_BuildDirectory, group: confluentArchiveGroup) {
description = 'Build a Connector Hub for the Confluent Connector Hub'
from "${baseArchiveBuildDir}"
include '**/*'
archiveFileName = "${baseArchiveName}.zip"
destinationDirectory = file('build/distro')
}
// Tasks for using the connector with Confluent Platform on Docker
task copyConnectorToDockerVolume(type: Copy, dependsOn: connectorArchive, group: confluentTestingGroup) {
description = "Copies the connector's archive directory to the Docker volume shared with the Connect server"
from "build/connectorArchive"
into "test-app/docker/confluent-marklogic-components"
}