diff --git a/.editorconfig b/.editorconfig index 270e69fe..1ae05cec 100644 --- a/.editorconfig +++ b/.editorconfig @@ -8,8 +8,11 @@ insert_final_newline = true max_line_length = 120 trim_trailing_whitespace = true +[build.gradle] +indent_size = 4 + # Override for Makefile -[{Makefile, makefile, GNUmakefile}] +[{Makefile, makefile}] indent_style = tab indent_size = 4 diff --git a/Makefile b/Makefile index 2595332e..b70f9acc 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -VERSION=1.19.0 +VERSION=1.20.0 default: versioncheck @@ -71,4 +71,4 @@ refresh: ./gradlew --refresh-dependencies upgrade-wrapper: - ./gradlew wrapper --gradle-version=8.4 --distribution-type=bin \ No newline at end of file + ./gradlew wrapper --gradle-version=8.5 --distribution-type=bin diff --git a/README.md b/README.md index 0412c1c7..85368023 100644 --- a/README.md +++ b/README.md @@ -113,8 +113,8 @@ scrape_configs: The docker images are available via: ```bash -docker pull pambrose/prometheus-proxy:1.19.0 -docker pull pambrose/prometheus-agent:1.19.0 +docker pull pambrose/prometheus-proxy:1.20.0 +docker pull pambrose/prometheus-agent:1.20.0 ``` Start a proxy container with: @@ -123,7 +123,7 @@ Start a proxy container with: docker run --rm -p 8082:8082 -p 8092:8092 -p 50051:50051 -p 8080:8080 \ --env ADMIN_ENABLED=true \ --env METRICS_ENABLED=true \ - pambrose/prometheus-proxy:1.19.0 + pambrose/prometheus-proxy:1.20.0 ``` Start an agent container with: @@ -131,7 +131,7 @@ Start an agent container with: ```bash docker run --rm -p 8083:8083 -p 8093:8093 \ --env AGENT_CONFIG='https://raw.githubusercontent.com/pambrose/prometheus-proxy/master/examples/simple.conf' \ - pambrose/prometheus-agent:1.19.0 + pambrose/prometheus-agent:1.20.0 ``` Using the config @@ -149,7 +149,7 @@ is in your current directory, run an agent container with: docker run --rm -p 8083:8083 -p 8093:8093 \ --mount type=bind,source="$(pwd)"/prom-agent.conf,target=/app/prom-agent.conf \ --env AGENT_CONFIG=prom-agent.conf \ - pambrose/prometheus-agent:1.19.0 + pambrose/prometheus-agent:1.20.0 ``` **Note:** The `WORKDIR` of the proxy and agent images is `/app`, so make sure to use `/app` as the base directory in the @@ -297,7 +297,7 @@ docker run --rm -p 8082:8082 -p 8092:8092 -p 50440:50440 -p 8080:8080 \ --env PROXY_CONFIG=tls-no-mutual-auth.conf \ --env ADMIN_ENABLED=true \ --env METRICS_ENABLED=true \ - pambrose/prometheus-proxy:1.19.0 + pambrose/prometheus-proxy:1.20.0 docker run --rm -p 8083:8083 -p 8093:8093 \ --mount type=bind,source="$(pwd)"/testing/certs,target=/app/testing/certs \ @@ -305,7 +305,7 @@ docker run --rm -p 8083:8083 -p 8093:8093 \ --env AGENT_CONFIG=tls-no-mutual-auth.conf \ --env PROXY_HOSTNAME=mymachine.lan:50440 \ --name docker-agent \ - pambrose/prometheus-agent:1.19.0 + pambrose/prometheus-agent:1.20.0 ``` **Note:** The `WORKDIR` of the proxy and agent images is `/app`, so make sure to use `/app` as the base directory in the diff --git a/bin/docker-agent.sh b/bin/docker-agent.sh index d2dabdab..3b7f9862 100755 --- a/bin/docker-agent.sh +++ b/bin/docker-agent.sh @@ -3,4 +3,4 @@ docker run --rm -p 8083:8083 -p 8093:8093 \ --env AGENT_CONFIG='https://raw.githubusercontent.com/pambrose/prometheus-proxy/master/examples/simple.conf' \ --env PROXY_HOSTNAME=mymachine.lan \ - pambrose/prometheus-agent:1.19.0 + pambrose/prometheus-agent:1.20.0 diff --git a/bin/docker-proxy.sh b/bin/docker-proxy.sh index 3f079697..7ca2c95f 100755 --- a/bin/docker-proxy.sh +++ b/bin/docker-proxy.sh @@ -2,4 +2,4 @@ docker run --rm -p 8082:8082 -p 8092:8092 -p 50051:50051 -p 8080:8080 \ --env PROXY_CONFIG='https://raw.githubusercontent.com/pambrose/prometheus-proxy/master/examples/simple.conf' \ - pambrose/prometheus-proxy:1.19.0 + pambrose/prometheus-proxy:1.20.0 diff --git a/build.gradle b/build.gradle index 226b066c..3f7a7f7a 100644 --- a/build.gradle +++ b/build.gradle @@ -1,151 +1,151 @@ plugins { - id 'idea' - id 'java' - id 'maven-publish' - id 'org.jetbrains.kotlin.jvm' version '1.9.20' - id 'com.google.protobuf' version '0.9.4' // Keep in sync with grpc - id 'org.jmailen.kotlinter' version "4.0.0" - id "com.github.ben-manes.versions" version '0.49.0' - id 'com.github.johnrengelman.shadow' version '8.1.1' - id 'com.github.gmazzo.buildconfig' version '4.1.2' - id 'org.jetbrains.kotlinx.kover' version '0.7.4' - // Turn these off until jacoco fixes their kotlin 1.5.0 SMAP issue - // id 'jacoco' - // id 'com.github.kt3k.coveralls' version '2.12.0' + id 'idea' + id 'java' + id 'maven-publish' + id 'org.jetbrains.kotlin.jvm' version '1.9.21' + id 'com.google.protobuf' version '0.9.4' // Keep in sync with grpc + id 'org.jmailen.kotlinter' version "4.1.0" + id "com.github.ben-manes.versions" version '0.50.0' + id 'com.github.johnrengelman.shadow' version '8.1.1' + id 'com.github.gmazzo.buildconfig' version '4.2.0' + id 'org.jetbrains.kotlinx.kover' version '0.7.5' + // Turn these off until jacoco fixes their kotlin 1.5.0 SMAP issue + // id 'jacoco' + // id 'com.github.kt3k.coveralls' version '2.12.0' } group = 'io.prometheus' -version = '1.19.0' +version = '1.20.0' sourceCompatibility = JavaVersion.VERSION_17 targetCompatibility = JavaVersion.VERSION_17 buildConfig { - packageName("io.prometheus") + packageName("io.prometheus") - buildConfigField('String', 'APP_NAME', "\"${project.name}\"") - buildConfigField('String', 'APP_VERSION', "\"${project.version}\"") - buildConfigField('String', 'APP_RELEASE_DATE', "\"11/02/2023\"") + buildConfigField('String', 'APP_NAME', "\"${project.name}\"") + buildConfigField('String', 'APP_VERSION', "\"${project.version}\"") + buildConfigField('String', 'APP_RELEASE_DATE', "\"12/11/2023\"") } repositories { - google() - mavenCentral() - maven { url = 'https://jitpack.io' } + google() + mavenCentral() + maven { url = 'https://jitpack.io' } } dependencies { - implementation "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version" + implementation "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version" - implementation "org.jetbrains.kotlinx:kotlinx-serialization-json:$serialization_version" + implementation "org.jetbrains.kotlinx:kotlinx-serialization-json:$serialization_version" - implementation "io.grpc:grpc-netty:$grpc_version" - implementation "io.grpc:grpc-protobuf:$grpc_version" - implementation "io.grpc:grpc-stub:$grpc_version" - implementation "io.grpc:grpc-services:$grpc_version" + implementation "io.grpc:grpc-netty:$grpc_version" + implementation "io.grpc:grpc-protobuf:$grpc_version" + implementation "io.grpc:grpc-stub:$grpc_version" + implementation "io.grpc:grpc-services:$grpc_version" - implementation "io.grpc:grpc-kotlin-stub:$gengrpc_version" + implementation "io.grpc:grpc-kotlin-stub:$gengrpc_version" - implementation "io.github.mscheong01:krotoDC-core:$krotodc_version" + implementation "io.github.mscheong01:krotoDC-core:$krotodc_version" - // Required - implementation "io.netty:netty-tcnative-boringssl-static:$tcnative_version" + // Required + implementation "io.netty:netty-tcnative-boringssl-static:$tcnative_version" - implementation "com.github.pambrose.common-utils:core-utils:$utils_version" - implementation "com.github.pambrose.common-utils:corex-utils:$utils_version" - implementation "com.github.pambrose.common-utils:dropwizard-utils:$utils_version" - implementation "com.github.pambrose.common-utils:guava-utils:$utils_version" - implementation "com.github.pambrose.common-utils:grpc-utils:$utils_version" - implementation "com.github.pambrose.common-utils:jetty-utils:$utils_version" - implementation "com.github.pambrose.common-utils:ktor-client-utils:$utils_version" - implementation "com.github.pambrose.common-utils:prometheus-utils:$utils_version" - implementation "com.github.pambrose.common-utils:service-utils:$utils_version" - implementation "com.github.pambrose.common-utils:zipkin-utils:$utils_version" + implementation "com.github.pambrose.common-utils:core-utils:$utils_version" + implementation "com.github.pambrose.common-utils:corex-utils:$utils_version" + implementation "com.github.pambrose.common-utils:dropwizard-utils:$utils_version" + implementation "com.github.pambrose.common-utils:guava-utils:$utils_version" + implementation "com.github.pambrose.common-utils:grpc-utils:$utils_version" + implementation "com.github.pambrose.common-utils:jetty-utils:$utils_version" + implementation "com.github.pambrose.common-utils:ktor-client-utils:$utils_version" + implementation "com.github.pambrose.common-utils:prometheus-utils:$utils_version" + implementation "com.github.pambrose.common-utils:service-utils:$utils_version" + implementation "com.github.pambrose.common-utils:zipkin-utils:$utils_version" - implementation "org.eclipse.jetty:jetty-servlet:$jetty_version" + implementation "org.eclipse.jetty:jetty-servlet:$jetty_version" - implementation "javax.annotation:javax.annotation-api:$annotation_version" - implementation "com.beust:jcommander:$jcommander_version" - implementation "com.typesafe:config:$typesafe_version" + implementation "javax.annotation:javax.annotation-api:$annotation_version" + implementation "com.beust:jcommander:$jcommander_version" + implementation "com.typesafe:config:$typesafe_version" - implementation "io.prometheus:simpleclient:$prometheus_version" + implementation "io.prometheus:simpleclient:$prometheus_version" - implementation "io.ktor:ktor-client:$ktor_version" - implementation "io.ktor:ktor-client-cio:$ktor_version" - implementation "io.ktor:ktor-client-auth:$ktor_version" - implementation "io.ktor:ktor-network:$ktor_version" - implementation "io.ktor:ktor-network-tls:$ktor_version" + implementation 'io.ktor:ktor-client-jvm:2.3.6' + implementation 'io.ktor:ktor-client-cio-jvm:2.3.6' + implementation 'io.ktor:ktor-client-auth-jvm:2.3.6' + implementation 'io.ktor:ktor-network-jvm:2.3.6' + implementation 'io.ktor:ktor-network-tls-jvm:2.3.6' - implementation "io.ktor:ktor-server:$ktor_version" - implementation "io.ktor:ktor-server-cio:$ktor_version" - implementation "io.ktor:ktor-server-call-logging:$ktor_version" - implementation "io.ktor:ktor-server-compression:$ktor_version" + implementation 'io.ktor:ktor-server-jvm:2.3.6' + implementation 'io.ktor:ktor-server-cio-jvm:2.3.6' + implementation "io.ktor:ktor-server-call-logging:$ktor_version" + implementation "io.ktor:ktor-server-compression:$ktor_version" - implementation "io.dropwizard.metrics:metrics-healthchecks:$dropwizard_version" + implementation "io.dropwizard.metrics:metrics-healthchecks:$dropwizard_version" - implementation "io.zipkin.brave:brave-instrumentation-grpc:$zipkin_version" + implementation "io.zipkin.brave:brave-instrumentation-grpc:$zipkin_version" - implementation "io.github.microutils:kotlin-logging:$logging_version" - implementation "ch.qos.logback:logback-classic:$logback_version" - implementation "org.slf4j:jul-to-slf4j:$slf4j_version" + implementation "io.github.microutils:kotlin-logging:$logging_version" + implementation "ch.qos.logback:logback-classic:$logback_version" + implementation "org.slf4j:jul-to-slf4j:$slf4j_version" - testImplementation "org.amshove.kluent:kluent:$kluent_version" - testImplementation "org.junit.jupiter:junit-jupiter-api:$junit_version" - testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:$junit_version" + testImplementation "org.amshove.kluent:kluent:$kluent_version" + testImplementation "org.junit.jupiter:junit-jupiter-api:$junit_version" + testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:$junit_version" } publishing { - publications { - mavenJava(MavenPublication) { - from components.java - versionMapping { - usage('java-api') { - fromResolutionOf('runtimeClasspath') + publications { + mavenJava(MavenPublication) { + from components.java + versionMapping { + usage('java-api') { + fromResolutionOf('runtimeClasspath') + } + usage('java-runtime') { + fromResolutionResult() + } + } } - usage('java-runtime') { - fromResolutionResult() - } - } } - } } compileKotlin.dependsOn ':generateProto' protobuf { - protoc { - artifact = "com.google.protobuf:protoc:$protoc_version" - } - plugins { - grpc { - artifact = "io.grpc:protoc-gen-grpc-java:$grpc_version" + protoc { + artifact = "com.google.protobuf:protoc:$protoc_version" } + plugins { + grpc { + artifact = "io.grpc:protoc-gen-grpc-java:$grpc_version" + } - // Specify protoc to generate using our grpc kotlin plugin - grpckt { - artifact = "io.grpc:protoc-gen-grpc-kotlin:$gengrpc_version:jdk8@jar" - } + // Specify protoc to generate using our grpc kotlin plugin + grpckt { + artifact = "io.grpc:protoc-gen-grpc-kotlin:$gengrpc_version:jdk8@jar" + } - krotoDC { - artifact = "io.github.mscheong01:protoc-gen-krotoDC:$krotodc_version:jdk8@jar" + krotoDC { + artifact = "io.github.mscheong01:protoc-gen-krotoDC:$krotodc_version:jdk8@jar" + } } - } - generateProtoTasks { - all().each { task -> - task.plugins { - grpc {} // Generate Java gRPC classes - grpckt {} // Generate Kotlin gRPC using the custom plugin from library - krotoDC {} // Generate Kotlin data classes - } + generateProtoTasks { + all().each { task -> + task.plugins { + grpc {} // Generate Java gRPC classes + grpckt {} // Generate Kotlin gRPC using the custom plugin from library + krotoDC {} // Generate Kotlin data classes + } // task.builtins { // kotlin {} // } + } } - } } configurations.all { - resolutionStrategy.cacheChangingModulesFor 0, 'seconds' + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' } //startScripts.enabled = false @@ -154,24 +154,24 @@ configurations.all { project.getTasks().getByName("jar").setProperty("duplicatesStrategy", DuplicatesStrategy.INCLUDE); tasks.register('sourcesJar', Jar) { - dependsOn classes - from sourceSets.main.allSource - archiveClassifier = 'sources' + dependsOn classes + from sourceSets.main.allSource + archiveClassifier = 'sources' } tasks.register('javadocJar', Jar) { - dependsOn javadoc - archiveClassifier = 'javadoc' - from javadoc.destinationDir + dependsOn javadoc + archiveClassifier = 'javadoc' + from javadoc.destinationDir } artifacts { - archives sourcesJar - //archives javadocJar + archives sourcesJar + //archives javadocJar } java { - withSourcesJar() + withSourcesJar() } @@ -195,129 +195,129 @@ java { //check.dependsOn jacocoTestReport tasks.withType(JavaCompile) { - options.encoding = 'UTF-8' + options.encoding = 'UTF-8' } // Required for multiple uberjar targets shadowJar { - mergeServiceFiles() + mergeServiceFiles() } task agentJar(type: Jar, dependsOn: shadowJar) { - archiveFileName = 'prometheus-agent.jar' - manifest { - attributes('Main-Class': 'io.prometheus.Agent') - } - from zipTree(shadowJar.archiveFile) + archiveFileName = 'prometheus-agent.jar' + manifest { + attributes('Main-Class': 'io.prometheus.Agent') + } + from zipTree(shadowJar.archiveFile) } task proxyJar(type: Jar, dependsOn: shadowJar) { - archiveFileName = 'prometheus-proxy.jar' - manifest { - attributes('Main-Class': 'io.prometheus.Proxy') - } - from zipTree(shadowJar.archiveFile) + archiveFileName = 'prometheus-proxy.jar' + manifest { + attributes('Main-Class': 'io.prometheus.Proxy') + } + from zipTree(shadowJar.archiveFile) } compileKotlin.dependsOn ':generateProto' kotlin { - jvmToolchain(17) + jvmToolchain(17) } compileKotlin { - kotlinOptions.freeCompilerArgs += ['-Xbackend-threads=8', - "-opt-in=kotlin.time.ExperimentalTime", - "-opt-in=kotlin.contracts.ExperimentalContracts", - "-opt-in=kotlin.ExperimentalUnsignedTypes", - "-opt-in=kotlinx.coroutines.ExperimentalCoroutinesApi", - "-opt-in=kotlinx.coroutines.InternalCoroutinesApi", - "-opt-in=kotlinx.coroutines.DelicateCoroutinesApi"] + kotlinOptions.freeCompilerArgs += ['-Xbackend-threads=8', + "-opt-in=kotlin.time.ExperimentalTime", + "-opt-in=kotlin.contracts.ExperimentalContracts", + "-opt-in=kotlin.ExperimentalUnsignedTypes", + "-opt-in=kotlinx.coroutines.ExperimentalCoroutinesApi", + "-opt-in=kotlinx.coroutines.InternalCoroutinesApi", + "-opt-in=kotlinx.coroutines.DelicateCoroutinesApi"] } compileTestKotlin { - kotlinOptions.freeCompilerArgs += ['-Xbackend-threads=8', - "-opt-in=kotlinx.coroutines.InternalCoroutinesApi", - "-opt-in=kotlinx.coroutines.DelicateCoroutinesApi"] + kotlinOptions.freeCompilerArgs += ['-Xbackend-threads=8', + "-opt-in=kotlinx.coroutines.InternalCoroutinesApi", + "-opt-in=kotlinx.coroutines.DelicateCoroutinesApi"] } test { - useJUnitPlatform() + useJUnitPlatform() - // finalizedBy jacocoTestReport + // finalizedBy jacocoTestReport - testLogging { - events "passed", "skipped", "failed", "standardOut", "standardError" - exceptionFormat "full" - showStandardStreams = true - } + testLogging { + events "passed", "skipped", "failed", "standardOut", "standardError" + exceptionFormat "full" + showStandardStreams = true + } } // This will keep generated code out of the kotlinter checks tasks.named("lintKotlinMain") { - source = source - fileTree("$buildDir/generated") + source = source - fileTree("$buildDir/generated") } kotlinter { - ignoreFailures = false - reporters = ['checkstyle', 'plain'] + ignoreFailures = false + reporters = ['checkstyle', 'plain'] } koverMerged { - enable() // create Kover merged reports + enable() // create Kover merged reports - filters { // common filters for all default Kover merged tasks - classes { // common class filter for all default Kover merged tasks - includes.add("io.prometheus.*") // class inclusion rules - //excludes.addAll("io.prometheus.subpackage.*") // class exclusion rules - } + filters { // common filters for all default Kover merged tasks + classes { // common class filter for all default Kover merged tasks + includes.add("io.prometheus.*") // class inclusion rules + //excludes.addAll("io.prometheus.subpackage.*") // class exclusion rules + } - projects { // common projects filter for all default Kover merged tasks - //excludes.addAll("project1", ":child:project") // Specifies the projects excluded in the merged tasks + projects { // common projects filter for all default Kover merged tasks + //excludes.addAll("project1", ":child:project") // Specifies the projects excluded in the merged tasks + } } - } - xmlReport { - onCheck.set(true) - // true to run koverMergedXmlReport task during the execution of the check task (if it exists) of the current project - reportFile.set(layout.buildDirectory.file("my-merged-report/result.xml")) // change report file name - overrideClassFilter { // override common class filter - includes.add("io.prometheus.*") // override class inclusion rules - //excludes.addAll("io.prometheus.subpackage.*") // override class exclusion rules + xmlReport { + onCheck.set(true) + // true to run koverMergedXmlReport task during the execution of the check task (if it exists) of the current project + reportFile.set(layout.buildDirectory.file("my-merged-report/result.xml")) // change report file name + overrideClassFilter { // override common class filter + includes.add("io.prometheus.*") // override class inclusion rules + //excludes.addAll("io.prometheus.subpackage.*") // override class exclusion rules + } } - } - - htmlReport { - onCheck.set(true) - // true to run koverMergedHtmlReport task during the execution of the check task (if it exists) of the current project - reportDir.set(layout.buildDirectory.dir("my-merged-report/html-result")) // change report directory - overrideClassFilter { // override common class filter - includes.add("io.prometheus.*") // override class inclusion rules - //excludes.addAll("io.prometheus.subpackage.*") // override class exclusion rules + + htmlReport { + onCheck.set(true) + // true to run koverMergedHtmlReport task during the execution of the check task (if it exists) of the current project + reportDir.set(layout.buildDirectory.dir("my-merged-report/html-result")) // change report directory + overrideClassFilter { // override common class filter + includes.add("io.prometheus.*") // override class inclusion rules + //excludes.addAll("io.prometheus.subpackage.*") // override class exclusion rules + } } - } - - verify { - onCheck.set(true) - // true to run koverMergedVerify task during the execution of the check task (if it exists) of the current project - rule { // add verification rule - //isEnabled = true // false to disable rule checking - name = null // custom name for the rule - target = 'ALL' // specify by which entity the code for separate coverage evaluation will be grouped - - overrideClassFilter { // override common class filter - includes.add("io.prometheus.verify.*") // override class inclusion rules - // excludes.addAll("io.prometheus.verify.subpackage.*") // override class exclusion rules - } - - bound { // add rule bound - minValue = 10 - maxValue = 20 - counter = 'LINE' // change coverage metric to evaluate (LINE, INSTRUCTION, BRANCH) - valueType = 'COVERED_PERCENTAGE' - // change counter value (COVERED_COUNT, MISSED_COUNT, COVERED_PERCENTAGE, MISSED_PERCENTAGE) - } + + verify { + onCheck.set(true) + // true to run koverMergedVerify task during the execution of the check task (if it exists) of the current project + rule { // add verification rule + //isEnabled = true // false to disable rule checking + name = null // custom name for the rule + target = 'ALL' // specify by which entity the code for separate coverage evaluation will be grouped + + overrideClassFilter { // override common class filter + includes.add("io.prometheus.verify.*") // override class inclusion rules + // excludes.addAll("io.prometheus.verify.subpackage.*") // override class exclusion rules + } + + bound { // add rule bound + minValue = 10 + maxValue = 20 + counter = 'LINE' // change coverage metric to evaluate (LINE, INSTRUCTION, BRANCH) + valueType = 'COVERED_PERCENTAGE' + // change counter value (COVERED_COUNT, MISSED_COUNT, COVERED_PERCENTAGE, MISSED_PERCENTAGE) + } + } } - } } diff --git a/etc/compose/proxy.yml b/etc/compose/proxy.yml index 3ce5d28b..f7988483 100644 --- a/etc/compose/proxy.yml +++ b/etc/compose/proxy.yml @@ -1,6 +1,6 @@ prometheus-proxy: autoredeploy: true - image: 'pambrose/prometheus-proxy:1.19.0' + image: 'pambrose/prometheus-proxy:1.20.0' ports: - '8080:8080' - '8082:8082' @@ -18,4 +18,4 @@ prometheus-test: #zipkin: # image: 'openzipkin/zipkin' # ports: -# - '9411:9411' \ No newline at end of file +# - '9411:9411' diff --git a/gradle.properties b/gradle.properties index 0fff69fc..333efe84 100644 --- a/gradle.properties +++ b/gradle.properties @@ -9,25 +9,25 @@ org.gradle.caching=true org.gradle.jvmargs=-Xmx4096m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 # Jars annotation_version=1.3.2 -dropwizard_version=4.2.21 -gengrpc_version=1.4.0 -grpc_version=1.59.0 +dropwizard_version=4.2.23 +gengrpc_version=1.4.1 +grpc_version=1.60.0 jcommander_version=1.82 jetty_version=10.0.18 -junit_version=5.10.0 +junit_version=5.10.1 kluent_version=1.73 -kotlin_version=1.9.20 +kotlin_version=1.9.21 krotodc_version=1.0.6 -ktor_version=2.3.5 -logback_version=1.4.11 +ktor_version=2.3.6 +logback_version=1.4.14 logging_version=4.0.0-beta-2 # Keep in sync with grpc tcnative_version=2.0.61.Final prometheus_version=0.16.0 # Keep in sync with grpc protoc_version=3.24.0 -serialization_version=1.6.0 +serialization_version=1.6.2 slf4j_version=2.0.9 typesafe_version=1.4.3 -utils_version=1.42.1 -zipkin_version=5.16.0 \ No newline at end of file +utils_version=1.43.0 +zipkin_version=5.16.0 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 033e24c4..7f93135c 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 3fa8f862..1af9e093 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index fcb6fca1..1aa94a42 100755 --- a/gradlew +++ b/gradlew @@ -83,7 +83,8 @@ done # This is normally unused # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -144,7 +145,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac @@ -152,7 +153,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then '' | soft) :;; #( *) # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -201,11 +202,11 @@ fi # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ diff --git a/src/main/kotlin/io/prometheus/Agent.kt b/src/main/kotlin/io/prometheus/Agent.kt index 5cae7de6..86079857 100644 --- a/src/main/kotlin/io/prometheus/Agent.kt +++ b/src/main/kotlin/io/prometheus/Agent.kt @@ -71,22 +71,20 @@ class Agent( inProcessServerName: String = "", testMode: Boolean = false, initBlock: (Agent.() -> Unit)? = null, -) : - GenericService( - configVals = options.configVals, - adminConfig = newAdminConfig(options.adminEnabled, options.adminPort, options.configVals.agent.admin), - metricsConfig = newMetricsConfig(options.metricsEnabled, options.metricsPort, options.configVals.agent.metrics), - zipkinConfig = newZipkinConfig(options.configVals.agent.internal.zipkin), - versionBlock = { getVersionDesc(true) }, - isTestMode = testMode, - ) { - private val agentConfigVals = configVals.agent.internal +) : GenericService( + configVals = options.configVals, + adminConfig = newAdminConfig(options.adminEnabled, options.adminPort, options.configVals.agent.admin), + metricsConfig = newMetricsConfig(options.metricsEnabled, options.metricsPort, options.configVals.agent.metrics), + zipkinConfig = newZipkinConfig(options.configVals.agent.internal.zipkin), + versionBlock = { getVersionDesc(true) }, + isTestMode = testMode, +) { private val clock = Monotonic private val agentHttpService = AgentHttpService(this) private val initialConnectionLatch = CountDownLatch(1) // Prime the limiter - private val reconnectLimiter = RateLimiter.create(1.0 / agentConfigVals.reconnectPauseSecs).apply { acquire() } + private val reconnectLimiter: RateLimiter private var lastMsgSentMark: TimeMark by nonNullableReference(clock.markNow()) internal val agentName = options.agentName.ifBlank { "Unnamed-${hostInfo.hostName}" } @@ -97,7 +95,11 @@ class Agent( internal val launchId = randomId(15) internal val metrics by lazy { AgentMetrics(this) } + val agentConfigVals: ConfigVals.Agent get() = configVals.agent + init { + reconnectLimiter = RateLimiter.create(1.0 / agentConfigVals.internal.reconnectPauseSecs).apply { acquire() } + fun toPlainText() = """ Prometheus Agent Info [${getVersionDesc(false)}] @@ -116,7 +118,7 @@ class Agent( """.trimIndent() logger.info { "Agent name: $agentName" } - logger.info { "Proxy reconnect pause time: ${agentConfigVals.reconnectPauseSecs.seconds}" } + logger.info { "Proxy reconnect pause time: ${agentConfigVals.internal.reconnectPauseSecs.seconds}" } logger.info { "Scrape timeout time: ${options.scrapeTimeoutSecs.seconds}" } initServletService { @@ -193,9 +195,7 @@ class Agent( }.onFailure { e -> when (e) { is RequestFailureException -> - logger.info { - "Disconnected from proxy at $proxyHost after invalid response ${e.message}" - } + logger.info { "Disconnected from proxy at $proxyHost after invalid response ${e.message}" } is StatusRuntimeException -> logger.info { "Disconnected from proxy at $proxyHost" } @@ -226,29 +226,32 @@ class Agent( "scrape_request_backlog_check", newBacklogHealthCheck( backlogSize = scrapeRequestBacklogSize.get(), - size = agentConfigVals.scrapeRequestBacklogUnhealthySize, + size = agentConfigVals.internal.scrapeRequestBacklogUnhealthySize, ), ) } - private suspend fun startHeartBeat(connectionContext: AgentConnectionContext) = - if (agentConfigVals.heartbeatEnabled) { - val heartbeatPauseTime = agentConfigVals.heartbeatCheckPauseMillis.milliseconds - val maxInactivityTime = agentConfigVals.heartbeatMaxInactivitySecs.seconds - logger.info { "Heartbeat scheduled to fire after $maxInactivityTime of inactivity" } - - while (isRunning && connectionContext.connected) { - val timeSinceLastWrite = lastMsgSentMark.elapsedNow() - if (timeSinceLastWrite > maxInactivityTime) { - logger.debug { "Sending heartbeat" } - grpcService.sendHeartBeat() + private suspend fun startHeartBeat(connectionContext: AgentConnectionContext) { + with(agentConfigVals.internal) { + if (heartbeatEnabled) { + val heartbeatPauseTime = heartbeatCheckPauseMillis.milliseconds + val maxInactivityTime = heartbeatMaxInactivitySecs.seconds + logger.info { "Heartbeat scheduled to fire after $maxInactivityTime of inactivity" } + + while (isRunning && connectionContext.connected) { + val timeSinceLastWrite = lastMsgSentMark.elapsedNow() + if (timeSinceLastWrite > maxInactivityTime) { + logger.debug { "Sending heartbeat" } + grpcService.sendHeartBeat() + } + delay(heartbeatPauseTime) } - delay(heartbeatPauseTime) + logger.info { "Heartbeat completed" } + } else { + logger.info { "Heartbeat disabled" } } - logger.info { "Heartbeat completed" } - } else { - logger.info { "Heartbeat disabled" } } + } internal fun updateScrapeCounter( agent: Agent, diff --git a/src/main/kotlin/io/prometheus/Proxy.kt b/src/main/kotlin/io/prometheus/Proxy.kt index 0b6320b5..d1002209 100644 --- a/src/main/kotlin/io/prometheus/Proxy.kt +++ b/src/main/kotlin/io/prometheus/Proxy.kt @@ -47,6 +47,12 @@ import io.prometheus.proxy.ProxyOptions import io.prometheus.proxy.ProxyPathManager import io.prometheus.proxy.ScrapeRequestManager import kotlinx.coroutines.runBlocking +import kotlinx.serialization.json.JsonArray +import kotlinx.serialization.json.JsonPrimitive +import kotlinx.serialization.json.addJsonObject +import kotlinx.serialization.json.buildJsonArray +import kotlinx.serialization.json.putJsonArray +import kotlinx.serialization.json.putJsonObject import mu.two.KLogging import java.time.LocalDateTime import java.time.format.DateTimeFormatter @@ -59,31 +65,33 @@ class Proxy( inProcessServerName: String = "", testMode: Boolean = false, initBlock: (Proxy.() -> Unit)? = null, -) : - GenericService( - configVals = options.configVals, - adminConfig = newAdminConfig(options.adminEnabled, options.adminPort, options.configVals.proxy.admin), - metricsConfig = newMetricsConfig(options.metricsEnabled, options.metricsPort, options.configVals.proxy.metrics), - zipkinConfig = newZipkinConfig(options.configVals.proxy.internal.zipkin), - versionBlock = { getVersionDesc(true) }, - isTestMode = testMode, - ) { - private val proxyConfigVals: ConfigVals.Proxy2.Internal2 = configVals.proxy.internal +) : GenericService( + configVals = options.configVals, + adminConfig = newAdminConfig(options.adminEnabled, options.adminPort, options.configVals.proxy.admin), + metricsConfig = newMetricsConfig(options.metricsEnabled, options.metricsPort, options.configVals.proxy.metrics), + zipkinConfig = newZipkinConfig(options.configVals.proxy.internal.zipkin), + versionBlock = { getVersionDesc(true) }, + isTestMode = testMode, +) { private val httpService = ProxyHttpService(this, proxyHttpPort, isTestMode) - private val recentReqs: EvictingQueue = EvictingQueue.create(configVals.proxy.admin.recentRequestsQueueSize) + private val recentReqs: EvictingQueue = EvictingQueue.create(proxyConfigVals.admin.recentRequestsQueueSize) private val grpcService = if (inProcessServerName.isEmpty()) - ProxyGrpcService(this, port = options.proxyAgentPort) + ProxyGrpcService(proxy = this, port = options.proxyAgentPort) else - ProxyGrpcService(this, inProcessName = inProcessServerName) + ProxyGrpcService(proxy = this, inProcessName = inProcessServerName) - private val agentCleanupService by lazy { AgentContextCleanupService(this, proxyConfigVals) { addServices(this) } } + private val agentCleanupService by lazy { + AgentContextCleanupService(this, proxyConfigVals.internal) { addServices(this) } + } internal val metrics by lazy { ProxyMetrics(this) } internal val pathManager by lazy { ProxyPathManager(this, isTestMode) } internal val agentContextManager = AgentContextManager(isTestMode) internal val scrapeRequestManager = ScrapeRequestManager() + val proxyConfigVals: ConfigVals.Proxy2 get() = configVals.proxy + init { fun toPlainText() = """ @@ -113,8 +121,7 @@ class Proxy( pathManager.toPlainText(), if (recentReqs.size > 0) "\n${recentReqs.size} most recent requests:" else "", recentReqs.reversed().joinToString("\n"), - ) - .joinToString("\n") + ).joinToString("\n") }, ) } else { @@ -131,7 +138,7 @@ class Proxy( grpcService.startSync() httpService.startSync() - if (proxyConfigVals.staleAgentCheckEnabled) + if (proxyConfigVals.internal.staleAgentCheckEnabled) agentCleanupService.startSync() else logger.info { "Agent eviction thread not started" } @@ -140,7 +147,7 @@ class Proxy( override fun shutDown() { grpcService.stopSync() httpService.stopSync() - if (proxyConfigVals.staleAgentCheckEnabled) + if (proxyConfigVals.internal.staleAgentCheckEnabled) agentCleanupService.stopSync() super.shutDown() } @@ -161,21 +168,23 @@ class Proxy( "chunking_map_check", newMapHealthCheck( agentContextManager.chunkedContextMap, - proxyConfigVals.chunkContextMapUnhealthySize, + proxyConfigVals.internal.chunkContextMapUnhealthySize, ), ) register( "scrape_response_map_check", newMapHealthCheck( scrapeRequestManager.scrapeRequestMap, - proxyConfigVals.scrapeRequestMapUnhealthySize, + proxyConfigVals.internal.scrapeRequestMapUnhealthySize, ), ) register( "agent_scrape_request_backlog", healthCheck { agentContextManager.agentContextMap.entries - .filter { it.value.scrapeRequestBacklogSize >= proxyConfigVals.scrapeRequestBacklogUnhealthySize } + .filter { + it.value.scrapeRequestBacklogSize >= proxyConfigVals.internal.scrapeRequestBacklogUnhealthySize + } .map { "${it.value} ${it.value.scrapeRequestBacklogSize}" } .let { vals -> if (vals.isEmpty()) { @@ -215,6 +224,26 @@ class Proxy( } } + fun isBlitzRequest(path: String) = with(proxyConfigVals.internal) { blitz.enabled && path == blitz.path } + + fun buildSdJson(): JsonArray = + buildJsonArray { + pathManager.allPaths.forEach { path -> + addJsonObject { + putJsonArray("targets") { + add(JsonPrimitive(options.sdTargetPrefix)) + } + putJsonObject("labels") { + put("__metrics_path__", JsonPrimitive(path)) + + val agentContexts = pathManager.getAgentContextInfo(path)?.agentContexts + put("agentName", JsonPrimitive(agentContexts?.joinToString { it.agentName })) + put("hostName", JsonPrimitive(agentContexts?.joinToString { it.hostName })) + } + } + } + } + override fun toString() = toStringElements { add("proxyPort", httpService.httpPort) diff --git a/src/main/kotlin/io/prometheus/agent/AgentPathManager.kt b/src/main/kotlin/io/prometheus/agent/AgentPathManager.kt index b7e15f27..4130b7ef 100644 --- a/src/main/kotlin/io/prometheus/agent/AgentPathManager.kt +++ b/src/main/kotlin/io/prometheus/agent/AgentPathManager.kt @@ -83,8 +83,8 @@ internal class AgentPathManager(private val agent: Agent) { } fun toPlainText(): String { - val maxName = pathConfigs.maxOfOrNull { it[NAME]?.length ?: 0 } ?: 0 - val maxPath = pathConfigs.maxOfOrNull { it[PATH]?.length ?: 0 } ?: 0 + val maxName = pathConfigs.maxOfOrNull { it[NAME].orEmpty().length } ?: 0 + val maxPath = pathConfigs.maxOfOrNull { it[PATH].orEmpty().length } ?: 0 return "Agent Path Configs:\n" + "Name".padEnd(maxName + 1) + "Path".padEnd(maxPath + 2) + "URL\n" + pathConfigs.joinToString("\n") { c -> "${c[NAME]?.padEnd(maxName)} /${c[PATH]?.padEnd(maxPath)} ${c[URL]}" } } diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyConstants.kt b/src/main/kotlin/io/prometheus/proxy/ProxyConstants.kt new file mode 100644 index 00000000..e555abd0 --- /dev/null +++ b/src/main/kotlin/io/prometheus/proxy/ProxyConstants.kt @@ -0,0 +1,23 @@ +/* + * Copyright © 2023 Paul Ambrose (pambrose@mac.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.prometheus.proxy + +object ProxyConstants { + const val MISSING_PATH_MSG = "Request missing path" + const val CACHE_CONTROL_VALUE = "must-revalidate,no-store" + const val FAVICON_FILENAME = "favicon.ico" +} diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyGrpcService.kt b/src/main/kotlin/io/prometheus/proxy/ProxyGrpcService.kt index 66ef37d8..163bca0e 100644 --- a/src/main/kotlin/io/prometheus/proxy/ProxyGrpcService.kt +++ b/src/main/kotlin/io/prometheus/proxy/ProxyGrpcService.kt @@ -25,6 +25,7 @@ import com.github.pambrose.common.concurrent.genericServiceListener import com.github.pambrose.common.dsl.GrpcDsl.server import com.github.pambrose.common.dsl.GuavaDsl.toStringElements import com.github.pambrose.common.dsl.MetricsDsl.healthCheck +import com.github.pambrose.common.utils.TlsContext import com.github.pambrose.common.utils.TlsContext.Companion.PLAINTEXT_CONTEXT import com.github.pambrose.common.utils.TlsUtils.buildServerTlsContext import com.github.pambrose.common.utils.shutdownGracefully @@ -42,18 +43,10 @@ internal class ProxyGrpcService( private val port: Int = -1, private val inProcessName: String = "", ) : GenericIdleService() { - val healthCheck = - healthCheck { - if (grpcServer.isShutdown || grpcServer.isTerminated) - HealthCheck.Result.unhealthy("gRPC server is not running") - else - HealthCheck.Result.healthy() - } - - private val grpcServer: Server - private val tracing by lazy { proxy.zipkinReporterService.newTracing("grpc_server") } private val grpcTracing by lazy { GrpcTracing.create(tracing) } + private val grpcServer: Server + val healthCheck: HealthCheck init { val options = proxy.options @@ -67,32 +60,42 @@ internal class ProxyGrpcService( else PLAINTEXT_CONTEXT - grpcServer = - server( - port = port, - tlsContext = tlsContext, - inProcessServerName = inProcessName, - ) { - val proxyService = ProxyServiceImpl(proxy) - val interceptors = - buildList { - if (!options.transportFilterDisabled) - add(ProxyServerInterceptor()) - if (proxy.isZipkinEnabled) - add(grpcTracing.newServerInterceptor()) - } - - addService(ServerInterceptors.intercept(proxyService.bindService(), interceptors)) - - if (!options.transportFilterDisabled) - addTransportFilter(ProxyServerTransportFilter(proxy)) - } - + grpcServer = createGrpcServer(tlsContext, options) grpcServer.shutdownWithJvm(2.seconds) - addListener(genericServiceListener(logger), MoreExecutors.directExecutor()) + + healthCheck = healthCheck { + if (grpcServer.isShutdown || grpcServer.isTerminated) + HealthCheck.Result.unhealthy("gRPC server is not running") + else + HealthCheck.Result.healthy() + } } + private fun createGrpcServer( + tlsContext: TlsContext, + options: ProxyOptions, + ): Server = + server( + port = port, + tlsContext = tlsContext, + inProcessServerName = inProcessName, + ) { + val proxyService = ProxyServiceImpl(proxy) + val interceptors: List = + buildList { + if (!options.transportFilterDisabled) + add(ProxyServerInterceptor()) + if (proxy.isZipkinEnabled) + add(grpcTracing.newServerInterceptor()) + } + + addService(ServerInterceptors.intercept(proxyService.bindService(), interceptors)) + + if (!options.transportFilterDisabled) + addTransportFilter(ProxyServerTransportFilter(proxy)) + } + override fun startUp() { grpcServer.start() } diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyHttpConfig.kt b/src/main/kotlin/io/prometheus/proxy/ProxyHttpConfig.kt index 6ec1a8ae..c6470b72 100644 --- a/src/main/kotlin/io/prometheus/proxy/ProxyHttpConfig.kt +++ b/src/main/kotlin/io/prometheus/proxy/ProxyHttpConfig.kt @@ -16,15 +16,10 @@ package io.prometheus.proxy -import com.github.pambrose.common.util.isNotNull -import com.github.pambrose.common.util.isNull import com.github.pambrose.common.util.simpleClassName -import com.github.pambrose.common.util.unzip import io.ktor.http.* -import io.ktor.http.ContentType.Application.Json import io.ktor.http.ContentType.Text.Plain import io.ktor.http.HttpStatusCode.Companion.NotFound -import io.ktor.http.HttpStatusCode.Companion.OK import io.ktor.http.content.* import io.ktor.server.application.* import io.ktor.server.logging.* @@ -35,24 +30,12 @@ import io.ktor.server.plugins.defaultheaders.* import io.ktor.server.plugins.statuspages.* import io.ktor.server.request.* import io.ktor.server.response.* -import io.ktor.server.routing.* import io.prometheus.Proxy -import kotlinx.coroutines.async -import kotlinx.serialization.encodeToString -import kotlinx.serialization.json.Json -import kotlinx.serialization.json.JsonPrimitive -import kotlinx.serialization.json.addJsonObject -import kotlinx.serialization.json.buildJsonArray -import kotlinx.serialization.json.putJsonArray -import kotlinx.serialization.json.putJsonObject import mu.two.KLogging import org.slf4j.event.Level -import kotlin.time.Duration -import kotlin.time.Duration.Companion.milliseconds -import kotlin.time.Duration.Companion.seconds internal object ProxyHttpConfig : KLogging() { - fun Application.configServer( + fun Application.configureKtorServer( proxy: Proxy, isTestMode: Boolean, ) { @@ -60,299 +43,57 @@ internal object ProxyHttpConfig : KLogging() { header("X-Engine", "Ktor") } - if (!isTestMode && proxy.options.configVals.proxy.http.requestLoggingEnabled) { + if (!isTestMode && proxy.options.configVals.proxy.http.requestLoggingEnabled) install(CallLogging) { - level = Level.INFO - filter { call -> call.request.path().startsWith("/") } - format { call -> - when (val status = call.response.status()) { - HttpStatusCode.Found -> { - val str = call.request.toLogString() - "$status: $str -> ${call.response.headers[HttpHeaders.Location]} - ${call.request.origin.remoteHost}" - } - - else -> "$status: ${call.request.toLogString()} - ${call.request.origin.remoteHost}" - } - } + configureCallLogging() } - } install(Compression) { - gzip { - priority = 1.0 - } - deflate { - priority = 10.0 - minimumSize(1024) // condition - } + configureCompression() } install(StatusPages) { - // Catch all - exception { call, cause -> - logger.info(cause) { " Throwable caught: ${cause.simpleClassName}" } - call.respond(NotFound) - } - - status(NotFound) { call, cause -> - call.respond(TextContent("${cause.value} ${cause.description}", Plain.withCharset(Charsets.UTF_8), cause)) - } + configureStatusPages() } + } - routing { -// get("/__test__") { -// delay(30.seconds) -// call.respondWith("Test value", Plain, OK) -// } - - if (proxy.options.sdEnabled) { - logger.info { "Adding /${proxy.options.sdPath} service discovery endpoint" } - val format = Json { prettyPrint = true } - - get(proxy.options.sdPath) { - val json = buildJsonArray { - proxy.pathManager.allPaths.forEach { path -> - addJsonObject { - putJsonArray("targets") { - add(JsonPrimitive(proxy.options.sdTargetPrefix)) - } - putJsonObject("labels") { - put("__metrics_path__", JsonPrimitive(path)) - - val agentContexts = proxy.pathManager.getAgentContextInfo(path)?.agentContexts - put("agentName", JsonPrimitive(agentContexts?.joinToString { it.agentName })) - put("hostName", JsonPrimitive(agentContexts?.joinToString { it.hostName })) - } - } - } - } - val prettyPrint = format.encodeToString(json) - call.respondWith(prettyPrint, Json) - } - } else { - logger.info { "Not adding /${proxy.options.sdPath} service discovery endpoint" } - } - - get("/*") { - call.response.header(HttpHeaders.CacheControl, "must-revalidate,no-store") - - val proxyConfigVals = proxy.configVals.proxy - val path = call.request.path().drop(1) - val queryParams = call.request.queryParameters.formUrlEncode() - val responseResults = ResponseResults() - val logger = ProxyHttpService.logger - - logger.debug { - "Servicing request for path: $path${if (queryParams.isNotEmpty()) " with query params $queryParams" else ""}" - } - - when { - !proxy.isRunning -> { - logger.error { "Proxy stopped" } - responseResults.apply { - updateMsg = "proxy_stopped" - statusCode = HttpStatusCode.ServiceUnavailable - } - } - - path.isEmpty() || path.isBlank() -> { - val msg = "Request missing path" - proxy.logActivity(msg) - logger.info { msg } - responseResults.apply { - updateMsg = "missing_path" - statusCode = NotFound - } - } - - path == "favicon.ico" -> { - responseResults.apply { - updateMsg = "invalid_path" - statusCode = NotFound - } - } - - proxyConfigVals.internal.blitz.enabled && path == proxyConfigVals.internal.blitz.path -> - responseResults.contentText = "42" - - else -> { - val agentContextInfo = proxy.pathManager.getAgentContextInfo(path) - if (agentContextInfo.isNull()) { - val msg = "Invalid path request /$path" - proxy.logActivity(msg) - logger.info { msg } - responseResults.apply { - updateMsg = "invalid_path" - statusCode = NotFound - } - } else { - if (!agentContextInfo.consolidated && agentContextInfo.agentContexts[0].isNotValid()) { - val msg = "Invalid AgentContext for /$path" - proxy.logActivity(msg) - logger.error { msg } - responseResults.apply { - updateMsg = "invalid_agent_context" - statusCode = NotFound - } - } else { - val jobs = - agentContextInfo.agentContexts - .map { - async { - submitScrapeRequest(it, proxy, path, queryParams, call.request, call.response) - } - } - .map { it.await() } - .onEach { response -> - var status = "/$path - ${response.updateMsg} - ${response.statusCode}" - if (!response.statusCode.isSuccess()) status += " reason: [${response.failureReason}]" - status += " time: ${response.fetchDuration} url: ${response.url}" - - proxy.logActivity(status) - } - - val statusCodes = jobs.map { it.statusCode }.toSet().toList() - val contentTypes = jobs.map { it.contentType }.toSet().toList() - val updateMsgs = jobs.joinToString("\n") { it.updateMsg } - // Grab the contentType of the first OK in the lit - val okContentType = jobs.firstOrNull { it.statusCode == OK }?.contentType + private fun CallLoggingConfig.configureCallLogging() { + level = Level.INFO + filter { call -> call.request.path().startsWith("/") } + format { call -> getFormattedLog(call) } + } - responseResults.apply { - statusCode = if (statusCodes.contains(OK)) OK else statusCodes[0] - contentType = if (okContentType.isNotNull()) okContentType else contentTypes[0] - contentText = jobs.joinToString("\n") { it.contentText } - updateMsg = updateMsgs - } - } - } - } + private fun getFormattedLog(call: ApplicationCall) = + with(call) { + when (val status = response.status()) { + HttpStatusCode.Found -> { + val logMsg = request.toLogString() + "$status: $logMsg -> ${response.headers[HttpHeaders.Location]} - ${request.origin.remoteHost}" } - responseResults.apply { - updateScrapeRequests(proxy, updateMsg) - call.respondWith(contentText, contentType, statusCode) - } + else -> "$status: ${request.toLogString()} - ${request.origin.remoteHost}" } } - } - - private fun updateScrapeRequests( - proxy: Proxy, - type: String, - ) { - if (type.isNotEmpty()) proxy.metrics { scrapeRequestCount.labels(type).inc() } - } - private suspend fun ApplicationCall.respondWith( - text: String, - contentType: ContentType = Plain, - status: HttpStatusCode = OK, - ) { - response.header(HttpHeaders.CacheControl, "must-revalidate,no-store") - response.status(status) - respondText(text, contentType, status) + private fun CompressionConfig.configureCompression() { + gzip { + priority = 1.0 + } + deflate { + priority = 10.0 + minimumSize(1024) // condition + } } - private suspend fun submitScrapeRequest( - agentContext: AgentContext, - proxy: Proxy, - path: String, - encodedQueryParams: String, - request: ApplicationRequest, - response: ApplicationResponse, - ): ScrapeRequestResponse { - val scrapeRequest = ScrapeRequestWrapper( - agentContext = agentContext, - proxy = proxy, - path = path, - encodedQueryParams = encodedQueryParams, - authHeader = request.header(HttpHeaders.Authorization) ?: "", - accept = request.header(HttpHeaders.Accept), - debugEnabled = proxy.options.debugEnabled, - ) - val logger = ProxyHttpService.logger - - try { - val proxyConfigVals = proxy.configVals.proxy - val timeoutTime = proxyConfigVals.internal.scrapeRequestTimeoutSecs.seconds - val checkTime = proxyConfigVals.internal.scrapeRequestCheckMillis.milliseconds - - proxy.scrapeRequestManager.addToScrapeRequestMap(scrapeRequest) - agentContext.writeScrapeRequest(scrapeRequest) - - // Returns false if timed out - while (!scrapeRequest.suspendUntilComplete(checkTime)) { - // Check if agent is disconnected or agent is hung - if (scrapeRequest.ageDuration() >= timeoutTime || !scrapeRequest.agentContext.isValid() || !proxy.isRunning) - return ScrapeRequestResponse( - statusCode = HttpStatusCode.ServiceUnavailable, - updateMsg = "timed_out", - fetchDuration = scrapeRequest.ageDuration(), - ) - } - } finally { - val scrapeId = scrapeRequest.scrapeId - proxy.scrapeRequestManager.removeFromScrapeRequestMap(scrapeId) - ?: logger.error { "Scrape request $scrapeId missing in map" } + private fun StatusPagesConfig.configureStatusPages() { + // Catch all + exception { call, cause -> + logger.info(cause) { " Throwable caught: ${cause.simpleClassName}" } + call.respond(NotFound) } - logger.debug { "Results returned from $agentContext for $scrapeRequest" } - - scrapeRequest.scrapeResults.also { scrapeResults -> - HttpStatusCode.fromValue(scrapeResults.statusCode).also { statusCode -> - scrapeResults.contentType.split("/").also { contentTypeElems -> - - val contentType = - if (contentTypeElems.size == 2) - ContentType(contentTypeElems[0], contentTypeElems[1]) - else - Plain - - // Do not return content on error status codes - return if (!statusCode.isSuccess()) { - scrapeRequest.scrapeResults.run { - ScrapeRequestResponse( - statusCode = statusCode, - contentType = contentType, - failureReason = failureReason, - url = url, - updateMsg = "path_not_found", - fetchDuration = scrapeRequest.ageDuration(), - ) - } - } else { - scrapeRequest.scrapeResults.run { - // Unzip content here - ScrapeRequestResponse( - statusCode = statusCode, - contentType = contentType, - contentText = if (zipped) contentAsZipped.unzip() else contentAsText, - failureReason = failureReason, - url = url, - updateMsg = "success", - fetchDuration = scrapeRequest.ageDuration(), - ) - } - } - } - } + status(NotFound) { call, cause -> + call.respond(TextContent("${cause.value} ${cause.description}", Plain.withCharset(Charsets.UTF_8), cause)) } } } - -private class ScrapeRequestResponse( - val statusCode: HttpStatusCode, - val updateMsg: String, - var contentType: ContentType = Plain, - var contentText: String = "", - val failureReason: String = "", - val url: String = "", - val fetchDuration: Duration, -) - -private class ResponseResults( - var statusCode: HttpStatusCode = OK, - var contentType: ContentType = Plain, - var contentText: String = "", - var updateMsg: String = "", -) diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyHttpRoutes.kt b/src/main/kotlin/io/prometheus/proxy/ProxyHttpRoutes.kt new file mode 100644 index 00000000..7e9490b5 --- /dev/null +++ b/src/main/kotlin/io/prometheus/proxy/ProxyHttpRoutes.kt @@ -0,0 +1,278 @@ +/* + * Copyright © 2023 Paul Ambrose (pambrose@mac.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.prometheus.proxy + +import com.github.pambrose.common.util.isNull +import com.github.pambrose.common.util.unzip +import io.ktor.http.* +import io.ktor.server.application.* +import io.ktor.server.request.* +import io.ktor.server.response.* +import io.ktor.server.routing.* +import io.ktor.util.pipeline.* +import io.prometheus.Proxy +import io.prometheus.proxy.ProxyConstants.CACHE_CONTROL_VALUE +import io.prometheus.proxy.ProxyConstants.FAVICON_FILENAME +import io.prometheus.proxy.ProxyUtils.emptyPathResponse +import io.prometheus.proxy.ProxyUtils.incrementScrapeRequestCount +import io.prometheus.proxy.ProxyUtils.invalidAgentContextResponse +import io.prometheus.proxy.ProxyUtils.invalidPathResponse +import io.prometheus.proxy.ProxyUtils.proxyNotRunningResponse +import io.prometheus.proxy.ProxyUtils.respondWith +import kotlinx.coroutines.async +import kotlinx.coroutines.coroutineScope +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json +import mu.two.KLogging +import kotlin.time.Duration +import kotlin.time.Duration.Companion.milliseconds +import kotlin.time.Duration.Companion.seconds + +object ProxyHttpRoutes : KLogging() { + fun Application.configureHttpRoutes(proxy: Proxy) { + routing { + handleRequests(proxy) + } + } + + private fun Routing.handleRequests(proxy: Proxy) { + // get("/__test__") { + // delay(30.seconds) + // call.respondWith("Test value", Plain, OK) + // } + handleServiceDiscoveryEndpoint(proxy) + handleClientRequests(proxy) + } + + private fun Routing.handleServiceDiscoveryEndpoint(proxy: Proxy) { + if (proxy.options.sdEnabled) { + logger.info { "Adding /${proxy.options.sdPath} service discovery endpoint" } + get(proxy.options.sdPath) { + val json = proxy.buildSdJson() + val format = Json { prettyPrint = true } + val prettyPrint = format.encodeToString(json) + call.respondWith(prettyPrint, ContentType.Application.Json) + } + } else { + logger.info { "Not adding /${proxy.options.sdPath} service discovery endpoint" } + } + } + + private fun Routing.handleClientRequests(proxy: Proxy) { + get("/*") { + call.response.header(HttpHeaders.CacheControl, CACHE_CONTROL_VALUE) + + val path = call.request.path().drop(1) + val queryParams = call.request.queryParameters.formUrlEncode() + val responseResults = ResponseResults() + + logger.debug { + "Servicing request for path: $path${if (queryParams.isNotEmpty()) " with query params $queryParams" else ""}" + } + + when { + !proxy.isRunning -> proxyNotRunningResponse(logger, responseResults) + path.isBlank() -> emptyPathResponse(proxy, logger, responseResults) + path == FAVICON_FILENAME -> invalidPathResponse(path, proxy, logger, responseResults) + proxy.isBlitzRequest(path) -> responseResults.contentText = "42" + else -> processRequestsBasedOnPath(proxy, path, responseResults, queryParams) + } + + responseResults.apply { + incrementScrapeRequestCount(proxy, updateMsg) + call.respondWith(contentText, contentType, statusCode) + } + } + } + + private suspend fun PipelineContext.processRequestsBasedOnPath( + proxy: Proxy, + path: String, + responseResults: ResponseResults, + queryParams: String, + ) { + val agentContextInfo = proxy.pathManager.getAgentContextInfo(path) + when { + agentContextInfo.isNull() -> invalidPathResponse(path, proxy, logger, responseResults) + agentContextInfo.isNotValid() -> invalidAgentContextResponse(path, proxy, logger, responseResults) + else -> processRequests(agentContextInfo, proxy, path, queryParams, responseResults) + } + } + + private suspend fun PipelineCall.processRequests( + agentContextInfo: ProxyPathManager.AgentContextInfo, + proxy: Proxy, + path: String, + queryParams: String, + responseResults: ResponseResults, + ) { + val results: List = executeScrapeRequests(agentContextInfo, proxy, path, queryParams) + val statusCodes: List = results.map { it.statusCode }.toSet().toList() + val contentTypes: List = results.map { it.contentType }.toSet().toList() + val updateMsgs: String = results.joinToString("\n") { it.updateMsg } + // Grab the contentType of the first OK in the list + val okContentType: ContentType? = results.firstOrNull { it.statusCode == HttpStatusCode.OK }?.contentType + + responseResults.apply { + statusCode = if (statusCodes.contains(HttpStatusCode.OK)) HttpStatusCode.OK else statusCodes[0] + contentType = okContentType ?: contentTypes[0] + contentText = results.joinToString("\n") { it.contentText } + updateMsg = updateMsgs + } + } + + private suspend fun PipelineCall.executeScrapeRequests( + agentContextInfo: ProxyPathManager.AgentContextInfo, + proxy: Proxy, + path: String, + queryParams: String, + ): List = + coroutineScope { + agentContextInfo.agentContexts + .map { agentContext -> + async { + submitScrapeRequest(agentContext, proxy, path, queryParams, call.request, call.response) + } + } + .map { deferred -> deferred.await() } + .onEach { response -> logActivityForResponse(path, response, proxy) } + } + + private fun logActivityForResponse( + path: String, + response: ScrapeRequestResponse, + proxy: Proxy, + ) { + var status = "/$path - ${response.updateMsg} - ${response.statusCode}" + if (!response.statusCode.isSuccess()) + status += " reason: [${response.failureReason}]" + status += " time: ${response.fetchDuration} url: ${response.url}" + proxy.logActivity(status) + } + + private suspend fun submitScrapeRequest( + agentContext: AgentContext, + proxy: Proxy, + path: String, + encodedQueryParams: String, + request: ApplicationRequest, + response: ApplicationResponse, + ): ScrapeRequestResponse { + val scrapeRequest = createScrapeRequest(agentContext, proxy, path, encodedQueryParams, request) + + try { + val proxyConfigVals = proxy.proxyConfigVals + val timeoutTime = proxyConfigVals.internal.scrapeRequestTimeoutSecs.seconds + val checkTime = proxyConfigVals.internal.scrapeRequestCheckMillis.milliseconds + + proxy.scrapeRequestManager.addToScrapeRequestMap(scrapeRequest) + agentContext.writeScrapeRequest(scrapeRequest) + + // Returns false if timed out + while (!scrapeRequest.suspendUntilComplete(checkTime)) { + // Check if agent is disconnected or agent is hung + if (scrapeRequest.ageDuration() >= timeoutTime || !scrapeRequest.agentContext.isValid() || !proxy.isRunning) + return ScrapeRequestResponse( + statusCode = HttpStatusCode.ServiceUnavailable, + updateMsg = "timed_out", + fetchDuration = scrapeRequest.ageDuration(), + ) + } + } finally { + val scrapeId = scrapeRequest.scrapeId + proxy.scrapeRequestManager.removeFromScrapeRequestMap(scrapeId) + ?: logger.error { "Scrape request $scrapeId missing in map" } + } + + logger.debug { "Results returned from $agentContext for $scrapeRequest" } + + scrapeRequest.scrapeResults.also { scrapeResults -> + HttpStatusCode.fromValue(scrapeResults.statusCode).also { statusCode -> + scrapeResults.contentType.split("/").also { contentTypeElems -> + + val contentType = + if (contentTypeElems.size == 2) + ContentType(contentTypeElems[0], contentTypeElems[1]) + else + ContentType.Text.Plain + + // Do not return content on error status codes + return if (!statusCode.isSuccess()) + scrapeRequest.scrapeResults.run { + ScrapeRequestResponse( + statusCode = statusCode, + contentType = contentType, + failureReason = failureReason, + url = url, + updateMsg = "path_not_found", + fetchDuration = scrapeRequest.ageDuration(), + ) + } + else + scrapeRequest.scrapeResults.run { + // Unzip content here + ScrapeRequestResponse( + statusCode = statusCode, + contentType = contentType, + contentText = if (zipped) contentAsZipped.unzip() else contentAsText, + failureReason = failureReason, + url = url, + updateMsg = "success", + fetchDuration = scrapeRequest.ageDuration(), + ) + } + } + } + } + } + + private fun createScrapeRequest( + agentContext: AgentContext, + proxy: Proxy, + path: String, + encodedQueryParams: String, + request: ApplicationRequest, + ): ScrapeRequestWrapper = + ScrapeRequestWrapper( + agentContext = agentContext, + proxy = proxy, + path = path, + encodedQueryParams = encodedQueryParams, + authHeader = request.header(HttpHeaders.Authorization) ?: "", + accept = request.header(HttpHeaders.Accept), + debugEnabled = proxy.options.debugEnabled, + ) +} + +typealias PipelineCall = PipelineContext<*, ApplicationCall> + +class ScrapeRequestResponse( + val statusCode: HttpStatusCode, + val updateMsg: String, + var contentType: ContentType = ContentType.Text.Plain, + var contentText: String = "", + val failureReason: String = "", + val url: String = "", + val fetchDuration: Duration, +) + +class ResponseResults( + var statusCode: HttpStatusCode = HttpStatusCode.OK, + var contentType: ContentType = ContentType.Text.Plain, + var contentText: String = "", + var updateMsg: String = "", +) diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyHttpService.kt b/src/main/kotlin/io/prometheus/proxy/ProxyHttpService.kt index 34e6538f..f784d3a3 100644 --- a/src/main/kotlin/io/prometheus/proxy/ProxyHttpService.kt +++ b/src/main/kotlin/io/prometheus/proxy/ProxyHttpService.kt @@ -27,21 +27,28 @@ import io.ktor.server.cio.* import io.ktor.server.cio.CIOApplicationEngine.* import io.ktor.server.engine.* import io.prometheus.Proxy -import io.prometheus.proxy.ProxyHttpConfig.configServer +import io.prometheus.proxy.ProxyHttpConfig.configureKtorServer +import io.prometheus.proxy.ProxyHttpRoutes.configureHttpRoutes import mu.two.KLogging import kotlin.time.Duration.Companion.seconds import kotlin.time.DurationUnit.SECONDS -internal class ProxyHttpService(private val proxy: Proxy, val httpPort: Int, isTestMode: Boolean) : - GenericIdleService() { - private val proxyConfigVals = proxy.configVals.proxy +internal class ProxyHttpService( + private val proxy: Proxy, + val httpPort: Int, + isTestMode: Boolean, +) : GenericIdleService() { private val idleTimeout = - if (proxyConfigVals.http.idleTimeoutSecs == -1) 45.seconds else proxyConfigVals.http.idleTimeoutSecs.seconds + with(proxy.proxyConfigVals.http) { (if (idleTimeoutSecs == -1) 45 else idleTimeoutSecs).seconds } private val tracing by lazy { proxy.zipkinReporterService.newTracing("proxy-http") } private val config: Configuration.() -> Unit = { connectionIdleTimeoutSeconds = idleTimeout.toInt(SECONDS) } - private val httpServer = embeddedServer(CIO, port = httpPort, configure = config) { configServer(proxy, isTestMode) } + private val httpServer = + embeddedServer(CIO, port = httpPort, configure = config) { + configureKtorServer(proxy, isTestMode) + configureHttpRoutes(proxy) + } init { addListener(genericServiceListener(logger), MoreExecutors.directExecutor()) diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyOptions.kt b/src/main/kotlin/io/prometheus/proxy/ProxyOptions.kt index 41019174..7a39f3b9 100644 --- a/src/main/kotlin/io/prometheus/proxy/ProxyOptions.kt +++ b/src/main/kotlin/io/prometheus/proxy/ProxyOptions.kt @@ -51,49 +51,51 @@ class ProxyOptions(argv: Array) : BaseOptions(Proxy::class.java.simpleNa } override fun assignConfigVals() { - if (proxyHttpPort == -1) - proxyHttpPort = PROXY_PORT.getEnv(configVals.proxy.http.port) - logger.info { "proxyHttpPort: $proxyHttpPort" } - - if (proxyAgentPort == -1) - proxyAgentPort = AGENT_PORT.getEnv(configVals.proxy.agent.port) - logger.info { "proxyAgentPort: $proxyAgentPort" } - - configVals.proxy - .also { proxyConfigVals -> - - if (!sdEnabled) - sdEnabled = SD_ENABLED.getEnv(false) - logger.info { "sdEnabled: $sdEnabled" } - - if (sdPath.isEmpty()) - sdPath = SD_PATH.getEnv(proxyConfigVals.service.discovery.path) - if (sdEnabled) - require(sdPath.isNotEmpty()) { "sdPath is empty" } - else - logger.info { "sdPath: $sdPath" } - - if (sdTargetPrefix.isEmpty()) - sdTargetPrefix = SD_TARGET_PREFIX.getEnv(proxyConfigVals.service.discovery.targetPrefix) - if (sdEnabled) - require(sdTargetPrefix.isNotEmpty()) { "sdTargetPrefix is empty" } - else - logger.info { "sdTargetPrefix: $sdTargetPrefix" } - - assignAdminEnabled(proxyConfigVals.admin.enabled) - assignAdminPort(proxyConfigVals.admin.port) - assignMetricsEnabled(proxyConfigVals.metrics.enabled) - assignMetricsPort(proxyConfigVals.metrics.port) - assignTransportFilterDisabled(proxyConfigVals.transportFilterDisabled) - assignDebugEnabled(proxyConfigVals.admin.debugEnabled) - - assignCertChainFilePath(proxyConfigVals.tls.certChainFilePath) - assignPrivateKeyFilePath(proxyConfigVals.tls.privateKeyFilePath) - assignTrustCertCollectionFilePath(proxyConfigVals.tls.trustCertCollectionFilePath) - - logger.info { "proxy.internal.scrapeRequestTimeoutSecs: ${proxyConfigVals.internal.scrapeRequestTimeoutSecs}" } - logger.info { "proxy.internal.staleAgentCheckPauseSecs: ${proxyConfigVals.internal.staleAgentCheckPauseSecs}" } - logger.info { "proxy.internal.maxAgentInactivitySecs: ${proxyConfigVals.internal.maxAgentInactivitySecs}" } + with(configVals.proxy) { + if (proxyHttpPort == -1) + proxyHttpPort = PROXY_PORT.getEnv(http.port) + logger.info { "proxyHttpPort: $proxyHttpPort" } + + if (proxyAgentPort == -1) + proxyAgentPort = AGENT_PORT.getEnv(agent.port) + logger.info { "proxyAgentPort: $proxyAgentPort" } + + if (!sdEnabled) + sdEnabled = SD_ENABLED.getEnv(false) + logger.info { "sdEnabled: $sdEnabled" } + + if (sdPath.isEmpty()) + sdPath = SD_PATH.getEnv(service.discovery.path) + if (sdEnabled) + require(sdPath.isNotEmpty()) { "sdPath is empty" } + else + logger.info { "sdPath: $sdPath" } + + if (sdTargetPrefix.isEmpty()) + sdTargetPrefix = SD_TARGET_PREFIX.getEnv(service.discovery.targetPrefix) + if (sdEnabled) + require(sdTargetPrefix.isNotEmpty()) { "sdTargetPrefix is empty" } + else + logger.info { "sdTargetPrefix: $sdTargetPrefix" } + + assignAdminEnabled(admin.enabled) + assignAdminPort(admin.port) + assignMetricsEnabled(metrics.enabled) + assignMetricsPort(metrics.port) + assignTransportFilterDisabled(transportFilterDisabled) + assignDebugEnabled(admin.debugEnabled) + + with(tls) { + assignCertChainFilePath(certChainFilePath) + assignPrivateKeyFilePath(privateKeyFilePath) + assignTrustCertCollectionFilePath(trustCertCollectionFilePath) } + + with(internal) { + logger.info { "proxy.internal.scrapeRequestTimeoutSecs: $scrapeRequestTimeoutSecs" } + logger.info { "proxy.internal.staleAgentCheckPauseSecs: $staleAgentCheckPauseSecs" } + logger.info { "proxy.internal.maxAgentInactivitySecs: $maxAgentInactivitySecs" } + } + } } } diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyPathManager.kt b/src/main/kotlin/io/prometheus/proxy/ProxyPathManager.kt index d505d667..d7118292 100644 --- a/src/main/kotlin/io/prometheus/proxy/ProxyPathManager.kt +++ b/src/main/kotlin/io/prometheus/proxy/ProxyPathManager.kt @@ -28,9 +28,11 @@ import io.prometheus.grpc.krotodc.UnregisterPathResponse import mu.two.KLogging internal class ProxyPathManager(private val proxy: Proxy, private val isTestMode: Boolean) { - class AgentContextInfo(var consolidated: Boolean, val agentContexts: MutableList) { + class AgentContextInfo(val isConsolidated: Boolean, val agentContexts: MutableList) { + fun isNotValid() = !isConsolidated && agentContexts[0].isNotValid() + override fun toString(): String { - return "AgentContextInfo(consolidated=$consolidated, agentContexts=$agentContexts)" + return "AgentContextInfo(consolidated=$isConsolidated, agentContexts=$agentContexts)" } } @@ -58,9 +60,9 @@ internal class ProxyPathManager(private val proxy: Proxy, private val isTestMode if (agentInfo.isNull()) { pathMap[path] = AgentContextInfo(true, mutableListOf(agentContext)) } else { - if (agentContext.consolidated != agentInfo.consolidated) + if (agentContext.consolidated != agentInfo.isConsolidated) logger.warn { - "Mismatch of agent context types: ${agentContext.consolidated} and ${agentInfo.consolidated}" + "Mismatch of agent context types: ${agentContext.consolidated} and ${agentInfo.isConsolidated}" } else agentInfo.agentContexts += agentContext @@ -96,7 +98,7 @@ internal class ProxyPathManager(private val proxy: Proxy, private val isTestMode logger.error { msg } false to msg } else { - if (agentInfo.consolidated && agentInfo.agentContexts.size > 1) { + if (agentInfo.isConsolidated && agentInfo.agentContexts.size > 1) { agentInfo.agentContexts.remove(agentContext) if (!isTestMode) logger.info { "Removed element of path /$path for $agentInfo" } diff --git a/src/main/kotlin/io/prometheus/proxy/ProxyUtils.kt b/src/main/kotlin/io/prometheus/proxy/ProxyUtils.kt new file mode 100644 index 00000000..7f0abc79 --- /dev/null +++ b/src/main/kotlin/io/prometheus/proxy/ProxyUtils.kt @@ -0,0 +1,126 @@ +/* + * Copyright © 2023 Paul Ambrose (pambrose@mac.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.prometheus.proxy + +import io.ktor.http.* +import io.ktor.server.application.* +import io.ktor.server.response.* +import io.prometheus.Proxy +import io.prometheus.proxy.ProxyConstants.CACHE_CONTROL_VALUE +import io.prometheus.proxy.ProxyConstants.MISSING_PATH_MSG +import mu.two.KLogger + +object ProxyUtils { + fun invalidAgentContextResponse( + path: String, + proxy: Proxy, + logger: KLogger, + responseResults: ResponseResults, + ) { + updateResponse( + message = "Invalid AgentContext for /$path", + proxy = proxy, + logger = logger, + logLevel = KLogger::error, + responseResults = responseResults, + updateMsg = "invalid_agent_context", + statusCode = HttpStatusCode.NotFound, + ) + } + + fun invalidPathResponse( + path: String, + proxy: Proxy, + logger: KLogger, + responseResults: ResponseResults, + ) { + updateResponse( + message = "Invalid path request /$path", + proxy = proxy, + logger = logger, + logLevel = KLogger::info, + responseResults = responseResults, + updateMsg = "invalid_path", + statusCode = HttpStatusCode.NotFound, + ) + } + + fun emptyPathResponse( + proxy: Proxy, + logger: KLogger, + responseResults: ResponseResults, + ) { + updateResponse( + message = MISSING_PATH_MSG, + proxy = proxy, + logger = logger, + logLevel = KLogger::info, + responseResults = responseResults, + updateMsg = "missing_path", + statusCode = HttpStatusCode.NotFound, + ) + } + + fun proxyNotRunningResponse( + logger: KLogger, + responseResults: ResponseResults, + ) { + updateResponse( + message = "Proxy stopped", + proxy = null, + logger = logger, + logLevel = KLogger::error, + responseResults = responseResults, + updateMsg = "proxy_stopped", + statusCode = HttpStatusCode.ServiceUnavailable, + ) + } + + private fun updateResponse( + message: String, + proxy: Proxy?, + logger: KLogger, + logLevel: (KLogger, String) -> Unit, + responseResults: ResponseResults, + updateMsg: String, + statusCode: HttpStatusCode, + ) { + proxy?.logActivity(message) + logLevel(logger, message) + responseResults.apply { + this.updateMsg = updateMsg + this.statusCode = statusCode + } + } + + fun incrementScrapeRequestCount( + proxy: Proxy, + type: String, + ) { + if (type.isNotEmpty()) proxy.metrics { scrapeRequestCount.labels(type).inc() } + } + + suspend fun ApplicationCall.respondWith( + text: String, + contentType: ContentType = ContentType.Text.Plain, + status: HttpStatusCode = HttpStatusCode.OK, + ) { + response.header(HttpHeaders.CacheControl, CACHE_CONTROL_VALUE) + response.status(status) + respondText(text, contentType, status) + } +} diff --git a/src/test/kotlin/io/prometheus/AdminDefaultPathTest.kt b/src/test/kotlin/io/prometheus/AdminDefaultPathTest.kt index 182a5328..92142f43 100644 --- a/src/test/kotlin/io/prometheus/AdminDefaultPathTest.kt +++ b/src/test/kotlin/io/prometheus/AdminDefaultPathTest.kt @@ -32,88 +32,83 @@ import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.Test class AdminDefaultPathTest { - private val agentConfigVals = agent.configVals.agent - private val proxyConfigVals = proxy.configVals.proxy + private val agentConfigVals = agent.agentConfigVals + private val proxyConfigVals = proxy.proxyConfigVals @Test fun proxyPingPathTest() { - proxyConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.pingPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText() shouldStartWith "pong" - } + with(proxyConfigVals.admin) { + blockingGet("$port/$pingPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText() shouldStartWith "pong" } + } } @Test fun agentPingPathTest() { - agentConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.pingPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText() shouldStartWith "pong" - } + with(agentConfigVals.admin) { + blockingGet("$port/$pingPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText() shouldStartWith "pong" } + } } @Test fun proxyVersionPathTest() { - agentConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.versionPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText() shouldContain "Version" - } + with(agentConfigVals.admin) { + blockingGet("$port/$versionPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText() shouldContain "Version" } + } } @Test fun agentVersionPathTest() { - agentConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.versionPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText() shouldContain "Version" - } + with(agentConfigVals.admin) { + blockingGet("$port/$versionPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText() shouldContain "Version" } + } } @Test fun proxyHealthCheckPathTest() { - proxyConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.healthCheckPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText().length shouldBeGreaterThan 10 - } + with(proxyConfigVals.admin) { + blockingGet("$port/$healthCheckPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText().length shouldBeGreaterThan 10 } + } } @Test fun agentHealthCheckPathTest() { - agentConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.healthCheckPath}".withPrefix()) { response -> - response.bodyAsText().length shouldBeGreaterThan 10 - } + with(agentConfigVals.admin) { + blockingGet("$port/$healthCheckPath".withPrefix()) { response -> + response.bodyAsText().length shouldBeGreaterThan 10 } + } } @Test fun proxyThreadDumpPathTest() { - proxyConfigVals.admin - .also { admin -> - blockingGet("${admin.port}/${admin.threadDumpPath}".withPrefix()) { response -> - response.bodyAsText().length shouldBeGreaterThan 10 - } + with(proxyConfigVals.admin) { + blockingGet("$port/$threadDumpPath".withPrefix()) { response -> + response.bodyAsText().length shouldBeGreaterThan 10 } + } } @Test fun agentThreadDumpPathTest() { - blockingGet("${agentConfigVals.admin.port}/${agentConfigVals.admin.threadDumpPath}".withPrefix()) { response -> - response.bodyAsText().length shouldBeGreaterThan 10 + with(agentConfigVals.admin) { + blockingGet("$port/$threadDumpPath".withPrefix()) { response -> + response.bodyAsText().length shouldBeGreaterThan 10 + } } } diff --git a/src/test/kotlin/io/prometheus/AdminEmptyPathTest.kt b/src/test/kotlin/io/prometheus/AdminEmptyPathTest.kt index 32c52e4d..97b1d2a0 100644 --- a/src/test/kotlin/io/prometheus/AdminEmptyPathTest.kt +++ b/src/test/kotlin/io/prometheus/AdminEmptyPathTest.kt @@ -33,52 +33,48 @@ class AdminEmptyPathTest { @Test fun proxyPingPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.port shouldBeEqualTo 8098 - admin.pingPath shouldBeEqualTo "" + with(proxyConfigVals.admin) { + port shouldBeEqualTo 8098 + pingPath shouldBeEqualTo "" - blockingGet("${admin.port}/${admin.pingPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.NotFound - } + blockingGet("$port/$pingPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.NotFound } + } } @Test fun proxyVersionPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.port shouldBeEqualTo 8098 - admin.versionPath shouldBeEqualTo "" + with(proxyConfigVals.admin) { + port shouldBeEqualTo 8098 + versionPath shouldBeEqualTo "" - blockingGet("${admin.port}/${admin.versionPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.NotFound - } + blockingGet("$port/$versionPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.NotFound } + } } @Test fun proxyHealthCheckPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.healthCheckPath shouldBeEqualTo "" + with(proxyConfigVals.admin) { + healthCheckPath shouldBeEqualTo "" - blockingGet("${admin.port}/${admin.healthCheckPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.NotFound - } + blockingGet("$port/$healthCheckPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.NotFound } + } } @Test fun proxyThreadDumpPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.threadDumpPath shouldBeEqualTo "" + with(proxyConfigVals.admin) { + threadDumpPath shouldBeEqualTo "" - blockingGet("${admin.port}/${admin.threadDumpPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.NotFound - } + blockingGet("$port/$threadDumpPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.NotFound } + } } companion object : CommonCompanion() { diff --git a/src/test/kotlin/io/prometheus/AdminNonDefaultPathTest.kt b/src/test/kotlin/io/prometheus/AdminNonDefaultPathTest.kt index bff3136b..9e5e4204 100644 --- a/src/test/kotlin/io/prometheus/AdminNonDefaultPathTest.kt +++ b/src/test/kotlin/io/prometheus/AdminNonDefaultPathTest.kt @@ -37,55 +37,51 @@ class AdminNonDefaultPathTest { @Test fun proxyPingPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.port shouldBeEqualTo 8099 - admin.pingPath shouldBeEqualTo "pingPath2" + with(proxyConfigVals.admin) { + port shouldBeEqualTo 8099 + pingPath shouldBeEqualTo "pingPath2" - blockingGet("${admin.port}/${admin.pingPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText() shouldStartWith "pong" - } + blockingGet("$port/$pingPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText() shouldStartWith "pong" } + } } @Test fun proxyVersionPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.port shouldBeEqualTo 8099 - admin.versionPath shouldBeEqualTo "versionPath2" + with(proxyConfigVals.admin) { + port shouldBeEqualTo 8099 + versionPath shouldBeEqualTo "versionPath2" - blockingGet("${admin.port}/${admin.versionPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText() shouldContain "Version" - } + blockingGet("$port/$versionPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText() shouldContain "Version" } + } } @Test fun proxyHealthCheckPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.healthCheckPath shouldBeEqualTo "healthCheckPath2" + with(proxyConfigVals.admin) { + healthCheckPath shouldBeEqualTo "healthCheckPath2" - blockingGet("${admin.port}/${admin.healthCheckPath}".withPrefix()) { response -> - response.status shouldBeEqualTo HttpStatusCode.OK - response.bodyAsText().length shouldBeGreaterThan 10 - } + blockingGet("$port/$healthCheckPath".withPrefix()) { response -> + response.status shouldBeEqualTo HttpStatusCode.OK + response.bodyAsText().length shouldBeGreaterThan 10 } + } } @Test fun proxyThreadDumpPathTest() { - proxyConfigVals.admin - .also { admin -> - admin.threadDumpPath shouldBeEqualTo "threadDumpPath2" + with(proxyConfigVals.admin) { + threadDumpPath shouldBeEqualTo "threadDumpPath2" - blockingGet("${admin.port}/${admin.threadDumpPath}".withPrefix()) { response -> - response.bodyAsText().length shouldBeGreaterThan 10 - } + blockingGet("$port/$threadDumpPath".withPrefix()) { response -> + response.bodyAsText().length shouldBeGreaterThan 10 } + } } companion object : CommonCompanion() {