entry : rowIndexToErrorMessage.entrySet()) {
+ logger.severe(
+ String.format(
+ "Row error at index %d: %s", entry.getKey(), entry.getValue()));
+ }
+ } else {
+ logger.severe(
+ "AppendSerializationError occurred, but no row-specific errors were"
+ + " provided.");
+ }
+ }
+ }
+
+ @Override
+ public void onSuccess(AppendRowsResponse result) {
+ if (result.hasError()) {
+ logger.severe("BigQuery append error: " + result.getError().getMessage());
+ for (var error : result.getRowErrorsList()) {
+ logger.severe(
+ String.format(
+ "Row error at index %d: %s", error.getIndex(), error.getMessage()));
+ }
+ } else {
+ logger.fine("Successfully wrote " + batch.size() + " rows to BigQuery.");
+ }
+ }
+ },
+ directExecutor());
+ }
+ } catch (RuntimeException e) {
+ logger.log(Level.SEVERE, "Failed to append rows to StreamWriter", e);
+ }
+ } finally {
+ flushLock.set(false);
+ if (queue.size() >= batchSize && !flushLock.get()) {
+ executor.execute(this::flush);
+ }
+ }
+ }
+
+ private void populateVector(FieldVector vector, int index, Object value) {
+ if (value == null || (value instanceof JsonNode jsonNode && jsonNode.isNull())) {
+ vector.setNull(index);
+ return;
+ }
+
+ if (vector instanceof VarCharVector varCharVector) {
+ String strValue = (value instanceof JsonNode jsonNode) ? jsonNode.asText() : value.toString();
+ varCharVector.setSafe(index, strValue.getBytes(UTF_8));
+ } else if (vector instanceof BigIntVector bigIntVector) {
+ long longValue;
+ if (value instanceof JsonNode jsonNode) {
+ longValue = jsonNode.asLong();
+ } else if (value instanceof Number number) {
+ longValue = number.longValue();
+ } else {
+ longValue = Long.parseLong(value.toString());
+ }
+ bigIntVector.setSafe(index, longValue);
+ } else if (vector instanceof BitVector bitVector) {
+ boolean boolValue =
+ (value instanceof JsonNode jsonNode) ? jsonNode.asBoolean() : (Boolean) value;
+ bitVector.setSafe(index, boolValue ? 1 : 0);
+ } else if (vector instanceof TimeStampVector timeStampVector) {
+ if (value instanceof Instant instant) {
+ long micros =
+ SECONDS.toMicros(instant.getEpochSecond()) + NANOSECONDS.toMicros(instant.getNano());
+ timeStampVector.setSafe(index, micros);
+ } else if (value instanceof JsonNode jsonNode) {
+ timeStampVector.setSafe(index, jsonNode.asLong());
+ } else if (value instanceof Long longValue) {
+ timeStampVector.setSafe(index, longValue);
+ }
+ } else if (vector instanceof ListVector listVector) {
+ int start = listVector.startNewValue(index);
+ if (value instanceof ArrayNode arrayNode) {
+ for (int i = 0; i < arrayNode.size(); i++) {
+ populateVector(listVector.getDataVector(), start + i, arrayNode.get(i));
+ }
+ listVector.endValue(index, arrayNode.size());
+ } else if (value instanceof List) {
+ List> list = (List>) value;
+ for (int i = 0; i < list.size(); i++) {
+ populateVector(listVector.getDataVector(), start + i, list.get(i));
+ }
+ listVector.endValue(index, list.size());
+ }
+ } else if (vector instanceof StructVector structVector) {
+ structVector.setIndexDefined(index);
+ if (value instanceof ObjectNode objectNode) {
+ for (FieldVector child : structVector.getChildrenFromFields()) {
+ populateVector(child, index, objectNode.get(child.getName()));
+ }
+ } else if (value instanceof Map) {
+ Map, ?> map = (Map, ?>) value;
+ for (FieldVector child : structVector.getChildrenFromFields()) {
+ populateVector(child, index, map.get(child.getName()));
+ }
+ }
+ }
+ }
+
+ @Override
+ public void close() {
+ flush();
+ if (writer != null) {
+ writer.close();
+ }
+ if (allocator != null) {
+ allocator.close();
+ }
+ }
+}
diff --git a/core/src/main/java/com/google/adk/plugins/agentanalytics/BigQueryAgentAnalyticsPlugin.java b/core/src/main/java/com/google/adk/plugins/agentanalytics/BigQueryAgentAnalyticsPlugin.java
new file mode 100644
index 000000000..5d486f31e
--- /dev/null
+++ b/core/src/main/java/com/google/adk/plugins/agentanalytics/BigQueryAgentAnalyticsPlugin.java
@@ -0,0 +1,435 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.adk.plugins.agentanalytics;
+
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
+
+import com.google.adk.agents.BaseAgent;
+import com.google.adk.agents.CallbackContext;
+import com.google.adk.agents.InvocationContext;
+import com.google.adk.events.Event;
+import com.google.adk.models.LlmRequest;
+import com.google.adk.models.LlmResponse;
+import com.google.adk.plugins.BasePlugin;
+import com.google.adk.tools.BaseTool;
+import com.google.adk.tools.ToolContext;
+import com.google.api.gax.core.FixedCredentialsProvider;
+import com.google.api.gax.retrying.RetrySettings;
+import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQueryException;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.Clustering;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient;
+import com.google.cloud.bigquery.storage.v1.BigQueryWriteSettings;
+import com.google.cloud.bigquery.storage.v1.StreamWriter;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.VerifyException;
+import com.google.common.collect.ImmutableList;
+import com.google.genai.types.Content;
+import io.opentelemetry.api.trace.Span;
+import io.opentelemetry.api.trace.SpanContext;
+import io.reactivex.rxjava3.core.Completable;
+import io.reactivex.rxjava3.core.Maybe;
+import java.io.IOException;
+import java.time.Instant;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.threeten.bp.Duration;
+
+/**
+ * BigQuery Agent Analytics Plugin for Java.
+ *
+ * Logs agent execution events directly to a BigQuery table using the Storage Write API.
+ */
+public class BigQueryAgentAnalyticsPlugin extends BasePlugin {
+ private static final Logger logger =
+ Logger.getLogger(BigQueryAgentAnalyticsPlugin.class.getName());
+ private static final ImmutableList DEFAULT_AUTH_SCOPES =
+ ImmutableList.of("https://www.googleapis.com/auth/cloud-platform");
+ private static final AtomicLong threadCounter = new AtomicLong(0);
+
+ private final BigQueryLoggerConfig config;
+ private final BigQuery bigQuery;
+ private final BigQueryWriteClient writeClient;
+ private final ScheduledExecutorService executor;
+ private final Object tableEnsuredLock = new Object();
+ @VisibleForTesting final BatchProcessor batchProcessor;
+ private volatile boolean tableEnsured = false;
+
+ public BigQueryAgentAnalyticsPlugin(BigQueryLoggerConfig config) throws IOException {
+ this(config, createBigQuery(config));
+ }
+
+ public BigQueryAgentAnalyticsPlugin(BigQueryLoggerConfig config, BigQuery bigQuery)
+ throws IOException {
+ super("bigquery_agent_analytics");
+ this.config = config;
+ this.bigQuery = bigQuery;
+ ThreadFactory threadFactory =
+ r -> new Thread(r, "bq-analytics-plugin-" + threadCounter.getAndIncrement());
+ this.executor = Executors.newScheduledThreadPool(1, threadFactory);
+ this.writeClient = createWriteClient(config);
+
+ if (config.enabled()) {
+ StreamWriter writer = createWriter(config);
+ this.batchProcessor =
+ new BatchProcessor(
+ writer,
+ config.batchSize(),
+ config.batchFlushInterval(),
+ config.queueMaxSize(),
+ executor);
+ this.batchProcessor.start();
+ } else {
+ this.batchProcessor = null;
+ }
+ }
+
+ private static BigQuery createBigQuery(BigQueryLoggerConfig config) throws IOException {
+ BigQueryOptions.Builder builder = BigQueryOptions.newBuilder();
+ if (config.credentials() != null) {
+ builder.setCredentials(config.credentials());
+ } else {
+ builder.setCredentials(
+ GoogleCredentials.getApplicationDefault().createScoped(DEFAULT_AUTH_SCOPES));
+ }
+ return builder.build().getService();
+ }
+
+ private void ensureTableExistsOnce() {
+ if (!tableEnsured) {
+ synchronized (tableEnsuredLock) {
+ if (!tableEnsured) {
+ // Table creation is expensive, so we only do it once per plugin instance.
+ tableEnsured = true;
+ ensureTableExists(bigQuery, config);
+ }
+ }
+ }
+ }
+
+ private void ensureTableExists(BigQuery bigQuery, BigQueryLoggerConfig config) {
+ TableId tableId = TableId.of(config.projectId(), config.datasetId(), config.tableName());
+ Schema schema = BigQuerySchema.getEventsSchema();
+ try {
+ Table table = bigQuery.getTable(tableId);
+ logger.info("BigQuery table: " + tableId);
+ if (table == null) {
+ logger.info("Creating BigQuery table: " + tableId);
+ StandardTableDefinition.Builder tableDefinitionBuilder =
+ StandardTableDefinition.newBuilder().setSchema(schema);
+ if (!config.clusteringFields().isEmpty()) {
+ tableDefinitionBuilder.setClustering(
+ Clustering.newBuilder().setFields(config.clusteringFields()).build());
+ }
+ TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinitionBuilder.build()).build();
+ bigQuery.create(tableInfo);
+ } else if (config.autoSchemaUpgrade()) {
+ // TODO(vmaliuta): Implement auto-schema upgrade.
+ logger.info("BigQuery table already exists and auto-schema upgrade is enabled: " + tableId);
+ logger.info("Auto-schema upgrade is not implemented yet.");
+ }
+ } catch (BigQueryException e) {
+ if (e.getMessage().contains("invalid_grant")) {
+ logger.log(
+ Level.SEVERE,
+ "Failed to authenticate with BigQuery. Please run 'gcloud auth application-default"
+ + " login' to refresh your credentials or provide valid credentials in"
+ + " BigQueryLoggerConfig.",
+ e);
+ } else {
+ logger.log(
+ Level.WARNING, "Failed to check or create/upgrade BigQuery table: " + tableId, e);
+ }
+ } catch (RuntimeException e) {
+ logger.log(Level.WARNING, "Failed to check or create/upgrade BigQuery table: " + tableId, e);
+ }
+ }
+
+ protected BigQueryWriteClient createWriteClient(BigQueryLoggerConfig config) throws IOException {
+ if (config.credentials() != null) {
+ return BigQueryWriteClient.create(
+ BigQueryWriteSettings.newBuilder()
+ .setCredentialsProvider(FixedCredentialsProvider.create(config.credentials()))
+ .build());
+ }
+ return BigQueryWriteClient.create();
+ }
+
+ protected String getStreamName(BigQueryLoggerConfig config) {
+ return String.format(
+ "projects/%s/datasets/%s/tables/%s/streams/_default",
+ config.projectId(), config.datasetId(), config.tableName());
+ }
+
+ protected StreamWriter createWriter(BigQueryLoggerConfig config) {
+ BigQueryLoggerConfig.RetryConfig rc = config.retryConfig();
+ RetrySettings retrySettings =
+ RetrySettings.newBuilder()
+ .setMaxAttempts(rc.maxRetries())
+ .setInitialRetryDelay(Duration.ofMillis(rc.initialDelay().toMillis()))
+ .setRetryDelayMultiplier(rc.multiplier())
+ .setMaxRetryDelay(Duration.ofMillis(rc.maxDelay().toMillis()))
+ .build();
+
+ String streamName = getStreamName(config);
+ try {
+ return StreamWriter.newBuilder(streamName, writeClient)
+ .setRetrySettings(retrySettings)
+ .setWriterSchema(BigQuerySchema.getArrowSchema())
+ .build();
+ } catch (Exception e) {
+ throw new VerifyException("Failed to create StreamWriter for " + streamName, e);
+ }
+ }
+
+ private void logEvent(
+ String eventType,
+ InvocationContext invocationContext,
+ Optional callbackContext,
+ Object content,
+ Map extraAttributes) {
+ if (batchProcessor == null) {
+ return;
+ }
+
+ ensureTableExistsOnce();
+
+ Map row = new HashMap<>();
+ row.put("timestamp", Instant.now());
+ row.put("event_type", eventType);
+ row.put(
+ "agent",
+ callbackContext.map(CallbackContext::agentName).orElse(invocationContext.agent().name()));
+ row.put("session_id", invocationContext.session().id());
+ row.put("invocation_id", invocationContext.invocationId());
+ row.put("user_id", invocationContext.userId());
+
+ if (content instanceof Content contentParts) {
+ row.put(
+ "content_parts",
+ JsonFormatter.formatContentParts(Optional.of(contentParts), config.maxContentLength()));
+ row.put(
+ "content", JsonFormatter.smartTruncate(content, config.maxContentLength()).toString());
+ } else if (content != null) {
+ row.put(
+ "content", JsonFormatter.smartTruncate(content, config.maxContentLength()).toString());
+ }
+
+ Map attributes = new HashMap<>(config.customTags());
+ if (extraAttributes != null) {
+ attributes.putAll(extraAttributes);
+ }
+ row.put(
+ "attributes",
+ JsonFormatter.smartTruncate(attributes, config.maxContentLength()).toString());
+
+ addTraceDetails(row);
+ batchProcessor.append(row);
+ }
+
+ private void addTraceDetails(Map row) {
+ SpanContext spanContext = Span.current().getSpanContext();
+ if (spanContext.isValid()) {
+ row.put("trace_id", spanContext.getTraceId());
+ row.put("span_id", spanContext.getSpanId());
+ }
+ }
+
+ @Override
+ public Completable close() {
+ if (batchProcessor != null) {
+ batchProcessor.close();
+ }
+ if (writeClient != null) {
+ writeClient.close();
+ }
+ try {
+ executor.shutdown();
+ if (!executor.awaitTermination(config.shutdownTimeout().toMillis(), MILLISECONDS)) {
+ executor.shutdownNow();
+ }
+ } catch (InterruptedException e) {
+ executor.shutdownNow();
+ Thread.currentThread().interrupt();
+ }
+ return Completable.complete();
+ }
+
+ @Override
+ public Maybe onUserMessageCallback(
+ InvocationContext invocationContext, Content userMessage) {
+ return Maybe.fromAction(
+ () -> logEvent("USER_MESSAGE", invocationContext, Optional.empty(), userMessage, null));
+ }
+
+ @Override
+ public Maybe beforeRunCallback(InvocationContext invocationContext) {
+ return Maybe.fromAction(
+ () -> logEvent("INVOCATION_START", invocationContext, Optional.empty(), null, null));
+ }
+
+ @Override
+ public Maybe onEventCallback(InvocationContext invocationContext, Event event) {
+ return Maybe.fromAction(
+ () -> {
+ Map attrs = new HashMap<>();
+ attrs.put("event_author", event.author());
+ logEvent(
+ "EVENT", invocationContext, Optional.empty(), event.content().orElse(null), attrs);
+ });
+ }
+
+ @Override
+ public Completable afterRunCallback(InvocationContext invocationContext) {
+ return Completable.fromAction(
+ () -> {
+ logEvent("INVOCATION_END", invocationContext, Optional.empty(), null, null);
+ batchProcessor.flush();
+ });
+ }
+
+ @Override
+ public Maybe beforeAgentCallback(BaseAgent agent, CallbackContext callbackContext) {
+ return Maybe.fromAction(
+ () ->
+ logEvent(
+ "AGENT_START",
+ callbackContext.invocationContext(),
+ Optional.of(callbackContext),
+ null,
+ null));
+ }
+
+ @Override
+ public Maybe afterAgentCallback(BaseAgent agent, CallbackContext callbackContext) {
+ return Maybe.fromAction(
+ () ->
+ logEvent(
+ "AGENT_END",
+ callbackContext.invocationContext(),
+ Optional.of(callbackContext),
+ null,
+ null));
+ }
+
+ @Override
+ public Maybe beforeModelCallback(
+ CallbackContext callbackContext, LlmRequest.Builder llmRequest) {
+ return Maybe.fromAction(
+ () -> {
+ Map attrs = new HashMap<>();
+ LlmRequest req = llmRequest.build();
+ attrs.put("model", req.model().orElse("unknown"));
+ logEvent(
+ "MODEL_REQUEST",
+ callbackContext.invocationContext(),
+ Optional.of(callbackContext),
+ req,
+ attrs);
+ });
+ }
+
+ @Override
+ public Maybe afterModelCallback(
+ CallbackContext callbackContext, LlmResponse llmResponse) {
+ return Maybe.fromAction(
+ () -> {
+ Map attrs = new HashMap<>();
+ llmResponse.usageMetadata().ifPresent(u -> attrs.put("usage_metadata", u));
+ logEvent(
+ "MODEL_RESPONSE",
+ callbackContext.invocationContext(),
+ Optional.of(callbackContext),
+ llmResponse,
+ attrs);
+ });
+ }
+
+ @Override
+ public Maybe onModelErrorCallback(
+ CallbackContext callbackContext, LlmRequest.Builder llmRequest, Throwable error) {
+ return Maybe.fromAction(
+ () -> {
+ Map attrs = new HashMap<>();
+ attrs.put("error_message", error.getMessage());
+ logEvent(
+ "MODEL_ERROR",
+ callbackContext.invocationContext(),
+ Optional.of(callbackContext),
+ null,
+ attrs);
+ });
+ }
+
+ @Override
+ public Maybe