From 183da34bfba4f57e372fb336182f45429a26da4a Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Wed, 15 Oct 2025 10:55:29 +0100 Subject: [PATCH 01/14] Add details on allowing audit log access to non-Owners --- reference/audit-log.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reference/audit-log.mdx b/reference/audit-log.mdx index f460edc0..2c705371 100644 --- a/reference/audit-log.mdx +++ b/reference/audit-log.mdx @@ -18,7 +18,7 @@ The audit log also make it easier to manage your Axiom organization. They allow - Track changes made by your team to your observability posture. - Track monitoring performance. -The audit log is available to all users. By default, you can query the audit log for the previous three days. You can request the ability to query the audit log for the full time range as an add-on on the Axiom Cloud plan. For more information, see [Manage add-ons](reference/usage-billing#manage-add-ons). +The audit log is available to all organizations. By default, you can query the audit log for the previous three days. You can purchase full access to the audit log as an add-on on the Axiom Cloud plan. For more information, see [Manage add-ons](reference/usage-billing#manage-add-ons). ## Explore audit log @@ -36,7 +36,7 @@ The `action` field specifies the type of activity that happened in your Axiom or ## Restrict access to audit log -To restrict access to the audit log, use Axiom’s role-based access control to define who can access the `axiom-audit` dataset. For more information, see [Access](/reference/settings). +The audit log is only accessible to users with the Owner role. To allow other users to access certain events in the audit log, first create a [view](/query-data/views) that defines the desired scope, then grant access to the view using Axiom's role-based access control. For more information, see [Access](/reference/settings). ## List of trackable actions From c27f78a743454af97c87cf47ebab8e619010cc0e Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Mon, 3 Nov 2025 16:23:01 +0000 Subject: [PATCH 02/14] Update endpoints for edge ingest --- apps/hex.mdx | 2 +- guides/send-logs-from-apache-log4j.mdx | 2 +- guides/send-logs-from-dotnet.mdx | 10 +++++----- guides/send-logs-from-laravel.mdx | 12 +++++++----- guides/send-logs-from-ruby-on-rails.mdx | 2 +- reference/regions.mdx | 4 ++-- restapi/ingest.mdx | 16 ++++++++-------- restapi/introduction.mdx | 2 +- send-data/aws-firehose.mdx | 2 +- send-data/aws-firelens.mdx | 6 +++--- send-data/aws-iot-rules.mdx | 8 ++++---- send-data/aws-lambda-dot.mdx | 2 +- send-data/aws-s3.mdx | 6 +++--- send-data/cribl.mdx | 2 +- send-data/fluent-bit.mdx | 4 ++-- send-data/fluentd.mdx | 8 ++++---- send-data/heroku-log-drains.mdx | 2 +- send-data/kubernetes.mdx | 18 +++++++++--------- send-data/methods.mdx | 2 +- send-data/tremor.mdx | 2 +- snippets/replace-domain.mdx | 2 +- 21 files changed, 58 insertions(+), 56 deletions(-) diff --git a/apps/hex.mdx b/apps/hex.mdx index 0bcebd0c..a7239565 100644 --- a/apps/hex.mdx +++ b/apps/hex.mdx @@ -21,7 +21,7 @@ This page explains how to integrate Hex with Axiom to visualize geospatial data Send your sample location data to Axiom using the API endpoint. For example, the following HTTP request sends sample robot location data with latitude, longitude, status, and satellite information. ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ diff --git a/guides/send-logs-from-apache-log4j.mdx b/guides/send-logs-from-apache-log4j.mdx index 5a4e9be1..78366f2b 100644 --- a/guides/send-logs-from-apache-log4j.mdx +++ b/guides/send-logs-from-apache-log4j.mdx @@ -354,7 +354,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue @type http - endpoint https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest + endpoint https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} headers {"Authorization":"Bearer API_TOKEN"} json_array true diff --git a/guides/send-logs-from-dotnet.mdx b/guides/send-logs-from-dotnet.mdx index c154b0ea..2861be1a 100644 --- a/guides/send-logs-from-dotnet.mdx +++ b/guides/send-logs-from-dotnet.mdx @@ -51,8 +51,8 @@ public static class AxiomLogger // Specify the Axiom dataset name and construct the API endpoint URL var datasetName = "DATASET_NAME"; - var axiomDomain = "AXIOM_DOMAIN"; - var axiomUri = $"https://{axiomDomain}/v1/datasets/{datasetName}/ingest"; + var region = "us-east-1.aws"; // Replace with "eu-central-1.aws" for EU region + var axiomUri = $"https://{region}.edge.axiom.co/v1/ingest/{datasetName}"; // Replace with your Axiom API token var apiToken = "API_TOKEN"; // Ensure your API token is correct @@ -244,7 +244,7 @@ public class AxiomConfig { public const string DatasetName = "DATASET_NAME"; public const string ApiToken = "API_TOKEN"; - public const string ApiUrl = "https://AXIOM_DOMAIN/v1/datasets"; + public const string ApiUrl = "https://{REGION}.edge.axiom.co/v1"; } public class AxiomHttpClient : IHttpClient @@ -283,7 +283,7 @@ public class Program .MinimumLevel.Debug() .WriteTo.Console() .WriteTo.Http( - requestUri: $"{AxiomConfig.ApiUrl}/{AxiomConfig.DatasetName}/ingest", + requestUri: $"{AxiomConfig.ApiUrl}/ingest/{AxiomConfig.DatasetName}", queueLimitBytes: null, textFormatter: new ElasticsearchJsonFormatter(renderMessageTemplate: false, inlineFields: true), httpClient: new AxiomHttpClient() @@ -415,7 +415,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically flushTimeout="5000"> diff --git a/guides/send-logs-from-laravel.mdx b/guides/send-logs-from-laravel.mdx index 710c2b4e..f6b48e3a 100644 --- a/guides/send-logs-from-laravel.mdx +++ b/guides/send-logs-from-laravel.mdx @@ -74,6 +74,7 @@ return [ 'with' => [ 'apiToken' => env('AXIOM_API_TOKEN'), 'dataset' => env('AXIOM_DATASET'), + 'region' => env('AXIOM_REGION', 'us-east-1.aws'), // Default to US region ], ], @@ -132,7 +133,7 @@ The `default` configuration specifies the primary channel Laravel uses for loggi LOG_CHANNEL=axiom AXIOM_API_TOKEN=API_TOKEN AXIOM_DATASET=DATASET_NAME -AXIOM_URL=AXIOM_DOMAIN +AXIOM_REGION=us-east-1.aws LOG_LEVEL=debug LOG_DEPRECATIONS_CHANNEL=null ``` @@ -172,7 +173,7 @@ The heart of the `logging.php` file lies within the **`channels`** array where y 'with' => [ 'apiToken' => env('AXIOM_API_TOKEN'), 'dataset' => env('AXIOM_DATASET'), - 'axiomUrl' => env('AXIOM_URL'), + 'region' => env('AXIOM_REGION', 'us-east-1.aws'), ], ], ``` @@ -248,18 +249,19 @@ class AxiomHandler extends AbstractProcessingHandler { private $apiToken; private $dataset; + private $region; - public function __construct($level = Logger::DEBUG, bool $bubble = true, $apiToken = null, $dataset = null) + public function __construct($level = Logger::DEBUG, bool $bubble = true, $apiToken = null, $dataset = null, $region = null) { parent::__construct($level, $bubble); $this->apiToken = $apiToken; $this->dataset = $dataset; - $this->axiomUrl = $axiomUrl; + $this->region = $region ?? 'us-east-1.aws'; // Default to US region } private function initializeCurl(): \CurlHandle { - $endpoint = "https://{$this->axiomUrl}/v1/datasets/{$this->dataset}/ingest"; + $endpoint = "https://{$this->region}.edge.axiom.co/v1/ingest/{$this->dataset}"; $ch = curl_init($endpoint); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); diff --git a/guides/send-logs-from-ruby-on-rails.mdx b/guides/send-logs-from-ruby-on-rails.mdx index cc93172d..31b99dcc 100644 --- a/guides/send-logs-from-ruby-on-rails.mdx +++ b/guides/send-logs-from-ruby-on-rails.mdx @@ -48,7 +48,7 @@ require 'json' class AxiomLogger def self.send_log(log_data) dataset_name = "DATASET_NAME" - axiom_ingest_api_url = "https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest" + axiom_ingest_api_url = "https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}" ingest_token = "API_TOKEN" conn = Faraday.new(url: axiom_ingest_api_url) do |faraday| diff --git a/reference/regions.mdx b/reference/regions.mdx index 317c0f6a..36a1ef63 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -34,8 +34,8 @@ If your organization uses the EU region, change the base domain in the examples | Region | Base domain of API endpoints | Example | | ---------- | ------------------------------------ | ------------------------------------------------- | -| US | `https://api.axiom.co` | `https://api.axiom.co/v1/datasets/DATASET_NAME/ingest` | -| EU | `https://api.eu.axiom.co` | `https://api.eu.axiom.co/v1/datasets/DATASET_NAME/ingest` | +| US | `https://us-east-1.aws.edge.axiom.co` | `https://us-east-1.aws.edge.axiom.co/v1/ingest/{AXIOM_DATASET}` | +| EU | `https://eu-central-1.aws.edge.axiom.co` | `https://eu-central-1.aws.edge.axiom.co/v1/ingest/{AXIOM_DATASET}` | ## Add new region diff --git a/restapi/ingest.mdx b/restapi/ingest.mdx index dad43470..90201d42 100644 --- a/restapi/ingest.mdx +++ b/restapi/ingest.mdx @@ -35,7 +35,7 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to `application/json`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`. +1. Send the POST request to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. @@ -49,7 +49,7 @@ The following example request contains grouped events. The structure of the JSON **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -87,7 +87,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -155,7 +155,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[{ "axiom": { @@ -202,7 +202,7 @@ To send data to Axiom in NDJSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to either `application/json` or `application/x-ndjson`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -212,7 +212,7 @@ To send data to Axiom in NDJSON format: **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{"id":1,"name":"machala"} @@ -249,7 +249,7 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to `text/csv`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -259,7 +259,7 @@ To send data to Axiom in JSON format: **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: text/csv' \ -d 'user, name diff --git a/restapi/introduction.mdx b/restapi/introduction.mdx index c88a47dc..8b43b2b9 100644 --- a/restapi/introduction.mdx +++ b/restapi/introduction.mdx @@ -25,7 +25,7 @@ Axiom API follows the REST architectural style and uses JSON for serialization. For example, the following curl command ingests data to an Axiom dataset: ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ diff --git a/send-data/aws-firehose.mdx b/send-data/aws-firehose.mdx index d88f850a..b3db21fb 100644 --- a/send-data/aws-firehose.mdx +++ b/send-data/aws-firehose.mdx @@ -23,7 +23,7 @@ Amazon Data Firehose is a service for delivering real-time streaming data to dif 1. In Axiom, determine the ID of the dataset you’ve created. 1. In Amazon Data Firehose, create an HTTP endpoint destination. For more information, see the [Amazon Data Firehose documentation](https://docs.aws.amazon.com/firehose/latest/dev/create-destination.html#create-destination-http). -1. Set HTTP endpoint URL to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest/firehose`. +1. Set HTTP endpoint URL to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}/firehose`. diff --git a/send-data/aws-firelens.mdx b/send-data/aws-firelens.mdx index 8781d0e6..814cad8e 100644 --- a/send-data/aws-firelens.mdx +++ b/send-data/aws-firelens.mdx @@ -40,9 +40,9 @@ You’ll typically define this in a file called `fluent-bit.conf`: [OUTPUT] Name http Match * - Host AXIOM_DOMAIN + Host {REGION}.edge.axiom.co Port 443 - URI /v1/datasets/DATASET_NAME/ingest + URI /v1/ingest/DATASET_NAME Format json_lines tls On format json @@ -104,7 +104,7 @@ Create the `fluentd.conf` file and add your configuration: @type http headers {"Authorization": "Bearer API_TOKEN"} data_type json - endpoint https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest + endpoint https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} sourcetype ecs ``` diff --git a/send-data/aws-iot-rules.mdx b/send-data/aws-iot-rules.mdx index fc1a0b95..07b733e0 100644 --- a/send-data/aws-iot-rules.mdx +++ b/send-data/aws-iot-rules.mdx @@ -27,12 +27,12 @@ import json # Import the json module to handle JSON data import requests # Import the requests module to make HTTP requests def lambda_handler(event, context): - # Retrieve the dataset name and the Axiom domain from the environment variables + # Retrieve the dataset name and the Axiom region from the environment variables dataset_name = os.environ['DATASET_NAME'] - axiom_domain = os.environ['AXIOM_DOMAIN'] + region = os.environ['REGION'] # Construct the Axiom API URL using the dataset name - axiom_api_url = f"https://{axiom_domain}/v1/datasets/{dataset_name}/ingest" + axiom_api_url = f"https://{region}.edge.axiom.co/v1/ingest/{dataset_name}" # Retrieve the Axiom API token from the environment variable api_token = os.environ['API_TOKEN'] @@ -62,7 +62,7 @@ def lambda_handler(event, context): In the environment variables section of the Lambda function configuration, add the following environment variables: - `DATASET_NAME` is the name of the Axiom dataset where you want to send data. -- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. For more information, see [Regions](/reference/regions). +- `REGION` is the Axiom region that your organization uses. Replace `REGION` with `us-east-1.aws` for the US region, or `eu-central-1.aws` for the EU region. For more information, see [Regions](/reference/regions). - `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable. diff --git a/send-data/aws-lambda-dot.mdx b/send-data/aws-lambda-dot.mdx index 7b0b50ea..4472ebae 100644 --- a/send-data/aws-lambda-dot.mdx +++ b/send-data/aws-lambda-dot.mdx @@ -80,7 +80,7 @@ receivers: exporters: otlphttp: compression: gzip - endpoint: https://AXIOM_DOMAIN + endpoint: https://{REGION}.edge.axiom.co headers: authorization: Bearer API_TOKEN x-axiom-dataset: DATASET_NAME diff --git a/send-data/aws-s3.mdx b/send-data/aws-s3.mdx index 7ba6ef3f..6a711614 100644 --- a/send-data/aws-s3.mdx +++ b/send-data/aws-s3.mdx @@ -102,8 +102,8 @@ def lambda_handler(event, context): # Prepare Axiom API request dataset_name = os.environ['DATASET_NAME'] - axiom_domain = os.environ['AXIOM_DOMAIN'] - axiom_api_url = f"https://{axiom_domain}/v1/datasets/{dataset_name}/ingest" + region = os.environ['REGION'] + axiom_api_url = f"https://{region}.edge.axiom.co/v1/ingest/{dataset_name}" api_token = os.environ['API_TOKEN'] axiom_headers = { "Authorization": f"Bearer {api_token}", @@ -125,7 +125,7 @@ def lambda_handler(event, context): In the environment variables section of the Lambda function configuration, add the following environment variables: - `DATASET_NAME` is the name of the Axiom dataset where you want to send data. -- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. For more information, see [Regions](/reference/regions). +- `REGION` is the Axiom region that your organization uses. Replace `REGION` with `us-east-1.aws` for the US region, or `eu-central-1.aws` for the EU region. For more information, see [Regions](/reference/regions). - `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable. diff --git a/send-data/cribl.mdx b/send-data/cribl.mdx index 67e5f0b5..ad50f54e 100644 --- a/send-data/cribl.mdx +++ b/send-data/cribl.mdx @@ -36,7 +36,7 @@ Open Cribl’s UI and navigate to **Destinations > HTTP**. Click on `+` Add New - **Name:** Choose a name for the destination. -- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`. +- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. diff --git a/send-data/fluent-bit.mdx b/send-data/fluent-bit.mdx index ecfcfe3e..a944a6dc 100644 --- a/send-data/fluent-bit.mdx +++ b/send-data/fluent-bit.mdx @@ -27,9 +27,9 @@ Fluent Bit is an open-source log processor and forwarder that allows you to coll [OUTPUT] Name http Match * - Host AXIOM_DOMAIN + Host {REGION}.edge.axiom.co Port 443 - URI /v1/datasets/DATASET_NAME/ingest + URI /v1/ingest/DATASET_NAME Header Authorization Bearer API_TOKEN Compress gzip Format json diff --git a/send-data/fluentd.mdx b/send-data/fluentd.mdx index d5748a49..76e8b2b8 100644 --- a/send-data/fluentd.mdx +++ b/send-data/fluentd.mdx @@ -49,7 +49,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t @type http - endpoint https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest + endpoint https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} # Authorization Bearer should be an ingest token headers {"Authorization": "Bearer API_TOKEN"} json_array false @@ -257,7 +257,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom # Send Scala logs using HTTP plugin to Axiom @type http - endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest'}" + endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}'}" headers {"Authorization": "Bearer #{ENV['FLUENT_HTTP_TOKEN'] || ''}"} @type json @@ -309,7 +309,7 @@ The example below shows a Fluentd configuration that sends data from your virtua @id out_http_axiom @log_level info endpoint "#{ENV['AXIOM_URL'] || 'https://api.axiom.co'}" - path "/v1/datasets/DATASET_NAME/ingest" + path "/v1/ingest/DATASET_NAME" ssl_verify "#{ENV['AXIOM_SSL_VERIFY'] || 'true'}" Authorization "Bearer API_TOKEN" @@ -360,7 +360,7 @@ The example below shows a Fluentd configuration that sends data from your virtua @id out_http_axiom @log_level info endpoint "#{ENV['AXIOM_URL'] || 'https://api.axiom.co'}" - path "/v1/datasets/DATASET_NAME/ingest" + path "/v1/ingest/DATASET_NAME" ssl_verify "#{ENV['AXIOM_SSL_VERIFY'] || 'true'}" Authorization "Bearer API_TOKEN" diff --git a/send-data/heroku-log-drains.mdx b/send-data/heroku-log-drains.mdx index 5538b3fb..d364a648 100644 --- a/send-data/heroku-log-drains.mdx +++ b/send-data/heroku-log-drains.mdx @@ -28,7 +28,7 @@ Sign up and login to your account on [Heroku](https://heroku.com/), and download Heroku log drains configuration consists of three main components ```bash -heroku drains:add https://axiom:API_TOKEN@AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest -a HEROKU_APPLICATION_NAME +heroku drains:add https://axiom:API_TOKEN@{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} -a HEROKU_APPLICATION_NAME ``` diff --git a/send-data/kubernetes.mdx b/send-data/kubernetes.mdx index 80849b00..a577e20a 100644 --- a/send-data/kubernetes.mdx +++ b/send-data/kubernetes.mdx @@ -78,7 +78,7 @@ data: processors: - add_cloud_metadata: output.elasticsearch: - hosts: ['${AXIOM_HOST}/v1/datasets/${AXIOM_DATASET_NAME}/elastic'] + hosts: ['${REGION}.edge.axiom.co/v1/datasets/${AXIOM_DATASET_NAME}/elastic'] api_key: 'axiom:${AXIOM_API_TOKEN}' setup.ilm.enabled: false kind: ConfigMap @@ -112,8 +112,8 @@ spec: - /etc/filebeat.yml - -e env: - - name: AXIOM_HOST - value: AXIOM_DOMAIN + - name: REGION + value: us-east-1.aws - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -275,8 +275,8 @@ spec: - --config-dir - /etc/vector/ env: - - name: AXIOM_HOST - value: AXIOM_DOMAIN + - name: REGION + value: us-east-1.aws - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -424,9 +424,9 @@ data: [OUTPUT] Name http Match * - Host ${AXIOM_HOST} + Host ${REGION}.edge.axiom.co Port 443 - URI /v1/datasets/${AXIOM_DATASET_NAME}/ingest + URI /v1/ingest/${AXIOM_DATASET_NAME} Header Authorization Bearer ${AXIOM_API_TOKEN} Format json Json_date_key time @@ -463,8 +463,8 @@ spec: - name: fluent-bit image: fluent/fluent-bit:1.9.9 env: - - name: AXIOM_HOST - value: AXIOM_DOMAIN + - name: REGION + value: us-east-1.aws - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN diff --git a/send-data/methods.mdx b/send-data/methods.mdx index 1b3ca08f..1ec7ea14 100644 --- a/send-data/methods.mdx +++ b/send-data/methods.mdx @@ -11,7 +11,7 @@ import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" The easiest way to send your first event data to Axiom is with a direct HTTP request using a tool like `cURL`. ```shell -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \ +curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{ "http": { "request": { "method": "GET", "duration_ms": 231 }, "response": { "body": { "size": 3012 } } }, "url": { "path": "/download" } }' diff --git a/send-data/tremor.mdx b/send-data/tremor.mdx index eb48db36..6e1a3b7f 100644 --- a/send-data/tremor.mdx +++ b/send-data/tremor.mdx @@ -45,7 +45,7 @@ flow token with config = { - "url": "https://AXIOM_DOMAIN/v1/datasets/#{args.dataset}/ingest", + "url": "https://{REGION}.edge.axiom.co/v1/ingest/#{args.dataset}", "tls": true, "method": "POST", "headers": { diff --git a/snippets/replace-domain.mdx b/snippets/replace-domain.mdx index 7dafb84f..186252d7 100644 --- a/snippets/replace-domain.mdx +++ b/snippets/replace-domain.mdx @@ -1 +1 @@ -Replace `AXIOM_DOMAIN` with `api.axiom.co` if your organization uses the US region, and with `api.eu.axiom.co` if your organization uses the EU region. For more information, see [Regions](/reference/regions). \ No newline at end of file +Replace `{REGION}` with `us-east-1.aws` if your organization uses the US region, and with `eu-central-1.aws` if your organization uses the EU region. Replace `{AXIOM_DATASET}` with the name of your Axiom dataset. For more information, see [Regions](/reference/regions). \ No newline at end of file From ac82ea7534eb44374b16a0361f7ff5e4fb489414 Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Mon, 3 Nov 2025 16:53:56 +0000 Subject: [PATCH 03/14] Stick with existing AXIOM_DOMAIN pattern --- apps/hex.mdx | 2 +- guides/send-logs-from-apache-log4j.mdx | 2 +- guides/send-logs-from-dotnet.mdx | 8 ++++---- guides/send-logs-from-laravel.mdx | 12 ++++++------ guides/send-logs-from-ruby-on-rails.mdx | 2 +- reference/regions.mdx | 3 +-- restapi/ingest.mdx | 16 ++++++++-------- restapi/introduction.mdx | 2 +- send-data/aws-firehose.mdx | 2 +- send-data/aws-firelens.mdx | 4 ++-- send-data/aws-iot-rules.mdx | 8 ++++---- send-data/aws-lambda-dot.mdx | 2 +- send-data/aws-s3.mdx | 6 +++--- send-data/cribl.mdx | 2 +- send-data/fluent-bit.mdx | 2 +- send-data/fluentd.mdx | 4 ++-- send-data/heroku-log-drains.mdx | 2 +- send-data/kubernetes.mdx | 16 ++++++++-------- send-data/methods.mdx | 2 +- send-data/tremor.mdx | 2 +- snippets/replace-domain.mdx | 2 +- 21 files changed, 50 insertions(+), 51 deletions(-) diff --git a/apps/hex.mdx b/apps/hex.mdx index a7239565..6b7b8e9d 100644 --- a/apps/hex.mdx +++ b/apps/hex.mdx @@ -21,7 +21,7 @@ This page explains how to integrate Hex with Axiom to visualize geospatial data Send your sample location data to Axiom using the API endpoint. For example, the following HTTP request sends sample robot location data with latitude, longitude, status, and satellite information. ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ diff --git a/guides/send-logs-from-apache-log4j.mdx b/guides/send-logs-from-apache-log4j.mdx index 78366f2b..6a568588 100644 --- a/guides/send-logs-from-apache-log4j.mdx +++ b/guides/send-logs-from-apache-log4j.mdx @@ -354,7 +354,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue @type http - endpoint https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} + endpoint https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} headers {"Authorization":"Bearer API_TOKEN"} json_array true diff --git a/guides/send-logs-from-dotnet.mdx b/guides/send-logs-from-dotnet.mdx index 2861be1a..818d1029 100644 --- a/guides/send-logs-from-dotnet.mdx +++ b/guides/send-logs-from-dotnet.mdx @@ -51,8 +51,8 @@ public static class AxiomLogger // Specify the Axiom dataset name and construct the API endpoint URL var datasetName = "DATASET_NAME"; - var region = "us-east-1.aws"; // Replace with "eu-central-1.aws" for EU region - var axiomUri = $"https://{region}.edge.axiom.co/v1/ingest/{datasetName}"; + var axiomDomain = "AXIOM_DOMAIN"; + var axiomUri = $"https://{axiomDomain}/v1/ingest/{datasetName}"; // Replace with your Axiom API token var apiToken = "API_TOKEN"; // Ensure your API token is correct @@ -244,7 +244,7 @@ public class AxiomConfig { public const string DatasetName = "DATASET_NAME"; public const string ApiToken = "API_TOKEN"; - public const string ApiUrl = "https://{REGION}.edge.axiom.co/v1"; + public const string ApiUrl = "https://AXIOM_DOMAIN/v1"; } public class AxiomHttpClient : IHttpClient @@ -415,7 +415,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically flushTimeout="5000"> diff --git a/guides/send-logs-from-laravel.mdx b/guides/send-logs-from-laravel.mdx index f6b48e3a..a6fe146d 100644 --- a/guides/send-logs-from-laravel.mdx +++ b/guides/send-logs-from-laravel.mdx @@ -74,7 +74,7 @@ return [ 'with' => [ 'apiToken' => env('AXIOM_API_TOKEN'), 'dataset' => env('AXIOM_DATASET'), - 'region' => env('AXIOM_REGION', 'us-east-1.aws'), // Default to US region + 'axiomUrl' => env('AXIOM_DOMAIN'), ], ], @@ -133,7 +133,7 @@ The `default` configuration specifies the primary channel Laravel uses for loggi LOG_CHANNEL=axiom AXIOM_API_TOKEN=API_TOKEN AXIOM_DATASET=DATASET_NAME -AXIOM_REGION=us-east-1.aws +AXIOM_DOMAIN=us-east-1.aws.edge.axiom.co LOG_LEVEL=debug LOG_DEPRECATIONS_CHANNEL=null ``` @@ -249,19 +249,19 @@ class AxiomHandler extends AbstractProcessingHandler { private $apiToken; private $dataset; - private $region; + private $axiomUrl; - public function __construct($level = Logger::DEBUG, bool $bubble = true, $apiToken = null, $dataset = null, $region = null) + public function __construct($level = Logger::DEBUG, bool $bubble = true, $apiToken = null, $dataset = null, $axiomUrl = null) { parent::__construct($level, $bubble); $this->apiToken = $apiToken; $this->dataset = $dataset; - $this->region = $region ?? 'us-east-1.aws'; // Default to US region + $this->axiomUrl = $axiomUrl; } private function initializeCurl(): \CurlHandle { - $endpoint = "https://{$this->region}.edge.axiom.co/v1/ingest/{$this->dataset}"; + $endpoint = "https://{$this->axiomUrl}/v1/ingest/{$this->dataset}"; $ch = curl_init($endpoint); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); diff --git a/guides/send-logs-from-ruby-on-rails.mdx b/guides/send-logs-from-ruby-on-rails.mdx index 31b99dcc..920f48a6 100644 --- a/guides/send-logs-from-ruby-on-rails.mdx +++ b/guides/send-logs-from-ruby-on-rails.mdx @@ -48,7 +48,7 @@ require 'json' class AxiomLogger def self.send_log(log_data) dataset_name = "DATASET_NAME" - axiom_ingest_api_url = "https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}" + axiom_ingest_api_url = "https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}" ingest_token = "API_TOKEN" conn = Faraday.new(url: axiom_ingest_api_url) do |faraday| diff --git a/reference/regions.mdx b/reference/regions.mdx index f94088d1..20459933 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -25,8 +25,7 @@ All examples in the documentation use the default US base domain `https://api.ax | Region | Base domain of API endpoints | Example | | ---------- | ------------------------------------ | ------------------------------------------------- | -| US | `https://us-east-1.aws.edge.axiom.co` | `https://us-east-1.aws.edge.axiom.co/v1/ingest/{AXIOM_DATASET}` | -| EU | `https://eu-central-1.aws.edge.axiom.co` | `https://eu-central-1.aws.edge.axiom.co/v1/ingest/{AXIOM_DATASET}` | +| US | `https://us-east-1.aws.edge.axiom.co` | `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}` | ## Add new region diff --git a/restapi/ingest.mdx b/restapi/ingest.mdx index 90201d42..d51b3d87 100644 --- a/restapi/ingest.mdx +++ b/restapi/ingest.mdx @@ -35,7 +35,7 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to `application/json`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. -1. Send the POST request to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. +1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. @@ -49,7 +49,7 @@ The following example request contains grouped events. The structure of the JSON **Example request** ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -87,7 +87,7 @@ curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ **Example request** ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -155,7 +155,7 @@ curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ **Example request** ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[{ "axiom": { @@ -202,7 +202,7 @@ To send data to Axiom in NDJSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to either `application/json` or `application/x-ndjson`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -212,7 +212,7 @@ To send data to Axiom in NDJSON format: **Example request** ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{"id":1,"name":"machala"} @@ -249,7 +249,7 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to `text/csv`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -259,7 +259,7 @@ To send data to Axiom in JSON format: **Example request** ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: text/csv' \ -d 'user, name diff --git a/restapi/introduction.mdx b/restapi/introduction.mdx index 6de2b416..2087a5bb 100644 --- a/restapi/introduction.mdx +++ b/restapi/introduction.mdx @@ -25,7 +25,7 @@ Axiom API follows the REST architectural style and uses JSON for serialization. For example, the following curl command ingests data to an Axiom dataset: ```bash -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ diff --git a/send-data/aws-firehose.mdx b/send-data/aws-firehose.mdx index b3db21fb..119cd910 100644 --- a/send-data/aws-firehose.mdx +++ b/send-data/aws-firehose.mdx @@ -23,7 +23,7 @@ Amazon Data Firehose is a service for delivering real-time streaming data to dif 1. In Axiom, determine the ID of the dataset you’ve created. 1. In Amazon Data Firehose, create an HTTP endpoint destination. For more information, see the [Amazon Data Firehose documentation](https://docs.aws.amazon.com/firehose/latest/dev/create-destination.html#create-destination-http). -1. Set HTTP endpoint URL to `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}/firehose`. +1. Set HTTP endpoint URL to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}/firehose`. diff --git a/send-data/aws-firelens.mdx b/send-data/aws-firelens.mdx index 814cad8e..7227d33c 100644 --- a/send-data/aws-firelens.mdx +++ b/send-data/aws-firelens.mdx @@ -40,7 +40,7 @@ You’ll typically define this in a file called `fluent-bit.conf`: [OUTPUT] Name http Match * - Host {REGION}.edge.axiom.co + Host AXIOM_DOMAIN Port 443 URI /v1/ingest/DATASET_NAME Format json_lines @@ -104,7 +104,7 @@ Create the `fluentd.conf` file and add your configuration: @type http headers {"Authorization": "Bearer API_TOKEN"} data_type json - endpoint https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} + endpoint https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} sourcetype ecs ``` diff --git a/send-data/aws-iot-rules.mdx b/send-data/aws-iot-rules.mdx index 07b733e0..901eecc4 100644 --- a/send-data/aws-iot-rules.mdx +++ b/send-data/aws-iot-rules.mdx @@ -27,12 +27,12 @@ import json # Import the json module to handle JSON data import requests # Import the requests module to make HTTP requests def lambda_handler(event, context): - # Retrieve the dataset name and the Axiom region from the environment variables + # Retrieve the dataset name and the Axiom domain from the environment variables dataset_name = os.environ['DATASET_NAME'] - region = os.environ['REGION'] + axiom_domain = os.environ['AXIOM_DOMAIN'] # Construct the Axiom API URL using the dataset name - axiom_api_url = f"https://{region}.edge.axiom.co/v1/ingest/{dataset_name}" + axiom_api_url = f"https://{axiom_domain}/v1/ingest/{dataset_name}" # Retrieve the Axiom API token from the environment variable api_token = os.environ['API_TOKEN'] @@ -62,7 +62,7 @@ def lambda_handler(event, context): In the environment variables section of the Lambda function configuration, add the following environment variables: - `DATASET_NAME` is the name of the Axiom dataset where you want to send data. -- `REGION` is the Axiom region that your organization uses. Replace `REGION` with `us-east-1.aws` for the US region, or `eu-central-1.aws` for the EU region. For more information, see [Regions](/reference/regions). +- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` for the US region, or `eu-central-1.aws.edge.axiom.co` for the EU region. For more information, see [Regions](/reference/regions). - `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable. diff --git a/send-data/aws-lambda-dot.mdx b/send-data/aws-lambda-dot.mdx index 4472ebae..7b0b50ea 100644 --- a/send-data/aws-lambda-dot.mdx +++ b/send-data/aws-lambda-dot.mdx @@ -80,7 +80,7 @@ receivers: exporters: otlphttp: compression: gzip - endpoint: https://{REGION}.edge.axiom.co + endpoint: https://AXIOM_DOMAIN headers: authorization: Bearer API_TOKEN x-axiom-dataset: DATASET_NAME diff --git a/send-data/aws-s3.mdx b/send-data/aws-s3.mdx index 6a711614..7a3d93b3 100644 --- a/send-data/aws-s3.mdx +++ b/send-data/aws-s3.mdx @@ -102,8 +102,8 @@ def lambda_handler(event, context): # Prepare Axiom API request dataset_name = os.environ['DATASET_NAME'] - region = os.environ['REGION'] - axiom_api_url = f"https://{region}.edge.axiom.co/v1/ingest/{dataset_name}" + axiom_domain = os.environ['AXIOM_DOMAIN'] + axiom_api_url = f"https://{axiom_domain}/v1/ingest/{dataset_name}" api_token = os.environ['API_TOKEN'] axiom_headers = { "Authorization": f"Bearer {api_token}", @@ -125,7 +125,7 @@ def lambda_handler(event, context): In the environment variables section of the Lambda function configuration, add the following environment variables: - `DATASET_NAME` is the name of the Axiom dataset where you want to send data. -- `REGION` is the Axiom region that your organization uses. Replace `REGION` with `us-east-1.aws` for the US region, or `eu-central-1.aws` for the EU region. For more information, see [Regions](/reference/regions). +- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` for the US region, or `eu-central-1.aws.edge.axiom.co` for the EU region. For more information, see [Regions](/reference/regions). - `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable. diff --git a/send-data/cribl.mdx b/send-data/cribl.mdx index ad50f54e..85b51242 100644 --- a/send-data/cribl.mdx +++ b/send-data/cribl.mdx @@ -36,7 +36,7 @@ Open Cribl’s UI and navigate to **Destinations > HTTP**. Click on `+` Add New - **Name:** Choose a name for the destination. -- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}`. +- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. diff --git a/send-data/fluent-bit.mdx b/send-data/fluent-bit.mdx index a944a6dc..5bef3e19 100644 --- a/send-data/fluent-bit.mdx +++ b/send-data/fluent-bit.mdx @@ -27,7 +27,7 @@ Fluent Bit is an open-source log processor and forwarder that allows you to coll [OUTPUT] Name http Match * - Host {REGION}.edge.axiom.co + Host AXIOM_DOMAIN Port 443 URI /v1/ingest/DATASET_NAME Header Authorization Bearer API_TOKEN diff --git a/send-data/fluentd.mdx b/send-data/fluentd.mdx index 76e8b2b8..21eb81a5 100644 --- a/send-data/fluentd.mdx +++ b/send-data/fluentd.mdx @@ -49,7 +49,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t @type http - endpoint https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} + endpoint https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} # Authorization Bearer should be an ingest token headers {"Authorization": "Bearer API_TOKEN"} json_array false @@ -257,7 +257,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom # Send Scala logs using HTTP plugin to Axiom @type http - endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}'}" + endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}'}" headers {"Authorization": "Bearer #{ENV['FLUENT_HTTP_TOKEN'] || ''}"} @type json diff --git a/send-data/heroku-log-drains.mdx b/send-data/heroku-log-drains.mdx index d364a648..c15554bd 100644 --- a/send-data/heroku-log-drains.mdx +++ b/send-data/heroku-log-drains.mdx @@ -28,7 +28,7 @@ Sign up and login to your account on [Heroku](https://heroku.com/), and download Heroku log drains configuration consists of three main components ```bash -heroku drains:add https://axiom:API_TOKEN@{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET} -a HEROKU_APPLICATION_NAME +heroku drains:add https://axiom:API_TOKEN@AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} -a HEROKU_APPLICATION_NAME ``` diff --git a/send-data/kubernetes.mdx b/send-data/kubernetes.mdx index a577e20a..57c691dd 100644 --- a/send-data/kubernetes.mdx +++ b/send-data/kubernetes.mdx @@ -78,7 +78,7 @@ data: processors: - add_cloud_metadata: output.elasticsearch: - hosts: ['${REGION}.edge.axiom.co/v1/datasets/${AXIOM_DATASET_NAME}/elastic'] + hosts: ['${AXIOM_HOST}/v1/datasets/${AXIOM_DATASET_NAME}/elastic'] api_key: 'axiom:${AXIOM_API_TOKEN}' setup.ilm.enabled: false kind: ConfigMap @@ -112,8 +112,8 @@ spec: - /etc/filebeat.yml - -e env: - - name: REGION - value: us-east-1.aws + - name: AXIOM_HOST + value: AXIOM_DOMAIN - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -275,8 +275,8 @@ spec: - --config-dir - /etc/vector/ env: - - name: REGION - value: us-east-1.aws + - name: AXIOM_HOST + value: AXIOM_DOMAIN - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -424,7 +424,7 @@ data: [OUTPUT] Name http Match * - Host ${REGION}.edge.axiom.co + Host ${AXIOM_HOST} Port 443 URI /v1/ingest/${AXIOM_DATASET_NAME} Header Authorization Bearer ${AXIOM_API_TOKEN} @@ -463,8 +463,8 @@ spec: - name: fluent-bit image: fluent/fluent-bit:1.9.9 env: - - name: REGION - value: us-east-1.aws + - name: AXIOM_HOST + value: AXIOM_DOMAIN - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN diff --git a/send-data/methods.mdx b/send-data/methods.mdx index f850360e..b7aec26c 100644 --- a/send-data/methods.mdx +++ b/send-data/methods.mdx @@ -11,7 +11,7 @@ import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" The easiest way to send your first event data to Axiom is with a direct HTTP request using a tool like `cURL`. ```shell -curl -X 'POST' 'https://{REGION}.edge.axiom.co/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{ "http": { "request": { "method": "GET", "duration_ms": 231 }, "response": { "body": { "size": 3012 } } }, "url": { "path": "/download" } }' diff --git a/send-data/tremor.mdx b/send-data/tremor.mdx index 6e1a3b7f..73fdc0f3 100644 --- a/send-data/tremor.mdx +++ b/send-data/tremor.mdx @@ -45,7 +45,7 @@ flow token with config = { - "url": "https://{REGION}.edge.axiom.co/v1/ingest/#{args.dataset}", + "url": "https://AXIOM_DOMAIN/v1/ingest/#{args.dataset}", "tls": true, "method": "POST", "headers": { diff --git a/snippets/replace-domain.mdx b/snippets/replace-domain.mdx index a81921fc..297c5a1b 100644 --- a/snippets/replace-domain.mdx +++ b/snippets/replace-domain.mdx @@ -1 +1 @@ -Replace `{REGION}` with `us-east-1.aws` if your organization uses the US region, and with `eu-central-1.aws` if your organization uses the EU region. Replace `{AXIOM_DATASET}` with the name of your Axiom dataset. For more information, see [Regions](/reference/regions). +Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. Replace `{AXIOM_DATASET}` with the name of your Axiom dataset. For more information, see [Regions](/reference/regions). From 0a48e31b57eab78c823116321d48e3c065c94fb8 Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Wed, 5 Nov 2025 10:31:58 +0000 Subject: [PATCH 04/14] Use correct placeholder name --- apps/hex.mdx | 2 +- guides/send-logs-from-apache-log4j.mdx | 2 +- guides/send-logs-from-dotnet.mdx | 2 +- guides/send-logs-from-ruby-on-rails.mdx | 2 +- reference/regions.mdx | 2 +- restapi/ingest.mdx | 16 ++++++++-------- restapi/introduction.mdx | 2 +- send-data/aws-firehose.mdx | 2 +- send-data/aws-firelens.mdx | 2 +- send-data/cribl.mdx | 2 +- send-data/fluentd.mdx | 4 ++-- send-data/heroku-log-drains.mdx | 2 +- send-data/methods.mdx | 2 +- snippets/replace-domain.mdx | 2 +- 14 files changed, 22 insertions(+), 22 deletions(-) diff --git a/apps/hex.mdx b/apps/hex.mdx index 6b7b8e9d..6c293bd5 100644 --- a/apps/hex.mdx +++ b/apps/hex.mdx @@ -21,7 +21,7 @@ This page explains how to integrate Hex with Axiom to visualize geospatial data Send your sample location data to Axiom using the API endpoint. For example, the following HTTP request sends sample robot location data with latitude, longitude, status, and satellite information. ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ diff --git a/guides/send-logs-from-apache-log4j.mdx b/guides/send-logs-from-apache-log4j.mdx index 6a568588..523662a5 100644 --- a/guides/send-logs-from-apache-log4j.mdx +++ b/guides/send-logs-from-apache-log4j.mdx @@ -354,7 +354,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue @type http - endpoint https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} + endpoint https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME headers {"Authorization":"Bearer API_TOKEN"} json_array true diff --git a/guides/send-logs-from-dotnet.mdx b/guides/send-logs-from-dotnet.mdx index 818d1029..4feefe59 100644 --- a/guides/send-logs-from-dotnet.mdx +++ b/guides/send-logs-from-dotnet.mdx @@ -415,7 +415,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically flushTimeout="5000"> diff --git a/guides/send-logs-from-ruby-on-rails.mdx b/guides/send-logs-from-ruby-on-rails.mdx index 920f48a6..46c5e945 100644 --- a/guides/send-logs-from-ruby-on-rails.mdx +++ b/guides/send-logs-from-ruby-on-rails.mdx @@ -48,7 +48,7 @@ require 'json' class AxiomLogger def self.send_log(log_data) dataset_name = "DATASET_NAME" - axiom_ingest_api_url = "https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}" + axiom_ingest_api_url = "https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME" ingest_token = "API_TOKEN" conn = Faraday.new(url: axiom_ingest_api_url) do |faraday| diff --git a/reference/regions.mdx b/reference/regions.mdx index 20459933..0660f68a 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -25,7 +25,7 @@ All examples in the documentation use the default US base domain `https://api.ax | Region | Base domain of API endpoints | Example | | ---------- | ------------------------------------ | ------------------------------------------------- | -| US | `https://us-east-1.aws.edge.axiom.co` | `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}` | +| US | `https://us-east-1.aws.edge.axiom.co` | `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME` | ## Add new region diff --git a/restapi/ingest.mdx b/restapi/ingest.mdx index d51b3d87..6eb44dcd 100644 --- a/restapi/ingest.mdx +++ b/restapi/ingest.mdx @@ -35,7 +35,7 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to `application/json`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. +1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. @@ -49,7 +49,7 @@ The following example request contains grouped events. The structure of the JSON **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -87,7 +87,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -155,7 +155,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[{ "axiom": { @@ -202,7 +202,7 @@ To send data to Axiom in NDJSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to either `application/json` or `application/x-ndjson`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -212,7 +212,7 @@ To send data to Axiom in NDJSON format: **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{"id":1,"name":"machala"} @@ -249,7 +249,7 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). 1. Set the `Content-Type` header to `text/csv`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -259,7 +259,7 @@ To send data to Axiom in JSON format: **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: text/csv' \ -d 'user, name diff --git a/restapi/introduction.mdx b/restapi/introduction.mdx index 2087a5bb..9bccc5ce 100644 --- a/restapi/introduction.mdx +++ b/restapi/introduction.mdx @@ -25,7 +25,7 @@ Axiom API follows the REST architectural style and uses JSON for serialization. For example, the following curl command ingests data to an Axiom dataset: ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ diff --git a/send-data/aws-firehose.mdx b/send-data/aws-firehose.mdx index 119cd910..82ee80b7 100644 --- a/send-data/aws-firehose.mdx +++ b/send-data/aws-firehose.mdx @@ -23,7 +23,7 @@ Amazon Data Firehose is a service for delivering real-time streaming data to dif 1. In Axiom, determine the ID of the dataset you’ve created. 1. In Amazon Data Firehose, create an HTTP endpoint destination. For more information, see the [Amazon Data Firehose documentation](https://docs.aws.amazon.com/firehose/latest/dev/create-destination.html#create-destination-http). -1. Set HTTP endpoint URL to `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}/firehose`. +1. Set HTTP endpoint URL to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME/firehose`. diff --git a/send-data/aws-firelens.mdx b/send-data/aws-firelens.mdx index 7227d33c..f4cfe104 100644 --- a/send-data/aws-firelens.mdx +++ b/send-data/aws-firelens.mdx @@ -104,7 +104,7 @@ Create the `fluentd.conf` file and add your configuration: @type http headers {"Authorization": "Bearer API_TOKEN"} data_type json - endpoint https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} + endpoint https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME sourcetype ecs ``` diff --git a/send-data/cribl.mdx b/send-data/cribl.mdx index 85b51242..d1aa465a 100644 --- a/send-data/cribl.mdx +++ b/send-data/cribl.mdx @@ -36,7 +36,7 @@ Open Cribl’s UI and navigate to **Destinations > HTTP**. Click on `+` Add New - **Name:** Choose a name for the destination. -- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}`. +- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. diff --git a/send-data/fluentd.mdx b/send-data/fluentd.mdx index 21eb81a5..0f3ccee0 100644 --- a/send-data/fluentd.mdx +++ b/send-data/fluentd.mdx @@ -49,7 +49,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t @type http - endpoint https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} + endpoint https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME # Authorization Bearer should be an ingest token headers {"Authorization": "Bearer API_TOKEN"} json_array false @@ -257,7 +257,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom # Send Scala logs using HTTP plugin to Axiom @type http - endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}'}" + endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME'}" headers {"Authorization": "Bearer #{ENV['FLUENT_HTTP_TOKEN'] || ''}"} @type json diff --git a/send-data/heroku-log-drains.mdx b/send-data/heroku-log-drains.mdx index c15554bd..24ba82ce 100644 --- a/send-data/heroku-log-drains.mdx +++ b/send-data/heroku-log-drains.mdx @@ -28,7 +28,7 @@ Sign up and login to your account on [Heroku](https://heroku.com/), and download Heroku log drains configuration consists of three main components ```bash -heroku drains:add https://axiom:API_TOKEN@AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET} -a HEROKU_APPLICATION_NAME +heroku drains:add https://axiom:API_TOKEN@AXIOM_DOMAIN/v1/ingest/DATASET_NAME -a HEROKU_APPLICATION_NAME ``` diff --git a/send-data/methods.mdx b/send-data/methods.mdx index b7aec26c..46ba0e30 100644 --- a/send-data/methods.mdx +++ b/send-data/methods.mdx @@ -11,7 +11,7 @@ import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" The easiest way to send your first event data to Axiom is with a direct HTTP request using a tool like `cURL`. ```shell -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/{AXIOM_DATASET}' \ +curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{ "http": { "request": { "method": "GET", "duration_ms": 231 }, "response": { "body": { "size": 3012 } } }, "url": { "path": "/download" } }' diff --git a/snippets/replace-domain.mdx b/snippets/replace-domain.mdx index 297c5a1b..e62a43c0 100644 --- a/snippets/replace-domain.mdx +++ b/snippets/replace-domain.mdx @@ -1 +1 @@ -Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. Replace `{AXIOM_DATASET}` with the name of your Axiom dataset. For more information, see [Regions](/reference/regions). +Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. Replace `DATASET_NAME` with the name of your Axiom dataset. For more information, see [Regions](/reference/regions). From 7eab1f18bbcd15c398a3d9b5fe3b95fa3bb3d34f Mon Sep 17 00:00:00 2001 From: benenharrington <42470628+benenharrington@users.noreply.github.com> Date: Wed, 5 Nov 2025 10:42:12 +0000 Subject: [PATCH 05/14] Update snippets/replace-domain.mdx Co-authored-by: Mano Toth <71388581+manototh@users.noreply.github.com> --- snippets/replace-domain.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snippets/replace-domain.mdx b/snippets/replace-domain.mdx index e62a43c0..1d74cafc 100644 --- a/snippets/replace-domain.mdx +++ b/snippets/replace-domain.mdx @@ -1 +1 @@ -Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. Replace `DATASET_NAME` with the name of your Axiom dataset. For more information, see [Regions](/reference/regions). +Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. For more information, see [Regions](/reference/regions). From 3a33a72e7eac643c97d7c2ac77ce5319b3b056a4 Mon Sep 17 00:00:00 2001 From: benenharrington <42470628+benenharrington@users.noreply.github.com> Date: Wed, 5 Nov 2025 10:42:23 +0000 Subject: [PATCH 06/14] Update reference/regions.mdx Co-authored-by: Mano Toth <71388581+manototh@users.noreply.github.com> --- reference/regions.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reference/regions.mdx b/reference/regions.mdx index 0660f68a..8d862cb3 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -25,7 +25,7 @@ All examples in the documentation use the default US base domain `https://api.ax | Region | Base domain of API endpoints | Example | | ---------- | ------------------------------------ | ------------------------------------------------- | -| US | `https://us-east-1.aws.edge.axiom.co` | `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME` | +| US | `https://us-east-1.aws.edge.axiom.co` | `https://us-east-1.aws.edge.axiom.co/v1/ingest/DATASET_NAME` | ## Add new region From 0b612846f2fc7fe340827c166d45d1a5779edaf9 Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Mon, 10 Nov 2025 16:51:12 +0000 Subject: [PATCH 07/14] Update region info --- reference/regions.mdx | 76 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 60 insertions(+), 16 deletions(-) diff --git a/reference/regions.mdx b/reference/regions.mdx index 8d862cb3..9030c009 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -1,32 +1,76 @@ --- title: 'Regions' -description: 'This page explains how to work with Axiom based on your organization’s region.' +description: 'This page explains how to work with Axiom’s unified multi-region platform.' --- +Axiom is available in multiple regions to help you comply with data residency requirements and optimize performance. + +## Available regions + +Axiom currently supports the following cloud regions for data ingest: + +| Region | Base domain of edge ingest API endpoints | Example | Cloud provider | +| ---------- | ------------------------------------ | ------------------------------------------------- | -------------- | +| US | `https://us-east-1.aws.edge.axiom.co` | `https://us-east-1.aws.edge.axiom.co/v1/ingest/DATASET_NAME` | AWS | +| EU | `https://eu-central-1.aws.edge.axiom.co` | `https://eu-central-1.aws.edge.axiom.co/v1/ingest/DATASET_NAME` | AWS | + +The base domain of other API endpoints is `https://api.axiom.co`. + -Axiom will support a unified multi-region model by early November 2025. This will allow you to manage data across different regions from a single organization. [Contact Axiom](https://www.axiom.co/contact) to learn more. +Axiom is developing support for additional regions. [Contact Axiom](https://www.axiom.co/contact) if you need a region that’s not currently supported. -## Check your region + +Query execution currently happens in the US region only. This means that data in the EU region can leave the region when queried. As we build out support for multiple regions, query execution will always happen in the same region as the storage region of the data being queried. + + +## Default region + +The default region of your organization determines where the data in your datasets are stored. + +To determine your organization’s default region: + +1. Click Settings icon **Settings > General**. +1. Find your organization’s default region in the **Region** section. + + +Axiom is building support for storing data outside your default region on a dataset-by-dataset basis. + + +## Choose region + +When you create a new organization, choose the region that best meets your data residency requirements and where your infrastructure is located for optimal performance. + +When choosing a region, consider the following factors: + +- **Data residency requirements**: If you need to comply with regulations like GDPR that require data to be stored in specific regions, choose the appropriate region. +- **Performance**: Choose the region closest to your infrastructure for lower latency. +- **Team location**: Choose the region that your team is primarily located in for better performance. + +## Roadmap -Determine the region your organization uses in one of the following ways: +### Legacy infrastructure: Dedicated regional instances -- Go to the [Axiom app](https://app.axiom.co/) and check the URL. Match the base domain in the Axiom web app with the table below: +Until November 2025, Axiom operated completely separate infrastructures for different regions: - | Region | Base domain in web app | - | ---------- | ------------------------------ | - | US | `https://app.axiom.co` | +- **US instance**: `app.axiom.co` hosted in AWS `us-east-1` +- **EU instance**: `app.eu.axiom.co` hosted in AWS `eu-central-1` -- Click Settings icon **Settings > General**, and then find the **Region** section. +Each instance was entirely independent with separate: +- Organizations and user accounts +- Billing and subscription plans +- API endpoints and ingestion URLs +- Data storage and query infrastructure -## Axiom API reference +### Towards a unified multi-region platform -All examples in the documentation use the default US base domain `https://api.axiom.co`. +Starting in November 2025, Axiom is rolling out support for a unified platform where a single organization can manage datasets across multiple regions: -| Region | Base domain of API endpoints | Example | -| ---------- | ------------------------------------ | ------------------------------------------------- | -| US | `https://us-east-1.aws.edge.axiom.co` | `https://us-east-1.aws.edge.axiom.co/v1/ingest/DATASET_NAME` | +- **One organization, multiple regions**: Create and manage datasets in different geographic regions from a single account. +- **Unified billing**: Single subscription covering all your regions. +- **Cross-region visibility**: View and manage all your data from one console. +- **Flexible data residency**: Place each dataset in the region that best meets your compliance and performance requirements. -## Add new region +### Migration from legacy EU instance -For more information on managing additional regions, see [Manage add-ons](reference/usage-billing#manage-add-ons). +The unified multi-region model is rolling out progressively. If your organization currently uses the legacy EU instance, Axiom will contact you about the opportunity to migrate to the unified platform. From b3f354e036991e85fc8bfcab941fa97c1fa522b6 Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Mon, 10 Nov 2025 16:53:28 +0000 Subject: [PATCH 08/14] Clarify query execution language --- reference/regions.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reference/regions.mdx b/reference/regions.mdx index 9030c009..3378a82f 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -21,7 +21,7 @@ Axiom is developing support for additional regions. [Contact Axiom](https://www. -Query execution currently happens in the US region only. This means that data in the EU region can leave the region when queried. As we build out support for multiple regions, query execution will always happen in the same region as the storage region of the data being queried. +Query execution currently happens in the US region only. This means that data in the EU region can leave the region when queried. As we continue to build out support for multiple regions, we will update query behavior to ensure that query execution happens in the same region as the storage region of the data being queried. ## Default region From f16e8b6962632f770a893ccb120894aa35f2d128 Mon Sep 17 00:00:00 2001 From: Mano Toth Date: Tue, 11 Nov 2025 11:23:20 +0100 Subject: [PATCH 09/14] Add different snippet for edge --- apps/hex.mdx | 6 +++--- guides/send-logs-from-apache-log4j.mdx | 6 +++--- guides/send-logs-from-dotnet.mdx | 8 ++++---- guides/send-logs-from-ruby-on-rails.mdx | 6 +++--- send-data/aws-firehose.mdx | 6 +++--- send-data/aws-firelens.mdx | 6 +++--- send-data/aws-iot-rules.mdx | 11 +++++------ send-data/aws-s3.mdx | 12 +++++++----- send-data/cribl.mdx | 6 +++--- send-data/fluent-bit.mdx | 6 +++--- send-data/fluentd.mdx | 6 +++--- send-data/heroku-log-drains.mdx | 6 +++--- send-data/kubernetes.mdx | 10 +++++----- send-data/methods.mdx | 6 +++--- send-data/tremor.mdx | 6 +++--- snippets/replace-domain.mdx | 2 +- snippets/replace-edge-domain.mdx | 1 + 17 files changed, 56 insertions(+), 54 deletions(-) create mode 100644 snippets/replace-edge-domain.mdx diff --git a/apps/hex.mdx b/apps/hex.mdx index 6c293bd5..6fcb1a9c 100644 --- a/apps/hex.mdx +++ b/apps/hex.mdx @@ -6,7 +6,7 @@ sidebarTitle: 'Hex' import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import ReplaceDataset from "/snippets/replace-dataset.mdx" Hex is a powerful collaborative data platform that allows you to create notebooks with Python/SQL code and interactive visualizations. @@ -21,7 +21,7 @@ This page explains how to integrate Hex with Axiom to visualize geospatial data Send your sample location data to Axiom using the API endpoint. For example, the following HTTP request sends sample robot location data with latitude, longitude, status, and satellite information. ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -39,7 +39,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ - + Verify that your data has been ingested correctly by running an APL query in the Axiom UI. diff --git a/guides/send-logs-from-apache-log4j.mdx b/guides/send-logs-from-apache-log4j.mdx index 523662a5..7d237f1d 100644 --- a/guides/send-logs-from-apache-log4j.mdx +++ b/guides/send-logs-from-apache-log4j.mdx @@ -10,7 +10,7 @@ enlargeLogo: true import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" Log4j is a Java logging framework developed by the Apache Software Foundation and widely used in the Java community. This page covers how to get started with Log4j, configure it to forward log messages to Fluentd, and send logs to Axiom. @@ -354,7 +354,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue @type http - endpoint https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME + endpoint https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME headers {"Authorization":"Bearer API_TOKEN"} json_array true @@ -371,7 +371,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue - + This configuration does the following: diff --git a/guides/send-logs-from-dotnet.mdx b/guides/send-logs-from-dotnet.mdx index 4feefe59..be3b92f1 100644 --- a/guides/send-logs-from-dotnet.mdx +++ b/guides/send-logs-from-dotnet.mdx @@ -9,7 +9,7 @@ logoId: 'dotnet' import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" - [Install the .NET SDK](https://dotnet.microsoft.com/download). @@ -51,8 +51,8 @@ public static class AxiomLogger // Specify the Axiom dataset name and construct the API endpoint URL var datasetName = "DATASET_NAME"; - var axiomDomain = "AXIOM_DOMAIN"; - var axiomUri = $"https://{axiomDomain}/v1/ingest/{datasetName}"; + var axiomEdgeDomain = "AXIOM_EDGE_DOMAIN"; + var axiomUri = $"https://{axiomEdgeDomain}/v1/ingest/{datasetName}"; // Replace with your Axiom API token var apiToken = "API_TOKEN"; // Ensure your API token is correct @@ -95,7 +95,7 @@ public static class AxiomLogger - + ### Configure the main program diff --git a/guides/send-logs-from-ruby-on-rails.mdx b/guides/send-logs-from-ruby-on-rails.mdx index 46c5e945..92c4dcbf 100644 --- a/guides/send-logs-from-ruby-on-rails.mdx +++ b/guides/send-logs-from-ruby-on-rails.mdx @@ -8,7 +8,7 @@ logoId: 'ruby' --- import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" This guide provides step-by-step instructions on how to send logs from a Ruby on Rails app to Axiom using the Faraday library. By following this guide, you configure your Rails app to send logs to Axiom, allowing you to monitor and analyze your app logs effectively. @@ -48,7 +48,7 @@ require 'json' class AxiomLogger def self.send_log(log_data) dataset_name = "DATASET_NAME" - axiom_ingest_api_url = "https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME" + axiom_ingest_api_url = "https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME" ingest_token = "API_TOKEN" conn = Faraday.new(url: axiom_ingest_api_url) do |faraday| @@ -75,7 +75,7 @@ end - + ## Test with the Axiom logger diff --git a/send-data/aws-firehose.mdx b/send-data/aws-firehose.mdx index 82ee80b7..2acf6a1c 100644 --- a/send-data/aws-firehose.mdx +++ b/send-data/aws-firehose.mdx @@ -10,7 +10,7 @@ logoId: 'firehose' import AwsDisclaimer from "/snippets/aws-disclaimer.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDataset from "/snippets/replace-dataset.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" Amazon Data Firehose is a service for delivering real-time streaming data to different destinations. Send event data from Amazon Data Firehose to Axiom to analyse and monitor your data efficiently. @@ -23,9 +23,9 @@ Amazon Data Firehose is a service for delivering real-time streaming data to dif 1. In Axiom, determine the ID of the dataset you’ve created. 1. In Amazon Data Firehose, create an HTTP endpoint destination. For more information, see the [Amazon Data Firehose documentation](https://docs.aws.amazon.com/firehose/latest/dev/create-destination.html#create-destination-http). -1. Set HTTP endpoint URL to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME/firehose`. +1. Set HTTP endpoint URL to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME/firehose`. - + 1. Set the access key to the Axiom API token. diff --git a/send-data/aws-firelens.mdx b/send-data/aws-firelens.mdx index f4cfe104..a78bf043 100644 --- a/send-data/aws-firelens.mdx +++ b/send-data/aws-firelens.mdx @@ -8,7 +8,7 @@ logoId: 'awsEC2' --- import Prerequisites from "/snippets/standard-prerequisites.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" import AwsDisclaimer from "/snippets/aws-disclaimer.mdx" @@ -40,7 +40,7 @@ You’ll typically define this in a file called `fluent-bit.conf`: [OUTPUT] Name http Match * - Host AXIOM_DOMAIN + Host AXIOM_EDGE_DOMAIN Port 443 URI /v1/ingest/DATASET_NAME Format json_lines @@ -52,7 +52,7 @@ You’ll typically define this in a file called `fluent-bit.conf`: ``` - + diff --git a/send-data/aws-iot-rules.mdx b/send-data/aws-iot-rules.mdx index 901eecc4..14222113 100644 --- a/send-data/aws-iot-rules.mdx +++ b/send-data/aws-iot-rules.mdx @@ -9,7 +9,7 @@ logoId: 'awsIotCore' import AwsDisclaimer from "/snippets/aws-disclaimer.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" @@ -29,10 +29,10 @@ import requests # Import the requests module to make HTTP requests def lambda_handler(event, context): # Retrieve the dataset name and the Axiom domain from the environment variables dataset_name = os.environ['DATASET_NAME'] - axiom_domain = os.environ['AXIOM_DOMAIN'] + axiom_edge_domain = os.environ['AXIOM_EDGE_DOMAIN'] # Construct the Axiom API URL using the dataset name - axiom_api_url = f"https://{axiom_domain}/v1/ingest/{dataset_name}" + axiom_api_url = f"https://{axiom_edge_domain}/v1/ingest/{dataset_name}" # Retrieve the Axiom API token from the environment variable api_token = os.environ['API_TOKEN'] @@ -61,9 +61,8 @@ def lambda_handler(event, context): ``` In the environment variables section of the Lambda function configuration, add the following environment variables: -- `DATASET_NAME` is the name of the Axiom dataset where you want to send data. -- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` for the US region, or `eu-central-1.aws.edge.axiom.co` for the EU region. For more information, see [Regions](/reference/regions). -- `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable. + + This example uses Python for the Lambda function. To use another language, change the code above accordingly. diff --git a/send-data/aws-s3.mdx b/send-data/aws-s3.mdx index 7a3d93b3..7d3a6427 100644 --- a/send-data/aws-s3.mdx +++ b/send-data/aws-s3.mdx @@ -9,6 +9,9 @@ logoId: 'awsbucket' import AwsDisclaimer from "/snippets/aws-disclaimer.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" +import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" + This page explains how to set up an AWS Lambda function to send logs from an S3 bucket to Axiom. The Lambda function triggers when a new log file is uploaded to an S3 bucket, processes the log data, and sends it to Axiom. @@ -102,8 +105,8 @@ def lambda_handler(event, context): # Prepare Axiom API request dataset_name = os.environ['DATASET_NAME'] - axiom_domain = os.environ['AXIOM_DOMAIN'] - axiom_api_url = f"https://{axiom_domain}/v1/ingest/{dataset_name}" + axiom_edge_domain = os.environ['AXIOM_EDGE_DOMAIN'] + axiom_api_url = f"https://{axiom_edge_domain}/v1/ingest/{dataset_name}" api_token = os.environ['API_TOKEN'] axiom_headers = { "Authorization": f"Bearer {api_token}", @@ -124,9 +127,8 @@ def lambda_handler(event, context): In the environment variables section of the Lambda function configuration, add the following environment variables: -- `DATASET_NAME` is the name of the Axiom dataset where you want to send data. -- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` for the US region, or `eu-central-1.aws.edge.axiom.co` for the EU region. For more information, see [Regions](/reference/regions). -- `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable. + + This example uses Python for the Lambda function. To use another language, change the code above accordingly. diff --git a/send-data/cribl.mdx b/send-data/cribl.mdx index d1aa465a..5a410849 100644 --- a/send-data/cribl.mdx +++ b/send-data/cribl.mdx @@ -9,7 +9,7 @@ isPopular: true popularityOrder: 10 --- -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import ReplaceDataset from "/snippets/replace-dataset.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceToken from "/snippets/replace-token.mdx" @@ -36,10 +36,10 @@ Open Cribl’s UI and navigate to **Destinations > HTTP**. Click on `+` Add New - **Name:** Choose a name for the destination. -- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. +- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`. - + diff --git a/send-data/fluent-bit.mdx b/send-data/fluent-bit.mdx index 5bef3e19..7ce18585 100644 --- a/send-data/fluent-bit.mdx +++ b/send-data/fluent-bit.mdx @@ -10,7 +10,7 @@ popularityOrder: 11 --- import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" Fluent Bit is an open-source log processor and forwarder that allows you to collect any data like metrics and logs from different sources, enrich them with filters, and send them to multiple destinations like Axiom. @@ -27,7 +27,7 @@ Fluent Bit is an open-source log processor and forwarder that allows you to coll [OUTPUT] Name http Match * - Host AXIOM_DOMAIN + Host AXIOM_EDGE_DOMAIN Port 443 URI /v1/ingest/DATASET_NAME Header Authorization Bearer API_TOKEN @@ -39,6 +39,6 @@ Fluent Bit is an open-source log processor and forwarder that allows you to coll ``` - + diff --git a/send-data/fluentd.mdx b/send-data/fluentd.mdx index 0f3ccee0..f0c2249c 100644 --- a/send-data/fluentd.mdx +++ b/send-data/fluentd.mdx @@ -12,7 +12,7 @@ popularityOrder: 12 import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" import ReplaceDataset from "/snippets/replace-dataset.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" Fluentd is an open-source log collector that allows you to collect, aggregate, process, analyze, and route log files. @@ -49,7 +49,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t @type http - endpoint https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME + endpoint https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME # Authorization Bearer should be an ingest token headers {"Authorization": "Bearer API_TOKEN"} json_array false @@ -66,7 +66,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t ``` - + diff --git a/send-data/heroku-log-drains.mdx b/send-data/heroku-log-drains.mdx index 24ba82ce..23a2413b 100644 --- a/send-data/heroku-log-drains.mdx +++ b/send-data/heroku-log-drains.mdx @@ -9,7 +9,7 @@ logoId: 'heroku' import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" Log Drains make it easy to collect logs from your deployments and forward them to archival, search, and alerting services by sending them via HTTPS, HTTP, TLS, and TCP. @@ -28,12 +28,12 @@ Sign up and login to your account on [Heroku](https://heroku.com/), and download Heroku log drains configuration consists of three main components ```bash -heroku drains:add https://axiom:API_TOKEN@AXIOM_DOMAIN/v1/ingest/DATASET_NAME -a HEROKU_APPLICATION_NAME +heroku drains:add https://axiom:API_TOKEN@AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME -a HEROKU_APPLICATION_NAME ``` - + Replace `HEROKU_APPLICATION_NAME` with the name of the app that you created on the Heroku dashboard or the Heroku CLI. diff --git a/send-data/kubernetes.mdx b/send-data/kubernetes.mdx index 57c691dd..8ff21f9f 100644 --- a/send-data/kubernetes.mdx +++ b/send-data/kubernetes.mdx @@ -8,7 +8,7 @@ logoId: 'kubernetes' --- import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import Prerequisites from "/snippets/standard-prerequisites.mdx" Axiom makes it easy to collect, analyze, and monitor logs from your Kubernetes clusters. Integrate popular tools like Filebeat, Vector, or Fluent Bit with Axiom to send your cluster logs. @@ -113,7 +113,7 @@ spec: - -e env: - name: AXIOM_HOST - value: AXIOM_DOMAIN + value: AXIOM_EDGE_DOMAIN - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -276,7 +276,7 @@ spec: - /etc/vector/ env: - name: AXIOM_HOST - value: AXIOM_DOMAIN + value: AXIOM_EDGE_DOMAIN - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -464,7 +464,7 @@ spec: image: fluent/fluent-bit:1.9.9 env: - name: AXIOM_HOST - value: AXIOM_DOMAIN + value: AXIOM_EDGE_DOMAIN - name: AXIOM_DATASET_NAME value: DATASET_NAME - name: AXIOM_API_TOKEN @@ -495,7 +495,7 @@ spec: ``` - + diff --git a/send-data/methods.mdx b/send-data/methods.mdx index 46ba0e30..e9929c21 100644 --- a/send-data/methods.mdx +++ b/send-data/methods.mdx @@ -5,20 +5,20 @@ sidebarTitle: "Methods" keywords: ["send data", "ingest", "methods", "integrations", "opentelemetry", "vector", "fluentd"] --- -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" The easiest way to send your first event data to Axiom is with a direct HTTP request using a tool like `cURL`. ```shell -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{ "http": { "request": { "method": "GET", "duration_ms": 231 }, "response": { "body": { "size": 3012 } } }, "url": { "path": "/download" } }' ``` - + diff --git a/send-data/tremor.mdx b/send-data/tremor.mdx index 73fdc0f3..83a83552 100644 --- a/send-data/tremor.mdx +++ b/send-data/tremor.mdx @@ -8,7 +8,7 @@ logoId: 'tremor' --- import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" Axiom provides a unique way of ingesting [Tremor logs](https://www.tremor.rs/) into Axiom. With your connector definitions, you can configure Tremor connectors and events components to interact with your external systems, such as databases, message queues, or APIs, and eventually ingest data from these sources into Axiom. @@ -45,7 +45,7 @@ flow token with config = { - "url": "https://AXIOM_DOMAIN/v1/ingest/#{args.dataset}", + "url": "https://AXIOM_EDGE_DOMAIN/v1/ingest/#{args.dataset}", "tls": true, "method": "POST", "headers": { @@ -77,7 +77,7 @@ This assumes you have set `TREMOR_PATH` in your environment pointing to `tremor- - + ## Configuration using Syslog diff --git a/snippets/replace-domain.mdx b/snippets/replace-domain.mdx index 1d74cafc..3f138f33 100644 --- a/snippets/replace-domain.mdx +++ b/snippets/replace-domain.mdx @@ -1 +1 @@ -Replace `AXIOM_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. For more information, see [Regions](/reference/regions). +Replace `AXIOM_DOMAIN` with `api.axiom.co` if your organization uses the US region. For more information, see [Regions](/reference/regions). \ No newline at end of file diff --git a/snippets/replace-edge-domain.mdx b/snippets/replace-edge-domain.mdx new file mode 100644 index 00000000..39cb9659 --- /dev/null +++ b/snippets/replace-edge-domain.mdx @@ -0,0 +1 @@ +Replace `AXIOM_EDGE_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. For more information, see [Regions](/reference/regions). From e4f2c42f802e446f4375b71937eb16e07eeb8a81 Mon Sep 17 00:00:00 2001 From: Mano Toth Date: Tue, 11 Nov 2025 11:57:22 +0100 Subject: [PATCH 10/14] Add Edge v1 endpoint, update links to it --- docs.json | 6 + reference/limits.mdx | 2 +- reference/optimize-usage.mdx | 2 +- restapi/endpoints/ingestToDataset.mdx | 4 + restapi/ingest.mdx | 8 +- restapi/introduction.mdx | 2 +- restapi/versions/v1-edge.json | 345 ++++++++++++++++++++++++++ 7 files changed, 362 insertions(+), 7 deletions(-) create mode 100644 restapi/endpoints/ingestToDataset.mdx create mode 100644 restapi/versions/v1-edge.json diff --git a/docs.json b/docs.json index 175d6c34..1121ffd0 100644 --- a/docs.json +++ b/docs.json @@ -621,6 +621,12 @@ "restapi/endpoints/deleteDataset" ] }, + { + "group": "Edge endpoints", + "pages": [ + "restapi/endpoints/ingestToDataset" + ] + }, { "group": "Map field endpoints", "pages": [ diff --git a/reference/limits.mdx b/reference/limits.mdx index c72ecdd0..b9e98506 100644 --- a/reference/limits.mdx +++ b/reference/limits.mdx @@ -74,7 +74,7 @@ If you try to ingest data that exceeds these limits, Axiom does the following: Axiom creates the following two fields automatically for a new dataset: -- `_time` is the timestamp of the event. If the data you ingest doesn’t have a `_time` field, Axiom assigns the time of the data ingest to the events. If you ingest data using the [Ingest data](/restapi/endpoints/ingestIntoDataset) API endpoint, you can specify the timestamp field with the [timestamp-field](/restapi/endpoints/ingestIntoDataset#parameter-timestamp-field) parameter. +- `_time` is the timestamp of the event. If the data you ingest doesn’t have a `_time` field, Axiom assigns the time of the data ingest to the events. If you ingest data using the [Ingest data](/restapi/endpoints/ingestToDataset) API endpoint, you can specify the timestamp field with the [timestamp-field](/restapi/endpoints/ingestToDataset#parameter-timestamp-field) parameter. - `_sysTime` is the time when you ingested the data. In most cases, use `_time` to define the timestamp of events. In rare cases, if you experience clock skews on your event-producing systems, `_sysTime` can be useful. diff --git a/reference/optimize-usage.mdx b/reference/optimize-usage.mdx index 0033d4ff..32e2c464 100644 --- a/reference/optimize-usage.mdx +++ b/reference/optimize-usage.mdx @@ -6,7 +6,7 @@ keywords: ['query', 'gb hours'] ## Optimize data loading -When you send data via the [Ingest data](/restapi/endpoints/ingestIntoDataset) API endpoint, ingest volume is based on the uncompressed size of the HTTP request. In this case, body size dominates and metadata overhead is usually negligible. +When you send data via the [Ingest data](/restapi/endpoints/ingestToDataset) API endpoint, ingest volume is based on the uncompressed size of the HTTP request. In this case, body size dominates and metadata overhead is usually negligible. If you ingest data via API, using the CSV format comes with the following trade-offs: diff --git a/restapi/endpoints/ingestToDataset.mdx b/restapi/endpoints/ingestToDataset.mdx new file mode 100644 index 00000000..3733d2f9 --- /dev/null +++ b/restapi/endpoints/ingestToDataset.mdx @@ -0,0 +1,4 @@ +--- +title: Ingest data to dataset +openapi: "v1-edge post /ingest/{dataset-id}" +--- diff --git a/restapi/ingest.mdx b/restapi/ingest.mdx index 6eb44dcd..74caca51 100644 --- a/restapi/ingest.mdx +++ b/restapi/ingest.mdx @@ -23,7 +23,7 @@ For more information on other ingest options, see [Send data](send-data/methods) For an introduction to the basics of the Axiom API and to the authentication options, see [Introduction to Axiom API](/restapi/introduction). -The API requests on this page use the ingest data endpoint. For more information, see the [API reference](/restapi/endpoints/ingestIntoDataset). +The API requests on this page use the ingest data endpoint. For more information, see the [API reference](/restapi/endpoints/ingestToDataset). @@ -32,7 +32,7 @@ The API requests on this page use the ingest data endpoint. For more information To send data to Axiom in JSON format: 1. Encode the events as JSON objects. 1. Enter the array of JSON objects into the body of the API request. -1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). +1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset). 1. Set the `Content-Type` header to `application/json`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. 1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. @@ -199,7 +199,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ To send data to Axiom in NDJSON format: 1. Encode the events as JSON objects. 1. Enter each JSON object in a separate line into the body of the API request. -1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). +1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset). 1. Set the `Content-Type` header to either `application/json` or `application/x-ndjson`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. 1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. @@ -246,7 +246,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ To send data to Axiom in JSON format: 1. Encode the events in CSV format. The first line specifies the field names separated by commas. Subsequent new lines specify the values separated by commas. 1. Enter the CSV representation in the body of the API request. -1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset). +1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset). 1. Set the `Content-Type` header to `text/csv`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. 1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. diff --git a/restapi/introduction.mdx b/restapi/introduction.mdx index 9bccc5ce..d6429d05 100644 --- a/restapi/introduction.mdx +++ b/restapi/introduction.mdx @@ -40,7 +40,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ -For more information, see [Send data to Axiom via API](/restapi/ingest) and [Ingest data endpoint](/restapi/endpoints/ingestIntoDataset). +For more information, see [Send data to Axiom via API](/restapi/ingest) and [Ingest data endpoint](/restapi/endpoints/ingestToDataset). ## Regions diff --git a/restapi/versions/v1-edge.json b/restapi/versions/v1-edge.json new file mode 100644 index 00000000..0b4ac663 --- /dev/null +++ b/restapi/versions/v1-edge.json @@ -0,0 +1,345 @@ +{ + "openapi": "3.0.0", + "info": { + "description": "A public and stable API for interacting with axiom services", + "title": "Axiom", + "termsOfService": "http://axiom.co/terms", + "contact": { + "name": "Axiom support team", + "url": "https://axiom.co", + "email": "hello@axiom.co" + }, + "license": { + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html" + }, + "version": "2.0.0" + }, + "servers": [ + { + "url": "https://{axiom-domain}/v1/" + } + ], + "paths": { + "/ingest/{dataset-id}": { + "post": { + "security": [ + { + "Auth": [ + "ingest|create" + ] + } + ], + "tags": [ + "ingest" + ], + "operationId": "ingestToDataset", + "parameters": [ + { + "name": "dataset-id", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "The name of the field to use as the timestamp. If not specified, the timestamp will be the time the event was received by Axiom.", + "name": "timestamp-field", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "description": "The date-time format of the timestamp field. The reference time is `Mon Jan 2 15:04:05 -0700 MST 2006`, as specified in https://pkg.go.dev/time/?tab=doc#Parse", + "name": "timestamp-format", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "description": "The delimiter to use when parsing CSV data. If not specified, the default delimiter is `,`.", + "name": "csv-delimiter", + "in": "query", + "schema": { + "type": "string" + } + }, + { + "description": "A list of optional comma separated fields to use for CSV ingestion. If not specified, the first line of the CSV will be used as the field names.", + "name": "X-Axiom-CSV-Fields", + "in": "header", + "style": "simple", + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "description": "An optional JSON encoded object with additional labels to add to all events in the request", + "name": "X-Axiom-Event-Labels", + "in": "header", + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.", + "type": "string", + "format": "binary" + } + }, + "application/nd-json": { + "schema": { + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.", + "type": "string", + "format": "binary" + } + }, + "text/csv": { + "schema": { + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.", + "type": "string", + "format": "binary" + } + }, + "application/x-ndjson": { + "schema": { + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.", + "type": "string", + "format": "binary" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "IngestResult", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IngestStatus" + } + } + } + } + }, + "x-axiom-limit-requests": true, + "x-axiom-nocompress": true, + "x-axiom-not-suspended": true, + "x-axiom-trace-sampling": "0.01" + } + } + }, + "components": { + "parameters": { + "Limit": { + "name": "limit", + "in": "query", + "schema": { + "type": "integer", + "format": "int64", + "minimum": 1, + "maximum": 1000, + "default": 100 + } + }, + "Offset": { + "name": "offset", + "in": "query", + "schema": { + "type": "integer", + "format": "int64", + "minimum": 0, + "default": 0 + } + } + }, + "requestBodies": { + "ingestDatasetOpenTelemetryLogsPayload": { + "content": { + "application/json": { + "schema": { + "description": "Data you want to send to Axiom.\nSupported formats: JSON, Protobuf.\nUpload the data in a file or send it in the payload of the request.", + "type": "string", + "format": "binary" + } + }, + "application/x-protobuf": { + "schema": { + "description": "Data you want to send to Axiom.\nSupported formats: JSON, Protobuf.\nUpload the data in a file or send it in the payload of the request.", + "type": "string", + "format": "binary" + } + } + }, + "required": true + } + }, + "securitySchemes": { + "Auth": { + "type": "oauth2", + "flows": { + "authorizationCode": { + "authorizationUrl": "https://accounts.google.com/o/oauth2/v2/auth", + "tokenUrl": "https://www.googleapis.com/oauth2/v4/token", + "scopes": {} + } + } + }, + "Shared": { + "type": "apiKey", + "name": "tk", + "in": "query" + } + }, + "schemas": { + "IngestFailure": { + "type": "object", + "required": [ + "error", + "timestamp" + ], + "properties": { + "error": { + "type": "string" + }, + "timestamp": { + "type": "string", + "format": "date-time" + } + } + }, + "IngestStatus": { + "type": "object", + "required": [ + "ingested", + "failed", + "processedBytes", + "blocksCreated", + "walLength" + ], + "properties": { + "blocksCreated": { + "type": "integer", + "format": "uint32" + }, + "failed": { + "type": "integer", + "format": "uint64" + }, + "failures": { + "type": "array", + "items": { + "$ref": "#/components/schemas/IngestFailure" + } + }, + "ingested": { + "type": "integer", + "format": "uint64" + }, + "processedBytes": { + "type": "integer", + "format": "uint64" + }, + "walLength": { + "type": "integer", + "format": "uint32" + } + } + }, + "OTELResponse": { + "description": "The response from the ingest API according to the OpenTelemetry specification.", + "type": "string", + "format": "binary" + }, + "elasticHealth": { + "type": "object", + "properties": { + "cluster_name": { + "type": "string" + }, + "status": { + "type": "string" + } + } + }, + "elasticLicenseInfo": { + "type": "object", + "properties": { + "expiry_date": { + "type": "string", + "format": "date-time", + "nullable": true + }, + "expiry_date_in_millis": { + "type": "integer", + "format": "int64", + "nullable": true + }, + "issue_date": { + "type": "string", + "format": "date-time" + }, + "issue_date_in_millis": { + "type": "integer", + "format": "int64" + }, + "issued_to": { + "type": "string" + }, + "issuer": { + "type": "string" + }, + "max_nodes": { + "type": "integer" + }, + "start_date_in_millis": { + "type": "integer", + "format": "int64" + }, + "status": { + "type": "string" + }, + "type": { + "type": "string" + }, + "uid": { + "type": "string" + } + } + }, + "elasticLicenseResponse": { + "type": "object", + "properties": { + "license": { + "$ref": "#/components/schemas/elasticLicenseInfo" + } + } + }, + "elasticVersion": { + "type": "object", + "properties": { + "version": { + "type": "object", + "properties": { + "number": { + "type": "string" + } + } + } + } + } + } + } + } + \ No newline at end of file From fa562e49acaab72642a4f6dfe317d3a992007c2e Mon Sep 17 00:00:00 2001 From: Benen Harrington Date: Tue, 11 Nov 2025 11:05:06 +0000 Subject: [PATCH 11/14] Vale feedback --- reference/regions.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reference/regions.mdx b/reference/regions.mdx index 3378a82f..2cb57096 100644 --- a/reference/regions.mdx +++ b/reference/regions.mdx @@ -21,7 +21,7 @@ Axiom is developing support for additional regions. [Contact Axiom](https://www. -Query execution currently happens in the US region only. This means that data in the EU region can leave the region when queried. As we continue to build out support for multiple regions, we will update query behavior to ensure that query execution happens in the same region as the storage region of the data being queried. +Query execution currently happens in the US region only. This means that data in the EU region can leave the region when queried. As Axiom continues to build out support for multiple regions, query behavior will be updated to ensure that query execution happens in the same region as the storage region of the data being queried. ## Default region @@ -34,7 +34,7 @@ To determine your organization’s default region: 1. Find your organization’s default region in the **Region** section. -Axiom is building support for storing data outside your default region on a dataset-by-dataset basis. +Axiom is building support for storing data outside your default region on a dataset-by-dataset basis. ## Choose region From c0302edc1ddd06f889878c3793065b5649fa6ed4 Mon Sep 17 00:00:00 2001 From: Mano Toth Date: Tue, 11 Nov 2025 12:05:44 +0100 Subject: [PATCH 12/14] Fix Laravel --- guides/send-logs-from-laravel.mdx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/guides/send-logs-from-laravel.mdx b/guides/send-logs-from-laravel.mdx index a6fe146d..f4aad715 100644 --- a/guides/send-logs-from-laravel.mdx +++ b/guides/send-logs-from-laravel.mdx @@ -9,7 +9,7 @@ logoId: 'laravel' import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" This guide explains integrating Axiom as a logging solution in a Laravel app. Using Axiom’s capabilities with a custom log channel, you can efficiently send your app’s logs to Axiom for storage, analysis, and monitoring. This integration uses Monolog, Laravel’s underlying logging library, to create a custom logging handler that forwards logs to Axiom. @@ -133,14 +133,14 @@ The `default` configuration specifies the primary channel Laravel uses for loggi LOG_CHANNEL=axiom AXIOM_API_TOKEN=API_TOKEN AXIOM_DATASET=DATASET_NAME -AXIOM_DOMAIN=us-east-1.aws.edge.axiom.co +AXIOM_DOMAIN=AXIOM_EDGE_DOMAIN LOG_LEVEL=debug LOG_DEPRECATIONS_CHANNEL=null ``` - + ### Deprecations log channel @@ -173,7 +173,7 @@ The heart of the `logging.php` file lies within the **`channels`** array where y 'with' => [ 'apiToken' => env('AXIOM_API_TOKEN'), 'dataset' => env('AXIOM_DATASET'), - 'region' => env('AXIOM_REGION', 'us-east-1.aws'), + 'axiomUrl' => env('AXIOM_DOMAIN'), ], ], ``` From 3b2f056ad48823631279860500d70c8eb758bd92 Mon Sep 17 00:00:00 2001 From: Mano Toth Date: Tue, 11 Nov 2025 12:07:47 +0100 Subject: [PATCH 13/14] Fix examples in V1 api --- restapi/versions/v1.json | 55 +++++++--------------------------------- 1 file changed, 9 insertions(+), 46 deletions(-) diff --git a/restapi/versions/v1.json b/restapi/versions/v1.json index 3fccfd51..7f07e5a7 100644 --- a/restapi/versions/v1.json +++ b/restapi/versions/v1.json @@ -447,63 +447,26 @@ } ], "requestBody": { - "description": "Data you want to send to Axiom in a supported format.", "content": { "application/json": { "schema": { - "type": "array", - "description": "a JSON or ND-JSON array of events to ingest", - "example": [ - { - "message": "Hello, World!", - "foo": "bar" - }, - { - "bar": "foz" - } - ], - "items": { - "type": "object", - "properties": {} - } + "type": "string", + "format": "binary", + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request." } }, "text/csv": { "schema": { - "type": "array", - "description": "a JSON or ND-JSON array of events to ingest", - "example": [ - { - "message": "Hello, World!", - "foo": "bar" - }, - { - "bar": "foz" - } - ], - "items": { - "type": "object", - "properties": {} - } + "type": "string", + "format": "binary", + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request." } }, "application/x-ndjson": { "schema": { - "type": "array", - "description": "a JSON or ND-JSON array of events to ingest", - "example": [ - { - "message": "Hello, World!", - "foo": "bar" - }, - { - "bar": "foz" - } - ], - "items": { - "type": "object", - "properties": {} - } + "type": "string", + "format": "binary", + "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request." } } }, From 9eca50190ecc3da8d5dc84f55b9813315e603f8f Mon Sep 17 00:00:00 2001 From: Mano Toth Date: Tue, 11 Nov 2025 12:16:09 +0100 Subject: [PATCH 14/14] Fix more snippets --- guides/send-logs-from-dotnet.mdx | 4 ++-- restapi/ingest.mdx | 36 ++++++++++++++++---------------- restapi/introduction.mdx | 6 +++--- send-data/aws-firelens.mdx | 4 ++-- send-data/fluentd.mdx | 4 ++-- 5 files changed, 27 insertions(+), 27 deletions(-) diff --git a/guides/send-logs-from-dotnet.mdx b/guides/send-logs-from-dotnet.mdx index be3b92f1..9786ec4b 100644 --- a/guides/send-logs-from-dotnet.mdx +++ b/guides/send-logs-from-dotnet.mdx @@ -415,7 +415,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically flushTimeout="5000"> @@ -436,7 +436,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically - + ### Configure the main program diff --git a/restapi/ingest.mdx b/restapi/ingest.mdx index 74caca51..9fd6d405 100644 --- a/restapi/ingest.mdx +++ b/restapi/ingest.mdx @@ -7,7 +7,7 @@ keywords: ['axiom documentation', 'documentation', 'axiom', 'axiom api', 'rest a import Prerequisites from "/snippets/standard-prerequisites.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" The Axiom REST API accepts the following data formats: @@ -35,10 +35,10 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset). 1. Set the `Content-Type` header to `application/json`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. +1. Send the POST request to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`. - + @@ -49,7 +49,7 @@ The following example request contains grouped events. The structure of the JSON **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -65,7 +65,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + @@ -87,7 +87,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -133,7 +133,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + @@ -155,7 +155,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[{ "axiom": { @@ -177,7 +177,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + @@ -202,17 +202,17 @@ To send data to Axiom in NDJSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset). 1. Set the `Content-Type` header to either `application/json` or `application/x-ndjson`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. - + **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/x-ndjson' \ -d '{"id":1,"name":"machala"} @@ -224,7 +224,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + @@ -249,17 +249,17 @@ To send data to Axiom in JSON format: 1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset). 1. Set the `Content-Type` header to `text/csv`. 1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated. -1. Send the POST request to `https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. +1. Send the POST request to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data. - + **Example request** ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: text/csv' \ -d 'user, name @@ -267,7 +267,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + @@ -296,7 +296,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + diff --git a/restapi/introduction.mdx b/restapi/introduction.mdx index d6429d05..df727f4f 100644 --- a/restapi/introduction.mdx +++ b/restapi/introduction.mdx @@ -11,7 +11,7 @@ popularityOrder: 1 --- import Prerequisites from "/snippets/minimal-prerequisites.mdx" -import ReplaceDomain from "/snippets/replace-domain.mdx" +import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx" import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx" You can use the Axiom API (Application Programming Interface) to send data to Axiom, query data, and manage resources programmatically. This page covers the basics for interacting with the Axiom API. @@ -25,7 +25,7 @@ Axiom API follows the REST architectural style and uses JSON for serialization. For example, the following curl command ingests data to an Axiom dataset: ```bash -curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ +curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \ -H 'Authorization: Bearer API_TOKEN' \ -H 'Content-Type: application/json' \ -d '[ @@ -36,7 +36,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME' \ ``` - + diff --git a/send-data/aws-firelens.mdx b/send-data/aws-firelens.mdx index a78bf043..151b3259 100644 --- a/send-data/aws-firelens.mdx +++ b/send-data/aws-firelens.mdx @@ -104,14 +104,14 @@ Create the `fluentd.conf` file and add your configuration: @type http headers {"Authorization": "Bearer API_TOKEN"} data_type json - endpoint https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME + endpoint https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME sourcetype ecs ``` - + Read more about [Fluentd configuration here](/send-data/fluentd) diff --git a/send-data/fluentd.mdx b/send-data/fluentd.mdx index f0c2249c..bbb37789 100644 --- a/send-data/fluentd.mdx +++ b/send-data/fluentd.mdx @@ -257,7 +257,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom # Send Scala logs using HTTP plugin to Axiom @type http - endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_DOMAIN/v1/ingest/DATASET_NAME'}" + endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME'}" headers {"Authorization": "Bearer #{ENV['FLUENT_HTTP_TOKEN'] || ''}"} @type json @@ -275,7 +275,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom ``` - +