diff --git a/apps/hex.mdx b/apps/hex.mdx
index 0bcebd0c..6fcb1a9c 100644
--- a/apps/hex.mdx
+++ b/apps/hex.mdx
@@ -6,7 +6,7 @@ sidebarTitle: 'Hex'
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import ReplaceDataset from "/snippets/replace-dataset.mdx"
Hex is a powerful collaborative data platform that allows you to create notebooks with Python/SQL code and interactive visualizations.
@@ -21,7 +21,7 @@ This page explains how to integrate Hex with Axiom to visualize geospatial data
Send your sample location data to Axiom using the API endpoint. For example, the following HTTP request sends sample robot location data with latitude, longitude, status, and satellite information.
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/json' \
-d '[
@@ -39,7 +39,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
-
+
Verify that your data has been ingested correctly by running an APL query in the Axiom UI.
diff --git a/docs.json b/docs.json
index 175d6c34..1121ffd0 100644
--- a/docs.json
+++ b/docs.json
@@ -621,6 +621,12 @@
"restapi/endpoints/deleteDataset"
]
},
+ {
+ "group": "Edge endpoints",
+ "pages": [
+ "restapi/endpoints/ingestToDataset"
+ ]
+ },
{
"group": "Map field endpoints",
"pages": [
diff --git a/guides/send-logs-from-apache-log4j.mdx b/guides/send-logs-from-apache-log4j.mdx
index 5a4e9be1..7d237f1d 100644
--- a/guides/send-logs-from-apache-log4j.mdx
+++ b/guides/send-logs-from-apache-log4j.mdx
@@ -10,7 +10,7 @@ enlargeLogo: true
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
Log4j is a Java logging framework developed by the Apache Software Foundation and widely used in the Java community. This page covers how to get started with Log4j, configure it to forward log messages to Fluentd, and send logs to Axiom.
@@ -354,7 +354,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue
@type http
- endpoint https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest
+ endpoint https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME
headers {"Authorization":"Bearer API_TOKEN"}
json_array true
@@ -371,7 +371,7 @@ To configure Fluentd, create a configuration file. Create a new file named `flue
-
+
This configuration does the following:
diff --git a/guides/send-logs-from-dotnet.mdx b/guides/send-logs-from-dotnet.mdx
index c154b0ea..9786ec4b 100644
--- a/guides/send-logs-from-dotnet.mdx
+++ b/guides/send-logs-from-dotnet.mdx
@@ -9,7 +9,7 @@ logoId: 'dotnet'
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
- [Install the .NET SDK](https://dotnet.microsoft.com/download).
@@ -51,8 +51,8 @@ public static class AxiomLogger
// Specify the Axiom dataset name and construct the API endpoint URL
var datasetName = "DATASET_NAME";
- var axiomDomain = "AXIOM_DOMAIN";
- var axiomUri = $"https://{axiomDomain}/v1/datasets/{datasetName}/ingest";
+ var axiomEdgeDomain = "AXIOM_EDGE_DOMAIN";
+ var axiomUri = $"https://{axiomEdgeDomain}/v1/ingest/{datasetName}";
// Replace with your Axiom API token
var apiToken = "API_TOKEN"; // Ensure your API token is correct
@@ -95,7 +95,7 @@ public static class AxiomLogger
-
+
### Configure the main program
@@ -244,7 +244,7 @@ public class AxiomConfig
{
public const string DatasetName = "DATASET_NAME";
public const string ApiToken = "API_TOKEN";
- public const string ApiUrl = "https://AXIOM_DOMAIN/v1/datasets";
+ public const string ApiUrl = "https://AXIOM_DOMAIN/v1";
}
public class AxiomHttpClient : IHttpClient
@@ -283,7 +283,7 @@ public class Program
.MinimumLevel.Debug()
.WriteTo.Console()
.WriteTo.Http(
- requestUri: $"{AxiomConfig.ApiUrl}/{AxiomConfig.DatasetName}/ingest",
+ requestUri: $"{AxiomConfig.ApiUrl}/ingest/{AxiomConfig.DatasetName}",
queueLimitBytes: null,
textFormatter: new ElasticsearchJsonFormatter(renderMessageTemplate: false, inlineFields: true),
httpClient: new AxiomHttpClient()
@@ -415,7 +415,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically
flushTimeout="5000">
@@ -436,7 +436,7 @@ Set up NLog by creating an `NLog.config` file or configuring it programmatically
-
+
### Configure the main program
diff --git a/guides/send-logs-from-laravel.mdx b/guides/send-logs-from-laravel.mdx
index 710c2b4e..f4aad715 100644
--- a/guides/send-logs-from-laravel.mdx
+++ b/guides/send-logs-from-laravel.mdx
@@ -9,7 +9,7 @@ logoId: 'laravel'
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
This guide explains integrating Axiom as a logging solution in a Laravel app. Using Axiom’s capabilities with a custom log channel, you can efficiently send your app’s logs to Axiom for storage, analysis, and monitoring. This integration uses Monolog, Laravel’s underlying logging library, to create a custom logging handler that forwards logs to Axiom.
@@ -74,6 +74,7 @@ return [
'with' => [
'apiToken' => env('AXIOM_API_TOKEN'),
'dataset' => env('AXIOM_DATASET'),
+ 'axiomUrl' => env('AXIOM_DOMAIN'),
],
],
@@ -132,14 +133,14 @@ The `default` configuration specifies the primary channel Laravel uses for loggi
LOG_CHANNEL=axiom
AXIOM_API_TOKEN=API_TOKEN
AXIOM_DATASET=DATASET_NAME
-AXIOM_URL=AXIOM_DOMAIN
+AXIOM_DOMAIN=AXIOM_EDGE_DOMAIN
LOG_LEVEL=debug
LOG_DEPRECATIONS_CHANNEL=null
```
-
+
### Deprecations log channel
@@ -172,7 +173,7 @@ The heart of the `logging.php` file lies within the **`channels`** array where y
'with' => [
'apiToken' => env('AXIOM_API_TOKEN'),
'dataset' => env('AXIOM_DATASET'),
- 'axiomUrl' => env('AXIOM_URL'),
+ 'axiomUrl' => env('AXIOM_DOMAIN'),
],
],
```
@@ -248,8 +249,9 @@ class AxiomHandler extends AbstractProcessingHandler
{
private $apiToken;
private $dataset;
+ private $axiomUrl;
- public function __construct($level = Logger::DEBUG, bool $bubble = true, $apiToken = null, $dataset = null)
+ public function __construct($level = Logger::DEBUG, bool $bubble = true, $apiToken = null, $dataset = null, $axiomUrl = null)
{
parent::__construct($level, $bubble);
$this->apiToken = $apiToken;
@@ -259,7 +261,7 @@ class AxiomHandler extends AbstractProcessingHandler
private function initializeCurl(): \CurlHandle
{
- $endpoint = "https://{$this->axiomUrl}/v1/datasets/{$this->dataset}/ingest";
+ $endpoint = "https://{$this->axiomUrl}/v1/ingest/{$this->dataset}";
$ch = curl_init($endpoint);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
diff --git a/guides/send-logs-from-ruby-on-rails.mdx b/guides/send-logs-from-ruby-on-rails.mdx
index cc93172d..92c4dcbf 100644
--- a/guides/send-logs-from-ruby-on-rails.mdx
+++ b/guides/send-logs-from-ruby-on-rails.mdx
@@ -8,7 +8,7 @@ logoId: 'ruby'
---
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
This guide provides step-by-step instructions on how to send logs from a Ruby on Rails app to Axiom using the Faraday library. By following this guide, you configure your Rails app to send logs to Axiom, allowing you to monitor and analyze your app logs effectively.
@@ -48,7 +48,7 @@ require 'json'
class AxiomLogger
def self.send_log(log_data)
dataset_name = "DATASET_NAME"
- axiom_ingest_api_url = "https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest"
+ axiom_ingest_api_url = "https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME"
ingest_token = "API_TOKEN"
conn = Faraday.new(url: axiom_ingest_api_url) do |faraday|
@@ -75,7 +75,7 @@ end
-
+
## Test with the Axiom logger
diff --git a/reference/limits.mdx b/reference/limits.mdx
index c72ecdd0..b9e98506 100644
--- a/reference/limits.mdx
+++ b/reference/limits.mdx
@@ -74,7 +74,7 @@ If you try to ingest data that exceeds these limits, Axiom does the following:
Axiom creates the following two fields automatically for a new dataset:
-- `_time` is the timestamp of the event. If the data you ingest doesn’t have a `_time` field, Axiom assigns the time of the data ingest to the events. If you ingest data using the [Ingest data](/restapi/endpoints/ingestIntoDataset) API endpoint, you can specify the timestamp field with the [timestamp-field](/restapi/endpoints/ingestIntoDataset#parameter-timestamp-field) parameter.
+- `_time` is the timestamp of the event. If the data you ingest doesn’t have a `_time` field, Axiom assigns the time of the data ingest to the events. If you ingest data using the [Ingest data](/restapi/endpoints/ingestToDataset) API endpoint, you can specify the timestamp field with the [timestamp-field](/restapi/endpoints/ingestToDataset#parameter-timestamp-field) parameter.
- `_sysTime` is the time when you ingested the data.
In most cases, use `_time` to define the timestamp of events. In rare cases, if you experience clock skews on your event-producing systems, `_sysTime` can be useful.
diff --git a/reference/optimize-usage.mdx b/reference/optimize-usage.mdx
index 0033d4ff..32e2c464 100644
--- a/reference/optimize-usage.mdx
+++ b/reference/optimize-usage.mdx
@@ -6,7 +6,7 @@ keywords: ['query', 'gb hours']
## Optimize data loading
-When you send data via the [Ingest data](/restapi/endpoints/ingestIntoDataset) API endpoint, ingest volume is based on the uncompressed size of the HTTP request. In this case, body size dominates and metadata overhead is usually negligible.
+When you send data via the [Ingest data](/restapi/endpoints/ingestToDataset) API endpoint, ingest volume is based on the uncompressed size of the HTTP request. In this case, body size dominates and metadata overhead is usually negligible.
If you ingest data via API, using the CSV format comes with the following trade-offs:
diff --git a/reference/regions.mdx b/reference/regions.mdx
index 75d4f624..2cb57096 100644
--- a/reference/regions.mdx
+++ b/reference/regions.mdx
@@ -1,32 +1,76 @@
---
title: 'Regions'
-description: 'This page explains how to work with Axiom based on your organization’s region.'
+description: 'This page explains how to work with Axiom’s unified multi-region platform.'
---
+Axiom is available in multiple regions to help you comply with data residency requirements and optimize performance.
+
+## Available regions
+
+Axiom currently supports the following cloud regions for data ingest:
+
+| Region | Base domain of edge ingest API endpoints | Example | Cloud provider |
+| ---------- | ------------------------------------ | ------------------------------------------------- | -------------- |
+| US | `https://us-east-1.aws.edge.axiom.co` | `https://us-east-1.aws.edge.axiom.co/v1/ingest/DATASET_NAME` | AWS |
+| EU | `https://eu-central-1.aws.edge.axiom.co` | `https://eu-central-1.aws.edge.axiom.co/v1/ingest/DATASET_NAME` | AWS |
+
+The base domain of other API endpoints is `https://api.axiom.co`.
+
-Axiom will support a unified multi-region model by early November 2025. This will allow you to manage data across different regions from a single organization. [Contact Axiom](https://www.axiom.co/contact) to learn more.
+Axiom is developing support for additional regions. [Contact Axiom](https://www.axiom.co/contact) if you need a region that’s not currently supported.
-## Check your region
+
+Query execution currently happens in the US region only. This means that data in the EU region can leave the region when queried. As Axiom continues to build out support for multiple regions, query behavior will be updated to ensure that query execution happens in the same region as the storage region of the data being queried.
+
+
+## Default region
+
+The default region of your organization determines where the data in your datasets are stored.
+
+To determine your organization’s default region:
+
+1. Click
**Settings > General**.
+1. Find your organization’s default region in the **Region** section.
+
+
+Axiom is building support for storing data outside your default region on a dataset-by-dataset basis.
+
+
+## Choose region
+
+When you create a new organization, choose the region that best meets your data residency requirements and where your infrastructure is located for optimal performance.
+
+When choosing a region, consider the following factors:
+
+- **Data residency requirements**: If you need to comply with regulations like GDPR that require data to be stored in specific regions, choose the appropriate region.
+- **Performance**: Choose the region closest to your infrastructure for lower latency.
+- **Team location**: Choose the region that your team is primarily located in for better performance.
+
+## Roadmap
-Determine the region your organization uses in one of the following ways:
+### Legacy infrastructure: Dedicated regional instances
-- Go to the [Axiom app](https://app.axiom.co/) and check the URL. Match the base domain in the Axiom web app with the table below:
+Until November 2025, Axiom operated completely separate infrastructures for different regions:
- | Region | Base domain in web app |
- | ---------- | ------------------------------ |
- | US | `https://app.axiom.co` |
+- **US instance**: `app.axiom.co` hosted in AWS `us-east-1`
+- **EU instance**: `app.eu.axiom.co` hosted in AWS `eu-central-1`
-- Click
**Settings > General**, and then find the **Region** section.
+Each instance was entirely independent with separate:
+- Organizations and user accounts
+- Billing and subscription plans
+- API endpoints and ingestion URLs
+- Data storage and query infrastructure
-## Axiom API reference
+### Towards a unified multi-region platform
-All examples in the documentation use the default US base domain `https://api.axiom.co`.
+Starting in November 2025, Axiom is rolling out support for a unified platform where a single organization can manage datasets across multiple regions:
-| Region | Base domain of API endpoints | Example |
-| ---------- | ------------------------------------ | ------------------------------------------------- |
-| US | `https://api.axiom.co` | `https://api.axiom.co/v1/datasets/DATASET_NAME/ingest` |
+- **One organization, multiple regions**: Create and manage datasets in different geographic regions from a single account.
+- **Unified billing**: Single subscription covering all your regions.
+- **Cross-region visibility**: View and manage all your data from one console.
+- **Flexible data residency**: Place each dataset in the region that best meets your compliance and performance requirements.
-## Add new region
+### Migration from legacy EU instance
-For more information on managing additional regions, see [Manage add-ons](reference/usage-billing#manage-add-ons).
+The unified multi-region model is rolling out progressively. If your organization currently uses the legacy EU instance, Axiom will contact you about the opportunity to migrate to the unified platform.
diff --git a/restapi/endpoints/ingestToDataset.mdx b/restapi/endpoints/ingestToDataset.mdx
new file mode 100644
index 00000000..3733d2f9
--- /dev/null
+++ b/restapi/endpoints/ingestToDataset.mdx
@@ -0,0 +1,4 @@
+---
+title: Ingest data to dataset
+openapi: "v1-edge post /ingest/{dataset-id}"
+---
diff --git a/restapi/ingest.mdx b/restapi/ingest.mdx
index dad43470..9fd6d405 100644
--- a/restapi/ingest.mdx
+++ b/restapi/ingest.mdx
@@ -7,7 +7,7 @@ keywords: ['axiom documentation', 'documentation', 'axiom', 'axiom api', 'rest a
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
The Axiom REST API accepts the following data formats:
@@ -23,7 +23,7 @@ For more information on other ingest options, see [Send data](send-data/methods)
For an introduction to the basics of the Axiom API and to the authentication options, see [Introduction to Axiom API](/restapi/introduction).
-The API requests on this page use the ingest data endpoint. For more information, see the [API reference](/restapi/endpoints/ingestIntoDataset).
+The API requests on this page use the ingest data endpoint. For more information, see the [API reference](/restapi/endpoints/ingestToDataset).
@@ -32,13 +32,13 @@ The API requests on this page use the ingest data endpoint. For more information
To send data to Axiom in JSON format:
1. Encode the events as JSON objects.
1. Enter the array of JSON objects into the body of the API request.
-1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset).
+1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset).
1. Set the `Content-Type` header to `application/json`.
1. Set the `Authorization` header to `Bearer API_TOKEN`.
-1. Send the POST request to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`.
+1. Send the POST request to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`.
-
+
@@ -49,7 +49,7 @@ The following example request contains grouped events. The structure of the JSON
**Example request**
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/json' \
-d '[
@@ -65,7 +65,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
@@ -87,7 +87,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
**Example request**
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/json' \
-d '[
@@ -133,7 +133,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
@@ -155,7 +155,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
**Example request**
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/json' \
-d '[{ "axiom": {
@@ -177,7 +177,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
@@ -199,20 +199,20 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
To send data to Axiom in NDJSON format:
1. Encode the events as JSON objects.
1. Enter each JSON object in a separate line into the body of the API request.
-1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset).
+1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset).
1. Set the `Content-Type` header to either `application/json` or `application/x-ndjson`.
1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated.
-1. Send the POST request to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data.
+1. Send the POST request to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data.
-
+
**Example request**
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/x-ndjson' \
-d '{"id":1,"name":"machala"}
@@ -224,7 +224,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
@@ -246,20 +246,20 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
To send data to Axiom in JSON format:
1. Encode the events in CSV format. The first line specifies the field names separated by commas. Subsequent new lines specify the values separated by commas.
1. Enter the CSV representation in the body of the API request.
-1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestIntoDataset).
+1. Optional: In the body of the request, set optional parameters such as `timestamp-field` and `timestamp-format`. For more information, see the [ingest data API reference](/restapi/endpoints/ingestToDataset).
1. Set the `Content-Type` header to `text/csv`.
1. Set the `Authorization` header to `Bearer API_TOKEN`. Replace `API_TOKEN` with the Axiom API token you have generated.
-1. Send the POST request to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data.
+1. Send the POST request to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`. Replace `DATASET_NAME` with the name of the Axiom dataset where you want to send data.
-
+
**Example request**
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: text/csv' \
-d 'user, name
@@ -267,7 +267,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
@@ -296,7 +296,7 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
diff --git a/restapi/introduction.mdx b/restapi/introduction.mdx
index e18e1efc..df727f4f 100644
--- a/restapi/introduction.mdx
+++ b/restapi/introduction.mdx
@@ -11,7 +11,7 @@ popularityOrder: 1
---
import Prerequisites from "/snippets/minimal-prerequisites.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
You can use the Axiom API (Application Programming Interface) to send data to Axiom, query data, and manage resources programmatically. This page covers the basics for interacting with the Axiom API.
@@ -25,7 +25,7 @@ Axiom API follows the REST architectural style and uses JSON for serialization.
For example, the following curl command ingests data to an Axiom dataset:
```bash
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/json' \
-d '[
@@ -36,11 +36,11 @@ curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
```
-
+
-For more information, see [Send data to Axiom via API](/restapi/ingest) and [Ingest data endpoint](/restapi/endpoints/ingestIntoDataset).
+For more information, see [Send data to Axiom via API](/restapi/ingest) and [Ingest data endpoint](/restapi/endpoints/ingestToDataset).
## Regions
diff --git a/restapi/versions/v1-edge.json b/restapi/versions/v1-edge.json
new file mode 100644
index 00000000..0b4ac663
--- /dev/null
+++ b/restapi/versions/v1-edge.json
@@ -0,0 +1,345 @@
+{
+ "openapi": "3.0.0",
+ "info": {
+ "description": "A public and stable API for interacting with axiom services",
+ "title": "Axiom",
+ "termsOfService": "http://axiom.co/terms",
+ "contact": {
+ "name": "Axiom support team",
+ "url": "https://axiom.co",
+ "email": "hello@axiom.co"
+ },
+ "license": {
+ "name": "Apache 2.0",
+ "url": "https://www.apache.org/licenses/LICENSE-2.0.html"
+ },
+ "version": "2.0.0"
+ },
+ "servers": [
+ {
+ "url": "https://{axiom-domain}/v1/"
+ }
+ ],
+ "paths": {
+ "/ingest/{dataset-id}": {
+ "post": {
+ "security": [
+ {
+ "Auth": [
+ "ingest|create"
+ ]
+ }
+ ],
+ "tags": [
+ "ingest"
+ ],
+ "operationId": "ingestToDataset",
+ "parameters": [
+ {
+ "name": "dataset-id",
+ "in": "path",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "The name of the field to use as the timestamp. If not specified, the timestamp will be the time the event was received by Axiom.",
+ "name": "timestamp-field",
+ "in": "query",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "The date-time format of the timestamp field. The reference time is `Mon Jan 2 15:04:05 -0700 MST 2006`, as specified in https://pkg.go.dev/time/?tab=doc#Parse",
+ "name": "timestamp-format",
+ "in": "query",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "The delimiter to use when parsing CSV data. If not specified, the default delimiter is `,`.",
+ "name": "csv-delimiter",
+ "in": "query",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "A list of optional comma separated fields to use for CSV ingestion. If not specified, the first line of the CSV will be used as the field names.",
+ "name": "X-Axiom-CSV-Fields",
+ "in": "header",
+ "style": "simple",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ {
+ "description": "An optional JSON encoded object with additional labels to add to all events in the request",
+ "name": "X-Axiom-Event-Labels",
+ "in": "header",
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.",
+ "type": "string",
+ "format": "binary"
+ }
+ },
+ "application/nd-json": {
+ "schema": {
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.",
+ "type": "string",
+ "format": "binary"
+ }
+ },
+ "text/csv": {
+ "schema": {
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.",
+ "type": "string",
+ "format": "binary"
+ }
+ },
+ "application/x-ndjson": {
+ "schema": {
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request.",
+ "type": "string",
+ "format": "binary"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "IngestResult",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/IngestStatus"
+ }
+ }
+ }
+ }
+ },
+ "x-axiom-limit-requests": true,
+ "x-axiom-nocompress": true,
+ "x-axiom-not-suspended": true,
+ "x-axiom-trace-sampling": "0.01"
+ }
+ }
+ },
+ "components": {
+ "parameters": {
+ "Limit": {
+ "name": "limit",
+ "in": "query",
+ "schema": {
+ "type": "integer",
+ "format": "int64",
+ "minimum": 1,
+ "maximum": 1000,
+ "default": 100
+ }
+ },
+ "Offset": {
+ "name": "offset",
+ "in": "query",
+ "schema": {
+ "type": "integer",
+ "format": "int64",
+ "minimum": 0,
+ "default": 0
+ }
+ }
+ },
+ "requestBodies": {
+ "ingestDatasetOpenTelemetryLogsPayload": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, Protobuf.\nUpload the data in a file or send it in the payload of the request.",
+ "type": "string",
+ "format": "binary"
+ }
+ },
+ "application/x-protobuf": {
+ "schema": {
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, Protobuf.\nUpload the data in a file or send it in the payload of the request.",
+ "type": "string",
+ "format": "binary"
+ }
+ }
+ },
+ "required": true
+ }
+ },
+ "securitySchemes": {
+ "Auth": {
+ "type": "oauth2",
+ "flows": {
+ "authorizationCode": {
+ "authorizationUrl": "https://accounts.google.com/o/oauth2/v2/auth",
+ "tokenUrl": "https://www.googleapis.com/oauth2/v4/token",
+ "scopes": {}
+ }
+ }
+ },
+ "Shared": {
+ "type": "apiKey",
+ "name": "tk",
+ "in": "query"
+ }
+ },
+ "schemas": {
+ "IngestFailure": {
+ "type": "object",
+ "required": [
+ "error",
+ "timestamp"
+ ],
+ "properties": {
+ "error": {
+ "type": "string"
+ },
+ "timestamp": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ },
+ "IngestStatus": {
+ "type": "object",
+ "required": [
+ "ingested",
+ "failed",
+ "processedBytes",
+ "blocksCreated",
+ "walLength"
+ ],
+ "properties": {
+ "blocksCreated": {
+ "type": "integer",
+ "format": "uint32"
+ },
+ "failed": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "failures": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/IngestFailure"
+ }
+ },
+ "ingested": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "processedBytes": {
+ "type": "integer",
+ "format": "uint64"
+ },
+ "walLength": {
+ "type": "integer",
+ "format": "uint32"
+ }
+ }
+ },
+ "OTELResponse": {
+ "description": "The response from the ingest API according to the OpenTelemetry specification.",
+ "type": "string",
+ "format": "binary"
+ },
+ "elasticHealth": {
+ "type": "object",
+ "properties": {
+ "cluster_name": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string"
+ }
+ }
+ },
+ "elasticLicenseInfo": {
+ "type": "object",
+ "properties": {
+ "expiry_date": {
+ "type": "string",
+ "format": "date-time",
+ "nullable": true
+ },
+ "expiry_date_in_millis": {
+ "type": "integer",
+ "format": "int64",
+ "nullable": true
+ },
+ "issue_date": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "issue_date_in_millis": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "issued_to": {
+ "type": "string"
+ },
+ "issuer": {
+ "type": "string"
+ },
+ "max_nodes": {
+ "type": "integer"
+ },
+ "start_date_in_millis": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "status": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ }
+ }
+ },
+ "elasticLicenseResponse": {
+ "type": "object",
+ "properties": {
+ "license": {
+ "$ref": "#/components/schemas/elasticLicenseInfo"
+ }
+ }
+ },
+ "elasticVersion": {
+ "type": "object",
+ "properties": {
+ "version": {
+ "type": "object",
+ "properties": {
+ "number": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
\ No newline at end of file
diff --git a/restapi/versions/v1.json b/restapi/versions/v1.json
index 3fccfd51..7f07e5a7 100644
--- a/restapi/versions/v1.json
+++ b/restapi/versions/v1.json
@@ -447,63 +447,26 @@
}
],
"requestBody": {
- "description": "Data you want to send to Axiom in a supported format.",
"content": {
"application/json": {
"schema": {
- "type": "array",
- "description": "a JSON or ND-JSON array of events to ingest",
- "example": [
- {
- "message": "Hello, World!",
- "foo": "bar"
- },
- {
- "bar": "foz"
- }
- ],
- "items": {
- "type": "object",
- "properties": {}
- }
+ "type": "string",
+ "format": "binary",
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request."
}
},
"text/csv": {
"schema": {
- "type": "array",
- "description": "a JSON or ND-JSON array of events to ingest",
- "example": [
- {
- "message": "Hello, World!",
- "foo": "bar"
- },
- {
- "bar": "foz"
- }
- ],
- "items": {
- "type": "object",
- "properties": {}
- }
+ "type": "string",
+ "format": "binary",
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request."
}
},
"application/x-ndjson": {
"schema": {
- "type": "array",
- "description": "a JSON or ND-JSON array of events to ingest",
- "example": [
- {
- "message": "Hello, World!",
- "foo": "bar"
- },
- {
- "bar": "foz"
- }
- ],
- "items": {
- "type": "object",
- "properties": {}
- }
+ "type": "string",
+ "format": "binary",
+ "description": "Data you want to send to Axiom.\nSupported formats: JSON, NDJSON, CSV.\nUpload the data in a file or send it in the payload of the request."
}
}
},
diff --git a/send-data/aws-firehose.mdx b/send-data/aws-firehose.mdx
index d88f850a..2acf6a1c 100644
--- a/send-data/aws-firehose.mdx
+++ b/send-data/aws-firehose.mdx
@@ -10,7 +10,7 @@ logoId: 'firehose'
import AwsDisclaimer from "/snippets/aws-disclaimer.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDataset from "/snippets/replace-dataset.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
Amazon Data Firehose is a service for delivering real-time streaming data to different destinations. Send event data from Amazon Data Firehose to Axiom to analyse and monitor your data efficiently.
@@ -23,9 +23,9 @@ Amazon Data Firehose is a service for delivering real-time streaming data to dif
1. In Axiom, determine the ID of the dataset you’ve created.
1. In Amazon Data Firehose, create an HTTP endpoint destination. For more information, see the [Amazon Data Firehose documentation](https://docs.aws.amazon.com/firehose/latest/dev/create-destination.html#create-destination-http).
-1. Set HTTP endpoint URL to `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest/firehose`.
+1. Set HTTP endpoint URL to `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME/firehose`.
-
+
1. Set the access key to the Axiom API token.
diff --git a/send-data/aws-firelens.mdx b/send-data/aws-firelens.mdx
index 8781d0e6..151b3259 100644
--- a/send-data/aws-firelens.mdx
+++ b/send-data/aws-firelens.mdx
@@ -8,7 +8,7 @@ logoId: 'awsEC2'
---
import Prerequisites from "/snippets/standard-prerequisites.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
import AwsDisclaimer from "/snippets/aws-disclaimer.mdx"
@@ -40,9 +40,9 @@ You’ll typically define this in a file called `fluent-bit.conf`:
[OUTPUT]
Name http
Match *
- Host AXIOM_DOMAIN
+ Host AXIOM_EDGE_DOMAIN
Port 443
- URI /v1/datasets/DATASET_NAME/ingest
+ URI /v1/ingest/DATASET_NAME
Format json_lines
tls On
format json
@@ -52,7 +52,7 @@ You’ll typically define this in a file called `fluent-bit.conf`:
```
-
+
@@ -104,14 +104,14 @@ Create the `fluentd.conf` file and add your configuration:
@type http
headers {"Authorization": "Bearer API_TOKEN"}
data_type json
- endpoint https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest
+ endpoint https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME
sourcetype ecs
```
-
+
Read more about [Fluentd configuration here](/send-data/fluentd)
diff --git a/send-data/aws-iot-rules.mdx b/send-data/aws-iot-rules.mdx
index fc1a0b95..14222113 100644
--- a/send-data/aws-iot-rules.mdx
+++ b/send-data/aws-iot-rules.mdx
@@ -9,7 +9,7 @@ logoId: 'awsIotCore'
import AwsDisclaimer from "/snippets/aws-disclaimer.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
@@ -29,10 +29,10 @@ import requests # Import the requests module to make HTTP requests
def lambda_handler(event, context):
# Retrieve the dataset name and the Axiom domain from the environment variables
dataset_name = os.environ['DATASET_NAME']
- axiom_domain = os.environ['AXIOM_DOMAIN']
+ axiom_edge_domain = os.environ['AXIOM_EDGE_DOMAIN']
# Construct the Axiom API URL using the dataset name
- axiom_api_url = f"https://{axiom_domain}/v1/datasets/{dataset_name}/ingest"
+ axiom_api_url = f"https://{axiom_edge_domain}/v1/ingest/{dataset_name}"
# Retrieve the Axiom API token from the environment variable
api_token = os.environ['API_TOKEN']
@@ -61,9 +61,8 @@ def lambda_handler(event, context):
```
In the environment variables section of the Lambda function configuration, add the following environment variables:
-- `DATASET_NAME` is the name of the Axiom dataset where you want to send data.
-- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. For more information, see [Regions](/reference/regions).
-- `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable.
+
+
This example uses Python for the Lambda function. To use another language, change the code above accordingly.
diff --git a/send-data/aws-s3.mdx b/send-data/aws-s3.mdx
index 7ba6ef3f..7d3a6427 100644
--- a/send-data/aws-s3.mdx
+++ b/send-data/aws-s3.mdx
@@ -9,6 +9,9 @@ logoId: 'awsbucket'
import AwsDisclaimer from "/snippets/aws-disclaimer.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
+import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
+
This page explains how to set up an AWS Lambda function to send logs from an S3 bucket to Axiom. The Lambda function triggers when a new log file is uploaded to an S3 bucket, processes the log data, and sends it to Axiom.
@@ -102,8 +105,8 @@ def lambda_handler(event, context):
# Prepare Axiom API request
dataset_name = os.environ['DATASET_NAME']
- axiom_domain = os.environ['AXIOM_DOMAIN']
- axiom_api_url = f"https://{axiom_domain}/v1/datasets/{dataset_name}/ingest"
+ axiom_edge_domain = os.environ['AXIOM_EDGE_DOMAIN']
+ axiom_api_url = f"https://{axiom_edge_domain}/v1/ingest/{dataset_name}"
api_token = os.environ['API_TOKEN']
axiom_headers = {
"Authorization": f"Bearer {api_token}",
@@ -124,9 +127,8 @@ def lambda_handler(event, context):
In the environment variables section of the Lambda function configuration, add the following environment variables:
-- `DATASET_NAME` is the name of the Axiom dataset where you want to send data.
-- `AXIOM_DOMAIN` is the Axiom domain that your organization uses. For more information, see [Regions](/reference/regions).
-- `API_TOKEN` is the Axiom API token you have generated. For added security, store the API token in an environment variable.
+
+
This example uses Python for the Lambda function. To use another language, change the code above accordingly.
diff --git a/send-data/cribl.mdx b/send-data/cribl.mdx
index 67e5f0b5..5a410849 100644
--- a/send-data/cribl.mdx
+++ b/send-data/cribl.mdx
@@ -9,7 +9,7 @@ isPopular: true
popularityOrder: 10
---
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import ReplaceDataset from "/snippets/replace-dataset.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceToken from "/snippets/replace-token.mdx"
@@ -36,10 +36,10 @@ Open Cribl’s UI and navigate to **Destinations > HTTP**. Click on `+` Add New
- **Name:** Choose a name for the destination.
-- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest`.
+- **Endpoint URL:** The URL of your Axiom log ingest endpoint `https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME`.
-
+
diff --git a/send-data/fluent-bit.mdx b/send-data/fluent-bit.mdx
index ecfcfe3e..7ce18585 100644
--- a/send-data/fluent-bit.mdx
+++ b/send-data/fluent-bit.mdx
@@ -10,7 +10,7 @@ popularityOrder: 11
---
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
Fluent Bit is an open-source log processor and forwarder that allows you to collect any data like metrics and logs from different sources, enrich them with filters, and send them to multiple destinations like Axiom.
@@ -27,9 +27,9 @@ Fluent Bit is an open-source log processor and forwarder that allows you to coll
[OUTPUT]
Name http
Match *
- Host AXIOM_DOMAIN
+ Host AXIOM_EDGE_DOMAIN
Port 443
- URI /v1/datasets/DATASET_NAME/ingest
+ URI /v1/ingest/DATASET_NAME
Header Authorization Bearer API_TOKEN
Compress gzip
Format json
@@ -39,6 +39,6 @@ Fluent Bit is an open-source log processor and forwarder that allows you to coll
```
-
+
diff --git a/send-data/fluentd.mdx b/send-data/fluentd.mdx
index d5748a49..bbb37789 100644
--- a/send-data/fluentd.mdx
+++ b/send-data/fluentd.mdx
@@ -12,7 +12,7 @@ popularityOrder: 12
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
import ReplaceDataset from "/snippets/replace-dataset.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
Fluentd is an open-source log collector that allows you to collect, aggregate, process, analyze, and route log files.
@@ -49,7 +49,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t
@type http
- endpoint https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest
+ endpoint https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME
# Authorization Bearer should be an ingest token
headers {"Authorization": "Bearer API_TOKEN"}
json_array false
@@ -66,7 +66,7 @@ The example below shows a Fluentd configuration that sends data to Axiom using t
```
-
+
@@ -257,7 +257,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom
# Send Scala logs using HTTP plugin to Axiom
@type http
- endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest'}"
+ endpoint "#{ENV['FLUENT_HTTP_ENDPOINT'] || 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME'}"
headers {"Authorization": "Bearer #{ENV['FLUENT_HTTP_TOKEN'] || ''}"}
@type json
@@ -275,7 +275,7 @@ The example below shows a Fluentd configuration that sends Scala data to Axiom
```
-
+
@@ -309,7 +309,7 @@ The example below shows a Fluentd configuration that sends data from your virtua
@id out_http_axiom
@log_level info
endpoint "#{ENV['AXIOM_URL'] || 'https://api.axiom.co'}"
- path "/v1/datasets/DATASET_NAME/ingest"
+ path "/v1/ingest/DATASET_NAME"
ssl_verify "#{ENV['AXIOM_SSL_VERIFY'] || 'true'}"
Authorization "Bearer API_TOKEN"
@@ -360,7 +360,7 @@ The example below shows a Fluentd configuration that sends data from your virtua
@id out_http_axiom
@log_level info
endpoint "#{ENV['AXIOM_URL'] || 'https://api.axiom.co'}"
- path "/v1/datasets/DATASET_NAME/ingest"
+ path "/v1/ingest/DATASET_NAME"
ssl_verify "#{ENV['AXIOM_SSL_VERIFY'] || 'true'}"
Authorization "Bearer API_TOKEN"
diff --git a/send-data/heroku-log-drains.mdx b/send-data/heroku-log-drains.mdx
index 5538b3fb..23a2413b 100644
--- a/send-data/heroku-log-drains.mdx
+++ b/send-data/heroku-log-drains.mdx
@@ -9,7 +9,7 @@ logoId: 'heroku'
import Prerequisites from "/snippets/standard-prerequisites.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
Log Drains make it easy to collect logs from your deployments and forward them to archival, search, and alerting services by sending them via HTTPS, HTTP, TLS, and TCP.
@@ -28,12 +28,12 @@ Sign up and login to your account on [Heroku](https://heroku.com/), and download
Heroku log drains configuration consists of three main components
```bash
-heroku drains:add https://axiom:API_TOKEN@AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest -a HEROKU_APPLICATION_NAME
+heroku drains:add https://axiom:API_TOKEN@AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME -a HEROKU_APPLICATION_NAME
```
-
+
Replace `HEROKU_APPLICATION_NAME` with the name of the app that you created on the Heroku dashboard or the Heroku CLI.
diff --git a/send-data/kubernetes.mdx b/send-data/kubernetes.mdx
index 80849b00..8ff21f9f 100644
--- a/send-data/kubernetes.mdx
+++ b/send-data/kubernetes.mdx
@@ -8,7 +8,7 @@ logoId: 'kubernetes'
---
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import Prerequisites from "/snippets/standard-prerequisites.mdx"
Axiom makes it easy to collect, analyze, and monitor logs from your Kubernetes clusters. Integrate popular tools like Filebeat, Vector, or Fluent Bit with Axiom to send your cluster logs.
@@ -113,7 +113,7 @@ spec:
- -e
env:
- name: AXIOM_HOST
- value: AXIOM_DOMAIN
+ value: AXIOM_EDGE_DOMAIN
- name: AXIOM_DATASET_NAME
value: DATASET_NAME
- name: AXIOM_API_TOKEN
@@ -276,7 +276,7 @@ spec:
- /etc/vector/
env:
- name: AXIOM_HOST
- value: AXIOM_DOMAIN
+ value: AXIOM_EDGE_DOMAIN
- name: AXIOM_DATASET_NAME
value: DATASET_NAME
- name: AXIOM_API_TOKEN
@@ -426,7 +426,7 @@ data:
Match *
Host ${AXIOM_HOST}
Port 443
- URI /v1/datasets/${AXIOM_DATASET_NAME}/ingest
+ URI /v1/ingest/${AXIOM_DATASET_NAME}
Header Authorization Bearer ${AXIOM_API_TOKEN}
Format json
Json_date_key time
@@ -464,7 +464,7 @@ spec:
image: fluent/fluent-bit:1.9.9
env:
- name: AXIOM_HOST
- value: AXIOM_DOMAIN
+ value: AXIOM_EDGE_DOMAIN
- name: AXIOM_DATASET_NAME
value: DATASET_NAME
- name: AXIOM_API_TOKEN
@@ -495,7 +495,7 @@ spec:
```
-
+
diff --git a/send-data/methods.mdx b/send-data/methods.mdx
index b16032f7..e9929c21 100644
--- a/send-data/methods.mdx
+++ b/send-data/methods.mdx
@@ -5,20 +5,20 @@ sidebarTitle: "Methods"
keywords: ["send data", "ingest", "methods", "integrations", "opentelemetry", "vector", "fluentd"]
---
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
The easiest way to send your first event data to Axiom is with a direct HTTP request using a tool like `cURL`.
```shell
-curl -X 'POST' 'https://AXIOM_DOMAIN/v1/datasets/DATASET_NAME/ingest' \
+curl -X 'POST' 'https://AXIOM_EDGE_DOMAIN/v1/ingest/DATASET_NAME' \
-H 'Authorization: Bearer API_TOKEN' \
-H 'Content-Type: application/x-ndjson' \
-d '{ "http": { "request": { "method": "GET", "duration_ms": 231 }, "response": { "body": { "size": 3012 } } }, "url": { "path": "/download" } }'
```
-
+
diff --git a/send-data/tremor.mdx b/send-data/tremor.mdx
index eb48db36..83a83552 100644
--- a/send-data/tremor.mdx
+++ b/send-data/tremor.mdx
@@ -8,7 +8,7 @@ logoId: 'tremor'
---
import ReplaceDatasetToken from "/snippets/replace-dataset-token.mdx"
-import ReplaceDomain from "/snippets/replace-domain.mdx"
+import ReplaceEdgeDomain from "/snippets/replace-edge-domain.mdx"
Axiom provides a unique way of ingesting [Tremor logs](https://www.tremor.rs/) into Axiom. With your connector definitions, you can configure Tremor connectors and events components to interact with your external systems, such as databases, message queues, or APIs, and eventually ingest data from these sources into Axiom.
@@ -45,7 +45,7 @@ flow
token
with
config = {
- "url": "https://AXIOM_DOMAIN/v1/datasets/#{args.dataset}/ingest",
+ "url": "https://AXIOM_EDGE_DOMAIN/v1/ingest/#{args.dataset}",
"tls": true,
"method": "POST",
"headers": {
@@ -77,7 +77,7 @@ This assumes you have set `TREMOR_PATH` in your environment pointing to `tremor-
-
+
## Configuration using Syslog
diff --git a/snippets/replace-edge-domain.mdx b/snippets/replace-edge-domain.mdx
new file mode 100644
index 00000000..39cb9659
--- /dev/null
+++ b/snippets/replace-edge-domain.mdx
@@ -0,0 +1 @@
+Replace `AXIOM_EDGE_DOMAIN` with `us-east-1.aws.edge.axiom.co` if your organization uses the US region, and with `eu-central-1.aws.edge.axiom.co` if your organization uses the EU region. For more information, see [Regions](/reference/regions).