diff --git a/internal/pkg/agent/application/actions/handlers/handler_action_policy_change.go b/internal/pkg/agent/application/actions/handlers/handler_action_policy_change.go
index c4f2c10dc96..af76e83f6d1 100644
--- a/internal/pkg/agent/application/actions/handlers/handler_action_policy_change.go
+++ b/internal/pkg/agent/application/actions/handlers/handler_action_policy_change.go
@@ -37,7 +37,7 @@ const (
 // PolicyChangeHandler is a handler for POLICY_CHANGE action.
 type PolicyChangeHandler struct {
 	log       *logger.Logger
-	agentInfo *info.AgentInfo
+	agentInfo info.Agent
 	config    *configuration.Configuration
 	store     storage.Store
 	ch        chan coordinator.ConfigChange
@@ -52,7 +52,7 @@ type PolicyChangeHandler struct {
 // NewPolicyChangeHandler creates a new PolicyChange handler.
 func NewPolicyChangeHandler(
 	log *logger.Logger,
-	agentInfo *info.AgentInfo,
+	agentInfo info.Agent,
 	config *configuration.Configuration,
 	store storage.Store,
 	ch chan coordinator.ConfigChange,
@@ -264,7 +264,7 @@ func clientEqual(k1 remote.Config, k2 remote.Config) bool {
 	return true
 }
 
-func fleetToReader(agentInfo *info.AgentInfo, cfg *configuration.Configuration) (io.Reader, error) {
+func fleetToReader(agentInfo info.Agent, cfg *configuration.Configuration) (io.Reader, error) {
 	configToStore := map[string]interface{}{
 		"fleet": cfg.Fleet,
 		"agent": map[string]interface{}{
diff --git a/internal/pkg/agent/application/actions/handlers/handler_action_policy_change_test.go b/internal/pkg/agent/application/actions/handlers/handler_action_policy_change_test.go
index cdfc2a7110a..b56bb1a1ba4 100644
--- a/internal/pkg/agent/application/actions/handlers/handler_action_policy_change_test.go
+++ b/internal/pkg/agent/application/actions/handlers/handler_action_policy_change_test.go
@@ -34,10 +34,7 @@ func TestPolicyChange(t *testing.T) {
 	log, _ := logger.New("", false)
 	ack := noopacker.New()
 
-	ctx, cancel := context.WithCancel(context.Background())
-	defer cancel()
-
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	nullStore := &storage.NullStore{}
 
 	t.Run("Receive a config change and successfully emits a raw configuration", func(t *testing.T) {
@@ -63,10 +60,8 @@ func TestPolicyChange(t *testing.T) {
 
 func TestPolicyAcked(t *testing.T) {
 	log, _ := logger.New("", false)
-	ctx, cancel := context.WithCancel(context.Background())
-	defer cancel()
 
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	nullStore := &storage.NullStore{}
 
 	t.Run("Config change should ACK", func(t *testing.T) {
diff --git a/internal/pkg/agent/application/actions/handlers/handler_action_settings.go b/internal/pkg/agent/application/actions/handlers/handler_action_settings.go
index 6fd348baca6..68be5715dab 100644
--- a/internal/pkg/agent/application/actions/handlers/handler_action_settings.go
+++ b/internal/pkg/agent/application/actions/handlers/handler_action_settings.go
@@ -20,14 +20,14 @@ import (
 // Settings handles settings change coming from fleet and updates log level.
 type Settings struct {
 	log       *logger.Logger
-	agentInfo *info.AgentInfo
+	agentInfo info.Agent
 	coord     *coordinator.Coordinator
 }
 
 // NewSettings creates a new Settings handler.
 func NewSettings(
 	log *logger.Logger,
-	agentInfo *info.AgentInfo,
+	agentInfo info.Agent,
 	coord *coordinator.Coordinator,
 ) *Settings {
 	return &Settings{
diff --git a/internal/pkg/agent/application/actions/handlers/handler_action_upgrade_test.go b/internal/pkg/agent/application/actions/handlers/handler_action_upgrade_test.go
index 01377e43313..14f4a02c571 100644
--- a/internal/pkg/agent/application/actions/handlers/handler_action_upgrade_test.go
+++ b/internal/pkg/agent/application/actions/handlers/handler_action_upgrade_test.go
@@ -63,7 +63,8 @@ func TestUpgradeHandler(t *testing.T) {
 	defer cancel()
 
 	log, _ := logger.New("", false)
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+
+	agentInfo := &info.AgentInfo{}
 	msgChan := make(chan string)
 
 	// Create and start the coordinator
@@ -95,7 +96,8 @@ func TestUpgradeHandlerSameVersion(t *testing.T) {
 	defer cancel()
 
 	log, _ := logger.New("", false)
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+
+	agentInfo := &info.AgentInfo{}
 	msgChan := make(chan string)
 
 	// Create and start the Coordinator
@@ -129,7 +131,8 @@ func TestUpgradeHandlerNewVersion(t *testing.T) {
 	defer cancel()
 
 	log, _ := logger.New("", false)
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+
+	agentInfo := &info.AgentInfo{}
 	msgChan := make(chan string)
 
 	// Create and start the Coordinator
diff --git a/internal/pkg/agent/application/application.go b/internal/pkg/agent/application/application.go
index a369924c802..8a37991f636 100644
--- a/internal/pkg/agent/application/application.go
+++ b/internal/pkg/agent/application/application.go
@@ -41,7 +41,7 @@ func New(
 	log *logger.Logger,
 	baseLogger *logger.Logger,
 	logLevel logp.Level,
-	agentInfo *info.AgentInfo,
+	agentInfo info.Agent,
 	reexec coordinator.ReExecManager,
 	tracer *apm.Tracer,
 	testingMode bool,
diff --git a/internal/pkg/agent/application/coordinator/coordinator.go b/internal/pkg/agent/application/coordinator/coordinator.go
index 0a5450ddada..10be2cc2aa8 100644
--- a/internal/pkg/agent/application/coordinator/coordinator.go
+++ b/internal/pkg/agent/application/coordinator/coordinator.go
@@ -173,7 +173,7 @@ type configReloader interface {
 // All configuration changes, update variables, and upgrade actions are managed and controlled by the coordinator.
 type Coordinator struct {
 	logger    *logger.Logger
-	agentInfo *info.AgentInfo
+	agentInfo info.Agent
 	isManaged bool
 
 	cfg   *configuration.Configuration
@@ -322,7 +322,7 @@ type UpdateComponentChange struct {
 }
 
 // New creates a new coordinator.
-func New(logger *logger.Logger, cfg *configuration.Configuration, logLevel logp.Level, agentInfo *info.AgentInfo, specs component.RuntimeSpecs, reexecMgr ReExecManager, upgradeMgr UpgradeManager, runtimeMgr RuntimeManager, configMgr ConfigManager, varsMgr VarsManager, caps capabilities.Capabilities, monitorMgr MonitorManager, isManaged bool, modifiers ...ComponentsModifier) *Coordinator {
+func New(logger *logger.Logger, cfg *configuration.Configuration, logLevel logp.Level, agentInfo info.Agent, specs component.RuntimeSpecs, reexecMgr ReExecManager, upgradeMgr UpgradeManager, runtimeMgr RuntimeManager, configMgr ConfigManager, varsMgr VarsManager, caps capabilities.Capabilities, monitorMgr MonitorManager, isManaged bool, modifiers ...ComponentsModifier) *Coordinator {
 	var fleetState cproto.State
 	var fleetMessage string
 	if !isManaged {
diff --git a/internal/pkg/agent/application/coordinator/diagnostics_test.go b/internal/pkg/agent/application/coordinator/diagnostics_test.go
index a5cdaf5a100..1e23a7e8e40 100644
--- a/internal/pkg/agent/application/coordinator/diagnostics_test.go
+++ b/internal/pkg/agent/application/coordinator/diagnostics_test.go
@@ -409,10 +409,9 @@ components:
 	assert.YAMLEq(t, expected, string(result), "components-actual diagnostic returned unexpected value")
 }
 
+// TestDiagnosticState creates a coordinator with a test state and verify that
+// the state diagnostic reports it.
 func TestDiagnosticState(t *testing.T) {
-	// Create a coordinator with a test state and verify that the state
-	// diagnostic reports it
-
 	now := time.Now().UTC()
 	state := State{
 		State:        agentclient.Starting,
@@ -427,7 +426,8 @@ func TestDiagnosticState(t *testing.T) {
 					State:   client.UnitStateDegraded,
 					Message: "degraded message",
 					VersionInfo: runtime.ComponentVersionInfo{
-						Name: "version name",
+						Name:      "version name",
+						BuildHash: "a-build-hash",
 					},
 				},
 			},
@@ -461,6 +461,7 @@ components:
       units: {}
       version_info:
         name: "version name"
+        build_hash: "a-build-hash"
 upgrade_details:
   target_version: 8.12.0
   state: UPG_DOWNLOADING
@@ -503,7 +504,8 @@ func TestDiagnosticStateForAPM(t *testing.T) {
 					State:   client.UnitStateDegraded,
 					Message: "degraded message",
 					VersionInfo: runtime.ComponentVersionInfo{
-						Name: "version name",
+						Name:      "version name",
+						BuildHash: "a-build-hash",
 					},
 					Component: &proto.Component{
 						ApmConfig: &proto.APMConfig{
@@ -540,6 +542,7 @@ components:
       units: {}
       version_info:
         name: "version name"
+        build_hash: "a-build-hash"
       component:
         apmconfig:
           elastic:
diff --git a/internal/pkg/agent/application/fleet_server_bootstrap.go b/internal/pkg/agent/application/fleet_server_bootstrap.go
index aa38ae001b5..51ac633e972 100644
--- a/internal/pkg/agent/application/fleet_server_bootstrap.go
+++ b/internal/pkg/agent/application/fleet_server_bootstrap.go
@@ -102,7 +102,7 @@ func FleetServerComponentModifier(serverCfg *configuration.FleetServerConfig) co
 
 // InjectFleetConfigComponentModifier The modifier that injects the fleet configuration for the components
 // that need to be able to connect to fleet server.
-func InjectFleetConfigComponentModifier(fleetCfg *configuration.FleetAgentConfig, agentInfo *info.AgentInfo) coordinator.ComponentsModifier {
+func InjectFleetConfigComponentModifier(fleetCfg *configuration.FleetAgentConfig, agentInfo info.Agent) coordinator.ComponentsModifier {
 	return func(comps []component.Component, cfg map[string]interface{}) ([]component.Component, error) {
 		hostsStr := fleetCfg.Client.GetHosts()
 		fleetHosts := make([]interface{}, 0, len(hostsStr))
diff --git a/internal/pkg/agent/application/info/agent_info.go b/internal/pkg/agent/application/info/agent_info.go
index f335b2eb76e..5e6b07f0ef8 100644
--- a/internal/pkg/agent/application/info/agent_info.go
+++ b/internal/pkg/agent/application/info/agent_info.go
@@ -11,6 +11,29 @@ import (
 	"github.com/elastic/elastic-agent/pkg/core/logger"
 )
 
+type Agent interface {
+	// AgentID returns an agent identifier.
+	AgentID() string
+
+	// Headers returns custom headers used to communicate with elasticsearch.
+	Headers() map[string]string
+
+	// LogLevel retrieves a log level.
+	LogLevel() string
+
+	// ReloadID reloads agent info ID from configuration file.
+	ReloadID(ctx context.Context) error
+
+	// SetLogLevel updates log level of agent.
+	SetLogLevel(ctx context.Context, level string) error
+
+	// Snapshot returns if this version is a snapshot.
+	Snapshot() bool
+
+	// Version returns the version for this Agent.
+	Version() string
+}
+
 // AgentInfo is a collection of information about agent.
 type AgentInfo struct {
 	agentID  string
diff --git a/internal/pkg/agent/application/managed_mode.go b/internal/pkg/agent/application/managed_mode.go
index 524500c661a..7f8d22a16ca 100644
--- a/internal/pkg/agent/application/managed_mode.go
+++ b/internal/pkg/agent/application/managed_mode.go
@@ -41,7 +41,7 @@ const dispatchFlushInterval = time.Minute * 5
 
 type managedConfigManager struct {
 	log                  *logger.Logger
-	agentInfo            *info.AgentInfo
+	agentInfo            info.Agent
 	cfg                  *configuration.Configuration
 	client               *remote.Client
 	store                storage.Store
@@ -60,7 +60,7 @@ type managedConfigManager struct {
 func newManagedConfigManager(
 	ctx context.Context,
 	log *logger.Logger,
-	agentInfo *info.AgentInfo,
+	agentInfo info.Agent,
 	cfg *configuration.Configuration,
 	storeSaver storage.Store,
 	runtime *runtime.Manager,
diff --git a/internal/pkg/agent/application/monitoring/v1_monitor.go b/internal/pkg/agent/application/monitoring/v1_monitor.go
index 1b60fcfb679..485595eda19 100644
--- a/internal/pkg/agent/application/monitoring/v1_monitor.go
+++ b/internal/pkg/agent/application/monitoring/v1_monitor.go
@@ -69,7 +69,7 @@ type BeatsMonitor struct {
 	enabled         bool // feature flag disabling whole v1 monitoring story
 	config          *monitoringConfig
 	operatingSystem string
-	agentInfo       *info.AgentInfo
+	agentInfo       info.Agent
 }
 
 type monitoringConfig struct {
@@ -77,7 +77,7 @@ type monitoringConfig struct {
 }
 
 // New creates a new BeatsMonitor instance.
-func New(enabled bool, operatingSystem string, cfg *monitoringCfg.MonitoringConfig, agentInfo *info.AgentInfo) *BeatsMonitor {
+func New(enabled bool, operatingSystem string, cfg *monitoringCfg.MonitoringConfig, agentInfo info.Agent) *BeatsMonitor {
 	return &BeatsMonitor{
 		enabled: enabled,
 		config: &monitoringConfig{
@@ -914,7 +914,7 @@ func (b *BeatsMonitor) injectMetricsInput(cfg map[string]interface{}, componentI
 	return nil
 }
 
-func createProcessorsForJSONInput(name string, compID, monitoringNamespace string, agentInfo *info.AgentInfo) []interface{} {
+func createProcessorsForJSONInput(name string, compID, monitoringNamespace string, agentInfo info.Agent) []interface{} {
 	return []interface{}{
 		map[string]interface{}{
 			"add_fields": map[string]interface{}{
diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go
index 46cf449f963..e154251f552 100644
--- a/internal/pkg/agent/application/upgrade/upgrade.go
+++ b/internal/pkg/agent/application/upgrade/upgrade.go
@@ -53,7 +53,7 @@ var ErrSameVersion = errors.New("upgrade did not occur because its the same vers
 type Upgrader struct {
 	log            *logger.Logger
 	settings       *artifact.Config
-	agentInfo      *info.AgentInfo
+	agentInfo      info.Agent
 	upgradeable    bool
 	fleetServerURI string
 	markerWatcher  MarkerWatcher
@@ -67,7 +67,7 @@ func IsUpgradeable() bool {
 }
 
 // NewUpgrader creates an upgrader which is capable of performing upgrade operation
-func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo *info.AgentInfo) (*Upgrader, error) {
+func NewUpgrader(log *logger.Logger, settings *artifact.Config, agentInfo info.Agent) (*Upgrader, error) {
 	return &Upgrader{
 		log:           log,
 		settings:      settings,
diff --git a/internal/pkg/agent/storage/encrypted_disk_store.go b/internal/pkg/agent/storage/encrypted_disk_store.go
index edad97f19ef..280d1c8b3ba 100644
--- a/internal/pkg/agent/storage/encrypted_disk_store.go
+++ b/internal/pkg/agent/storage/encrypted_disk_store.go
@@ -164,7 +164,7 @@ func (d *EncryptedDiskStore) Load() (rc io.ReadCloser, err error) {
 	fd, err := os.OpenFile(d.target, os.O_RDONLY, perms)
 	if err != nil {
 		if errors.Is(err, os.ErrNotExist) {
-			// If file doesn't exists, return empty reader closer
+			// If file doesn't exist, return empty reader closer
 			return io.NopCloser(bytes.NewReader([]byte{})), nil
 		}
 		return nil, errors.New(err,
diff --git a/internal/pkg/agent/vault/vault_notdarwin.go b/internal/pkg/agent/vault/vault_notdarwin.go
index 43f65bd4197..4b0e86a8a2e 100644
--- a/internal/pkg/agent/vault/vault_notdarwin.go
+++ b/internal/pkg/agent/vault/vault_notdarwin.go
@@ -96,18 +96,25 @@ func New(ctx context.Context, path string, opts ...OptionFunc) (v *Vault, err er
 func (v *Vault) Set(ctx context.Context, key string, data []byte) (err error) {
 	enc, err := v.encrypt(data)
 	if err != nil {
-		return err
+		return fmt.Errorf("vault Set: could not encrypt key: %w", err)
 	}
 
 	err = v.tryLock(ctx)
 	if err != nil {
-		return err
+		return fmt.Errorf("vault Set: could acquire lock: %w", err)
 	}
 	defer func() {
 		err = v.unlockAndJoinErrors(err)
+		if err != nil {
+			err = fmt.Errorf("vault Set: unlockAndJoinErrors failed: %w", err)
+		}
 	}()
 
-	return writeFile(v.filepathFromKey(key), enc)
+	err = writeFile(v.filepathFromKey(key), enc)
+	if err != nil {
+		return fmt.Errorf("vaukt: could not write key to file: %w", err)
+	}
+	return nil
 }
 
 // Get retrieves the key from the vault store
diff --git a/pkg/component/runtime/command.go b/pkg/component/runtime/command.go
index 140e9d22557..c422371c2a3 100644
--- a/pkg/component/runtime/command.go
+++ b/pkg/component/runtime/command.go
@@ -420,7 +420,7 @@ func (c *commandRuntime) stop(ctx context.Context) error {
 
 func (c *commandRuntime) startWatcher(info *process.Info, comm Communicator) {
 	go func() {
-		err := comm.WriteConnInfo(info.Stdin)
+		err := comm.WriteStartUpInfo(info.Stdin)
 		if err != nil {
 			_, _ = c.logErr.Write([]byte(fmt.Sprintf("Failed: failed to provide connection information to spawned pid '%d': %s", info.PID, err)))
 			// kill instantly
diff --git a/pkg/component/runtime/conn_info_server.go b/pkg/component/runtime/conn_info_server.go
index f3e2a0a6fe0..d6a55c4abb7 100644
--- a/pkg/component/runtime/conn_info_server.go
+++ b/pkg/component/runtime/conn_info_server.go
@@ -46,7 +46,7 @@ func newConnInfoServer(log *logger.Logger, comm Communicator, port int) (*connIn
 				break
 			}
 			log.Debugf("client connected, sending connection info")
-			err = comm.WriteConnInfo(conn)
+			err = comm.WriteStartUpInfo(conn)
 			if err != nil {
 				if !errors.Is(err, io.EOF) {
 					log.Errorf("failed write conn info: %v", err)
diff --git a/pkg/component/runtime/conn_info_server_test.go b/pkg/component/runtime/conn_info_server_test.go
index 7c2b3ee1a79..299ef323509 100644
--- a/pkg/component/runtime/conn_info_server_test.go
+++ b/pkg/component/runtime/conn_info_server_test.go
@@ -44,7 +44,7 @@ func newMockCommunicator() *mockCommunicator {
 	}
 }
 
-func (c *mockCommunicator) WriteConnInfo(w io.Writer, services ...client.Service) error {
+func (c *mockCommunicator) WriteStartUpInfo(w io.Writer, services ...client.Service) error {
 	infoBytes, err := protobuf.Marshal(c.startupInfo)
 	if err != nil {
 		return fmt.Errorf("failed to marshal connection information: %w", err)
diff --git a/pkg/component/runtime/manager.go b/pkg/component/runtime/manager.go
index 0756b663149..a3731673b3e 100644
--- a/pkg/component/runtime/manager.go
+++ b/pkg/component/runtime/manager.go
@@ -97,7 +97,7 @@ type Manager struct {
 	ca         *authority.CertificateAuthority
 	listenAddr string
 	listenPort int
-	agentInfo  *info.AgentInfo
+	agentInfo  info.Agent
 	tracer     *apm.Tracer
 	monitor    MonitoringManager
 	grpcConfig *configuration.GRPCConfig
@@ -150,7 +150,7 @@ func NewManager(
 	logger,
 	baseLogger *logger.Logger,
 	listenAddr string,
-	agentInfo *info.AgentInfo,
+	agentInfo info.Agent,
 	tracer *apm.Tracer,
 	monitor MonitoringManager,
 	grpcConfig *configuration.GRPCConfig,
@@ -159,6 +159,10 @@ func NewManager(
 	if err != nil {
 		return nil, err
 	}
+
+	if agentInfo == nil {
+		return nil, errors.New("agentInfo cannot be nil")
+	}
 	m := &Manager{
 		logger:         logger,
 		baseLogger:     baseLogger,
diff --git a/pkg/component/runtime/manager_fake_input_test.go b/pkg/component/runtime/manager_fake_input_test.go
index 5a61fcde1ac..8b0931def51 100644
--- a/pkg/component/runtime/manager_fake_input_test.go
+++ b/pkg/component/runtime/manager_fake_input_test.go
@@ -74,10 +74,10 @@ type FakeInputSuite struct {
 	suite.Suite
 }
 
-func (suite *FakeInputSuite) SetupTest() {
+func (suite *FakeInputSuite) SetupSuite() {
 	// Tests using the fake input / shipper need to override the
-	// versionedHome and topPath globals to reference the temporary directory
-	// the test is running in.
+	// versionedHome and topPath globals to reference the temporary
+	// directory the test is running in.
 	// That's why these tests run in their own suite: it's hard to properly
 	// clean up these global changes after a test without setting off the
 	// data race detector, so they all run together and reset at the start of
@@ -93,16 +93,9 @@ func (suite *FakeInputSuite) setupTestPaths() {
 	t := suite.T()
 	t.Helper()
 
-	tmpDir, err := os.MkdirTemp("", "at-*")
-	if err != nil {
-		t.Fatalf("failed to create temp directory: %s", err)
-	}
-	paths.SetVersionHome(false)
+	tmpDir := t.TempDir()
 	paths.SetTop(tmpDir)
-
-	t.Cleanup(func() {
-		_ = os.RemoveAll(tmpDir)
-	})
+	paths.SetVersionHome(false)
 }
 
 func (suite *FakeInputSuite) TestManager_StartStop() {
@@ -111,7 +104,7 @@ func (suite *FakeInputSuite) TestManager_StartStop() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -232,7 +225,7 @@ func (suite *FakeInputSuite) TestManager_Features() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -433,7 +426,7 @@ func (suite *FakeInputSuite) TestManager_APM() {
 	ctx, cancel := context.WithTimeout(context.Background(), timeout)
 	defer cancel()
 
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -668,7 +661,7 @@ func (suite *FakeInputSuite) TestManager_Limits() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -832,7 +825,7 @@ func (suite *FakeInputSuite) TestManager_ShipperLimits() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -996,7 +989,7 @@ func (suite *FakeInputSuite) TestManager_BadUnitToGood() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -1165,7 +1158,7 @@ func (suite *FakeInputSuite) TestManager_GoodUnitToBad() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	runResultChan := make(chan error, 1)
@@ -1347,7 +1340,7 @@ func (suite *FakeInputSuite) TestManager_NoDeadlock() {
 	maxUpdateInterval := 15 * time.Second
 
 	// Create the runtime manager
-	ai, _ := info.NewAgentInfo(context.Background(), true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 
@@ -1421,7 +1414,7 @@ func (suite *FakeInputSuite) TestManager_Configure() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -1543,7 +1536,7 @@ func (suite *FakeInputSuite) TestManager_RemoveUnit() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -1698,7 +1691,7 @@ func (suite *FakeInputSuite) TestManager_ActionState() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -1823,7 +1816,7 @@ func (suite *FakeInputSuite) TestManager_Restarts() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -1959,7 +1952,7 @@ func (suite *FakeInputSuite) TestManager_Restarts_ConfigKill() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -2103,7 +2096,7 @@ func (suite *FakeInputSuite) TestManager_KeepsRestarting() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -2247,7 +2240,7 @@ func (suite *FakeInputSuite) TestManager_RestartsOnMissedCheckins() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -2366,7 +2359,7 @@ func (suite *FakeInputSuite) TestManager_InvalidAction() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -2484,7 +2477,7 @@ func (suite *FakeInputSuite) TestManager_MultiComponent() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	agentInfo, _ := info.NewAgentInfo(ctx, true)
+	agentInfo := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -2698,7 +2691,7 @@ func (suite *FakeInputSuite) TestManager_LogLevel() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -2851,7 +2844,7 @@ func (suite *FakeInputSuite) TestManager_Shipper() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), configuration.DefaultGRPCConfig())
 	require.NoError(t, err)
 	errCh := make(chan error)
@@ -3147,7 +3140,7 @@ func (suite *FakeInputSuite) TestManager_OutputChange() {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
@@ -3393,7 +3386,7 @@ func (suite *FakeInputSuite) TestManager_Chunk() {
 	grpcConfig := configuration.DefaultGRPCConfig()
 	grpcConfig.MaxMsgSize = grpcDefaultSize * 2 // set to double the default size
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(newDebugLogger(t), newDebugLogger(t), "localhost:0", ai, apmtest.DiscardTracer, newTestMonitoringMgr(), grpcConfig)
 	require.NoError(t, err)
 	errCh := make(chan error)
diff --git a/pkg/component/runtime/manager_test.go b/pkg/component/runtime/manager_test.go
index 5ce50b9557b..996fe0cc1d9 100644
--- a/pkg/component/runtime/manager_test.go
+++ b/pkg/component/runtime/manager_test.go
@@ -17,7 +17,6 @@ import (
 
 	"github.com/elastic/elastic-agent-client/v7/pkg/client"
 	"github.com/elastic/elastic-agent-libs/logp"
-
 	"github.com/elastic/elastic-agent/internal/pkg/agent/application/info"
 	"github.com/elastic/elastic-agent/internal/pkg/agent/configuration"
 	"github.com/elastic/elastic-agent/pkg/component"
@@ -28,7 +27,7 @@ func TestManager_SimpleComponentErr(t *testing.T) {
 	ctx, cancel := context.WithCancel(context.Background())
 	defer cancel()
 
-	ai, _ := info.NewAgentInfo(ctx, true)
+	ai := &info.AgentInfo{}
 	m, err := NewManager(
 		newDebugLogger(t),
 		newDebugLogger(t),
diff --git a/pkg/component/runtime/runtime_comm.go b/pkg/component/runtime/runtime_comm.go
index 47322f82108..79eeb9f89c6 100644
--- a/pkg/component/runtime/runtime_comm.go
+++ b/pkg/component/runtime/runtime_comm.go
@@ -29,7 +29,7 @@ import (
 type Communicator interface {
 	// WriteConnInfo writes the connection information to the writer, informing the component it has access
 	// to the provided services.
-	WriteConnInfo(w io.Writer, services ...client.Service) error
+	WriteStartUpInfo(w io.Writer, services ...client.Service) error
 	// CheckinExpected sends the expected state to the component.
 	//
 	// observed is the observed message received from the component and what was used to compute the provided
@@ -44,7 +44,7 @@ type runtimeComm struct {
 	logger     *logger.Logger
 	listenAddr string
 	ca         *authority.CertificateAuthority
-	agentInfo  *info.AgentInfo
+	agentInfo  info.Agent
 
 	name  string
 	token string
@@ -71,7 +71,7 @@ type runtimeComm struct {
 	actionsResponse chan *proto.ActionResponse
 }
 
-func newRuntimeComm(logger *logger.Logger, listenAddr string, ca *authority.CertificateAuthority, agentInfo *info.AgentInfo, maxMessageSize int) (*runtimeComm, error) {
+func newRuntimeComm(logger *logger.Logger, listenAddr string, ca *authority.CertificateAuthority, agentInfo info.Agent, maxMessageSize int) (*runtimeComm, error) {
 	token, err := uuid.NewV4()
 	if err != nil {
 		return nil, err
@@ -103,7 +103,7 @@ func newRuntimeComm(logger *logger.Logger, listenAddr string, ca *authority.Cert
 	}, nil
 }
 
-func (c *runtimeComm) WriteConnInfo(w io.Writer, services ...client.Service) error {
+func (c *runtimeComm) WriteStartUpInfo(w io.Writer, services ...client.Service) error {
 	hasV2 := false
 	srvs := make([]proto.ConnInfoServices, 0, len(services))
 	for _, srv := range services {
@@ -129,6 +129,11 @@ func (c *runtimeComm) WriteConnInfo(w io.Writer, services ...client.Service) err
 		// chunking is always allowed if the client supports it
 		Supports:       []proto.ConnectionSupports{proto.ConnectionSupports_CheckinChunking},
 		MaxMessageSize: uint32(c.maxMessageSize),
+		AgentInfo: &proto.AgentInfo{
+			Id:       c.agentInfo.AgentID(),
+			Version:  c.agentInfo.Version(),
+			Snapshot: c.agentInfo.Snapshot(),
+		},
 	}
 	infoBytes, err := protobuf.Marshal(startupInfo)
 	if err != nil {
diff --git a/pkg/component/runtime/runtime_comm_test.go b/pkg/component/runtime/runtime_comm_test.go
new file mode 100644
index 00000000000..933ab7c8cbb
--- /dev/null
+++ b/pkg/component/runtime/runtime_comm_test.go
@@ -0,0 +1,80 @@
+// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+// or more contributor license agreements. Licensed under the Elastic License;
+// you may not use this file except in compliance with the Elastic License.
+
+package runtime
+
+import (
+	"bytes"
+	"context"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+
+	"github.com/elastic/elastic-agent-client/v7/pkg/client"
+	"github.com/elastic/elastic-agent/internal/pkg/core/authority"
+)
+
+type agentInfoMock struct {
+	agentID  string
+	snapshot bool
+	version  string
+}
+
+func (a agentInfoMock) AgentID() string {
+	return a.agentID
+}
+func (a agentInfoMock) Snapshot() bool {
+	return a.snapshot
+}
+
+func (a agentInfoMock) Version() string {
+	return a.version
+}
+
+func (a agentInfoMock) Headers() map[string]string                          { panic("implement me") }
+func (a agentInfoMock) LogLevel() string                                    { panic("implement me") }
+func (a agentInfoMock) ReloadID(ctx context.Context) error                  { panic("implement me") }
+func (a agentInfoMock) SetLogLevel(ctx context.Context, level string) error { panic("implement me") }
+
+func TestRuntimeComm_WriteStartUpInfo_packageVersion(t *testing.T) {
+	agentInfo := agentInfoMock{
+		agentID:  "NCC-1701",
+		snapshot: true,
+		version:  "8.13.0+build1966-09-6",
+	}
+
+	want := client.AgentInfo{
+		ID:       agentInfo.AgentID(),
+		Version:  agentInfo.Version(),
+		Snapshot: agentInfo.Snapshot(),
+	}
+
+	ca, err := authority.NewCA()
+	require.NoError(t, err, "could not create CA")
+	pair, err := ca.GeneratePair()
+	require.NoError(t, err, "could not create certificate pair from CA")
+
+	c := runtimeComm{
+		listenAddr: "localhost",
+		ca:         ca,
+		name:       "a_name",
+		token:      "a_token",
+		cert:       pair,
+		agentInfo:  agentInfo,
+	}
+
+	buff := bytes.Buffer{}
+	err = c.WriteStartUpInfo(&buff)
+	require.NoError(t, err, "failed to write ConnInfo")
+
+	clientv2, _, err := client.NewV2FromReader(&buff, client.VersionInfo{
+		Name: "TestRuntimeComm_WriteConnInfo",
+		Meta: nil,
+	})
+	require.NoError(t, err, "failed creating V2 client")
+
+	assert.Equal(t, &want, clientv2.AgentInfo(),
+		"agent info returned by client must match what has been written on command input")
+}
diff --git a/pkg/component/runtime/state.go b/pkg/component/runtime/state.go
index f376a4e184a..67dc9c028d3 100644
--- a/pkg/component/runtime/state.go
+++ b/pkg/component/runtime/state.go
@@ -54,6 +54,8 @@ type ComponentVersionInfo struct {
 	Name string `yaml:"name"`
 	// Additional metadata about the binary.
 	Meta map[string]string `yaml:"meta,omitempty"`
+	// BuildHash is the VCS commit hash the program was built from.
+	BuildHash string `yaml:"build_hash"`
 }
 
 // ComponentState is the overall state of the component.
@@ -345,6 +347,10 @@ func (s *ComponentState) syncCheckin(checkin *proto.CheckinObserved) bool {
 			s.VersionInfo.Name = checkin.VersionInfo.Name
 			changed = true
 		}
+		if checkin.VersionInfo.BuildHash != "" && s.VersionInfo.BuildHash != checkin.VersionInfo.BuildHash {
+			s.VersionInfo.BuildHash = checkin.VersionInfo.BuildHash
+			changed = true
+		}
 		if checkin.VersionInfo.Meta != nil && diffMeta(s.VersionInfo.Meta, checkin.VersionInfo.Meta) {
 			s.VersionInfo.Meta = checkin.VersionInfo.Meta
 			changed = true
diff --git a/pkg/control/v2/server/server.go b/pkg/control/v2/server/server.go
index cea7712623e..2fa845792bc 100644
--- a/pkg/control/v2/server/server.go
+++ b/pkg/control/v2/server/server.go
@@ -45,7 +45,7 @@ type Server struct {
 	cproto.UnimplementedElasticAgentControlServer
 
 	logger     *logger.Logger
-	agentInfo  *info.AgentInfo
+	agentInfo  info.Agent
 	coord      *coordinator.Coordinator
 	listener   net.Listener
 	server     *grpc.Server
@@ -57,7 +57,7 @@ type Server struct {
 }
 
 // New creates a new control protocol server.
-func New(log *logger.Logger, agentInfo *info.AgentInfo, coord *coordinator.Coordinator, tracer *apm.Tracer, diagHooks diagnostics.Hooks, grpcConfig *configuration.GRPCConfig) *Server {
+func New(log *logger.Logger, agentInfo info.Agent, coord *coordinator.Coordinator, tracer *apm.Tracer, diagHooks diagnostics.Hooks, grpcConfig *configuration.GRPCConfig) *Server {
 	return &Server{
 		logger:     log,
 		agentInfo:  agentInfo,
@@ -329,7 +329,7 @@ func (s *Server) Configure(ctx context.Context, req *cproto.ConfigureRequest) (*
 	return &cproto.Empty{}, nil
 }
 
-func stateToProto(state *coordinator.State, agentInfo *info.AgentInfo) (*cproto.StateResponse, error) {
+func stateToProto(state *coordinator.State, agentInfo info.Agent) (*cproto.StateResponse, error) {
 	var err error
 	components := make([]*cproto.ComponentState, 0, len(state.Components))
 	for _, comp := range state.Components {
diff --git a/pkg/testing/fixture.go b/pkg/testing/fixture.go
index 2f4a8da7518..ad932f306ae 100644
--- a/pkg/testing/fixture.go
+++ b/pkg/testing/fixture.go
@@ -20,6 +20,7 @@ import (
 
 	"github.com/cenkalti/backoff/v4"
 	"github.com/otiai10/copy"
+	"github.com/stretchr/testify/require"
 	"gopkg.in/yaml.v2"
 
 	"github.com/elastic/elastic-agent/internal/pkg/agent/application/paths"
@@ -740,6 +741,34 @@ func (f *Fixture) ExecVersion(ctx context.Context, opts ...process.CmdOption) (A
 	return version, err
 }
 
+// ExecDiagnostics executes the agent diagnostic and returns the path to the
+// zip file. If no cmd is provided, `diagnostics` will be used as the default.
+// The working directory of the command will be set to a temporary directory.
+// Use extractZipArchive to extract the diagnostics archive.
+func (f *Fixture) ExecDiagnostics(ctx context.Context, cmd ...string) (string, error) {
+	t := f.t
+	t.Helper()
+
+	if len(cmd) == 0 {
+		cmd = []string{"diagnostics"}
+	}
+
+	wd := t.TempDir()
+	diagnosticCmdOutput, err := f.Exec(ctx, cmd, process.WithWorkDir(wd))
+
+	t.Logf("diagnostic command completed with output \n%q\n", diagnosticCmdOutput)
+	require.NoErrorf(t, err, "error running diagnostic command: %v", err)
+
+	t.Logf("checking directory %q for the generated diagnostics archive", wd)
+	files, err := filepath.Glob(filepath.Join(wd, "elastic-agent-diagnostics-*.zip"))
+	require.NoError(t, err)
+	require.Len(t, files, 1)
+
+	t.Logf("Found %q diagnostic archive.", files[0])
+
+	return files[0], err
+}
+
 // IsHealthy checks whether the prepared Elastic Agent reports itself as healthy.
 // It returns an error if either the reported state isn't healthy or if it fails
 // to fetch the current state. If the status is successfully fetched, but it
diff --git a/pkg/testing/tools/fleettools/fleet.go b/pkg/testing/tools/fleettools/fleet.go
index a95eeac971d..8b83f691d62 100644
--- a/pkg/testing/tools/fleettools/fleet.go
+++ b/pkg/testing/tools/fleettools/fleet.go
@@ -11,9 +11,17 @@ import (
 	"os"
 	"strings"
 
+	"github.com/google/uuid"
+
 	"github.com/elastic/elastic-agent-libs/kibana"
 )
 
+type EnrollParams struct {
+	EnrollmentToken string `json:"api_key"`
+	FleetURL        string `json:"fleet_url"`
+	PolicyID        string `json:"policy_id"`
+}
+
 // GetAgentByPolicyIDAndHostnameFromList get an agent by the local_metadata.host.name property, reading from the agents list
 func GetAgentByPolicyIDAndHostnameFromList(ctx context.Context, client *kibana.Client, policyID, hostname string) (*kibana.AgentExisting, error) {
 	listAgentsResp, err := client.ListAgents(ctx, kibana.ListAgentsRequest{})
@@ -144,3 +152,43 @@ func DefaultURL(ctx context.Context, client *kibana.Client) (string, error) {
 
 	return "", errors.New("unable to determine default fleet server URL")
 }
+
+// NewEnrollParams creates a new policy with monitoring logs and metrics,
+// an enrollment token and returns an EnrollParams with the information to enroll
+// an agent. If an error happens, it returns nil and a non-nil error.
+func NewEnrollParams(ctx context.Context, client *kibana.Client) (*EnrollParams, error) {
+	policyUUID := uuid.New().String()
+	policy := kibana.AgentPolicy{
+		Name:        "test-policy-" + policyUUID,
+		Namespace:   "default",
+		Description: "Test policy " + policyUUID,
+		MonitoringEnabled: []kibana.MonitoringEnabledOption{
+			kibana.MonitoringEnabledLogs,
+			kibana.MonitoringEnabledMetrics,
+		},
+	}
+
+	policyResp, err := client.CreatePolicy(ctx, policy)
+	if err != nil {
+		return nil, fmt.Errorf("failed creating policy: %w", err)
+	}
+
+	createEnrollmentApiKeyReq := kibana.CreateEnrollmentAPIKeyRequest{
+		PolicyID: policyResp.ID,
+	}
+	enrollmentToken, err := client.CreateEnrollmentAPIKey(ctx, createEnrollmentApiKeyReq)
+	if err != nil {
+		return nil, fmt.Errorf("failed creating enrollment API key: %w", err)
+	}
+
+	fleetServerURL, err := DefaultURL(ctx, client)
+	if err != nil {
+		return nil, fmt.Errorf("failed getting Fleet Server URL: %w", err)
+	}
+
+	return &EnrollParams{
+		EnrollmentToken: enrollmentToken.APIKey,
+		FleetURL:        fleetServerURL,
+		PolicyID:        policyResp.ID,
+	}, nil
+}
diff --git a/testing/integration/diagnostics_test.go b/testing/integration/diagnostics_test.go
index b2d1b79250e..de10c1dc761 100644
--- a/testing/integration/diagnostics_test.go
+++ b/testing/integration/diagnostics_test.go
@@ -22,7 +22,6 @@ import (
 	"github.com/stretchr/testify/require"
 
 	"github.com/elastic/elastic-agent/pkg/control/v2/client"
-	"github.com/elastic/elastic-agent/pkg/core/process"
 	integrationtest "github.com/elastic/elastic-agent/pkg/testing"
 	"github.com/elastic/elastic-agent/pkg/testing/define"
 	"github.com/elastic/elastic-agent/pkg/testing/tools/testcontext"
@@ -139,23 +138,13 @@ func TestDiagnosticsCommand(t *testing.T) {
 
 func testDiagnosticsFactory(t *testing.T, diagFiles []string, diagCompFiles []string, fix *integrationtest.Fixture, cmd []string) func(ctx context.Context) error {
 	return func(ctx context.Context) error {
-		diagnosticCommandWD := t.TempDir()
-		diagnosticCmdOutput, err := fix.Exec(ctx, cmd, process.WithWorkDir(diagnosticCommandWD))
-
-		t.Logf("diagnostic command completed with output \n%q\n", diagnosticCmdOutput)
-		require.NoErrorf(t, err, "error running diagnostic command: %v", err)
-
-		t.Logf("checking directory %q for the generated archive", diagnosticCommandWD)
-		files, err := filepath.Glob(filepath.Join(diagnosticCommandWD, diagnosticsArchiveGlobPattern))
-		require.NoError(t, err)
-		require.Len(t, files, 1)
-		t.Logf("Found %q diagnostic archive.", files[0])
+		diagZip, err := fix.ExecDiagnostics(ctx, cmd...)
 
 		// get the version of the running agent
 		avi, err := getRunningAgentVersion(ctx, fix)
 		require.NoError(t, err)
 
-		verifyDiagnosticArchive(t, files[0], diagFiles, diagCompFiles, avi)
+		verifyDiagnosticArchive(t, diagZip, diagFiles, diagCompFiles, avi)
 
 		return nil
 	}
@@ -217,6 +206,8 @@ func verifyDiagnosticArchive(t *testing.T, diagArchive string, diagFiles []strin
 }
 
 func extractZipArchive(t *testing.T, zipFile string, dst string) {
+	t.Helper()
+
 	zReader, err := zip.OpenReader(zipFile)
 	require.NoErrorf(t, err, "file %q is not a valid zip archive", zipFile)
 	defer zReader.Close()
diff --git a/testing/integration/package_version_test.go b/testing/integration/package_version_test.go
index c05c35c6d4d..10d6deb85a5 100644
--- a/testing/integration/package_version_test.go
+++ b/testing/integration/package_version_test.go
@@ -7,17 +7,24 @@
 package integration
 
 import (
+	"bytes"
 	"context"
 	"os"
+	"os/exec"
+	"path/filepath"
+	"runtime"
+	"strings"
 	"testing"
 	"time"
 
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
+	"gopkg.in/yaml.v2"
 
 	"github.com/elastic/elastic-agent/pkg/control/v2/client"
 	atesting "github.com/elastic/elastic-agent/pkg/testing"
 	"github.com/elastic/elastic-agent/pkg/testing/define"
+	"github.com/elastic/elastic-agent/pkg/testing/tools/fleettools"
 	"github.com/elastic/elastic-agent/pkg/testing/tools/testcontext"
 	"github.com/elastic/elastic-agent/version"
 )
@@ -50,6 +57,119 @@ func TestPackageVersion(t *testing.T) {
 	t.Run("remove package versions file and test version again", testAfterRemovingPkgVersionFiles(ctx, f))
 }
 
+func TestComponentBuildHashInDiagnostics(t *testing.T) {
+	info := define.Require(t, define.Requirements{
+		Group: Fleet,
+		Stack: &define.Stack{},
+		Local: false, // requires Agent installation
+		Sudo:  true,  // requires Agent installation
+	})
+	ctx := context.Background()
+
+	f, err := define.NewFixture(t, define.Version())
+	require.NoError(t, err, "could not create new fixture")
+
+	err = f.Prepare(ctx)
+	require.NoError(t, err, "could not prepare fixture")
+
+	enrollParams, err := fleettools.NewEnrollParams(ctx, info.KibanaClient)
+	require.NoError(t, err, "failed preparing Agent enrollment")
+
+	t.Log("Installing Elastic Agent...")
+	installOpts := atesting.InstallOpts{
+		NonInteractive: true,
+		Force:          true,
+		EnrollOpts: atesting.EnrollOpts{
+			URL:             enrollParams.FleetURL,
+			EnrollmentToken: enrollParams.EnrollmentToken,
+		},
+	}
+	output, err := f.Install(ctx, &installOpts)
+	require.NoError(t, err,
+		"failed to install start agent [output: %s]", string(output))
+
+	stateBuff := bytes.Buffer{}
+	isHealth := func() bool {
+		stateBuff.Reset()
+
+		err := f.IsHealthy(ctx)
+		if err != nil {
+			stateBuff.WriteString(err.Error())
+			return false
+		}
+
+		return true
+	}
+	require.Eventuallyf(t,
+		isHealth,
+		1*time.Minute, 10*time.Second,
+		"agent did not became health. Last status: %v", &stateBuff)
+
+	filebeat := "filebeat"
+	if runtime.GOOS == "windows" {
+		filebeat += ".exe"
+	}
+	wd := f.WorkDir()
+	glob := filepath.Join(wd, "data", "elastic-agent-*", "components", filebeat)
+	compPaths, err := filepath.Glob(glob)
+	require.NoErrorf(t, err, "failed to glob filebeat path pattern %q", glob)
+	require.Lenf(t, compPaths, 1,
+		"glob pattern \"%s\": found %d paths to filebeat, can only have 1",
+		glob, len(compPaths))
+
+	cmdVer := exec.Command(compPaths[0], "version")
+	output, err = cmdVer.CombinedOutput()
+	require.NoError(t, err, "failed to get filebeat version")
+	outStr := string(output)
+
+	// version output example:
+	// filebeat version 8.13.0 (amd64), libbeat 8.13.0 [0baedd2518bd7e5b78e2280684580cbfdcab5ae8 built 2024-01-23 06:57:37 +0000 UTC
+	t.Log("parsing commit hash from filebeat version: ", outStr)
+	splits := strings.Split(outStr, "[")
+	require.Lenf(t, splits, 2,
+		"expected beats output version to be split into 2, it was split into %q",
+		strings.Join(splits, "|"))
+	splits = strings.Split(splits[1], " built")
+	require.Lenf(t, splits, 2,
+		"expected split of beats output version to be split into 2, it was split into %q",
+		strings.Join(splits, "|"))
+	wantBuildHash := splits[0]
+
+	diagZip, err := f.ExecDiagnostics(ctx)
+	require.NoError(t, err, "failed collecting diagnostics")
+
+	diag := t.TempDir()
+	extractZipArchive(t, diagZip, diag)
+
+	stateYAML, err := os.Open(filepath.Join(diag, "state.yaml"))
+	require.NoError(t, err, "could not open diagnostics state.yaml")
+
+	state := struct {
+		Components []struct {
+			ID    string `yaml:"id"`
+			State struct {
+				VersionInfo struct {
+					BuildHash string `yaml:"build_hash"`
+					Meta      struct {
+						BuildTime string `yaml:"build_time"`
+						Commit    string `yaml:"commit"`
+					} `yaml:"meta"`
+					Name string `yaml:"name"`
+				} `yaml:"version_info"`
+			} `yaml:"state"`
+		} `yaml:"components"`
+	}{}
+	err = yaml.NewDecoder(stateYAML).Decode(&state)
+	require.NoError(t, err, "could not parse state.yaml (%s)", stateYAML.Name())
+
+	for _, c := range state.Components {
+		assert.Equalf(t, wantBuildHash, c.State.VersionInfo.BuildHash,
+			"component %s: VersionInfo.BuildHash mismatch", c.ID)
+		assert.Equalf(t, wantBuildHash, c.State.VersionInfo.Meta.Commit,
+			"component %s: VersionInfo.Meta.Commit mismatch", c.ID)
+	}
+}
+
 func testVersionWithRunningAgent(runCtx context.Context, f *atesting.Fixture) func(*testing.T) {
 
 	return func(t *testing.T) {
@@ -138,3 +258,18 @@ func runAgentWithAfterTest(runCtx context.Context, f *atesting.Fixture, t *testi
 	require.NoError(t, err)
 
 }
+
+type StateComponentVersion struct {
+	Components []struct {
+		ID    string `yaml:"id"`
+		State struct {
+			VersionInfo struct {
+				Meta struct {
+					BuildTime string `yaml:"build_time"`
+					Commit    string `yaml:"commit"`
+				} `yaml:"meta"`
+				Name string `yaml:"name"`
+			} `yaml:"version_info"`
+		} `yaml:"state"`
+	} `yaml:"components"`
+}
diff --git a/testing/integration/pkgversion_common_test.go b/testing/integration/pkgversion_common_test.go
index a8c4285eabc..30ec07200b2 100644
--- a/testing/integration/pkgversion_common_test.go
+++ b/testing/integration/pkgversion_common_test.go
@@ -109,7 +109,7 @@ func unmarshalVersionOutput(t *testing.T, cmdOutput []byte, binaryOrDaemonKey st
 // findPkgVersionFiles scans recursively a root directory and returns all the package version files encountered
 func findPkgVersionFiles(t *testing.T, rootDir string) []string {
 	t.Helper()
-	//find the package version file
+	// find the package version file
 	installFS := os.DirFS(rootDir)
 	matches := []string{}
 	err := fs.WalkDir(installFS, ".", func(path string, d fs.DirEntry, err error) error {
@@ -122,7 +122,7 @@ func findPkgVersionFiles(t *testing.T, rootDir string) []string {
 		}
 		return nil
 	})
-	require.NoError(t, err)
+	require.NoError(t, err, "could not find package version files, fs.WalkDir failed")
 
 	t.Logf("package version files found: %v", matches)
 	return matches