From 94178d1fb5c91f12a4d305f5b45b1bf2d25cd272 Mon Sep 17 00:00:00 2001
From: tbogosavljevic
Date: Tue, 17 Jun 2025 15:38:29 +0200
Subject: [PATCH 01/87] fix(front): replace link in ssh debug dropdown (#398)
---
front/lib/front_web/templates/job/_ssh_popup.html.eex | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/front/lib/front_web/templates/job/_ssh_popup.html.eex b/front/lib/front_web/templates/job/_ssh_popup.html.eex
index 9d762a1bb..ba96bccf9 100644
--- a/front/lib/front_web/templates/job/_ssh_popup.html.eex
+++ b/front/lib/front_web/templates/job/_ssh_popup.html.eex
@@ -10,7 +10,7 @@
/>
<%= render FrontWeb.SharedView, "_connect_cli_instructions.html", organization: @organization, user: @user %>
From 1daa423bdd4df1d4a6788e67b6fa539ac0a55127 Mon Sep 17 00:00:00 2001
From: Emmanuel Ferdman
Date: Wed, 18 Jun 2025 17:30:11 +0300
Subject: [PATCH 02/87] docs: fix RFCs guide reference (#348)
Signed-off-by: Emmanuel Ferdman
Co-authored-by: Lucas Pinheiro
---
GOVERNANCE.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/GOVERNANCE.md b/GOVERNANCE.md
index 19a4f157b..1b85e3c2c 100644
--- a/GOVERNANCE.md
+++ b/GOVERNANCE.md
@@ -93,7 +93,7 @@ Semaphore follows a governance model that balances commercial stewardship with c
### Decision Visibility
- Technical decisions documented in issues/PRs
-- Architecture decisions documented in [RFCs](../rfcs/README.md)
+- Architecture decisions documented in [RFCs](rfcs/README.md)
- Roadmap publicly available
## Code of Conduct
From 8406924ef6ee16afdf40755684b2889723ead66a Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Thu, 19 Jun 2025 11:57:46 +0200
Subject: [PATCH 03/87] chore(guard): reduce guard logging volume and make logs
more useful (#403)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
We do not need log based metrics so the log level of "started" and
"finished" logs in the api is reduced to debug.
Also adds context metadata for grpc error logs
## ✅ Checklist
- [x] I have tested this change
- [ ] ~This change requires documentation update~
---
guard/.tool-versions | 2 +
guard/lib/guard/grpc_servers/user_server.ex | 297 +++++++++++---------
2 files changed, 167 insertions(+), 132 deletions(-)
create mode 100644 guard/.tool-versions
diff --git a/guard/.tool-versions b/guard/.tool-versions
new file mode 100644
index 000000000..643d12ee0
--- /dev/null
+++ b/guard/.tool-versions
@@ -0,0 +1,2 @@
+elixir 1.14.3-otp-24
+erlang 24.3.4.9
diff --git a/guard/lib/guard/grpc_servers/user_server.ex b/guard/lib/guard/grpc_servers/user_server.ex
index d9db094af..0fccf23f8 100644
--- a/guard/lib/guard/grpc_servers/user_server.ex
+++ b/guard/lib/guard/grpc_servers/user_server.ex
@@ -16,7 +16,7 @@ defmodule Guard.GrpcServers.UserServer do
@spec describe(User.DescribeRequest.t(), GRPC.Server.Stream.t()) :: User.DescribeResponse.t()
def describe(%User.DescribeRequest{user_id: user_id}, _stream) do
- observe_and_log("grpc.user.describe", fn ->
+ observe_and_log("grpc.user.describe", %{user_id: user_id}, fn ->
result = Front.fetch_user_with_repo_account_details(user_id)
case result do
@@ -34,7 +34,7 @@ defmodule Guard.GrpcServers.UserServer do
GRPC.Server.Stream.t()
) :: User.User.t()
def describe_by_email(%User.DescribeByEmailRequest{email: email}, _stream) do
- observe_and_log("grpc.user.describe_by_email", fn ->
+ observe_and_log("grpc.user.describe_by_email", %{email: email}, fn ->
case Front.fetch_user_by_email(email) do
nil -> grpc_error!(:not_found, "User not found.")
user -> map_user(user)
@@ -55,7 +55,7 @@ defmodule Guard.GrpcServers.UserServer do
},
_stream
) do
- observe_and_log("grpc.user.describe_by_repository_provider", fn ->
+ observe_and_log("grpc.user.describe_by_repository_provider", %{uid: uid, type: type}, fn ->
result =
Front.fetch_user_with_repository_provider(%{
type: User.RepositoryProvider.Type.key(type),
@@ -72,7 +72,7 @@ defmodule Guard.GrpcServers.UserServer do
@spec search_users(User.SearchUsersRequest.t(), GRPC.Server.Stream.t()) ::
User.SearchUsersResponse
def search_users(%User.SearchUsersRequest{query: query, limit: limit}, _stream) do
- observe_and_log("grpc.user.search_users", fn ->
+ observe_and_log("grpc.user.search_users", %{query: query, limit: limit}, fn ->
query = String.trim(query)
limit = abs(limit)
@@ -91,7 +91,7 @@ defmodule Guard.GrpcServers.UserServer do
@spec describe_many(User.DescribeManyRequest.t(), GRPC.Server.Stream.t()) ::
User.DescribeManyResponse.t()
def describe_many(%User.DescribeManyRequest{user_ids: user_ids}, _stream) do
- observe_and_log("grpc.user.describe_many", fn ->
+ observe_and_log("grpc.user.describe_many", %{user_ids: user_ids}, fn ->
user_ids
|> Enum.filter(&valid_uuid?/1)
|> handle_describe_many_response()
@@ -109,33 +109,37 @@ defmodule Guard.GrpcServers.UserServer do
},
_stream
) do
- observe_and_log("grpc.user.create_favorite", fn ->
- kind =
- User.Favorite.Kind.key(kind)
- |> to_string()
-
- validate_uuid!(user_id)
- validate_uuid!(organization_id)
- validate_uuid!(favorite_id)
-
- case FrontRepo.Favorite.find_or_create(%{
- user_id: user_id,
- organization_id: organization_id,
- favorite_id: favorite_id,
- kind: kind
- }) do
- {:ok, favorite, :created} ->
- favorite_pb = map_favorite(favorite)
- Guard.Events.FavoriteCreated.publish(favorite_pb, @user_exchange)
- favorite_pb
-
- {:ok, favorite, :found} ->
- map_favorite(favorite)
-
- {:error, _changeset} ->
- grpc_error!(:invalid_argument, "Invalid favorite.")
+ observe_and_log(
+ "grpc.user.create_favorite",
+ %{user_id: user_id, organization_id: organization_id, favorite_id: favorite_id, kind: kind},
+ fn ->
+ kind =
+ User.Favorite.Kind.key(kind)
+ |> to_string()
+
+ validate_uuid!(user_id)
+ validate_uuid!(organization_id)
+ validate_uuid!(favorite_id)
+
+ case FrontRepo.Favorite.find_or_create(%{
+ user_id: user_id,
+ organization_id: organization_id,
+ favorite_id: favorite_id,
+ kind: kind
+ }) do
+ {:ok, favorite, :created} ->
+ favorite_pb = map_favorite(favorite)
+ Guard.Events.FavoriteCreated.publish(favorite_pb, @user_exchange)
+ favorite_pb
+
+ {:ok, favorite, :found} ->
+ map_favorite(favorite)
+
+ {:error, _changeset} ->
+ grpc_error!(:invalid_argument, "Invalid favorite.")
+ end
end
- end)
+ )
end
@spec delete_favorite(User.Favorite.t(), GRPC.Server.Stream.t()) :: User.Favorite.t()
@@ -148,37 +152,41 @@ defmodule Guard.GrpcServers.UserServer do
},
_stream
) do
- observe_and_log("grpc.user.delete_favorite", fn ->
- kind =
- User.Favorite.Kind.key(kind)
- |> to_string()
-
- validate_uuid!(user_id)
- validate_uuid!(organization_id)
- validate_uuid!(favorite_id)
-
- favorite =
- FrontRepo.Favorite.find_by(%{
- user_id: user_id,
- organization_id: organization_id,
- favorite_id: favorite_id,
- kind: kind
- })
+ observe_and_log(
+ "grpc.user.delete_favorite",
+ %{user_id: user_id, organization_id: organization_id, favorite_id: favorite_id, kind: kind},
+ fn ->
+ kind =
+ User.Favorite.Kind.key(kind)
+ |> to_string()
+
+ validate_uuid!(user_id)
+ validate_uuid!(organization_id)
+ validate_uuid!(favorite_id)
+
+ favorite =
+ FrontRepo.Favorite.find_by(%{
+ user_id: user_id,
+ organization_id: organization_id,
+ favorite_id: favorite_id,
+ kind: kind
+ })
- if is_nil(favorite) do
- grpc_error!(:not_found, "Favorite not found.")
- end
+ if is_nil(favorite) do
+ grpc_error!(:not_found, "Favorite not found.")
+ end
- case FrontRepo.Favorite.delete_favorite(favorite) do
- {:ok, favorite} ->
- favorite_pb = map_favorite(favorite)
- Guard.Events.FavoriteDeleted.publish(favorite_pb, @user_exchange)
- favorite_pb
+ case FrontRepo.Favorite.delete_favorite(favorite) do
+ {:ok, favorite} ->
+ favorite_pb = map_favorite(favorite)
+ Guard.Events.FavoriteDeleted.publish(favorite_pb, @user_exchange)
+ favorite_pb
- {:error, _changeset} ->
- grpc_error!(:invalid_argument, "Invalid favorite.")
+ {:error, _changeset} ->
+ grpc_error!(:invalid_argument, "Invalid favorite.")
+ end
end
- end)
+ )
end
@spec list_favorites(User.ListFavoritesRequest.t(), GRPC.Server.Stream.t()) ::
@@ -187,21 +195,25 @@ defmodule Guard.GrpcServers.UserServer do
%User.ListFavoritesRequest{user_id: user_id, organization_id: organization_id},
_stream
) do
- observe_and_log("grpc.user.list_favorites", fn ->
- validate_uuid!(user_id)
- if organization_id != "", do: validate_uuid!(organization_id)
+ observe_and_log(
+ "grpc.user.list_favorites",
+ %{user_id: user_id, organization_id: organization_id},
+ fn ->
+ validate_uuid!(user_id)
+ if organization_id != "", do: validate_uuid!(organization_id)
- favorites =
- FrontRepo.Favorite.list_favorite_by_user_id(user_id, organization_id: organization_id)
+ favorites =
+ FrontRepo.Favorite.list_favorite_by_user_id(user_id, organization_id: organization_id)
- User.ListFavoritesResponse.new(favorites: Enum.map(favorites, &map_favorite/1))
- end)
+ User.ListFavoritesResponse.new(favorites: Enum.map(favorites, &map_favorite/1))
+ end
+ )
end
@spec block_account(User.BlockAccountRequest.t(), GRPC.Server.Stream.t()) ::
User.User.t()
def block_account(%User.BlockAccountRequest{user_id: user_id}, _stream) do
- observe_and_log("grpc.user.block_account", fn ->
+ observe_and_log("grpc.user.block_account", %{user_id: user_id}, fn ->
result = FrontRepo.User.active_user_by_id(user_id)
case result do
@@ -214,7 +226,7 @@ defmodule Guard.GrpcServers.UserServer do
@spec unblock_account(User.UnblockAccountRequest.t(), GRPC.Server.Stream.t()) ::
User.User.t()
def unblock_account(%User.UnblockAccountRequest{user_id: user_id}, _stream) do
- observe_and_log("grpc.user.unblock_account", fn ->
+ observe_and_log("grpc.user.unblock_account", %{user_id: user_id}, fn ->
result = FrontRepo.User.blocked_user_by_id(user_id)
case result do
@@ -232,34 +244,38 @@ defmodule Guard.GrpcServers.UserServer do
%User.RefreshRepositoryProviderRequest{user_id: user_id, type: type},
_stream
) do
- observe_and_log("grpc.user.refresh_repository_provider", fn ->
- validate_uuid!(user_id)
-
- user =
- case Front.find(user_id) do
- {:error, :not_found} -> grpc_error!(:not_found, "User #{user_id} not found.")
- {:ok, user} -> user
- end
+ observe_and_log(
+ "grpc.user.refresh_repository_provider",
+ %{user_id: user_id, type: type},
+ fn ->
+ validate_uuid!(user_id)
+
+ user =
+ case Front.find(user_id) do
+ {:error, :not_found} -> grpc_error!(:not_found, "User #{user_id} not found.")
+ {:ok, user} -> user
+ end
- provider =
- User.RepositoryProvider.Type.key(type)
- |> to_string()
- |> String.downcase()
+ provider =
+ User.RepositoryProvider.Type.key(type)
+ |> to_string()
+ |> String.downcase()
- case FrontRepo.RepoHostAccount.get_for_user_by_repo_host(user.id, provider) do
- {:error, :not_found} ->
- Logger.error("User #{user_id} not found")
- grpc_error!(:not_found, "User not found.")
+ case FrontRepo.RepoHostAccount.get_for_user_by_repo_host(user.id, provider) do
+ {:error, :not_found} ->
+ Logger.error("User #{user_id} not found")
+ grpc_error!(:not_found, "User not found.")
- {:ok, account} ->
- handle_update_repo_status(user, account)
+ {:ok, account} ->
+ handle_update_repo_status(user, account)
+ end
end
- end)
+ )
end
@spec update(User.UpdateRequest.t(), GRPC.Server.Stream.t()) :: User.UpdateResponse.t()
def update(%User.UpdateRequest{user: user}, _stream) do
- observe_and_log("grpc.user.update", fn ->
+ observe_and_log("grpc.user.update", %{user: user}, fn ->
if is_nil(user) do
grpc_error!(:invalid_argument, "Invalid user.")
end
@@ -297,7 +313,7 @@ defmodule Guard.GrpcServers.UserServer do
GRPC.Server.Stream.t()
) :: User.User.t()
def delete_with_owned_orgs(%User.DeleteWithOwnedOrgsRequest{user_id: user_id}, _stream) do
- observe_and_log("grpc.user.delete_with_owned_orgs", fn ->
+ observe_and_log("grpc.user.delete_with_owned_orgs", %{user_id: user_id}, fn ->
validate_uuid!(user_id)
case Front.find(user_id) do
@@ -326,23 +342,33 @@ defmodule Guard.GrpcServers.UserServer do
},
_stream
) do
- observe_and_log("grpc.user.create", fn ->
- case Guard.User.Actions.create(%{
- email: email,
- name: name,
- password: password,
- repository_providers: providers,
- skip_password_change: skip_password_change
- }) do
- {:ok, user} ->
- Front.fetch_user_with_repo_account_details(user.id)
- |> map_user()
-
- {:error, errors} ->
- Logger.error("Failed to create user: #{inspect(errors)}")
- grpc_error!(:invalid_argument, "Failed to create user")
+ observe_and_log(
+ "grpc.user.create",
+ %{
+ email: email,
+ name: name,
+ password: password,
+ repository_providers: providers,
+ skip_password_change: skip_password_change
+ },
+ fn ->
+ case Guard.User.Actions.create(%{
+ email: email,
+ name: name,
+ password: password,
+ repository_providers: providers,
+ skip_password_change: skip_password_change
+ }) do
+ {:ok, user} ->
+ Front.fetch_user_with_repo_account_details(user.id)
+ |> map_user()
+
+ {:error, errors} ->
+ Logger.error("Failed to create user: #{inspect(errors)}")
+ grpc_error!(:invalid_argument, "Failed to create user")
+ end
end
- end)
+ )
end
# ---------------------
@@ -693,41 +719,45 @@ defmodule Guard.GrpcServers.UserServer do
%User.GetRepositoryTokenRequest{user_id: user_id, integration_type: integration_type},
_stream
) do
- observe_and_log("grpc.user.get_repository_token", fn ->
- parsed_integration_type = RepositoryIntegrator.IntegrationType.key(integration_type)
- check_integration!(parsed_integration_type)
-
- user =
- case Front.find(user_id) do
- {:error, :not_found} -> grpc_error!(:not_found, "User not found.")
- {:ok, user} -> user
- end
+ observe_and_log(
+ "grpc.user.get_repository_token",
+ %{user_id: user_id, integration_type: integration_type},
+ fn ->
+ parsed_integration_type = RepositoryIntegrator.IntegrationType.key(integration_type)
+ check_integration!(parsed_integration_type)
+
+ user =
+ case Front.find(user_id) do
+ {:error, :not_found} -> grpc_error!(:not_found, "User not found.")
+ {:ok, user} -> user
+ end
- provider = get_provider(parsed_integration_type)
+ provider = get_provider(parsed_integration_type)
- repo_host_account =
- case FrontRepo.RepoHostAccount.get_for_user_by_repo_host(user.id, provider) do
- {:error, :not_found} ->
- Logger.error(
- "Integration for User: '#{user.id}' and '#{parsed_integration_type}' not found."
- )
+ repo_host_account =
+ case FrontRepo.RepoHostAccount.get_for_user_by_repo_host(user.id, provider) do
+ {:error, :not_found} ->
+ Logger.error(
+ "Integration for User: '#{user.id}' and '#{parsed_integration_type}' not found."
+ )
- grpc_error!(:not_found, "Integration '#{parsed_integration_type}' not found.")
+ grpc_error!(:not_found, "Integration '#{parsed_integration_type}' not found.")
- {:ok, account} ->
- account
- end
+ {:ok, account} ->
+ account
+ end
- {token, expires_at} = get_token(repo_host_account, user_id: user_id)
+ {token, expires_at} = get_token(repo_host_account, user_id: user_id)
- User.GetRepositoryTokenResponse.new(token: token, expires_at: grpc_timestamp(expires_at))
- end)
+ User.GetRepositoryTokenResponse.new(token: token, expires_at: grpc_timestamp(expires_at))
+ end
+ )
end
@spec regenerate_token(User.RegenerateTokenRequest.t(), GRPC.Stream.t()) ::
User.RegenerateTokenResponse.t()
def regenerate_token(%User.RegenerateTokenRequest{user_id: user_id}, _stream) do
- observe_and_log("grpc.user.regenerate_token", fn ->
+ observe_and_log("grpc.user.regenerate_token", %{user_id: user_id}, fn ->
validate_uuid!(user_id)
user =
@@ -820,18 +850,21 @@ defmodule Guard.GrpcServers.UserServer do
defp grpc_timestamp(_), do: nil
- defp observe_and_log(name, f) do
+ defp observe_and_log(name, request, f) do
Watchman.benchmark(name, fn ->
try do
- Logger.info("Service #{name} - Started")
+ Logger.debug(fn -> "Service #{name} - request: #{inspect(request)} - Started" end)
result = f.()
- Logger.info("Service #{name} - Finished")
+ Logger.debug(fn -> "Service #{name} - request: #{inspect(request)} - Finished" end)
Watchman.increment({name, ["OK"]})
result
rescue
e ->
- Logger.error("Service #{name} - Exited with an error: #{inspect(e)}")
+ Logger.error(
+ "Service #{name} - request: #{inspect(request)} - Exited with an error: #{inspect(e)}"
+ )
+
Watchman.increment({name, ["ERROR"]})
reraise e, __STACKTRACE__
end
From 3628b4eb2e3a26cf7725a45bbb025180f199a650 Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Thu, 19 Jun 2025 12:27:39 +0200
Subject: [PATCH 04/87] feat(security-toolbox): add flag for scanners (#404)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Sometimes we do not want to scan licenses so this enables us to set what
scanners trivy is using
## ✅ Checklist
- [x] I have tested this change
- [ ] ~This change requires documentation update~
---
security-toolbox/docker | 4 ++++
security-toolbox/policies/docker/trivy_image.rb | 3 ++-
2 files changed, 6 insertions(+), 1 deletion(-)
diff --git a/security-toolbox/docker b/security-toolbox/docker
index 7333c3411..633b71608 100755
--- a/security-toolbox/docker
+++ b/security-toolbox/docker
@@ -24,6 +24,10 @@ OptionParser.new do |parser|
args[:severity] = severity
end
+ parser.on("-c", "--scanners SCANNERS", "Comma-separated list of scanners to use (vuln,secret,license,misconfig)") do |scanners|
+ args[:scanners] = scanners
+ end
+
parser.on("-p", "--ignore-policy IGNORE_POLICY_PATH", "Ignore policy to use when scanning docker image") do |ignore_policy|
args[:ignore_policy] = ignore_policy
end
diff --git a/security-toolbox/policies/docker/trivy_image.rb b/security-toolbox/policies/docker/trivy_image.rb
index 744ff9d44..c75423361 100644
--- a/security-toolbox/policies/docker/trivy_image.rb
+++ b/security-toolbox/policies/docker/trivy_image.rb
@@ -11,6 +11,7 @@ def initialize(args)
@image = args[:image]
@severity = args[:severity] || "HIGH,CRITICAL"
@ignore_policy = args[:ignore_policy] || nil
+ @scanners = args[:scanners] || "vuln,secret,license,misconfig"
@skip_files = args[:skip_files].to_s.split(",") || []
@skip_dirs = args[:skip_dirs].to_s.split(",") || []
@@ -24,7 +25,7 @@ def test
"--severity #{@severity}",
"--exit-on-eol 1",
"--ignore-unfixed",
- "--scanners vuln,secret,license,misconfig",
+ "--scanners #{@scanners}",
"--format json",
"--output out/docker-scan-trivy.json"
]
From bf965b81bac7cab4603217297e8c84456a63fbf1 Mon Sep 17 00:00:00 2001
From: Tomas Fernandez
Date: Thu, 19 Jun 2025 12:47:24 +0100
Subject: [PATCH 05/87] docs: add info note about Okta usage (#401)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Add info note about Okta integration to the Cloud and EE versions
Fixes: https://github.com/semaphoreio/semaphore/issues/383
## ✅ Checklist
- [X] I have tested this change
- [ ] This change requires documentation update
---
docs/default.conf | 1 +
docs/docs/using-semaphore/okta.md | 6 ++++++
docs/versioned_docs/version-EE/using-semaphore/okta.md | 6 ++++++
3 files changed, 13 insertions(+)
diff --git a/docs/default.conf b/docs/default.conf
index 75310df38..014c65573 100644
--- a/docs/default.conf
+++ b/docs/default.conf
@@ -84,6 +84,7 @@ server {
location ~ ^/ci-cd-environment/ubuntu-22.04-arm-image/?$ {return 301 /reference/os-ubuntu#ubuntu2204-arm;}
location ~ ^/ci-cd-environment/ubuntu-22.04-image/?$ {return 301 /reference/os-ubuntu#ubuntu2204-x86;}
location ~ ^/ci-cd-environment/working-with-docker/?$ {return 301 /using-semaphore/optimization/docker;}
+ location ~ ^/article/75-debugging-with-ssh-access/?$ {return 301 /using-semaphore/jobs#ssh-into-agent;}
location ~ ^/essentials/artifacts/?$ {return 301 /using-semaphore/artifacts;}
location ~ ^/essentials/auto-cancel-previous-pipelines-on-a-new-push/?$ {return 301 /using-semaphore/pipelines#auto-cancel;}
location ~ ^/essentials/build-matrix/?$ {return 301 /using-semaphore/jobs#matrix;}
diff --git a/docs/docs/using-semaphore/okta.md b/docs/docs/using-semaphore/okta.md
index f071fad31..11aa9438c 100644
--- a/docs/docs/using-semaphore/okta.md
+++ b/docs/docs/using-semaphore/okta.md
@@ -237,6 +237,12 @@ Semaphore asks new users logging in via SSO to [connect their GitHub](./connect-

+:::info
+
+Once enforced, Okta is the **only login method allowed** for all users in the organization. Dual authentication methods like Okta + GitHub/BitBucket/GitLab are not supported.
+
+:::
+
## Troubleshooting duplicated users {#troubleshooting}
Semaphore tries to match new users provisioned via SCIM to existing Semaphore users by email address. If the email address associated with the SCIM request matches the email address of existing Semaphore users, the two accounts will be connected, and no new account will be provisioned. Email associated with Semaphore is the primary email from GitHub or BitBucket.
diff --git a/docs/versioned_docs/version-EE/using-semaphore/okta.md b/docs/versioned_docs/version-EE/using-semaphore/okta.md
index 22b392918..038f528a8 100644
--- a/docs/versioned_docs/version-EE/using-semaphore/okta.md
+++ b/docs/versioned_docs/version-EE/using-semaphore/okta.md
@@ -235,6 +235,12 @@ Semaphore asks new users logging in via SSO to [connect their GitHub](./connect-

+:::info
+
+Once enforced, Okta is the **only login method allowed** for all users in the server. Dual authentication methods like Okta + GitHub/BitBucket/GitLab are not supported.
+
+:::
+
## Troubleshooting duplicated users {#troubleshooting}
Semaphore tries to match new users provisioned via SCIM to existing Semaphore users by email address. If the email address associated with the SCIM request matches the email address of existing Semaphore users, the two accounts will be connected, and no new account will be provisioned. Email associated with Semaphore is the primary email from GitHub or BitBucket.
From 71dd864afcee2003795cb679b7cd2d71b98db4e9 Mon Sep 17 00:00:00 2001
From: Dejan K
Date: Fri, 20 Jun 2025 11:50:32 +0200
Subject: [PATCH 06/87] fix(projecthub-rest-api): drop page size (#406)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Removed the `page_size` query parameter due to inconsistent behavior.
Pagination now relies on the default `page_size` and the `page`
parameter only.
Retained only the `x-page` and `x-has-more` response headers, which are
sufficient for paginated iteration.
Related [task](https://github.com/renderedtext/tasks/issues/7953).
## ✅ Checklist
- [x] I have tested this change
- [x] ~This change requires documentation update~ - N/A
---
projecthub-rest-api/config/config.exs | 3 +-
projecthub-rest-api/config/runtime.exs | 3 +-
projecthub-rest-api/config/test.exs | 2 +
.../lib/projecthub/http_api.ex | 28 +++----
.../test/projecthub/http_api_test.exs | 78 ++++++++-----------
5 files changed, 53 insertions(+), 61 deletions(-)
diff --git a/projecthub-rest-api/config/config.exs b/projecthub-rest-api/config/config.exs
index 983c395ea..d64d3bb3f 100644
--- a/projecthub-rest-api/config/config.exs
+++ b/projecthub-rest-api/config/config.exs
@@ -10,7 +10,8 @@ config :projecthub, http_port: 4000
config :projecthub,
projecthub_grpc_endpoint: "0.0.0.0:50051",
organization_grpc_endpoint: "0.0.0.0:50051",
- rbac_grpc_endpoint: "0.0.0.0:50051"
+ rbac_grpc_endpoint: "0.0.0.0:50051",
+ projects_page_size: 500
config :projecthub, :enviroment, config_env()
diff --git a/projecthub-rest-api/config/runtime.exs b/projecthub-rest-api/config/runtime.exs
index 28f9120ba..65f5f04a8 100644
--- a/projecthub-rest-api/config/runtime.exs
+++ b/projecthub-rest-api/config/runtime.exs
@@ -17,5 +17,6 @@ if config_env() == :prod do
config :projecthub,
projecthub_grpc_endpoint: System.fetch_env!("INTERNAL_API_URL_PROJECT"),
organization_grpc_endpoint: System.fetch_env!("INTERNAL_API_URL_ORGANIZATION"),
- rbac_grpc_endpoint: System.fetch_env!("INTERNAL_API_URL_RBAC")
+ rbac_grpc_endpoint: System.fetch_env!("INTERNAL_API_URL_RBAC"),
+ projects_page_size: System.get_env("PROJECTS_PAGE_SIZE", "500") |> String.to_integer()
end
diff --git a/projecthub-rest-api/config/test.exs b/projecthub-rest-api/config/test.exs
index 3882cea0d..a99e75113 100644
--- a/projecthub-rest-api/config/test.exs
+++ b/projecthub-rest-api/config/test.exs
@@ -7,3 +7,5 @@ config :junit_formatter,
print_report_file: true,
include_filename?: true,
include_file_line?: true
+
+config :projecthub, :projects_page_size, 2
diff --git a/projecthub-rest-api/lib/projecthub/http_api.ex b/projecthub-rest-api/lib/projecthub/http_api.ex
index 2999762ff..2c771c1e7 100644
--- a/projecthub-rest-api/lib/projecthub/http_api.ex
+++ b/projecthub-rest-api/lib/projecthub/http_api.ex
@@ -40,11 +40,9 @@ defmodule Projecthub.HttpApi do
get "/api/#{@version}/projects" do
case list_projects(conn) do
- {:ok, {projects, page, page_size, total, has_more}} ->
+ {:ok, {projects, page, has_more}} ->
conn
|> put_resp_header("x-page", Integer.to_string(page))
- |> put_resp_header("x-page-size", Integer.to_string(page_size))
- |> put_resp_header("x-total-count", Integer.to_string(total))
|> put_resp_header("x-has-more", to_string(has_more))
|> send_resp(200, Poison.encode!(projects))
@@ -694,23 +692,22 @@ defmodule Projecthub.HttpApi do
org_id = conn.assigns.org_id
restricted = Organization.restricted?(org_id)
- with {:ok, page} <- parse_int(conn.params, "page", 1, 1_000_000, 1),
- {:ok, page_size} <- parse_int(conn.params, "page_size", 1, 500, 500) do
- do_list_projects(conn, org_id, restricted, page, page_size)
- else
- {:error, reason} ->
- {:error, reason}
+ case parse_int(conn.params, "page", 1, 100, 1) do
+ {:ok, page} -> do_list_projects(conn, org_id, restricted, page)
+ {:error, reason} -> {:error, reason}
end
end
- defp do_list_projects(conn, org_id, restricted, page, page_size) do
+ defp page_size, do: Application.get_env(:projecthub, :projects_page_size, 500)
+
+ defp do_list_projects(conn, org_id, restricted, page) do
req =
InternalApi.Projecthub.ListRequest.new(
metadata: Utils.construct_req_meta(conn),
pagination:
InternalApi.Projecthub.PaginationRequest.new(
page: page,
- page_size: page_size
+ page_size: page_size()
)
)
@@ -728,8 +725,13 @@ defmodule Projecthub.HttpApi do
|> Enum.map(&Map.merge(&1, %{"apiVersion" => @version, "kind" => "Project"}))
total = Map.get(res.pagination || %{}, :total_entries, 0)
- has_more = (page - 1) * page_size + length(projects) < total
- {:ok, {projects, page, page_size, total, has_more}}
+ has_more = (page - 1) * page_size() + length(res.projects) < total
+
+ if total < (page - 1) * page_size() do
+ {:ok, {[], page, false}}
+ else
+ {:ok, {projects, page, has_more}}
+ end
:NOT_FOUND ->
{:error, :not_found}
diff --git a/projecthub-rest-api/test/projecthub/http_api_test.exs b/projecthub-rest-api/test/projecthub/http_api_test.exs
index 638f2dde1..986653c16 100644
--- a/projecthub-rest-api/test/projecthub/http_api_test.exs
+++ b/projecthub-rest-api/test/projecthub/http_api_test.exs
@@ -292,6 +292,7 @@ defmodule Projecthub.HttpApi.Test do
p1 = create("project1", p1_id)
p2 = create("project2", p2_id)
p3 = create("project3", p3_id)
+ page_size = Application.get_env(:projecthub, :projects_page_size)
FunRegistry.set!(FakeServices.RbacService, :list_accessible_projects, fn _, _ ->
InternalApi.RBAC.ListAccessibleProjectsResponse.new(project_ids: [p1_id, p2_id, p3_id])
@@ -299,9 +300,7 @@ defmodule Projecthub.HttpApi.Test do
FunRegistry.set!(FakeServices.ProjectService, :list, fn req, _ ->
alias InternalApi.Projecthub, as: PH
- # Simulate pagination
page = req.pagination.page
- page_size = req.pagination.page_size
all_projects = [p1, p2, p3]
projects = Enum.slice(all_projects, (page - 1) * page_size, page_size)
@@ -327,30 +326,50 @@ defmodule Projecthub.HttpApi.Test do
test "returns correct pagination headers for /projects" do
{:ok, response} =
HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=1&page_size=2",
+ "http://localhost:#{@port}/api/#{@version}/projects?page=1",
@headers
)
assert response.status_code == 200
assert response.headers |> Enum.any?(fn {k, v} -> k == "x-page" and v == "1" end)
- assert response.headers |> Enum.any?(fn {k, v} -> k == "x-page-size" and v == "2" end)
- assert response.headers |> Enum.any?(fn {k, v} -> k == "x-total-count" and v == "3" end)
assert response.headers |> Enum.any?(fn {k, v} -> k == "x-has-more" and v == "true" end)
projects = Poison.decode!(response.body)
assert length(projects) == 2
+
+ {:ok, response2} =
+ HTTPoison.get(
+ "http://localhost:#{@port}/api/#{@version}/projects?page=2",
+ @headers
+ )
+
+ assert response2.status_code == 200
+ assert response2.headers |> Enum.any?(fn {k, v} -> k == "x-page" and v == "2" end)
+ assert response2.headers |> Enum.any?(fn {k, v} -> k == "x-has-more" and v == "false" end)
+ projects2 = Poison.decode!(response2.body)
+ assert length(projects2) == 1
+
+ {:ok, response3} =
+ HTTPoison.get(
+ "http://localhost:#{@port}/api/#{@version}/projects?page=3",
+ @headers
+ )
+
+ assert response3.status_code == 200
+ assert response3.headers |> Enum.any?(fn {k, v} -> k == "x-page" and v == "3" end)
+ assert response3.headers |> Enum.any?(fn {k, v} -> k == "x-has-more" and v == "false" end)
+ projects3 = Poison.decode!(response3.body)
+ assert Enum.empty?(projects3)
end
test "returns correct pagination headers for /projects when there are no more projects" do
{:ok, response} =
HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=2&page_size=2",
+ "http://localhost:#{@port}/api/#{@version}/projects?page=2",
@headers
)
assert response.status_code == 200
assert response.headers |> Enum.any?(fn {k, v} -> k == "x-page" and v == "2" end)
- assert response.headers |> Enum.any?(fn {k, v} -> k == "x-page-size" and v == "2" end)
- assert response.headers |> Enum.any?(fn {k, v} -> k == "x-total-count" and v == "3" end)
assert response.headers |> Enum.any?(fn {k, v} -> k == "x-has-more" and v == "false" end)
projects = Poison.decode!(response.body)
assert length(projects) == 1
@@ -371,7 +390,7 @@ defmodule Projecthub.HttpApi.Test do
{:ok, response} =
HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=10&page_size=2",
+ "http://localhost:#{@port}/api/#{@version}/projects?page=10",
@headers
)
@@ -401,7 +420,7 @@ defmodule Projecthub.HttpApi.Test do
{:ok, response} =
HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=foo&page_size=bar",
+ "http://localhost:#{@port}/api/#{@version}/projects?page=foo",
@headers
)
@@ -411,7 +430,7 @@ defmodule Projecthub.HttpApi.Test do
test "returns 400 on negative page" do
{:ok, response} =
HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=-1&page_size=2",
+ "http://localhost:#{@port}/api/#{@version}/projects?page=-1",
@headers
)
@@ -419,21 +438,10 @@ defmodule Projecthub.HttpApi.Test do
assert Poison.decode!(response.body)["message"] =~ "page must be at least 1"
end
- test "returns 400 on zero page_size" do
- {:ok, response} =
- HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=1&page_size=0",
- @headers
- )
-
- assert response.status_code == 400
- assert Poison.decode!(response.body)["message"] =~ "page_size must be at least 1"
- end
-
test "returns 400 on too large page" do
{:ok, response} =
HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=9999&page_size=2",
+ "http://localhost:#{@port}/api/#{@version}/projects?page=9999",
@headers
)
@@ -443,28 +451,6 @@ defmodule Projecthub.HttpApi.Test do
assert Poison.decode!(response.body)["message"] =~ "page must be at most"
end
end
-
- test "returns 400 on too large page_size" do
- {:ok, response} =
- HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=1&page_size=9999",
- @headers
- )
-
- assert response.status_code == 400
- assert Poison.decode!(response.body)["message"] =~ "page_size must be at most"
- end
-
- test "returns 400 on non-numeric page_size" do
- {:ok, response} =
- HTTPoison.get(
- "http://localhost:#{@port}/api/#{@version}/projects?page=1&page_size=abc",
- @headers
- )
-
- assert response.status_code == 400
- assert Poison.decode!(response.body)["message"] =~ "page_size must be a number"
- end
end
describe "GET /api//projects/:name with authorized user" do
@@ -1828,7 +1814,7 @@ defmodule Projecthub.HttpApi.Test do
def create(name, id) do
alias InternalApi.Projecthub.Project
- alias InternalApi.Projecthub.Project.Spec.{Repository, Visibility, PermissionType}
+ alias InternalApi.Projecthub.Project.Spec.{PermissionType, Repository, Visibility}
Project.new(
metadata: Project.Metadata.new(name: name, id: id),
From a9b69312ed7346f5de67cc8f1a2d263ed6fdfe05 Mon Sep 17 00:00:00 2001
From: Veljko Maksimovic <45179708+VeljkoMaksimovic@users.noreply.github.com>
Date: Fri, 20 Jun 2025 15:01:51 +0200
Subject: [PATCH 07/87] fix(guard): update allowed id providers (#405)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
In order to support saml login links without making changes to the
database, this is required
https://github.com/renderedtext/tasks/issues/8141
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.gitignore | 1 +
..._allowed_id_providers_for_organizations.rb | 11 +++++
guard/.tool-versions | 2 -
guard/docker-compose.yml | 2 +-
.../guard/grpc_servers/organization_server.ex | 13 +++++
.../grpc_servers/organization_server_test.exs | 49 +++++++++++++++++++
guard/test/support/factories/organization.ex | 3 +-
7 files changed, 77 insertions(+), 4 deletions(-)
create mode 100644 github_hooks/db/migrate/20250619173719_set_default_allowed_id_providers_for_organizations.rb
delete mode 100644 guard/.tool-versions
diff --git a/.gitignore b/.gitignore
index ed1c6b423..3061b6b51 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
*/out/*
*/*/out/*
**/_wildcard*
+**/.tool-versions
\ No newline at end of file
diff --git a/github_hooks/db/migrate/20250619173719_set_default_allowed_id_providers_for_organizations.rb b/github_hooks/db/migrate/20250619173719_set_default_allowed_id_providers_for_organizations.rb
new file mode 100644
index 000000000..e6247c2a4
--- /dev/null
+++ b/github_hooks/db/migrate/20250619173719_set_default_allowed_id_providers_for_organizations.rb
@@ -0,0 +1,11 @@
+class SetDefaultAllowedIdProvidersForOrganizations < ActiveRecord::Migration[5.1]
+ def up
+ # Set existing null values to the default
+ execute("UPDATE organizations SET allowed_id_providers = 'api_token,oidc' WHERE allowed_id_providers IS NULL OR allowed_id_providers = ''")
+ change_column_default :organizations, :allowed_id_providers, "api_token,oidc"
+ end
+
+ def down
+ change_column_default :organizations, :allowed_id_providers, nil
+ end
+end
diff --git a/guard/.tool-versions b/guard/.tool-versions
deleted file mode 100644
index 643d12ee0..000000000
--- a/guard/.tool-versions
+++ /dev/null
@@ -1,2 +0,0 @@
-elixir 1.14.3-otp-24
-erlang 24.3.4.9
diff --git a/guard/docker-compose.yml b/guard/docker-compose.yml
index eb181ea5b..a8f807e43 100644
--- a/guard/docker-compose.yml
+++ b/guard/docker-compose.yml
@@ -3,7 +3,7 @@ version: '3.6'
services:
app:
container_name: guard
- image: ${IMAGE:-guard}:${TAG:-latest}
+ image: ${IMAGE:-guard}:${TAG:-test}
build:
context: ..
cache_from:
diff --git a/guard/lib/guard/grpc_servers/organization_server.ex b/guard/lib/guard/grpc_servers/organization_server.ex
index e3fbdd1b3..77254e188 100644
--- a/guard/lib/guard/grpc_servers/organization_server.ex
+++ b/guard/lib/guard/grpc_servers/organization_server.ex
@@ -485,6 +485,19 @@ defmodule Guard.GrpcServers.OrganizationServer do
ip_allow_list: Enum.join(proto_org.ip_allow_list, ",")
}
+ attrs =
+ case proto_org.allowed_id_providers do
+ [_head | _tail] ->
+ Map.put(
+ attrs,
+ :allowed_id_providers,
+ Enum.join(proto_org.allowed_id_providers, ",")
+ )
+
+ _ ->
+ attrs
+ end
+
case Guard.Store.Organization.update(organization, attrs) do
{:ok, updated_org} ->
%Organization.UpdateResponse{
diff --git a/guard/test/guard/grpc_servers/organization_server_test.exs b/guard/test/guard/grpc_servers/organization_server_test.exs
index 648372f71..d4fb2f077 100644
--- a/guard/test/guard/grpc_servers/organization_server_test.exs
+++ b/guard/test/guard/grpc_servers/organization_server_test.exs
@@ -1530,6 +1530,55 @@ defmodule Guard.GrpcServers.OrganizationServerTest do
assert org.ip_allow_list == "192.168.1.1,192.168.1.2"
end
+ test "updates allowed_id_providers when non-empty list is provided", %{
+ grpc_channel: channel,
+ organization: organization
+ } do
+ assert organization.allowed_id_providers == "api_token,oidc"
+
+ req =
+ Organization.UpdateRequest.new(
+ organization:
+ Organization.Organization.new(
+ org_id: organization.id,
+ name: organization.name,
+ org_username: organization.username,
+ allowed_id_providers: ["okta"]
+ )
+ )
+
+ {:ok, response} = channel |> Organization.OrganizationService.Stub.update(req)
+
+ assert response.organization.allowed_id_providers == ["okta"]
+
+ updated_org = Guard.FrontRepo.get!(Guard.FrontRepo.Organization, organization.id)
+ assert updated_org.allowed_id_providers == "okta"
+ end
+
+ test "doesn't update allowed_id_providers when empty list is provided", %{
+ grpc_channel: channel,
+ organization: organization
+ } do
+ assert organization.allowed_id_providers == "api_token,oidc"
+
+ # Update with empty allowed_id_providers
+ request =
+ Organization.UpdateRequest.new(
+ organization:
+ Organization.Organization.new(
+ org_id: organization.id,
+ name: "Updated Organization",
+ org_username: "updated-org"
+ )
+ )
+
+ {:ok, response} = channel |> Organization.OrganizationService.Stub.update(request)
+
+ assert response.organization.allowed_id_providers == ["api_token", "oidc"]
+ updated_org = Guard.FrontRepo.get!(Guard.FrontRepo.Organization, organization.id)
+ assert updated_org.allowed_id_providers == "api_token,oidc"
+ end
+
test "returns error with invalid params", %{
grpc_channel: channel,
organization: organization
diff --git a/guard/test/support/factories/organization.ex b/guard/test/support/factories/organization.ex
index 9ca773718..7de65160c 100644
--- a/guard/test/support/factories/organization.ex
+++ b/guard/test/support/factories/organization.ex
@@ -8,7 +8,8 @@ defmodule Support.Factories.Organization do
open_source: false,
description: "Test Organization Description",
website: "https://example.com",
- avatar_url: "https://example.com/avatar.png"
+ avatar_url: "https://example.com/avatar.png",
+ allowed_id_providers: "api_token,oidc"
]
attrs = Keyword.merge(defaults, options) |> Enum.into(%{})
From 8260866ef4c9ac540f834c24430db68078747e39 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pedro=20Le=C3=A3o?=
<60622592+forestileao@users.noreply.github.com>
Date: Fri, 20 Jun 2025 10:52:28 -0300
Subject: [PATCH 08/87] refactor: regenerate protobuf modules (#408)
---
.../lib/internal_api/artifacthub.pb.ex | 44 ++-
.../v1alpha/lib/internal_api/gofer.dt.pb.ex | 82 ++++-
.../lib/internal_api/gofer.switch.pb.ex | 83 ++++-
.../lib/internal_api/organization.pb.ex | 307 ++++++++++++------
.../lib/internal_api/periodic_scheduler.pb.ex | 7 +-
.../internal_api/plumber_w_f.workflow.pb.ex | 69 +++-
.../v1alpha/lib/internal_api/projecthub.pb.ex | 301 ++++++++++++++++-
.../v1alpha/lib/internal_api/repo_proxy.pb.ex | 114 +++++++
.../internal_api/repository_integrator.pb.ex | 22 ++
.../v1alpha/lib/internal_api/secrethub.pb.ex | 130 +++++++-
.../lib/internal_api/self_hosted.pb.ex | 43 ++-
.../lib/internal_api/server_farm.job.pb.ex | 225 ++++++++++++-
12 files changed, 1263 insertions(+), 164 deletions(-)
diff --git a/public-api/v1alpha/lib/internal_api/artifacthub.pb.ex b/public-api/v1alpha/lib/internal_api/artifacthub.pb.ex
index b07398785..d952ea7c3 100644
--- a/public-api/v1alpha/lib/internal_api/artifacthub.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/artifacthub.pb.ex
@@ -175,12 +175,14 @@ defmodule InternalApi.Artifacthub.ListPathRequest do
@type t :: %__MODULE__{
artifact_id: String.t(),
- path: String.t()
+ path: String.t(),
+ unwrap_directories: boolean
}
- defstruct [:artifact_id, :path]
+ defstruct [:artifact_id, :path, :unwrap_directories]
field(:artifact_id, 1, type: :string)
field(:path, 2, type: :string)
+ field(:unwrap_directories, 3, type: :bool)
end
defmodule InternalApi.Artifacthub.ListPathResponse do
@@ -410,6 +412,38 @@ defmodule InternalApi.Artifacthub.Artifact do
field(:artifact_token, 4, type: :string)
end
+defmodule InternalApi.Artifacthub.GenerateTokenRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ artifact_id: String.t(),
+ job_id: String.t(),
+ workflow_id: String.t(),
+ project_id: String.t(),
+ duration: non_neg_integer
+ }
+ defstruct [:artifact_id, :job_id, :workflow_id, :project_id, :duration]
+
+ field(:artifact_id, 1, type: :string)
+ field(:job_id, 2, type: :string)
+ field(:workflow_id, 3, type: :string)
+ field(:project_id, 4, type: :string)
+ field(:duration, 5, type: :uint32)
+end
+
+defmodule InternalApi.Artifacthub.GenerateTokenResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ token: String.t()
+ }
+ defstruct [:token]
+
+ field(:token, 1, type: :string)
+end
+
defmodule InternalApi.Artifacthub.ArtifactService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Artifacthub.ArtifactService"
@@ -448,6 +482,12 @@ defmodule InternalApi.Artifacthub.ArtifactService.Service do
InternalApi.Artifacthub.UpdateRetentionPolicyResponse
)
+ rpc(
+ :GenerateToken,
+ InternalApi.Artifacthub.GenerateTokenRequest,
+ InternalApi.Artifacthub.GenerateTokenResponse
+ )
+
rpc(:Cleanup, InternalApi.Artifacthub.CleanupRequest, InternalApi.Artifacthub.CleanupResponse)
rpc(
diff --git a/public-api/v1alpha/lib/internal_api/gofer.dt.pb.ex b/public-api/v1alpha/lib/internal_api/gofer.dt.pb.ex
index c5e92b6cc..c56c33c79 100644
--- a/public-api/v1alpha/lib/internal_api/gofer.dt.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/gofer.dt.pb.ex
@@ -3,11 +3,13 @@ defmodule InternalApi.Gofer.DeploymentTargets.ListRequest do
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- project_id: String.t()
+ project_id: String.t(),
+ requester_id: String.t()
}
- defstruct [:project_id]
+ defstruct [:project_id, :requester_id]
field(:project_id, 1, type: :string)
+ field(:requester_id, 2, type: :string)
end
defmodule InternalApi.Gofer.DeploymentTargets.ListResponse do
@@ -50,6 +52,62 @@ defmodule InternalApi.Gofer.DeploymentTargets.DescribeResponse do
field(:target, 1, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
end
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ triggerer: String.t(),
+ git_ref_type: integer,
+ git_ref_label: String.t()
+ }
+ defstruct [:target_id, :triggerer, :git_ref_type, :git_ref_label]
+
+ field(:target_id, 1, type: :string)
+ field(:triggerer, 2, type: :string)
+
+ field(:git_ref_type, 3,
+ type: InternalApi.Gofer.DeploymentTargets.VerifyRequest.GitRefType,
+ enum: true
+ )
+
+ field(:git_ref_label, 4, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyRequest.GitRefType do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:BRANCH, 0)
+ field(:TAG, 1)
+ field(:PR, 2)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ status: integer
+ }
+ defstruct [:status]
+
+ field(:status, 1, type: InternalApi.Gofer.DeploymentTargets.VerifyResponse.Status, enum: true)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyResponse.Status do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:SYNCING_TARGET, 0)
+ field(:ACCESS_GRANTED, 1)
+ field(:BANNED_SUBJECT, 2)
+ field(:BANNED_OBJECT, 3)
+ field(:CORDONED_TARGET, 4)
+ field(:CORRUPTED_TARGET, 5)
+end
+
defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -58,9 +116,10 @@ defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest do
target_id: String.t(),
cursor_type: integer,
cursor_value: non_neg_integer,
- filters: InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters.t()
+ filters: InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters.t(),
+ requester_id: String.t()
}
- defstruct [:target_id, :cursor_type, :cursor_value, :filters]
+ defstruct [:target_id, :cursor_type, :cursor_value, :filters, :requester_id]
field(:target_id, 1, type: :string)
@@ -71,6 +130,7 @@ defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest do
field(:cursor_value, 3, type: :uint64)
field(:filters, 4, type: InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters)
+ field(:requester_id, 5, type: :string)
end
defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters do
@@ -332,7 +392,8 @@ defmodule InternalApi.Gofer.DeploymentTargets.Deployment do
state_message: String.t(),
switch_id: String.t(),
target_name: String.t(),
- env_vars: [InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar.t()]
+ env_vars: [InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar.t()],
+ can_requester_rerun: boolean
}
defstruct [
:id,
@@ -345,7 +406,8 @@ defmodule InternalApi.Gofer.DeploymentTargets.Deployment do
:state_message,
:switch_id,
:target_name,
- :env_vars
+ :env_vars,
+ :can_requester_rerun
]
field(:id, 1, type: :string)
@@ -360,6 +422,8 @@ defmodule InternalApi.Gofer.DeploymentTargets.Deployment do
field(:target_name, 10, type: :string)
field(:env_vars, 11, repeated: true, type: InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar)
+
+ field(:can_requester_rerun, 12, type: :bool)
end
defmodule InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar do
@@ -478,6 +542,12 @@ defmodule InternalApi.Gofer.DeploymentTargets.DeploymentTargets.Service do
InternalApi.Gofer.DeploymentTargets.DescribeResponse
)
+ rpc(
+ :Verify,
+ InternalApi.Gofer.DeploymentTargets.VerifyRequest,
+ InternalApi.Gofer.DeploymentTargets.VerifyResponse
+ )
+
rpc(
:History,
InternalApi.Gofer.DeploymentTargets.HistoryRequest,
diff --git a/public-api/v1alpha/lib/internal_api/gofer.switch.pb.ex b/public-api/v1alpha/lib/internal_api/gofer.switch.pb.ex
index ddfff457d..649fe0c13 100644
--- a/public-api/v1alpha/lib/internal_api/gofer.switch.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/gofer.switch.pb.ex
@@ -57,15 +57,24 @@ defmodule InternalApi.Gofer.Target do
pipeline_path: String.t(),
auto_trigger_on: [InternalApi.Gofer.AutoTriggerCond.t()],
parameter_env_vars: [InternalApi.Gofer.ParamEnvVar.t()],
- auto_promote_when: String.t()
+ auto_promote_when: String.t(),
+ deployment_target: String.t()
}
- defstruct [:name, :pipeline_path, :auto_trigger_on, :parameter_env_vars, :auto_promote_when]
+ defstruct [
+ :name,
+ :pipeline_path,
+ :auto_trigger_on,
+ :parameter_env_vars,
+ :auto_promote_when,
+ :deployment_target
+ ]
field(:name, 1, type: :string)
field(:pipeline_path, 2, type: :string)
field(:auto_trigger_on, 5, repeated: true, type: InternalApi.Gofer.AutoTriggerCond)
field(:parameter_env_vars, 6, repeated: true, type: InternalApi.Gofer.ParamEnvVar)
field(:auto_promote_when, 7, type: :string)
+ field(:deployment_target, 8, type: :string)
end
defmodule InternalApi.Gofer.ParamEnvVar do
@@ -128,12 +137,14 @@ defmodule InternalApi.Gofer.DescribeRequest do
@type t :: %__MODULE__{
switch_id: String.t(),
- events_per_target: integer
+ events_per_target: integer,
+ requester_id: String.t()
}
- defstruct [:switch_id, :events_per_target]
+ defstruct [:switch_id, :events_per_target, :requester_id]
field(:switch_id, 1, type: :string)
field(:events_per_target, 2, type: :int32)
+ field(:requester_id, 3, type: :string)
end
defmodule InternalApi.Gofer.DescribeResponse do
@@ -177,15 +188,69 @@ defmodule InternalApi.Gofer.TargetDescription do
pipeline_path: String.t(),
trigger_events: [InternalApi.Gofer.TriggerEvent.t()],
auto_trigger_on: [InternalApi.Gofer.AutoTriggerCond.t()],
- parameter_env_vars: [InternalApi.Gofer.ParamEnvVar.t()]
+ parameter_env_vars: [InternalApi.Gofer.ParamEnvVar.t()],
+ dt_description: InternalApi.Gofer.DeploymentTargetDescription.t()
}
- defstruct [:name, :pipeline_path, :trigger_events, :auto_trigger_on, :parameter_env_vars]
+ defstruct [
+ :name,
+ :pipeline_path,
+ :trigger_events,
+ :auto_trigger_on,
+ :parameter_env_vars,
+ :dt_description
+ ]
field(:name, 1, type: :string)
field(:pipeline_path, 2, type: :string)
field(:trigger_events, 4, repeated: true, type: InternalApi.Gofer.TriggerEvent)
field(:auto_trigger_on, 6, repeated: true, type: InternalApi.Gofer.AutoTriggerCond)
field(:parameter_env_vars, 7, repeated: true, type: InternalApi.Gofer.ParamEnvVar)
+ field(:dt_description, 8, type: InternalApi.Gofer.DeploymentTargetDescription)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargetDescription do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ target_name: String.t(),
+ access: InternalApi.Gofer.DeploymentTargetDescription.Access.t()
+ }
+ defstruct [:target_id, :target_name, :access]
+
+ field(:target_id, 1, type: :string)
+ field(:target_name, 2, type: :string)
+ field(:access, 3, type: InternalApi.Gofer.DeploymentTargetDescription.Access)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargetDescription.Access do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ allowed: boolean,
+ reason: integer,
+ message: String.t()
+ }
+ defstruct [:allowed, :reason, :message]
+
+ field(:allowed, 1, type: :bool)
+ field(:reason, 2, type: InternalApi.Gofer.DeploymentTargetDescription.Access.Reason, enum: true)
+ field(:message, 3, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargetDescription.Access.Reason do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:INTERNAL_ERROR, 0)
+ field(:NO_REASON, 1)
+ field(:SYNCING_TARGET, 2)
+ field(:CORRUPTED_TARGET, 3)
+ field(:BANNED_SUBJECT, 4)
+ field(:BANNED_OBJECT, 5)
+ field(:CORDONED_TARGET, 6)
end
defmodule InternalApi.Gofer.TriggerEvent do
@@ -246,12 +311,14 @@ defmodule InternalApi.Gofer.DescribeManyRequest do
@type t :: %__MODULE__{
switch_ids: [String.t()],
- events_per_target: integer
+ events_per_target: integer,
+ requester_id: String.t()
}
- defstruct [:switch_ids, :events_per_target]
+ defstruct [:switch_ids, :events_per_target, :requester_id]
field(:switch_ids, 1, repeated: true, type: :string)
field(:events_per_target, 2, type: :int32)
+ field(:requester_id, 3, type: :string)
end
defmodule InternalApi.Gofer.DescribeManyResponse do
diff --git a/public-api/v1alpha/lib/internal_api/organization.pb.ex b/public-api/v1alpha/lib/internal_api/organization.pb.ex
index dabc8dcc1..741f05b53 100644
--- a/public-api/v1alpha/lib/internal_api/organization.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/organization.pb.ex
@@ -5,13 +5,15 @@ defmodule InternalApi.Organization.DescribeRequest do
@type t :: %__MODULE__{
org_id: String.t(),
org_username: String.t(),
- include_quotas: boolean
+ include_quotas: boolean,
+ soft_deleted: boolean
}
- defstruct [:org_id, :org_username, :include_quotas]
+ defstruct [:org_id, :org_username, :include_quotas, :soft_deleted]
field(:org_id, 1, type: :string)
field(:org_username, 2, type: :string)
field(:include_quotas, 3, type: :bool)
+ field(:soft_deleted, 4, type: :bool)
end
defmodule InternalApi.Organization.DescribeResponse do
@@ -28,6 +30,32 @@ defmodule InternalApi.Organization.DescribeResponse do
field(:organization, 2, type: InternalApi.Organization.Organization)
end
+defmodule InternalApi.Organization.DescribeManyRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_ids: [String.t()],
+ soft_deleted: boolean
+ }
+ defstruct [:org_ids, :soft_deleted]
+
+ field(:org_ids, 1, repeated: true, type: :string)
+ field(:soft_deleted, 2, type: :bool)
+end
+
+defmodule InternalApi.Organization.DescribeManyResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ organizations: [InternalApi.Organization.Organization.t()]
+ }
+ defstruct [:organizations]
+
+ field(:organizations, 1, repeated: true, type: InternalApi.Organization.Organization)
+end
+
defmodule InternalApi.Organization.ListRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -37,15 +65,17 @@ defmodule InternalApi.Organization.ListRequest do
created_at_gt: Google.Protobuf.Timestamp.t(),
order: integer,
page_size: integer,
- page_token: String.t()
+ page_token: String.t(),
+ soft_deleted: boolean
}
- defstruct [:user_id, :created_at_gt, :order, :page_size, :page_token]
+ defstruct [:user_id, :created_at_gt, :order, :page_size, :page_token, :soft_deleted]
field(:user_id, 2, type: :string)
field(:created_at_gt, 3, type: Google.Protobuf.Timestamp)
field(:order, 4, type: InternalApi.Organization.ListRequest.Order, enum: true)
field(:page_size, 5, type: :int32)
field(:page_token, 6, type: :string)
+ field(:soft_deleted, 7, type: :bool)
end
defmodule InternalApi.Organization.ListRequest.Order do
@@ -102,32 +132,6 @@ defmodule InternalApi.Organization.CreateResponse do
field(:organization, 2, type: InternalApi.Organization.Organization)
end
-defmodule InternalApi.Organization.CreateWithQuotasRequest do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- organization: InternalApi.Organization.Organization.t(),
- quotas: [InternalApi.Organization.Quota.t()]
- }
- defstruct [:organization, :quotas]
-
- field(:organization, 1, type: InternalApi.Organization.Organization)
- field(:quotas, 2, repeated: true, type: InternalApi.Organization.Quota)
-end
-
-defmodule InternalApi.Organization.CreateWithQuotasResponse do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- organization: InternalApi.Organization.Organization.t()
- }
- defstruct [:organization]
-
- field(:organization, 1, type: InternalApi.Organization.Organization)
-end
-
defmodule InternalApi.Organization.UpdateRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -514,6 +518,18 @@ defmodule InternalApi.Organization.DestroyRequest do
field(:org_id, 1, type: :string)
end
+defmodule InternalApi.Organization.RestoreRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t()
+ }
+ defstruct [:org_id]
+
+ field(:org_id, 1, type: :string)
+end
+
defmodule InternalApi.Organization.Organization do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -531,7 +547,9 @@ defmodule InternalApi.Organization.Organization do
restricted: boolean,
ip_allow_list: [String.t()],
allowed_id_providers: [String.t()],
- quotas: [InternalApi.Organization.Quota.t()]
+ deny_member_workflows: boolean,
+ deny_non_member_workflows: boolean,
+ settings: [InternalApi.Organization.OrganizationSetting.t()]
}
defstruct [
:org_username,
@@ -546,7 +564,9 @@ defmodule InternalApi.Organization.Organization do
:restricted,
:ip_allow_list,
:allowed_id_providers,
- :quotas
+ :deny_member_workflows,
+ :deny_non_member_workflows,
+ :settings
]
field(:org_username, 1, type: :string)
@@ -561,7 +581,9 @@ defmodule InternalApi.Organization.Organization do
field(:restricted, 11, type: :bool)
field(:ip_allow_list, 12, repeated: true, type: :string)
field(:allowed_id_providers, 13, repeated: true, type: :string)
- field(:quotas, 8, repeated: true, type: InternalApi.Organization.Quota)
+ field(:deny_member_workflows, 14, type: :bool)
+ field(:deny_non_member_workflows, 15, type: :bool)
+ field(:settings, 16, repeated: true, type: InternalApi.Organization.OrganizationSetting)
end
defmodule InternalApi.Organization.Suspension do
@@ -635,87 +657,132 @@ defmodule InternalApi.Organization.Member.Role do
field(:ADMIN, 2)
end
-defmodule InternalApi.Organization.Quota do
+defmodule InternalApi.Organization.OrganizationSetting do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- type: integer,
- value: non_neg_integer
+ key: String.t(),
+ value: String.t()
}
- defstruct [:type, :value]
+ defstruct [:key, :value]
- field(:type, 1, type: InternalApi.Organization.Quota.Type, enum: true)
- field(:value, 2, type: :uint32)
+ field(:key, 1, type: :string)
+ field(:value, 2, type: :string)
end
-defmodule InternalApi.Organization.Quota.Type do
+defmodule InternalApi.Organization.RepositoryIntegratorsRequest do
@moduledoc false
- use Protobuf, enum: true, syntax: :proto3
+ use Protobuf, syntax: :proto3
- field(:MAX_PEOPLE_IN_ORG, 0)
- field(:MAX_PARALELLISM_IN_ORG, 1)
- field(:MAX_PROJECTS_IN_ORG, 7)
- field(:MAX_PARALLEL_E1_STANDARD_2, 2)
- field(:MAX_PARALLEL_E1_STANDARD_4, 3)
- field(:MAX_PARALLEL_E1_STANDARD_8, 4)
- field(:MAX_PARALLEL_A1_STANDARD_4, 5)
- field(:MAX_PARALLEL_A1_STANDARD_8, 6)
+ @type t :: %__MODULE__{
+ org_id: String.t()
+ }
+ defstruct [:org_id]
+
+ field(:org_id, 1, type: :string)
end
-defmodule InternalApi.Organization.GetQuotasRequest do
+defmodule InternalApi.Organization.RepositoryIntegratorsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- org_id: String.t(),
- types: [integer]
+ primary: integer,
+ enabled: [integer],
+ available: [integer]
}
- defstruct [:org_id, :types]
+ defstruct [:primary, :enabled, :available]
+
+ field(:primary, 1, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+
+ field(:enabled, 2,
+ repeated: true,
+ type: InternalApi.RepositoryIntegrator.IntegrationType,
+ enum: true
+ )
+
+ field(:available, 3,
+ repeated: true,
+ type: InternalApi.RepositoryIntegrator.IntegrationType,
+ enum: true
+ )
+end
+
+defmodule InternalApi.Organization.FetchOrganizationContactsRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t()
+ }
+ defstruct [:org_id]
field(:org_id, 1, type: :string)
- field(:types, 2, repeated: true, type: InternalApi.Organization.Quota.Type, enum: true)
end
-defmodule InternalApi.Organization.GetQuotaResponse do
+defmodule InternalApi.Organization.FetchOrganizationContactsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- quotas: [InternalApi.Organization.Quota.t()]
+ org_contacts: [InternalApi.Organization.OrganizationContact.t()]
}
- defstruct [:quotas]
+ defstruct [:org_contacts]
- field(:quotas, 1, repeated: true, type: InternalApi.Organization.Quota)
+ field(:org_contacts, 1, repeated: true, type: InternalApi.Organization.OrganizationContact)
end
-defmodule InternalApi.Organization.UpdateQuotasRequest do
+defmodule InternalApi.Organization.ModifyOrganizationContactRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- org_id: String.t(),
- quotas: [InternalApi.Organization.Quota.t()]
+ org_contact: InternalApi.Organization.OrganizationContact.t()
}
- defstruct [:org_id, :quotas]
+ defstruct [:org_contact]
- field(:org_id, 1, type: :string)
- field(:quotas, 2, repeated: true, type: InternalApi.Organization.Quota)
+ field(:org_contact, 1, type: InternalApi.Organization.OrganizationContact)
end
-defmodule InternalApi.Organization.UpdateQuotasResponse do
+defmodule InternalApi.Organization.ModifyOrganizationContactResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ defstruct []
+end
+
+defmodule InternalApi.Organization.OrganizationContact do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- quotas: [InternalApi.Organization.Quota.t()]
+ org_id: String.t(),
+ type: integer,
+ name: String.t(),
+ email: String.t(),
+ phone: String.t()
}
- defstruct [:quotas]
+ defstruct [:org_id, :type, :name, :email, :phone]
- field(:quotas, 1, repeated: true, type: InternalApi.Organization.Quota)
+ field(:org_id, 1, type: :string)
+ field(:type, 2, type: InternalApi.Organization.OrganizationContact.ContactType, enum: true)
+ field(:name, 3, type: :string)
+ field(:email, 4, type: :string)
+ field(:phone, 5, type: :string)
end
-defmodule InternalApi.Organization.RepositoryIntegratorsRequest do
+defmodule InternalApi.Organization.OrganizationContact.ContactType do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:CONTACT_TYPE_UNSPECIFIED, 0)
+ field(:CONTACT_TYPE_MAIN, 1)
+ field(:CONTACT_TYPE_FINANCES, 2)
+ field(:CONTACT_TYPE_SECURITY, 3)
+end
+
+defmodule InternalApi.Organization.FetchOrganizationSettingsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -727,30 +794,42 @@ defmodule InternalApi.Organization.RepositoryIntegratorsRequest do
field(:org_id, 1, type: :string)
end
-defmodule InternalApi.Organization.RepositoryIntegratorsResponse do
+defmodule InternalApi.Organization.FetchOrganizationSettingsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- primary: integer,
- enabled: [integer],
- available: [integer]
+ settings: [InternalApi.Organization.OrganizationSetting.t()]
}
- defstruct [:primary, :enabled, :available]
+ defstruct [:settings]
- field(:primary, 1, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:settings, 1, repeated: true, type: InternalApi.Organization.OrganizationSetting)
+end
- field(:enabled, 2,
- repeated: true,
- type: InternalApi.RepositoryIntegrator.IntegrationType,
- enum: true
- )
+defmodule InternalApi.Organization.ModifyOrganizationSettingsRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
- field(:available, 3,
- repeated: true,
- type: InternalApi.RepositoryIntegrator.IntegrationType,
- enum: true
- )
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ settings: [InternalApi.Organization.OrganizationSetting.t()]
+ }
+ defstruct [:org_id, :settings]
+
+ field(:org_id, 1, type: :string)
+ field(:settings, 2, repeated: true, type: InternalApi.Organization.OrganizationSetting)
+end
+
+defmodule InternalApi.Organization.ModifyOrganizationSettingsResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ settings: [InternalApi.Organization.OrganizationSetting.t()]
+ }
+ defstruct [:settings]
+
+ field(:settings, 1, repeated: true, type: InternalApi.Organization.OrganizationSetting)
end
defmodule InternalApi.Organization.OrganizationCreated do
@@ -901,6 +980,20 @@ defmodule InternalApi.Organization.OrganizationDailyUpdate do
field(:timestamp, 11, type: Google.Protobuf.Timestamp)
end
+defmodule InternalApi.Organization.OrganizationRestored do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ timestamp: Google.Protobuf.Timestamp.t()
+ }
+ defstruct [:org_id, :timestamp]
+
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+end
+
defmodule InternalApi.Organization.OrganizationService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Organization.OrganizationService"
@@ -911,15 +1004,14 @@ defmodule InternalApi.Organization.OrganizationService.Service do
InternalApi.Organization.DescribeResponse
)
- rpc(:List, InternalApi.Organization.ListRequest, InternalApi.Organization.ListResponse)
- rpc(:Create, InternalApi.Organization.CreateRequest, InternalApi.Organization.CreateResponse)
-
rpc(
- :CreateWithQuotas,
- InternalApi.Organization.CreateWithQuotasRequest,
- InternalApi.Organization.CreateWithQuotasResponse
+ :DescribeMany,
+ InternalApi.Organization.DescribeManyRequest,
+ InternalApi.Organization.DescribeManyResponse
)
+ rpc(:List, InternalApi.Organization.ListRequest, InternalApi.Organization.ListResponse)
+ rpc(:Create, InternalApi.Organization.CreateRequest, InternalApi.Organization.CreateResponse)
rpc(:Update, InternalApi.Organization.UpdateRequest, InternalApi.Organization.UpdateResponse)
rpc(:IsValid, InternalApi.Organization.Organization, InternalApi.Organization.IsValidResponse)
@@ -973,24 +1065,37 @@ defmodule InternalApi.Organization.OrganizationService.Service do
InternalApi.Organization.ListSuspensionsResponse
)
+ rpc(:Destroy, InternalApi.Organization.DestroyRequest, Google.Protobuf.Empty)
+ rpc(:Restore, InternalApi.Organization.RestoreRequest, Google.Protobuf.Empty)
+
+ rpc(
+ :RepositoryIntegrators,
+ InternalApi.Organization.RepositoryIntegratorsRequest,
+ InternalApi.Organization.RepositoryIntegratorsResponse
+ )
+
rpc(
- :UpdateQuotas,
- InternalApi.Organization.UpdateQuotasRequest,
- InternalApi.Organization.UpdateQuotasResponse
+ :FetchOrganizationContacts,
+ InternalApi.Organization.FetchOrganizationContactsRequest,
+ InternalApi.Organization.FetchOrganizationContactsResponse
)
rpc(
- :GetQuotas,
- InternalApi.Organization.GetQuotasRequest,
- InternalApi.Organization.GetQuotaResponse
+ :ModifyOrganizationContact,
+ InternalApi.Organization.ModifyOrganizationContactRequest,
+ InternalApi.Organization.ModifyOrganizationContactResponse
)
- rpc(:Destroy, InternalApi.Organization.DestroyRequest, Google.Protobuf.Empty)
+ rpc(
+ :FetchOrganizationSettings,
+ InternalApi.Organization.FetchOrganizationSettingsRequest,
+ InternalApi.Organization.FetchOrganizationSettingsResponse
+ )
rpc(
- :RepositoryIntegrators,
- InternalApi.Organization.RepositoryIntegratorsRequest,
- InternalApi.Organization.RepositoryIntegratorsResponse
+ :ModifyOrganizationSettings,
+ InternalApi.Organization.ModifyOrganizationSettingsRequest,
+ InternalApi.Organization.ModifyOrganizationSettingsResponse
)
end
diff --git a/public-api/v1alpha/lib/internal_api/periodic_scheduler.pb.ex b/public-api/v1alpha/lib/internal_api/periodic_scheduler.pb.ex
index 1106f3239..2422e3081 100644
--- a/public-api/v1alpha/lib/internal_api/periodic_scheduler.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/periodic_scheduler.pb.ex
@@ -239,7 +239,8 @@ defmodule InternalApi.PeriodicScheduler.Periodic do
inserted_at: Google.Protobuf.Timestamp.t(),
recurring: boolean,
parameters: [InternalApi.PeriodicScheduler.Periodic.Parameter.t()],
- description: String.t()
+ description: String.t(),
+ organization_id: String.t()
}
defstruct [
:id,
@@ -257,7 +258,8 @@ defmodule InternalApi.PeriodicScheduler.Periodic do
:inserted_at,
:recurring,
:parameters,
- :description
+ :description,
+ :organization_id
]
field(:id, 1, type: :string)
@@ -276,6 +278,7 @@ defmodule InternalApi.PeriodicScheduler.Periodic do
field(:recurring, 14, type: :bool)
field(:parameters, 15, repeated: true, type: InternalApi.PeriodicScheduler.Periodic.Parameter)
field(:description, 16, type: :string)
+ field(:organization_id, 17, type: :string)
end
defmodule InternalApi.PeriodicScheduler.Periodic.Parameter do
diff --git a/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex b/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex
index 592ee6cc7..97d0606a8 100644
--- a/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex
@@ -5,7 +5,6 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
@type t :: %__MODULE__{
service: integer,
repo: InternalApi.PlumberWF.ScheduleRequest.Repo.t(),
- auth: InternalApi.PlumberWF.ScheduleRequest.Auth.t(),
project_id: String.t(),
branch_id: String.t(),
hook_id: String.t(),
@@ -15,12 +14,13 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
requester_id: String.t(),
organization_id: String.t(),
label: String.t(),
- triggered_by: integer
+ triggered_by: integer,
+ scheduler_task_id: String.t(),
+ env_vars: [InternalApi.PlumberWF.ScheduleRequest.EnvVar.t()]
}
defstruct [
:service,
:repo,
- :auth,
:project_id,
:branch_id,
:hook_id,
@@ -30,12 +30,13 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
:requester_id,
:organization_id,
:label,
- :triggered_by
+ :triggered_by,
+ :scheduler_task_id,
+ :env_vars
]
field(:service, 2, type: InternalApi.PlumberWF.ScheduleRequest.ServiceType, enum: true)
field(:repo, 3, type: InternalApi.PlumberWF.ScheduleRequest.Repo)
- field(:auth, 4, type: InternalApi.PlumberWF.ScheduleRequest.Auth)
field(:project_id, 6, type: :string)
field(:branch_id, 7, type: :string)
field(:hook_id, 8, type: :string)
@@ -46,6 +47,8 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
field(:organization_id, 13, type: :string)
field(:label, 14, type: :string)
field(:triggered_by, 15, type: InternalApi.PlumberWF.TriggeredBy, enum: true)
+ field(:scheduler_task_id, 16, type: :string)
+ field(:env_vars, 17, repeated: true, type: InternalApi.PlumberWF.ScheduleRequest.EnvVar)
end
defmodule InternalApi.PlumberWF.ScheduleRequest.Repo do
@@ -56,30 +59,30 @@ defmodule InternalApi.PlumberWF.ScheduleRequest.Repo do
owner: String.t(),
repo_name: String.t(),
branch_name: String.t(),
- commit_sha: String.t()
+ commit_sha: String.t(),
+ repository_id: String.t()
}
- defstruct [:owner, :repo_name, :branch_name, :commit_sha]
+ defstruct [:owner, :repo_name, :branch_name, :commit_sha, :repository_id]
field(:owner, 1, type: :string)
field(:repo_name, 2, type: :string)
field(:branch_name, 4, type: :string)
field(:commit_sha, 5, type: :string)
+ field(:repository_id, 6, type: :string)
end
-defmodule InternalApi.PlumberWF.ScheduleRequest.Auth do
+defmodule InternalApi.PlumberWF.ScheduleRequest.EnvVar do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- client_id: String.t(),
- client_secret: String.t(),
- access_token: String.t()
+ name: String.t(),
+ value: String.t()
}
- defstruct [:client_id, :client_secret, :access_token]
+ defstruct [:name, :value]
- field(:client_id, 1, type: :string)
- field(:client_secret, 2, type: :string)
- field(:access_token, 3, type: :string)
+ field(:name, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.PlumberWF.ScheduleRequest.ServiceType do
@@ -89,6 +92,9 @@ defmodule InternalApi.PlumberWF.ScheduleRequest.ServiceType do
field(:GIT_HUB, 0)
field(:LOCAL, 1)
field(:SNAPSHOT, 2)
+ field(:BITBUCKET, 3)
+ field(:GITLAB, 4)
+ field(:GIT, 5)
end
defmodule InternalApi.PlumberWF.ScheduleResponse do
@@ -570,6 +576,32 @@ defmodule InternalApi.PlumberWF.DescribeResponse do
field(:workflow, 2, type: InternalApi.PlumberWF.WorkflowDetails)
end
+defmodule InternalApi.PlumberWF.DescribeManyRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ wf_ids: [String.t()]
+ }
+ defstruct [:wf_ids]
+
+ field(:wf_ids, 1, repeated: true, type: :string)
+end
+
+defmodule InternalApi.PlumberWF.DescribeManyResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ status: InternalApi.Status.t(),
+ workflows: [InternalApi.PlumberWF.WorkflowDetails.t()]
+ }
+ defstruct [:status, :workflows]
+
+ field(:status, 1, type: InternalApi.Status)
+ field(:workflows, 2, repeated: true, type: InternalApi.PlumberWF.WorkflowDetails)
+end
+
defmodule InternalApi.PlumberWF.TerminateRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -766,6 +798,13 @@ defmodule InternalApi.PlumberWF.WorkflowService.Service do
)
rpc(:Describe, InternalApi.PlumberWF.DescribeRequest, InternalApi.PlumberWF.DescribeResponse)
+
+ rpc(
+ :DescribeMany,
+ InternalApi.PlumberWF.DescribeManyRequest,
+ InternalApi.PlumberWF.DescribeManyResponse
+ )
+
rpc(:Terminate, InternalApi.PlumberWF.TerminateRequest, InternalApi.PlumberWF.TerminateResponse)
rpc(
diff --git a/public-api/v1alpha/lib/internal_api/projecthub.pb.ex b/public-api/v1alpha/lib/internal_api/projecthub.pb.ex
index 0615de758..097c4b7d7 100644
--- a/public-api/v1alpha/lib/internal_api/projecthub.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/projecthub.pb.ex
@@ -148,7 +148,8 @@ defmodule InternalApi.Projecthub.Project.Spec do
custom_permissions: boolean,
artifact_store_id: String.t(),
cache_id: String.t(),
- docker_registry_id: String.t()
+ docker_registry_id: String.t(),
+ tasks: [InternalApi.Projecthub.Project.Spec.Task.t()]
}
defstruct [
:repository,
@@ -161,7 +162,8 @@ defmodule InternalApi.Projecthub.Project.Spec do
:custom_permissions,
:artifact_store_id,
:cache_id,
- :docker_registry_id
+ :docker_registry_id,
+ :tasks
]
field(:repository, 1, type: InternalApi.Projecthub.Project.Spec.Repository)
@@ -186,6 +188,7 @@ defmodule InternalApi.Projecthub.Project.Spec do
field(:artifact_store_id, 9, type: :string)
field(:cache_id, 10, type: :string)
field(:docker_registry_id, 11, type: :string)
+ field(:tasks, 12, repeated: true, type: InternalApi.Projecthub.Project.Spec.Task)
end
defmodule InternalApi.Projecthub.Project.Spec.Repository do
@@ -362,6 +365,73 @@ defmodule InternalApi.Projecthub.Project.Spec.Scheduler.Status do
field(:STATUS_ACTIVE, 2)
end
+defmodule InternalApi.Projecthub.Project.Spec.Task do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ id: String.t(),
+ name: String.t(),
+ branch: String.t(),
+ at: String.t(),
+ pipeline_file: String.t(),
+ status: integer,
+ recurring: boolean,
+ parameters: [InternalApi.Projecthub.Project.Spec.Task.Parameter.t()],
+ description: String.t()
+ }
+ defstruct [
+ :id,
+ :name,
+ :branch,
+ :at,
+ :pipeline_file,
+ :status,
+ :recurring,
+ :parameters,
+ :description
+ ]
+
+ field(:id, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:branch, 3, type: :string)
+ field(:at, 4, type: :string)
+ field(:pipeline_file, 5, type: :string)
+ field(:status, 6, type: InternalApi.Projecthub.Project.Spec.Task.Status, enum: true)
+ field(:recurring, 7, type: :bool)
+ field(:parameters, 8, repeated: true, type: InternalApi.Projecthub.Project.Spec.Task.Parameter)
+ field(:description, 9, type: :string)
+end
+
+defmodule InternalApi.Projecthub.Project.Spec.Task.Parameter do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t(),
+ required: boolean,
+ description: String.t(),
+ default_value: String.t(),
+ options: [String.t()]
+ }
+ defstruct [:name, :required, :description, :default_value, :options]
+
+ field(:name, 1, type: :string)
+ field(:required, 2, type: :bool)
+ field(:description, 3, type: :string)
+ field(:default_value, 4, type: :string)
+ field(:options, 5, repeated: true, type: :string)
+end
+
+defmodule InternalApi.Projecthub.Project.Spec.Task.Status do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:STATUS_UNSPECIFIED, 0)
+ field(:STATUS_INACTIVE, 1)
+ field(:STATUS_ACTIVE, 2)
+end
+
defmodule InternalApi.Projecthub.Project.Spec.Visibility do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@@ -392,9 +462,10 @@ defmodule InternalApi.Projecthub.Project.Status do
cache: InternalApi.Projecthub.Project.Status.Cache.t(),
artifact_store: InternalApi.Projecthub.Project.Status.ArtifactStore.t(),
repository: InternalApi.Projecthub.Project.Status.Repository.t(),
- analysis: InternalApi.Projecthub.Project.Status.Analysis.t()
+ analysis: InternalApi.Projecthub.Project.Status.Analysis.t(),
+ permissions: InternalApi.Projecthub.Project.Status.Permissions.t()
}
- defstruct [:state, :state_reason, :cache, :artifact_store, :repository, :analysis]
+ defstruct [:state, :state_reason, :cache, :artifact_store, :repository, :analysis, :permissions]
field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
field(:state_reason, 2, type: :string)
@@ -402,6 +473,7 @@ defmodule InternalApi.Projecthub.Project.Status do
field(:artifact_store, 4, type: InternalApi.Projecthub.Project.Status.ArtifactStore)
field(:repository, 5, type: InternalApi.Projecthub.Project.Status.Repository)
field(:analysis, 6, type: InternalApi.Projecthub.Project.Status.Analysis)
+ field(:permissions, 7, type: InternalApi.Projecthub.Project.Status.Permissions)
end
defmodule InternalApi.Projecthub.Project.Status.Cache do
@@ -452,6 +524,18 @@ defmodule InternalApi.Projecthub.Project.Status.Analysis do
field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
end
+defmodule InternalApi.Projecthub.Project.Status.Permissions do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ state: integer
+ }
+ defstruct [:state]
+
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
+end
+
defmodule InternalApi.Projecthub.Project.Status.State do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@@ -459,6 +543,7 @@ defmodule InternalApi.Projecthub.Project.Status.State do
field(:INITIALIZING, 0)
field(:READY, 1)
field(:ERROR, 2)
+ field(:ONBOARDING, 3)
end
defmodule InternalApi.Projecthub.ListRequest do
@@ -469,14 +554,16 @@ defmodule InternalApi.Projecthub.ListRequest do
metadata: InternalApi.Projecthub.RequestMeta.t(),
pagination: InternalApi.Projecthub.PaginationRequest.t(),
owner_id: String.t(),
- repo_url: String.t()
+ repo_url: String.t(),
+ soft_deleted: boolean
}
- defstruct [:metadata, :pagination, :owner_id, :repo_url]
+ defstruct [:metadata, :pagination, :owner_id, :repo_url, :soft_deleted]
field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
field(:pagination, 2, type: InternalApi.Projecthub.PaginationRequest)
field(:owner_id, 3, type: :string)
field(:repo_url, 4, type: :string)
+ field(:soft_deleted, 5, type: :bool)
end
defmodule InternalApi.Projecthub.ListResponse do
@@ -495,6 +582,56 @@ defmodule InternalApi.Projecthub.ListResponse do
field(:projects, 3, repeated: true, type: InternalApi.Projecthub.Project)
end
+defmodule InternalApi.Projecthub.ListKeysetRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ page_size: integer,
+ page_token: String.t(),
+ direction: integer,
+ owner_id: String.t(),
+ repo_url: String.t(),
+ created_after: Google.Protobuf.Timestamp.t()
+ }
+ defstruct [:metadata, :page_size, :page_token, :direction, :owner_id, :repo_url, :created_after]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:page_size, 2, type: :int32)
+ field(:page_token, 3, type: :string)
+ field(:direction, 4, type: InternalApi.Projecthub.ListKeysetRequest.Direction, enum: true)
+ field(:owner_id, 5, type: :string)
+ field(:repo_url, 6, type: :string)
+ field(:created_after, 7, type: Google.Protobuf.Timestamp)
+end
+
+defmodule InternalApi.Projecthub.ListKeysetRequest.Direction do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:NEXT, 0)
+ field(:PREVIOUS, 1)
+end
+
+defmodule InternalApi.Projecthub.ListKeysetResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t(),
+ projects: [InternalApi.Projecthub.Project.t()],
+ next_page_token: String.t(),
+ previous_page_token: String.t()
+ }
+ defstruct [:metadata, :projects, :next_page_token, :previous_page_token]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:projects, 2, repeated: true, type: InternalApi.Projecthub.Project)
+ field(:next_page_token, 3, type: :string)
+ field(:previous_page_token, 4, type: :string)
+end
+
defmodule InternalApi.Projecthub.DescribeRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -503,14 +640,16 @@ defmodule InternalApi.Projecthub.DescribeRequest do
metadata: InternalApi.Projecthub.RequestMeta.t(),
id: String.t(),
name: String.t(),
- detailed: boolean
+ detailed: boolean,
+ soft_deleted: boolean
}
- defstruct [:metadata, :id, :name, :detailed]
+ defstruct [:metadata, :id, :name, :detailed, :soft_deleted]
field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
field(:id, 2, type: :string)
field(:name, 3, type: :string)
field(:detailed, 4, type: :bool)
+ field(:soft_deleted, 5, type: :bool)
end
defmodule InternalApi.Projecthub.DescribeResponse do
@@ -533,12 +672,14 @@ defmodule InternalApi.Projecthub.DescribeManyRequest do
@type t :: %__MODULE__{
metadata: InternalApi.Projecthub.RequestMeta.t(),
- ids: [String.t()]
+ ids: [String.t()],
+ soft_deleted: boolean
}
- defstruct [:metadata, :ids]
+ defstruct [:metadata, :ids, :soft_deleted]
field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
field(:ids, 2, repeated: true, type: :string)
+ field(:soft_deleted, 3, type: :bool)
end
defmodule InternalApi.Projecthub.DescribeManyResponse do
@@ -561,12 +702,14 @@ defmodule InternalApi.Projecthub.CreateRequest do
@type t :: %__MODULE__{
metadata: InternalApi.Projecthub.RequestMeta.t(),
- project: InternalApi.Projecthub.Project.t()
+ project: InternalApi.Projecthub.Project.t(),
+ skip_onboarding: boolean
}
- defstruct [:metadata, :project]
+ defstruct [:metadata, :project, :skip_onboarding]
field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
field(:project, 2, type: InternalApi.Projecthub.Project)
+ field(:skip_onboarding, 3, type: :bool)
end
defmodule InternalApi.Projecthub.CreateResponse do
@@ -589,12 +732,14 @@ defmodule InternalApi.Projecthub.UpdateRequest do
@type t :: %__MODULE__{
metadata: InternalApi.Projecthub.RequestMeta.t(),
- project: InternalApi.Projecthub.Project.t()
+ project: InternalApi.Projecthub.Project.t(),
+ omit_schedulers_and_tasks: boolean
}
- defstruct [:metadata, :project]
+ defstruct [:metadata, :project, :omit_schedulers_and_tasks]
field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
field(:project, 2, type: InternalApi.Projecthub.Project)
+ field(:omit_schedulers_and_tasks, 3, type: :bool)
end
defmodule InternalApi.Projecthub.UpdateResponse do
@@ -639,6 +784,32 @@ defmodule InternalApi.Projecthub.DestroyResponse do
field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
end
+defmodule InternalApi.Projecthub.RestoreRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ id: String.t()
+ }
+ defstruct [:metadata, :id]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+end
+
+defmodule InternalApi.Projecthub.RestoreResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t()
+ }
+ defstruct [:metadata]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+end
+
defmodule InternalApi.Projecthub.UsersRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -702,13 +873,15 @@ defmodule InternalApi.Projecthub.CheckDeployKeyResponse.DeployKey do
@type t :: %__MODULE__{
title: String.t(),
fingerprint: String.t(),
- created_at: Google.Protobuf.Timestamp.t()
+ created_at: Google.Protobuf.Timestamp.t(),
+ public_key: String.t()
}
- defstruct [:title, :fingerprint, :created_at]
+ defstruct [:title, :fingerprint, :created_at, :public_key]
field(:title, 1, type: :string)
field(:fingerprint, 2, type: :string)
field(:created_at, 3, type: Google.Protobuf.Timestamp)
+ field(:public_key, 4, type: :string)
end
defmodule InternalApi.Projecthub.RegenerateDeployKeyRequest do
@@ -746,13 +919,15 @@ defmodule InternalApi.Projecthub.RegenerateDeployKeyResponse.DeployKey do
@type t :: %__MODULE__{
title: String.t(),
fingerprint: String.t(),
- created_at: Google.Protobuf.Timestamp.t()
+ created_at: Google.Protobuf.Timestamp.t(),
+ public_key: String.t()
}
- defstruct [:title, :fingerprint, :created_at]
+ defstruct [:title, :fingerprint, :created_at, :public_key]
field(:title, 1, type: :string)
field(:fingerprint, 2, type: :string)
field(:created_at, 3, type: Google.Protobuf.Timestamp)
+ field(:public_key, 4, type: :string)
end
defmodule InternalApi.Projecthub.CheckWebhookRequest do
@@ -905,6 +1080,60 @@ defmodule InternalApi.Projecthub.GithubAppSwitchResponse do
field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
end
+defmodule InternalApi.Projecthub.FinishOnboardingRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ id: String.t()
+ }
+ defstruct [:metadata, :id]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+end
+
+defmodule InternalApi.Projecthub.FinishOnboardingResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t()
+ }
+ defstruct [:metadata]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+end
+
+defmodule InternalApi.Projecthub.RegenerateWebhookSecretRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ id: String.t()
+ }
+ defstruct [:metadata, :id]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+end
+
+defmodule InternalApi.Projecthub.RegenerateWebhookSecretResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t(),
+ secret: String.t()
+ }
+ defstruct [:metadata, :secret]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:secret, 2, type: :string)
+end
+
defmodule InternalApi.Projecthub.ProjectCreated do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -937,6 +1166,22 @@ defmodule InternalApi.Projecthub.ProjectDeleted do
field(:org_id, 3, type: :string)
end
+defmodule InternalApi.Projecthub.ProjectRestored do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ project_id: String.t(),
+ timestamp: Google.Protobuf.Timestamp.t(),
+ org_id: String.t()
+ }
+ defstruct [:project_id, :timestamp, :org_id]
+
+ field(:project_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:org_id, 3, type: :string)
+end
+
defmodule InternalApi.Projecthub.ProjectUpdated do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -972,6 +1217,13 @@ defmodule InternalApi.Projecthub.ProjectService.Service do
use GRPC.Service, name: "InternalApi.Projecthub.ProjectService"
rpc(:List, InternalApi.Projecthub.ListRequest, InternalApi.Projecthub.ListResponse)
+
+ rpc(
+ :ListKeyset,
+ InternalApi.Projecthub.ListKeysetRequest,
+ InternalApi.Projecthub.ListKeysetResponse
+ )
+
rpc(:Describe, InternalApi.Projecthub.DescribeRequest, InternalApi.Projecthub.DescribeResponse)
rpc(
@@ -983,6 +1235,7 @@ defmodule InternalApi.Projecthub.ProjectService.Service do
rpc(:Create, InternalApi.Projecthub.CreateRequest, InternalApi.Projecthub.CreateResponse)
rpc(:Update, InternalApi.Projecthub.UpdateRequest, InternalApi.Projecthub.UpdateResponse)
rpc(:Destroy, InternalApi.Projecthub.DestroyRequest, InternalApi.Projecthub.DestroyResponse)
+ rpc(:Restore, InternalApi.Projecthub.RestoreRequest, InternalApi.Projecthub.RestoreResponse)
rpc(:Users, InternalApi.Projecthub.UsersRequest, InternalApi.Projecthub.UsersResponse)
rpc(
@@ -1009,6 +1262,12 @@ defmodule InternalApi.Projecthub.ProjectService.Service do
InternalApi.Projecthub.RegenerateWebhookResponse
)
+ rpc(
+ :RegenerateWebhookSecret,
+ InternalApi.Projecthub.RegenerateWebhookSecretRequest,
+ InternalApi.Projecthub.RegenerateWebhookSecretResponse
+ )
+
rpc(
:ChangeProjectOwner,
InternalApi.Projecthub.ChangeProjectOwnerRequest,
@@ -1026,6 +1285,12 @@ defmodule InternalApi.Projecthub.ProjectService.Service do
InternalApi.Projecthub.GithubAppSwitchRequest,
InternalApi.Projecthub.GithubAppSwitchResponse
)
+
+ rpc(
+ :FinishOnboarding,
+ InternalApi.Projecthub.FinishOnboardingRequest,
+ InternalApi.Projecthub.FinishOnboardingResponse
+ )
end
defmodule InternalApi.Projecthub.ProjectService.Stub do
diff --git a/public-api/v1alpha/lib/internal_api/repo_proxy.pb.ex b/public-api/v1alpha/lib/internal_api/repo_proxy.pb.ex
index 3dce538bc..0f0f6c774 100644
--- a/public-api/v1alpha/lib/internal_api/repo_proxy.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/repo_proxy.pb.ex
@@ -38,6 +38,7 @@ defmodule InternalApi.RepoProxy.Hook do
repo_host_username: String.t(),
repo_host_email: String.t(),
repo_host_avatar_url: String.t(),
+ repo_host_uid: String.t(),
user_id: String.t(),
semaphore_email: String.t(),
repo_slug: String.t(),
@@ -62,6 +63,7 @@ defmodule InternalApi.RepoProxy.Hook do
:repo_host_username,
:repo_host_email,
:repo_host_avatar_url,
+ :repo_host_uid,
:user_id,
:semaphore_email,
:repo_slug,
@@ -86,6 +88,7 @@ defmodule InternalApi.RepoProxy.Hook do
field(:repo_host_username, 7, type: :string)
field(:repo_host_email, 8, type: :string)
field(:repo_host_avatar_url, 10, type: :string)
+ field(:repo_host_uid, 25, type: :string)
field(:user_id, 9, type: :string)
field(:semaphore_email, 6, type: :string)
field(:repo_slug, 17, type: :string)
@@ -246,6 +249,111 @@ defmodule InternalApi.RepoProxy.CreateResponse do
field(:pipeline_id, 3, type: :string)
end
+defmodule InternalApi.RepoProxy.CreateBlankRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ request_token: String.t(),
+ project_id: String.t(),
+ requester_id: String.t(),
+ definition_file: String.t(),
+ pipeline_id: String.t(),
+ wf_id: String.t(),
+ triggered_by: integer,
+ git: InternalApi.RepoProxy.CreateBlankRequest.Git.t()
+ }
+ defstruct [
+ :request_token,
+ :project_id,
+ :requester_id,
+ :definition_file,
+ :pipeline_id,
+ :wf_id,
+ :triggered_by,
+ :git
+ ]
+
+ field(:request_token, 1, type: :string)
+ field(:project_id, 2, type: :string)
+ field(:requester_id, 3, type: :string)
+ field(:definition_file, 4, type: :string)
+ field(:pipeline_id, 5, type: :string)
+ field(:wf_id, 6, type: :string)
+ field(:triggered_by, 7, type: InternalApi.PlumberWF.TriggeredBy, enum: true)
+ field(:git, 8, type: InternalApi.RepoProxy.CreateBlankRequest.Git)
+end
+
+defmodule InternalApi.RepoProxy.CreateBlankRequest.Git do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ reference: String.t(),
+ commit_sha: String.t()
+ }
+ defstruct [:reference, :commit_sha]
+
+ field(:reference, 1, type: :string)
+ field(:commit_sha, 2, type: :string)
+end
+
+defmodule InternalApi.RepoProxy.CreateBlankResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ hook_id: String.t(),
+ wf_id: String.t(),
+ pipeline_id: String.t(),
+ branch_id: String.t(),
+ repo: InternalApi.RepoProxy.CreateBlankResponse.Repo.t()
+ }
+ defstruct [:hook_id, :wf_id, :pipeline_id, :branch_id, :repo]
+
+ field(:hook_id, 1, type: :string)
+ field(:wf_id, 2, type: :string)
+ field(:pipeline_id, 3, type: :string)
+ field(:branch_id, 4, type: :string)
+ field(:repo, 5, type: InternalApi.RepoProxy.CreateBlankResponse.Repo)
+end
+
+defmodule InternalApi.RepoProxy.CreateBlankResponse.Repo do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ owner: String.t(),
+ repo_name: String.t(),
+ branch_name: String.t(),
+ commit_sha: String.t(),
+ repository_id: String.t()
+ }
+ defstruct [:owner, :repo_name, :branch_name, :commit_sha, :repository_id]
+
+ field(:owner, 1, type: :string)
+ field(:repo_name, 2, type: :string)
+ field(:branch_name, 3, type: :string)
+ field(:commit_sha, 4, type: :string)
+ field(:repository_id, 5, type: :string)
+end
+
+defmodule InternalApi.RepoProxy.PullRequestUnmergeable do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ project_id: String.t(),
+ branch_name: String.t(),
+ timestamp: Google.Protobuf.Timestamp.t()
+ }
+ defstruct [:project_id, :branch_name, :timestamp]
+
+ field(:project_id, 1, type: :string)
+ field(:branch_name, 2, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
+end
+
defmodule InternalApi.RepoProxy.RepoProxyService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.RepoProxy.RepoProxyService"
@@ -271,6 +379,12 @@ defmodule InternalApi.RepoProxy.RepoProxyService.Service do
)
rpc(:Create, InternalApi.RepoProxy.CreateRequest, InternalApi.RepoProxy.CreateResponse)
+
+ rpc(
+ :CreateBlank,
+ InternalApi.RepoProxy.CreateBlankRequest,
+ InternalApi.RepoProxy.CreateBlankResponse
+ )
end
defmodule InternalApi.RepoProxy.RepoProxyService.Stub do
diff --git a/public-api/v1alpha/lib/internal_api/repository_integrator.pb.ex b/public-api/v1alpha/lib/internal_api/repository_integrator.pb.ex
index f9e57fe76..6d778f449 100644
--- a/public-api/v1alpha/lib/internal_api/repository_integrator.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/repository_integrator.pb.ex
@@ -129,6 +129,20 @@ defmodule InternalApi.RepositoryIntegrator.GithubInstallationInfoResponse do
field(:installation_url, 3, type: :string)
end
+defmodule InternalApi.RepositoryIntegrator.InitGithubInstallationRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ defstruct []
+end
+
+defmodule InternalApi.RepositoryIntegrator.InitGithubInstallationResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ defstruct []
+end
+
defmodule InternalApi.RepositoryIntegrator.GetRepositoriesRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -182,6 +196,8 @@ defmodule InternalApi.RepositoryIntegrator.IntegrationType do
field(:GITHUB_OAUTH_TOKEN, 0)
field(:GITHUB_APP, 1)
field(:BITBUCKET, 2)
+ field(:GITLAB, 3)
+ field(:GIT, 4)
end
defmodule InternalApi.RepositoryIntegrator.IntegrationScope do
@@ -227,6 +243,12 @@ defmodule InternalApi.RepositoryIntegrator.RepositoryIntegratorService.Service d
InternalApi.RepositoryIntegrator.GithubInstallationInfoResponse
)
+ rpc(
+ :InitGithubInstallation,
+ InternalApi.RepositoryIntegrator.InitGithubInstallationRequest,
+ InternalApi.RepositoryIntegrator.InitGithubInstallationResponse
+ )
+
rpc(
:GetRepositories,
InternalApi.RepositoryIntegrator.GetRepositoriesRequest,
diff --git a/public-api/v1alpha/lib/internal_api/secrethub.pb.ex b/public-api/v1alpha/lib/internal_api/secrethub.pb.ex
index e8cc38c8c..26691d2ef 100644
--- a/public-api/v1alpha/lib/internal_api/secrethub.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/secrethub.pb.ex
@@ -348,7 +348,8 @@ defmodule InternalApi.Secrethub.ListKeysetRequest do
order: integer,
secret_level: integer,
project_id: String.t(),
- deployment_target_id: String.t()
+ deployment_target_id: String.t(),
+ ignore_contents: boolean
}
defstruct [
:metadata,
@@ -357,7 +358,8 @@ defmodule InternalApi.Secrethub.ListKeysetRequest do
:order,
:secret_level,
:project_id,
- :deployment_target_id
+ :deployment_target_id,
+ :ignore_contents
]
field(:metadata, 1, type: InternalApi.Secrethub.RequestMeta)
@@ -367,6 +369,7 @@ defmodule InternalApi.Secrethub.ListKeysetRequest do
field(:secret_level, 5, type: InternalApi.Secrethub.Secret.SecretLevel, enum: true)
field(:project_id, 6, type: :string)
field(:deployment_target_id, 7, type: :string)
+ field(:ignore_contents, 8, type: :bool)
end
defmodule InternalApi.Secrethub.ListKeysetRequest.Order do
@@ -638,11 +641,13 @@ defmodule InternalApi.Secrethub.DestroyResponse do
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- metadata: InternalApi.Secrethub.ResponseMeta.t()
+ metadata: InternalApi.Secrethub.ResponseMeta.t(),
+ id: String.t()
}
- defstruct [:metadata]
+ defstruct [:metadata, :id]
field(:metadata, 1, type: InternalApi.Secrethub.ResponseMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Secrethub.GenerateOpenIDConnectTokenRequest do
@@ -664,7 +669,12 @@ defmodule InternalApi.Secrethub.GenerateOpenIDConnectTokenRequest do
git_ref_type: String.t(),
git_branch_name: String.t(),
git_pull_request_number: String.t(),
- org_username: String.t()
+ org_username: String.t(),
+ job_type: String.t(),
+ git_pull_request_branch: String.t(),
+ repo_slug: String.t(),
+ triggerer: String.t(),
+ project_name: String.t()
}
defstruct [
:org_id,
@@ -681,7 +691,12 @@ defmodule InternalApi.Secrethub.GenerateOpenIDConnectTokenRequest do
:git_ref_type,
:git_branch_name,
:git_pull_request_number,
- :org_username
+ :org_username,
+ :job_type,
+ :git_pull_request_branch,
+ :repo_slug,
+ :triggerer,
+ :project_name
]
field(:org_id, 1, type: :string)
@@ -699,6 +714,11 @@ defmodule InternalApi.Secrethub.GenerateOpenIDConnectTokenRequest do
field(:git_branch_name, 13, type: :string)
field(:git_pull_request_number, 14, type: :string)
field(:org_username, 15, type: :string)
+ field(:job_type, 16, type: :string)
+ field(:git_pull_request_branch, 17, type: :string)
+ field(:repo_slug, 18, type: :string)
+ field(:triggerer, 19, type: :string)
+ field(:project_name, 20, type: :string)
end
defmodule InternalApi.Secrethub.GenerateOpenIDConnectTokenResponse do
@@ -798,6 +818,92 @@ defmodule InternalApi.Secrethub.UpdateEncryptedResponse do
field(:encrypted_data, 3, type: InternalApi.Secrethub.EncryptedData)
end
+defmodule InternalApi.Secrethub.GetJWTConfigRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ project_id: String.t()
+ }
+ defstruct [:org_id, :project_id]
+
+ field(:org_id, 1, type: :string)
+ field(:project_id, 2, type: :string)
+end
+
+defmodule InternalApi.Secrethub.GetJWTConfigResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ project_id: String.t(),
+ claims: [InternalApi.Secrethub.ClaimConfig.t()],
+ is_active: boolean
+ }
+ defstruct [:org_id, :project_id, :claims, :is_active]
+
+ field(:org_id, 1, type: :string)
+ field(:project_id, 2, type: :string)
+ field(:claims, 3, repeated: true, type: InternalApi.Secrethub.ClaimConfig)
+ field(:is_active, 4, type: :bool)
+end
+
+defmodule InternalApi.Secrethub.UpdateJWTConfigRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ project_id: String.t(),
+ claims: [InternalApi.Secrethub.ClaimConfig.t()],
+ is_active: boolean
+ }
+ defstruct [:org_id, :project_id, :claims, :is_active]
+
+ field(:org_id, 1, type: :string)
+ field(:project_id, 2, type: :string)
+ field(:claims, 3, repeated: true, type: InternalApi.Secrethub.ClaimConfig)
+ field(:is_active, 4, type: :bool)
+end
+
+defmodule InternalApi.Secrethub.UpdateJWTConfigResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ project_id: String.t()
+ }
+ defstruct [:org_id, :project_id]
+
+ field(:org_id, 1, type: :string)
+ field(:project_id, 2, type: :string)
+end
+
+defmodule InternalApi.Secrethub.ClaimConfig do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t(),
+ description: String.t(),
+ is_active: boolean,
+ is_mandatory: boolean,
+ is_aws_tag: boolean,
+ is_system_claim: boolean
+ }
+ defstruct [:name, :description, :is_active, :is_mandatory, :is_aws_tag, :is_system_claim]
+
+ field(:name, 1, type: :string)
+ field(:description, 2, type: :string)
+ field(:is_active, 3, type: :bool)
+ field(:is_mandatory, 4, type: :bool)
+ field(:is_aws_tag, 5, type: :bool)
+ field(:is_system_claim, 6, type: :bool)
+end
+
defmodule InternalApi.Secrethub.SecretService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Secrethub.SecretService"
@@ -848,6 +954,18 @@ defmodule InternalApi.Secrethub.SecretService.Service do
InternalApi.Secrethub.CheckoutManyRequest,
InternalApi.Secrethub.CheckoutManyResponse
)
+
+ rpc(
+ :GetJWTConfig,
+ InternalApi.Secrethub.GetJWTConfigRequest,
+ InternalApi.Secrethub.GetJWTConfigResponse
+ )
+
+ rpc(
+ :UpdateJWTConfig,
+ InternalApi.Secrethub.UpdateJWTConfigRequest,
+ InternalApi.Secrethub.UpdateJWTConfigResponse
+ )
end
defmodule InternalApi.Secrethub.SecretService.Stub do
diff --git a/public-api/v1alpha/lib/internal_api/self_hosted.pb.ex b/public-api/v1alpha/lib/internal_api/self_hosted.pb.ex
index 61261d4ad..68476e598 100644
--- a/public-api/v1alpha/lib/internal_api/self_hosted.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/self_hosted.pb.ex
@@ -47,7 +47,8 @@ defmodule InternalApi.SelfHosted.Agent do
arch: String.t(),
disabled_at: Google.Protobuf.Timestamp.t(),
disabled: boolean,
- type_name: String.t()
+ type_name: String.t(),
+ organization_id: String.t()
}
defstruct [
:name,
@@ -62,7 +63,8 @@ defmodule InternalApi.SelfHosted.Agent do
:arch,
:disabled_at,
:disabled,
- :type_name
+ :type_name,
+ :organization_id
]
field(:name, 1, type: :string)
@@ -78,6 +80,7 @@ defmodule InternalApi.SelfHosted.Agent do
field(:disabled_at, 11, type: Google.Protobuf.Timestamp)
field(:disabled, 12, type: :bool)
field(:type_name, 13, type: :string)
+ field(:organization_id, 14, type: :string)
end
defmodule InternalApi.SelfHosted.Agent.State do
@@ -277,6 +280,36 @@ defmodule InternalApi.SelfHosted.ListResponse do
field(:page, 4, type: :int32)
end
+defmodule InternalApi.SelfHosted.ListKeysetRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ organization_id: String.t(),
+ cursor: String.t(),
+ page_size: integer
+ }
+ defstruct [:organization_id, :cursor, :page_size]
+
+ field(:organization_id, 1, type: :string)
+ field(:cursor, 2, type: :string)
+ field(:page_size, 3, type: :int32)
+end
+
+defmodule InternalApi.SelfHosted.ListKeysetResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ agent_types: [InternalApi.SelfHosted.AgentType.t()],
+ next_page_cursor: String.t()
+ }
+ defstruct [:agent_types, :next_page_cursor]
+
+ field(:agent_types, 1, repeated: true, type: InternalApi.SelfHosted.AgentType)
+ field(:next_page_cursor, 2, type: :string)
+end
+
defmodule InternalApi.SelfHosted.ListAgentsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@@ -506,6 +539,12 @@ defmodule InternalApi.SelfHosted.SelfHostedAgents.Service do
rpc(:List, InternalApi.SelfHosted.ListRequest, InternalApi.SelfHosted.ListResponse)
+ rpc(
+ :ListKeyset,
+ InternalApi.SelfHosted.ListKeysetRequest,
+ InternalApi.SelfHosted.ListKeysetResponse
+ )
+
rpc(
:ListAgents,
InternalApi.SelfHosted.ListAgentsRequest,
diff --git a/public-api/v1alpha/lib/internal_api/server_farm.job.pb.ex b/public-api/v1alpha/lib/internal_api/server_farm.job.pb.ex
index efc5ffbd5..9a5cdc765 100644
--- a/public-api/v1alpha/lib/internal_api/server_farm.job.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/server_farm.job.pb.ex
@@ -39,7 +39,8 @@ defmodule InternalApi.ServerFarm.Job.Job do
self_hosted: boolean,
organization_id: String.t(),
build_req_id: String.t(),
- agent_name: String.t()
+ agent_name: String.t(),
+ agent_id: String.t()
}
defstruct [
:id,
@@ -66,7 +67,8 @@ defmodule InternalApi.ServerFarm.Job.Job do
:self_hosted,
:organization_id,
:build_req_id,
- :agent_name
+ :agent_name,
+ :agent_id
]
field(:id, 1, type: :string)
@@ -94,6 +96,7 @@ defmodule InternalApi.ServerFarm.Job.Job do
field(:organization_id, 23, type: :string)
field(:build_req_id, 24, type: :string)
field(:agent_name, 25, type: :string)
+ field(:agent_id, 27, type: :string)
end
defmodule InternalApi.ServerFarm.Job.Job.Timeline do
@@ -173,7 +176,11 @@ defmodule InternalApi.ServerFarm.Job.ListRequest do
finished_at_gte: Google.Protobuf.Timestamp.t(),
organization_id: String.t(),
only_debug_jobs: boolean,
- ppl_ids: [String.t()]
+ ppl_ids: [String.t()],
+ created_at_gte: Google.Protobuf.Timestamp.t(),
+ created_at_lte: Google.Protobuf.Timestamp.t(),
+ project_ids: [String.t()],
+ machine_types: [String.t()]
}
defstruct [
:page_size,
@@ -184,7 +191,11 @@ defmodule InternalApi.ServerFarm.Job.ListRequest do
:finished_at_gte,
:organization_id,
:only_debug_jobs,
- :ppl_ids
+ :ppl_ids,
+ :created_at_gte,
+ :created_at_lte,
+ :project_ids,
+ :machine_types
]
field(:page_size, 1, type: :int32)
@@ -196,6 +207,10 @@ defmodule InternalApi.ServerFarm.Job.ListRequest do
field(:organization_id, 7, type: :string)
field(:only_debug_jobs, 8, type: :bool)
field(:ppl_ids, 9, repeated: true, type: :string)
+ field(:created_at_gte, 10, type: Google.Protobuf.Timestamp)
+ field(:created_at_lte, 11, type: Google.Protobuf.Timestamp)
+ field(:project_ids, 13, repeated: true, type: :string)
+ field(:machine_types, 14, repeated: true, type: :string)
end
defmodule InternalApi.ServerFarm.Job.ListRequest.Order do
@@ -527,6 +542,202 @@ defmodule InternalApi.ServerFarm.Job.CanAttachResponse do
field(:message, 2, type: :string)
end
+defmodule InternalApi.ServerFarm.Job.CreateRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ requester_id: String.t(),
+ organization_id: String.t(),
+ project_id: String.t(),
+ branch_name: String.t(),
+ commit_sha: String.t(),
+ job_spec: InternalApi.ServerFarm.Job.JobSpec.t(),
+ restricted_job: boolean
+ }
+ defstruct [
+ :requester_id,
+ :organization_id,
+ :project_id,
+ :branch_name,
+ :commit_sha,
+ :job_spec,
+ :restricted_job
+ ]
+
+ field(:requester_id, 1, type: :string)
+ field(:organization_id, 2, type: :string)
+ field(:project_id, 3, type: :string)
+ field(:branch_name, 4, type: :string)
+ field(:commit_sha, 5, type: :string)
+ field(:job_spec, 6, type: InternalApi.ServerFarm.Job.JobSpec)
+ field(:restricted_job, 7, type: :bool)
+end
+
+defmodule InternalApi.ServerFarm.Job.CreateResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ status: InternalApi.ResponseStatus.t(),
+ job: InternalApi.ServerFarm.Job.Job.t()
+ }
+ defstruct [:status, :job]
+
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:job, 2, type: InternalApi.ServerFarm.Job.Job)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ job_name: String.t(),
+ agent: InternalApi.ServerFarm.Job.JobSpec.Agent.t(),
+ secrets: [InternalApi.ServerFarm.Job.JobSpec.Secret.t()],
+ env_vars: [InternalApi.ServerFarm.Job.JobSpec.EnvVar.t()],
+ files: [InternalApi.ServerFarm.Job.JobSpec.File.t()],
+ commands: [String.t()],
+ epilogue_always_commands: [String.t()],
+ epilogue_on_pass_commands: [String.t()],
+ epilogue_on_fail_commands: [String.t()],
+ priority: integer,
+ execution_time_limit: integer
+ }
+ defstruct [
+ :job_name,
+ :agent,
+ :secrets,
+ :env_vars,
+ :files,
+ :commands,
+ :epilogue_always_commands,
+ :epilogue_on_pass_commands,
+ :epilogue_on_fail_commands,
+ :priority,
+ :execution_time_limit
+ ]
+
+ field(:job_name, 1, type: :string)
+ field(:agent, 2, type: InternalApi.ServerFarm.Job.JobSpec.Agent)
+ field(:secrets, 3, repeated: true, type: InternalApi.ServerFarm.Job.JobSpec.Secret)
+ field(:env_vars, 4, repeated: true, type: InternalApi.ServerFarm.Job.JobSpec.EnvVar)
+ field(:files, 5, repeated: true, type: InternalApi.ServerFarm.Job.JobSpec.File)
+ field(:commands, 6, repeated: true, type: :string)
+ field(:epilogue_always_commands, 7, repeated: true, type: :string)
+ field(:epilogue_on_pass_commands, 8, repeated: true, type: :string)
+ field(:epilogue_on_fail_commands, 9, repeated: true, type: :string)
+ field(:priority, 10, type: :int32)
+ field(:execution_time_limit, 11, type: :int32)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.Agent do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ machine: InternalApi.ServerFarm.Job.JobSpec.Agent.Machine.t(),
+ containers: [InternalApi.ServerFarm.Job.JobSpec.Agent.Container.t()],
+ image_pull_secrets: [InternalApi.ServerFarm.Job.JobSpec.Agent.ImagePullSecret.t()]
+ }
+ defstruct [:machine, :containers, :image_pull_secrets]
+
+ field(:machine, 1, type: InternalApi.ServerFarm.Job.JobSpec.Agent.Machine)
+ field(:containers, 2, repeated: true, type: InternalApi.ServerFarm.Job.JobSpec.Agent.Container)
+
+ field(:image_pull_secrets, 3,
+ repeated: true,
+ type: InternalApi.ServerFarm.Job.JobSpec.Agent.ImagePullSecret
+ )
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.Agent.Machine do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ type: String.t(),
+ os_image: String.t()
+ }
+ defstruct [:type, :os_image]
+
+ field(:type, 1, type: :string)
+ field(:os_image, 2, type: :string)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.Agent.Container do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t(),
+ image: String.t(),
+ command: String.t(),
+ env_vars: [InternalApi.ServerFarm.Job.JobSpec.EnvVar.t()],
+ secrets: [InternalApi.ServerFarm.Job.JobSpec.Secret.t()]
+ }
+ defstruct [:name, :image, :command, :env_vars, :secrets]
+
+ field(:name, 1, type: :string)
+ field(:image, 2, type: :string)
+ field(:command, 3, type: :string)
+ field(:env_vars, 4, repeated: true, type: InternalApi.ServerFarm.Job.JobSpec.EnvVar)
+ field(:secrets, 5, repeated: true, type: InternalApi.ServerFarm.Job.JobSpec.Secret)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.Agent.ImagePullSecret do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t()
+ }
+ defstruct [:name]
+
+ field(:name, 1, type: :string)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.Secret do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t()
+ }
+ defstruct [:name]
+
+ field(:name, 1, type: :string)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.EnvVar do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t(),
+ value: String.t()
+ }
+ defstruct [:name, :value]
+
+ field(:name, 1, type: :string)
+ field(:value, 2, type: :string)
+end
+
+defmodule InternalApi.ServerFarm.Job.JobSpec.File do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ path: String.t(),
+ content: String.t()
+ }
+ defstruct [:path, :content]
+
+ field(:path, 1, type: :string)
+ field(:content, 2, type: :string)
+end
+
defmodule InternalApi.ServerFarm.Job.DebugSessionType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@@ -586,6 +797,12 @@ defmodule InternalApi.ServerFarm.Job.JobService.Service do
InternalApi.ServerFarm.Job.CanAttachRequest,
InternalApi.ServerFarm.Job.CanAttachResponse
)
+
+ rpc(
+ :Create,
+ InternalApi.ServerFarm.Job.CreateRequest,
+ InternalApi.ServerFarm.Job.CreateResponse
+ )
end
defmodule InternalApi.ServerFarm.Job.JobService.Stub do
From 2e5bbf3fbb1293a804f49dc82aea9750ef8b3ce9 Mon Sep 17 00:00:00 2001
From: Dejan K
Date: Mon, 23 Jun 2025 12:44:20 +0200
Subject: [PATCH 09/87] feat: audit logs for "Stop Job" action (#392)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Added auditing for stop job operation via Web and API/CLI.
Introduced audit middleware to the `public-api-gateway` service to
capture and log these actions. The middleware is designed to be
extensible for future auditing of other API operations.
Related [task](https://github.com/renderedtext/tasks/issues/8123).
## ✅ Checklist
- [x] I have tested this change
- [x] ~This change requires documentation update~ - N/A
---
.semaphore/daily-builds.yml | 3 +
.semaphore/semaphore.yml | 3 +
front/lib/front/audit/events_decorator.ex | 8 +
.../front/audit/events_decorator/preloader.ex | 17 +-
.../front_web/controllers/job_controller.ex | 19 +-
.../front_web/templates/audit/index.html.eex | 3 +
public-api-gateway/Dockerfile | 28 +-
public-api-gateway/Makefile | 5 +-
.../api/clients/audit_client.go | 102 +
.../api/middleware/audit_middleware.go | 202 ++
.../api/middleware/audit_middleware_test.go | 261 ++
.../api/middleware/response_recorder.go | 29 +
.../api/middleware/response_recorder_test.go | 69 +
...docker-compose.yaml => docker-compose.yml} | 23 +-
public-api-gateway/go.mod | 9 +-
public-api-gateway/go.sum | 39 +-
public-api-gateway/main.go | 12 +-
public-api-gateway/protos/audit/audit.pb.go | 2353 +++++++++++++++++
.../protos/audit/audit_grpc.pb.go | 428 +++
.../test/jobs_v1alpha_server.go | 144 +
.../test/secrets_v1beta_server.go | 2 +
public-api-gateway/test/test.sh | 57 +-
22 files changed, 3787 insertions(+), 29 deletions(-)
create mode 100644 public-api-gateway/api/clients/audit_client.go
create mode 100644 public-api-gateway/api/middleware/audit_middleware.go
create mode 100644 public-api-gateway/api/middleware/audit_middleware_test.go
create mode 100644 public-api-gateway/api/middleware/response_recorder.go
create mode 100644 public-api-gateway/api/middleware/response_recorder_test.go
rename public-api-gateway/{docker-compose.yaml => docker-compose.yml} (68%)
create mode 100644 public-api-gateway/protos/audit/audit.pb.go
create mode 100644 public-api-gateway/protos/audit/audit_grpc.pb.go
create mode 100644 public-api-gateway/test/jobs_v1alpha_server.go
diff --git a/.semaphore/daily-builds.yml b/.semaphore/daily-builds.yml
index aa418446e..308cdf05f 100644
--- a/.semaphore/daily-builds.yml
+++ b/.semaphore/daily-builds.yml
@@ -1993,6 +1993,9 @@ blocks:
- name: "Test"
commands:
- make test
+ - name: "E2E Test"
+ commands:
+ - make test.e2e
- name: "Lint"
commands:
- make lint
diff --git a/.semaphore/semaphore.yml b/.semaphore/semaphore.yml
index 44f623116..bd4b0ac4d 100644
--- a/.semaphore/semaphore.yml
+++ b/.semaphore/semaphore.yml
@@ -2191,6 +2191,9 @@ blocks:
- name: "Test"
commands:
- make test
+ - name: "E2E Test"
+ commands:
+ - make test.e2e
- name: "Lint"
commands:
- make lint
diff --git a/front/lib/front/audit/events_decorator.ex b/front/lib/front/audit/events_decorator.ex
index fdb6c729e..729684ac8 100644
--- a/front/lib/front/audit/events_decorator.ex
+++ b/front/lib/front/audit/events_decorator.ex
@@ -33,6 +33,10 @@ defmodule Front.Audit.EventsDecorator do
field(:pipeline, Front.Models.Pipeline.t(), enforce: false)
field(:has_pipeline, String.t(), enforce: false, default: false)
+ field(:job_id, String.t(), enforce: false)
+ field(:job, Front.Models.Job.t(), enforce: false)
+ field(:has_job, String.t(), enforce: false, default: false)
+
field(:agent, Map.t(), enforce: false)
end
end
@@ -78,6 +82,10 @@ defmodule Front.Audit.EventsDecorator do
has_workflow: false,
pipeline_id: Map.get(metadata, "pipeline_id", nil),
+ # initialy setting it to false, later the preloader can change it
+ has_job: false,
+ job_id: Map.get(metadata, "job_id", nil),
+
# inject agent data if the event is related to a self-hosted agent
agent: decorate_agent(event, metadata)
)
diff --git a/front/lib/front/audit/events_decorator/preloader.ex b/front/lib/front/audit/events_decorator/preloader.ex
index 7e5e1bd3a..713d67fcf 100644
--- a/front/lib/front/audit/events_decorator/preloader.ex
+++ b/front/lib/front/audit/events_decorator/preloader.ex
@@ -19,15 +19,18 @@ defmodule Front.Audit.EventsDecorator.Preloader do
project_ids = extract_unique_id_list(events, :project_id)
workflow_ids = extract_unique_id_list(events, :workflow_id)
pipeline_ids = extract_unique_id_list(events, :pipeline_id)
+ job_ids = extract_unique_id_list(events, :job_id)
projects = Front.Models.Project.find_many_by_ids(project_ids)
workflows = Front.Models.Workflow.find_many_by_ids(workflow_ids)
pipelines = Front.Models.Pipeline.find_many(pipeline_ids)
+ jobs = find_jobs_by_ids(job_ids)
inject(events, %{
projects: remove_nils(projects),
workflows: remove_nils(workflows),
- pipelines: remove_nils(pipelines)
+ pipelines: remove_nils(pipelines),
+ jobs: remove_nils(jobs)
})
end
@@ -36,11 +39,13 @@ defmodule Front.Audit.EventsDecorator.Preloader do
project = Enum.find(data.projects, fn p -> p.id == event.project_id end)
workflow = Enum.find(data.workflows, fn w -> w.id == event.workflow_id end)
pipeline = Enum.find(data.pipelines, fn p -> p.id == event.pipeline_id end)
+ job = Enum.find(data.jobs, fn j -> j.id == event.job_id end)
event
|> add_if_not_nil(project, :project, :has_project)
|> add_if_not_nil(workflow, :workflow, :has_workflow)
|> add_if_not_nil(pipeline, :pipeline, :has_pipeline)
+ |> add_if_not_nil(job, :job, :has_job)
end)
end
@@ -58,4 +63,14 @@ defmodule Front.Audit.EventsDecorator.Preloader do
end
defp remove_nils(arr), do: Enum.filter(arr, fn e -> e != nil end)
+
+ defp find_jobs_by_ids([]), do: []
+
+ defp find_jobs_by_ids([id | ids]) do
+ Front.Models.Job.find(id)
+ |> case do
+ nil -> find_jobs_by_ids(ids)
+ job -> [job | find_jobs_by_ids(ids)]
+ end
+ end
end
diff --git a/front/lib/front_web/controllers/job_controller.ex b/front/lib/front_web/controllers/job_controller.ex
index e6b2b63e1..33cf2213d 100644
--- a/front/lib/front_web/controllers/job_controller.ex
+++ b/front/lib/front_web/controllers/job_controller.ex
@@ -2,7 +2,7 @@ defmodule FrontWeb.JobController do
require Logger
use FrontWeb, :controller
- alias Front.Async
+ alias Front.{Async, Audit}
alias Front.MemoryCookie
alias Front.Models
alias FrontWeb.Plugs.{FetchPermissions, Header, PageAccess, PublicPageAccess, PutProjectAssigns}
@@ -216,6 +216,8 @@ defmodule FrontWeb.JobController do
case Front.Models.Job.stop(job_id, user_id) do
{:ok, _} ->
+ audit_log(conn, :Stopped, user_id, job_id)
+
conn
|> put_flash(:notice, "Job will be stopped shortly.")
|> redirect(to: job_path(conn, :show, job_id))
@@ -311,6 +313,21 @@ defmodule FrontWeb.JobController do
end)
end
+ def audit_log(conn, action, user_id, job_id) do
+ conn
+ |> Audit.new(:Job, action)
+ |> Audit.add(description: audit_desc(action))
+ |> Audit.add(resource_id: job_id)
+ |> Audit.metadata(requester_id: user_id)
+ |> Audit.metadata(project_id: conn.assigns.project.id)
+ |> Audit.metadata(project_name: conn.assigns.project.name)
+ |> Audit.metadata(pipeline_id: conn.assigns.job.ppl_id)
+ |> Audit.metadata(job_id: conn.assigns.job.id)
+ |> Audit.log()
+ end
+
+ defp audit_desc(:Stopped), do: "Stopped the job"
+
# Private
defp send_first_chunk(conn, next) do
diff --git a/front/lib/front_web/templates/audit/index.html.eex b/front/lib/front_web/templates/audit/index.html.eex
index 55f8784a0..df0c53a6a 100644
--- a/front/lib/front_web/templates/audit/index.html.eex
+++ b/front/lib/front_web/templates/audit/index.html.eex
@@ -34,6 +34,9 @@
Pipeline: <%= link event.pipeline.name, to: workflow_path(@conn, :show, event.workflow.id, pipeline_id: event.pipeline.id) %>
<% end %>
<% end %>
+ <%= if event.has_job do %>
+ Job: <%= link event.job.name, to: job_path(@conn, :show, event.job.id) %>
+ <% end %>
<% end %>
<%= event.description %>
diff --git a/public-api-gateway/Dockerfile b/public-api-gateway/Dockerfile
index bf9379f77..423c96df5 100644
--- a/public-api-gateway/Dockerfile
+++ b/public-api-gateway/Dockerfile
@@ -13,30 +13,34 @@ RUN echo "Build of $APP_NAME started"
RUN apt-get update -y && apt-get install --no-install-recommends -y ca-certificates unzip curl libc-bin libc6 \
&& apt-get clean && rm -f /var/lib/apt/lists/*_*
-WORKDIR /app
-COPY api api
-COPY go.mod go.mod
-COPY go.sum go.sum
-COPY main.go main.go
-
-FROM base AS dev
-
-COPY test test
-COPY scripts scripts
-COPY lint.toml lint.toml
-
WORKDIR /tmp
RUN curl -sL https://github.com/google/protobuf/releases/download/v3.3.0/protoc-3.3.0-linux-x86_64.zip -o protoc && \
unzip protoc && \
mv bin/protoc /usr/local/bin/protoc
WORKDIR /app
+
RUN go install github.com/mgechev/revive@v1.7.0
RUN go install gotest.tools/gotestsum@v1.12.1
RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
RUN go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest
+
+FROM base AS dev
+
+WORKDIR /app
+
+COPY api api
+COPY protos protos
+COPY go.mod go.mod
+COPY go.sum go.sum
+COPY main.go main.go
+
RUN rm -rf build && CGO_ENABLED=0 go build -o build/server main.go
+COPY test test
+COPY scripts scripts
+COPY lint.toml lint.toml
+
CMD [ "/bin/bash", "-c \"while sleep 1000; do :; done\"" ]
FROM ${RUNNER_IMAGE} AS runner
diff --git a/public-api-gateway/Makefile b/public-api-gateway/Makefile
index 6a90812d1..d68b2c271 100644
--- a/public-api-gateway/Makefile
+++ b/public-api-gateway/Makefile
@@ -29,8 +29,11 @@ api.checkout:
bin.build:
docker compose run --remove-orphans --rm $(VOLUME_BIND) app sh -c "rm -rf build && CGO_ENABLED=0 go build -o build/server main.go"
-test: bin.build
+test.e2e: bin.build
docker compose run --remove-orphans --rm $(VOLUME_BIND) app bash ./test/test.sh
+test:
+ docker compose run --remove-orphans --rm $(VOLUME_BIND) app gotestsum --format short-verbose --junitfile out/test-reports.xml --packages="./..." -- -p 1
+
lint:
docker compose run --remove-orphans --rm $(VOLUME_BIND) app revive -formatter friendly -config lint.toml ./...
diff --git a/public-api-gateway/api/clients/audit_client.go b/public-api-gateway/api/clients/audit_client.go
new file mode 100644
index 000000000..0a761e154
--- /dev/null
+++ b/public-api-gateway/api/clients/audit_client.go
@@ -0,0 +1,102 @@
+package clients
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "time"
+
+ "github.com/golang/glog"
+ "github.com/renderedtext/go-tackle"
+ "google.golang.org/protobuf/proto"
+
+ auditProto "github.com/semaphoreio/semaphore/public-api-gateway/protos/audit"
+)
+
+// AuditClient provides methods for sending audit events
+type AuditClient struct {
+ tacklePublisher *tackle.Publisher
+ amqpURL string
+}
+
+// AuditEventOptions contains options for creating an audit event
+type AuditEventOptions struct {
+ // UserID of the user performing the action
+ UserID string
+ // OrgID of the organization where the action is performed
+ OrgID string
+ // Resource type that is being audited
+ Resource auditProto.Event_Resource
+ // Operation being performed
+ Operation auditProto.Event_Operation
+ // Description of the audit event
+ Description string
+ // ResourceID of the affected resource
+ ResourceID string
+ // ResourceName of the affected resource
+ ResourceName string
+ // Medium through which the action was performed (e.g. API, CLI)
+ Medium auditProto.Event_Medium
+ // Additional metadata
+ Metadata map[string]string
+ // IP address of the client
+ IPAddress string
+ // Username of the user
+ Username string
+}
+
+// NewAuditClient creates a new audit client
+func NewAuditClient(amqpURL string) (*AuditClient, error) {
+ client := &AuditClient{
+ amqpURL: amqpURL,
+ }
+
+ if amqpURL == "" {
+ return nil, fmt.Errorf("AMQP URL is required")
+ }
+
+ tacklePublisher, err := tackle.NewPublisher(amqpURL, tackle.PublisherOptions{
+ ConnectionName: clientConnectionName(),
+ ConnectionTimeout: 5 * time.Second,
+ })
+
+ if err != nil {
+ return nil, fmt.Errorf("failed to create AMQP publisher: %w", err)
+ }
+
+ client.tacklePublisher = tacklePublisher
+
+ return client, nil
+}
+
+// SendAuditEvent sends an audit event via AMQP
+func (c *AuditClient) SendAuditEvent(ctx context.Context, event *auditProto.Event) error {
+ data, err := proto.Marshal(event)
+ if err != nil {
+ return fmt.Errorf("error marshaling audit event: %w", err)
+ }
+
+ err = c.tacklePublisher.PublishWithContext(ctx, &tackle.PublishParams{
+ Body: data,
+ Exchange: "audit",
+ RoutingKey: "log",
+ })
+
+ if err != nil {
+ glog.Errorf("Error publishing audit event: %v", err)
+ return fmt.Errorf("error publishing audit event: %w", err)
+ }
+
+ glog.Infof("Audit event published via AMQP: resource=%s, operation=%s, resource_id=%s, operation_id=%s", event.Resource.String(), event.Operation.String(), event.ResourceId, event.OperationId)
+
+ return nil
+}
+
+func clientConnectionName() string {
+ hostname := os.Getenv("HOSTNAME")
+ if hostname == "" {
+ return "public-api-gateway"
+ }
+
+ return hostname
+}
diff --git a/public-api-gateway/api/middleware/audit_middleware.go b/public-api-gateway/api/middleware/audit_middleware.go
new file mode 100644
index 000000000..b7306ae08
--- /dev/null
+++ b/public-api-gateway/api/middleware/audit_middleware.go
@@ -0,0 +1,202 @@
+package middleware
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "regexp"
+ "strings"
+
+ "github.com/golang/glog"
+ "github.com/google/uuid"
+ "github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
+ "google.golang.org/protobuf/types/known/timestamppb"
+
+ "github.com/semaphoreio/semaphore/public-api-gateway/api/clients"
+ auditProto "github.com/semaphoreio/semaphore/public-api-gateway/protos/audit"
+)
+
+type auditor func(r *http.Request, pathParams map[string]string) (auditProto.Event, error)
+
+var (
+ // errNotAuditable is returned when a request is not auditable
+ errNotAuditable = fmt.Errorf("path is not auditable")
+
+ // auditPaths maps regular expressions to auditors
+ // regular expressions are used to match request URL path
+ auditPaths = map[*regexp.Regexp]auditor{
+ regexp.MustCompile("/api/v1alpha/jobs/[0-9a-fA-F-]+/stop"): createStopJobAuditEvent,
+ }
+)
+
+// AuditMiddleware creates a new audit middleware function that implements runtime.Middleware.
+// This middleware will audit some requests to the API.
+func AuditMiddleware(auditClient *clients.AuditClient) runtime.Middleware {
+ // Return the middleware function that wraps the handler
+ return func(next runtime.HandlerFunc) runtime.HandlerFunc {
+ return auditMiddleware(next, auditClient)
+ }
+}
+
+func auditMiddleware(next runtime.HandlerFunc, auditClient *clients.AuditClient) runtime.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request, pathParams map[string]string) {
+ // Check if this path should be audited
+ if !shouldAudit(r) {
+ next(w, r, pathParams)
+ return
+ }
+
+ // Create a response recorder to capture the response
+ rw := NewResponseRecorder(w)
+
+ // Serve the request with the response recorder
+ next(rw, r, pathParams)
+
+ // Log the status code
+ statusCode := rw.Status
+
+ // Only audit successful responses
+ if statusCode < 200 || statusCode >= 300 {
+ return
+ }
+
+ // Extract audit information
+ auditEvent, err := createAuditEvent(r, pathParams)
+ if err == errNotAuditable {
+ return
+ }
+ if err != nil {
+ glog.Errorf("Failed to create audit event: %v", err)
+ errResponse := fmt.Errorf("failed to create audit event: %v", err)
+ respondWithJSON(w, http.StatusInternalServerError, map[string]interface{}{
+ "error": errResponse.Error(),
+ })
+ return
+ }
+
+ err = auditClient.SendAuditEvent(r.Context(), &auditEvent)
+ if err != nil {
+ glog.Errorf("Failed to send audit event: %v", err)
+ errResponse := fmt.Errorf("failed to send audit event: %v", err)
+ respondWithJSON(w, http.StatusInternalServerError, map[string]interface{}{
+ "error": errResponse.Error(),
+ })
+ return
+ }
+ }
+}
+
+// shouldAudit determines if this handler should process the request
+func shouldAudit(r *http.Request) bool {
+ for rePath := range auditPaths {
+ if rePath.MatchString(r.URL.Path) {
+ return true
+ }
+ }
+ return false
+}
+
+// createAuditEvent creates an AuditEvent from a request
+func createAuditEvent(r *http.Request, pathParams map[string]string) (auditProto.Event, error) {
+ for rePath, auditor := range auditPaths {
+ if rePath.MatchString(r.URL.Path) {
+ return auditor(r, pathParams)
+ }
+ }
+ return auditProto.Event{}, errNotAuditable
+}
+
+func createStopJobAuditEvent(r *http.Request, pathParams map[string]string) (auditEvent auditProto.Event, err error) {
+ if r.Method != http.MethodPost {
+ return auditProto.Event{}, errNotAuditable
+ }
+ resourceID, ok := pathParams["job_id"]
+ if !ok {
+ return auditProto.Event{}, errNotAuditable
+ }
+
+ metadataMap := map[string]string{
+ "job_id": resourceID,
+ "requester_id": r.Header.Get("x-semaphore-user-id"),
+ }
+
+ metadata, err := json.Marshal(metadataMap)
+ if err != nil {
+ err = fmt.Errorf("error marshaling metadata: %v", err)
+ return
+ }
+ auditEvent = createDefaultAuditEvent(r, auditProto.Event_Job, "")
+ auditEvent.Operation = auditProto.Event_Stopped
+ auditEvent.Description = "Stopped the job"
+ auditEvent.ResourceId = resourceID
+ auditEvent.Metadata = string(metadata)
+
+ return
+}
+
+func createDefaultAuditEvent(r *http.Request, resource auditProto.Event_Resource, resourceName string) auditProto.Event {
+ // Extract user information from request
+ userID := r.Header.Get("x-semaphore-user-id")
+ orgID := r.Header.Get("x-semaphore-org-id")
+ ipAddress := detectRemoteAddress(r)
+ medium := detectEventMedium(r)
+ return auditProto.Event{
+ UserId: userID,
+ OrgId: orgID,
+ Resource: resource,
+ ResourceName: resourceName,
+ OperationId: uuid.NewString(),
+ Description: "",
+ Timestamp: timestamppb.Now(),
+ Medium: medium,
+ IpAddress: ipAddress,
+ Metadata: "{}",
+ }
+}
+
+// detectRemoteAddress extracts the client IP address from an HTTP request,
+// taking into account various proxy headers
+func detectRemoteAddress(r *http.Request) string {
+ // Check for X-Forwarded-For header
+ if xff := r.Header.Get("X-Forwarded-For"); xff != "" {
+ // X-Forwarded-For can contain multiple IPs, the second from the end is the original client
+ ips := strings.Split(xff, ",")
+ if len(ips) >= 2 {
+ ip := strings.TrimSpace(ips[len(ips)-2])
+ if ip != "" {
+ return ip
+ }
+ }
+ }
+
+ // Check for X-Real-IP header
+ if xrip := r.Header.Get("X-Real-IP"); xrip != "" {
+ return xrip
+ }
+
+ // Fall back to RemoteAddr if no proxy headers are found
+ return r.RemoteAddr
+}
+
+func detectEventMedium(r *http.Request) auditProto.Event_Medium {
+ if strings.Contains(r.UserAgent(), "SemaphoreCLI") {
+ return auditProto.Event_CLI
+ }
+ return auditProto.Event_API
+}
+
+// respondWithJSON writes a JSON response with the given status code and payload
+func respondWithJSON(w http.ResponseWriter, statusCode int, payload interface{}) {
+ response, err := json.Marshal(payload)
+ if err != nil {
+ return
+ }
+
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(statusCode)
+
+ _, err = w.Write(response)
+ if err != nil {
+ glog.Errorf("Failed to write response: %v", err)
+ }
+}
diff --git a/public-api-gateway/api/middleware/audit_middleware_test.go b/public-api-gateway/api/middleware/audit_middleware_test.go
new file mode 100644
index 000000000..32f9cb3ac
--- /dev/null
+++ b/public-api-gateway/api/middleware/audit_middleware_test.go
@@ -0,0 +1,261 @@
+package middleware
+
+import (
+ "context"
+ "errors"
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "github.com/google/uuid"
+ "github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
+
+ auditProto "github.com/semaphoreio/semaphore/public-api-gateway/protos/audit"
+)
+
+type testAuditClient interface {
+ SendAuditEvent(ctx context.Context, event *auditProto.Event) error
+ Close() error
+}
+
+// MockAuditClient is a mock implementation of the audit client
+type MockAuditClient struct {
+ sentEvents []*auditProto.Event
+ shouldFail bool
+}
+
+func NewMockAuditClient() *MockAuditClient {
+ return &MockAuditClient{
+ sentEvents: make([]*auditProto.Event, 0),
+ }
+}
+
+func (m *MockAuditClient) SendAuditEvent(ctx context.Context, event *auditProto.Event) error {
+ if m.shouldFail {
+ return errors.New("mock audit client error")
+ }
+ m.sentEvents = append(m.sentEvents, event)
+ return nil
+}
+
+func (m *MockAuditClient) Close() error {
+ return nil
+}
+
+// testAuditMiddleware is a test version of auditMiddleware that accepts our mock client
+func testAuditMiddleware(next runtime.HandlerFunc, client testAuditClient) runtime.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request, pathParams map[string]string) {
+ if client == nil {
+ next(w, r, pathParams)
+ return
+ }
+ if !shouldAudit(r) {
+ next(w, r, pathParams)
+ return
+ }
+
+ rw := NewResponseRecorder(w)
+
+ next(rw, r, pathParams)
+
+ statusCode := rw.Status
+ if statusCode < 200 || statusCode >= 300 {
+ return
+ }
+
+ auditEvent, err := createAuditEvent(r, pathParams)
+ if err != nil {
+ return
+ }
+
+ client.SendAuditEvent(r.Context(), &auditEvent)
+ }
+}
+
+// TestAuditMiddleware tests the audit middleware functionality
+func TestAuditMiddleware(t *testing.T) {
+ testHandler := func(w http.ResponseWriter, r *http.Request, pathParams map[string]string) {
+ w.WriteHeader(http.StatusOK)
+ w.Write([]byte(`{"id": "test-job-id"}`))
+ }
+
+ t.Run("Should audit job stop requests", func(t *testing.T) {
+ mockClient := NewMockAuditClient()
+
+ middleware := func(next runtime.HandlerFunc) runtime.HandlerFunc {
+ return testAuditMiddleware(next, mockClient)
+ }
+
+ handlerWithMiddleware := middleware(testHandler)
+ jobID := uuid.NewString()
+ req := httptest.NewRequest("POST", "/api/v1alpha/jobs/"+jobID+"/stop", nil)
+ req.Header.Set("x-semaphore-user-id", "user-123")
+ req.Header.Set("x-semaphore-org-id", "org-123")
+
+ rr := httptest.NewRecorder()
+
+ pathParams := map[string]string{
+ "job_id": jobID,
+ }
+ handlerWithMiddleware(rr, req, pathParams)
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("Expected status code %d, got %d", http.StatusOK, rr.Code)
+ }
+
+ if len(mockClient.sentEvents) != 1 {
+ t.Errorf("Expected 1 audit event to be sent, got %d", len(mockClient.sentEvents))
+ return
+ }
+
+ event := mockClient.sentEvents[0]
+ if event.Resource != auditProto.Event_Job {
+ t.Errorf("Expected resource to be Job, got %v", event.Resource)
+ }
+ if event.Operation != auditProto.Event_Stopped {
+ t.Errorf("Expected operation to be Stopped, got %v", event.Operation)
+ }
+ if event.ResourceId != jobID {
+ t.Errorf("Expected resource ID to be %s, got %s", jobID, event.ResourceId)
+ }
+ })
+
+ t.Run("Should not audit non-job requests", func(t *testing.T) {
+ mockClient := NewMockAuditClient()
+
+ middleware := func(next runtime.HandlerFunc) runtime.HandlerFunc {
+ return testAuditMiddleware(next, mockClient)
+ }
+
+ handlerWithMiddleware := middleware(testHandler)
+
+ req := httptest.NewRequest("GET", "/api/v1alpha/dashboards", nil)
+
+ rr := httptest.NewRecorder()
+
+ handlerWithMiddleware(rr, req, map[string]string{})
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("Expected status code %d, got %d", http.StatusOK, rr.Code)
+ }
+
+ if len(mockClient.sentEvents) != 0 {
+ t.Errorf("Expected no audit events to be sent, got %d", len(mockClient.sentEvents))
+ }
+ })
+
+ t.Run("Should not audit failed requests", func(t *testing.T) {
+ mockClient := NewMockAuditClient()
+
+ errorHandler := func(w http.ResponseWriter, r *http.Request, pathParams map[string]string) {
+ w.WriteHeader(http.StatusBadRequest)
+ }
+
+ middleware := func(next runtime.HandlerFunc) runtime.HandlerFunc {
+ return testAuditMiddleware(next, mockClient)
+ }
+
+ handlerWithError := middleware(errorHandler)
+
+ jobID := uuid.NewString()
+ req := httptest.NewRequest("POST", "/api/v1alpha/jobs/"+jobID+"/stop", nil)
+
+ rr := httptest.NewRecorder()
+
+ pathParams := map[string]string{
+ "job_id": jobID,
+ }
+ handlerWithError(rr, req, pathParams)
+
+ if rr.Code != http.StatusBadRequest {
+ t.Errorf("Expected status code %d, got %d", http.StatusBadRequest, rr.Code)
+ }
+
+ if len(mockClient.sentEvents) != 0 {
+ t.Errorf("Expected no audit events to be sent, got %d", len(mockClient.sentEvents))
+ }
+ })
+
+ t.Run("Should handle audit client errors gracefully", func(t *testing.T) {
+ mockClient := NewMockAuditClient()
+ mockClient.shouldFail = true
+
+ middleware := func(next runtime.HandlerFunc) runtime.HandlerFunc {
+ return testAuditMiddleware(next, mockClient)
+ }
+
+ handlerWithMiddleware := middleware(testHandler)
+
+ jobID := uuid.NewString()
+ req := httptest.NewRequest("POST", "/api/v1alpha/jobs/"+jobID+"/stop", nil)
+ req.Header.Set("x-semaphore-user-id", "user-123")
+ req.Header.Set("x-semaphore-org-id", "org-123")
+
+ rr := httptest.NewRecorder()
+
+ pathParams := map[string]string{
+ "job_id": jobID,
+ }
+ handlerWithMiddleware(rr, req, pathParams)
+
+ if rr.Code != http.StatusOK {
+ t.Errorf("Expected status code %d, got %d", http.StatusOK, rr.Code)
+ }
+ })
+}
+
+// TestDetectRemoteAddress tests the detectRemoteAddress function
+func TestDetectRemoteAddress(t *testing.T) {
+ t.Run("Should detect IP from X-Forwarded-For", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/", nil)
+ req.Header.Set("X-Forwarded-For", "10.0.0.1, 192.168.1.1")
+
+ ip := detectRemoteAddress(req)
+ if ip != "10.0.0.1" {
+ t.Errorf("Expected IP 10.0.0.1, got %s", ip)
+ }
+ })
+
+ t.Run("Should detect IP from X-Real-IP", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/", nil)
+ req.Header.Set("X-Real-IP", "192.168.1.1")
+
+ ip := detectRemoteAddress(req)
+ if ip != "192.168.1.1" {
+ t.Errorf("Expected IP 192.168.1.1, got %s", ip)
+ }
+ })
+
+ t.Run("Should fall back to RemoteAddr", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/", nil)
+ req.RemoteAddr = "127.0.0.1:1234"
+
+ ip := detectRemoteAddress(req)
+ if ip != "127.0.0.1:1234" {
+ t.Errorf("Expected IP 127.0.0.1:1234, got %s", ip)
+ }
+ })
+}
+
+// TestDetectEventMedium tests the detectEventMedium function
+func TestDetectEventMedium(t *testing.T) {
+ t.Run("Should detect CLI medium", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/", nil)
+ req.Header.Set("User-Agent", "SemaphoreCLI/1.0")
+
+ medium := detectEventMedium(req)
+ if medium != auditProto.Event_CLI {
+ t.Errorf("Expected medium CLI, got %v", medium)
+ }
+ })
+
+ t.Run("Should default to API medium", func(t *testing.T) {
+ req := httptest.NewRequest("GET", "/", nil)
+ req.Header.Set("User-Agent", "Mozilla/5.0")
+
+ medium := detectEventMedium(req)
+ if medium != auditProto.Event_API {
+ t.Errorf("Expected medium API, got %v", medium)
+ }
+ })
+}
diff --git a/public-api-gateway/api/middleware/response_recorder.go b/public-api-gateway/api/middleware/response_recorder.go
new file mode 100644
index 000000000..6b0897ac9
--- /dev/null
+++ b/public-api-gateway/api/middleware/response_recorder.go
@@ -0,0 +1,29 @@
+package middleware
+
+import (
+ "net/http"
+)
+
+// ResponseRecorder is an http.ResponseWriter that records its status code
+type ResponseRecorder struct {
+ http.ResponseWriter
+ Status int
+}
+
+func NewResponseRecorder(w http.ResponseWriter) *ResponseRecorder {
+ return &ResponseRecorder{
+ ResponseWriter: w,
+ Status: http.StatusOK,
+ }
+}
+
+// WriteHeader records the status code
+func (r *ResponseRecorder) WriteHeader(status int) {
+ r.Status = status
+ r.ResponseWriter.WriteHeader(status)
+}
+
+// Write forwards the body to the underlying ResponseWriter
+func (r *ResponseRecorder) Write(b []byte) (int, error) {
+ return r.ResponseWriter.Write(b)
+}
diff --git a/public-api-gateway/api/middleware/response_recorder_test.go b/public-api-gateway/api/middleware/response_recorder_test.go
new file mode 100644
index 000000000..4fc57fe8d
--- /dev/null
+++ b/public-api-gateway/api/middleware/response_recorder_test.go
@@ -0,0 +1,69 @@
+package middleware
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+)
+
+func TestResponseRecorder(t *testing.T) {
+ t.Run("Should record status code", func(t *testing.T) {
+ w := httptest.NewRecorder()
+
+ rr := NewResponseRecorder(w)
+
+ rr.WriteHeader(http.StatusCreated)
+
+ if rr.Status != http.StatusCreated {
+ t.Errorf("Expected status code %d, got %d", http.StatusCreated, rr.Status)
+ }
+
+ if w.Code != http.StatusCreated {
+ t.Errorf("Expected underlying writer status code %d, got %d", http.StatusCreated, w.Code)
+ }
+ })
+
+ t.Run("Should record response body", func(t *testing.T) {
+ w := httptest.NewRecorder()
+
+ rr := NewResponseRecorder(w)
+
+ testData := []byte("test response body")
+ n, err := rr.Write(testData)
+
+ if err != nil {
+ t.Errorf("Expected no error, got %v", err)
+ }
+ if n != len(testData) {
+ t.Errorf("Expected %d bytes written, got %d", len(testData), n)
+ }
+
+ if w.Body.String() != "test response body" {
+ t.Errorf("Expected underlying writer body 'test response body', got '%s'", w.Body.String())
+ }
+ })
+
+ t.Run("Should default to 200 OK status", func(t *testing.T) {
+ w := httptest.NewRecorder()
+
+ rr := NewResponseRecorder(w)
+
+ if rr.Status != http.StatusOK {
+ t.Errorf("Expected default status code %d, got %d", http.StatusOK, rr.Status)
+ }
+ })
+
+ t.Run("Should handle multiple writes", func(t *testing.T) {
+ w := httptest.NewRecorder()
+
+ rr := NewResponseRecorder(w)
+
+ rr.Write([]byte("first "))
+ rr.Write([]byte("second "))
+ rr.Write([]byte("third"))
+
+ if w.Body.String() != "first second third" {
+ t.Errorf("Expected underlying writer body 'first second third', got '%s'", w.Body.String())
+ }
+ })
+}
diff --git a/public-api-gateway/docker-compose.yaml b/public-api-gateway/docker-compose.yml
similarity index 68%
rename from public-api-gateway/docker-compose.yaml
rename to public-api-gateway/docker-compose.yml
index c25023a97..cb1f9b2b3 100644
--- a/public-api-gateway/docker-compose.yaml
+++ b/public-api-gateway/docker-compose.yml
@@ -11,9 +11,12 @@ services:
ssh:
- default
cache_from:
- - ${MAIN_IMAGE:-public-api-gateway}:${IMAGE_TAG:-latest}
+ - ${IMAGE:-public-api-gateway}:${IMAGE_TAG:-latest}
ports:
- "8080:8080"
+ volumes:
+ - .:/app
+ - go-pkg-cache:/go
environment:
SSH_AUTH_SOCK: /ssh-agent
@@ -24,5 +27,21 @@ services:
JOBS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT: ${JOBS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT:-jobs:8080}
NOTIFICATIONS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT: ${NOTIFICATIONS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT:-notifications:8080}
ARTIFACTS_V1_PUBLIC_GRPC_API_ENDPOINT: ${ARTIFACTS_V1_PUBLIC_GRPC_API_ENDPOINT:-artifacts:8080}
-
+ AMQP_URL: ${AMQP_URL:-amqp://guest:guest@rabbitmq:5672}
+ links:
+ - rabbitmq:rabbitmq
+ depends_on:
+ - rabbitmq
tty: true
+
+ rabbitmq:
+ image: rabbitmq:3-management
+ ports:
+ - "5672:5672"
+ - "15672:15672"
+ environment:
+ RABBITMQ_DEFAULT_USER: guest
+ RABBITMQ_DEFAULT_PASS: guest
+
+volumes:
+ go-pkg-cache:
diff --git a/public-api-gateway/go.mod b/public-api-gateway/go.mod
index d26109770..1e7faa0db 100644
--- a/public-api-gateway/go.mod
+++ b/public-api-gateway/go.mod
@@ -5,15 +5,18 @@ go 1.22
require (
github.com/golang/glog v1.2.4
github.com/golang/protobuf v1.5.4
- github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0
+ github.com/google/uuid v1.6.0
+ github.com/grpc-ecosystem/grpc-gateway/v2 v2.21.0
+ github.com/renderedtext/go-tackle v0.0.0-20231226193542-c913a4af4f94
golang.org/x/net v0.33.0
google.golang.org/grpc v1.64.1
google.golang.org/protobuf v1.34.2
)
require (
+ github.com/rabbitmq/amqp091-go v1.9.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
- google.golang.org/genproto/googleapis/api v0.0.0-20240610135401-a8a62080eff3 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240610135401-a8a62080eff3 // indirect
+ google.golang.org/genproto/googleapis/api v0.0.0-20240723171418-e6d459c13d2a // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240723171418-e6d459c13d2a // indirect
)
diff --git a/public-api-gateway/go.sum b/public-api-gateway/go.sum
index cd359ac9b..a518354ed 100644
--- a/public-api-gateway/go.sum
+++ b/public-api-gateway/go.sum
@@ -1,22 +1,49 @@
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/golang/glog v1.2.4 h1:CNNw5U8lSiiBk7druxtSHHTsRWcxKoac6kZKm2peBBc=
github.com/golang/glog v1.2.4/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k=
+github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
+github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.21.0 h1:CWyXh/jylQWp2dtiV33mY4iSSp6yf4lmn+c7/tN+ObI=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.21.0/go.mod h1:nCLIt0w3Ept2NwF8ThLmrppXsfT07oC8k0XNDxd8sVU=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rabbitmq/amqp091-go v1.9.0 h1:qrQtyzB4H8BQgEuJwhmVQqVHB9O4+MNDJCCAcpc3Aoo=
+github.com/rabbitmq/amqp091-go v1.9.0/go.mod h1:+jPrT9iY2eLjRaMSRHUhc3z14E/l85kv/f+6luSD3pc=
+github.com/renderedtext/go-tackle v0.0.0-20231226193542-c913a4af4f94 h1:XynJJlfKWESMTlCM1fc7LDlPiQTvOPrRDQTiX6nyQiY=
+github.com/renderedtext/go-tackle v0.0.0-20231226193542-c913a4af4f94/go.mod h1:IfWH6x6erQ2Y4C7+BdP/fzOCN+8Szs2atOc6vUDOnvY=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
+go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
-google.golang.org/genproto/googleapis/api v0.0.0-20240610135401-a8a62080eff3 h1:QW9+G6Fir4VcRXVH8x3LilNAb6cxBGLa6+GM4hRwexE=
-google.golang.org/genproto/googleapis/api v0.0.0-20240610135401-a8a62080eff3/go.mod h1:kdrSS/OiLkPrNUpzD4aHgCq2rVuC/YRxok32HXZ4vRE=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240610135401-a8a62080eff3 h1:9Xyg6I9IWQZhRVfCWjKK+l6kI0jHcPesVlMnT//aHNo=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240610135401-a8a62080eff3/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
+google.golang.org/genproto/googleapis/api v0.0.0-20240723171418-e6d459c13d2a h1:YIa/rzVqMEokBkPtydCkx1VLmv3An1Uw7w1P1m6EhOY=
+google.golang.org/genproto/googleapis/api v0.0.0-20240723171418-e6d459c13d2a/go.mod h1:AHT0dDg3SoMOgZGnZk29b5xTbPHMoEC8qthmBLJCpys=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240723171418-e6d459c13d2a h1:hqK4+jJZXCU4pW7jsAdGOVFIfLHQeV7LaizZKnZ84HI=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240723171418-e6d459c13d2a/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/public-api-gateway/main.go b/public-api-gateway/main.go
index a019379c5..4d5b990e6 100644
--- a/public-api-gateway/main.go
+++ b/public-api-gateway/main.go
@@ -2,6 +2,7 @@ package main
import (
"flag"
+ "fmt"
"net/http"
"net/textproto"
"os"
@@ -17,8 +18,10 @@ import (
"google.golang.org/protobuf/encoding/protojson"
artifacts "github.com/semaphoreio/semaphore/public-api-gateway/api/artifacts.v1"
+ "github.com/semaphoreio/semaphore/public-api-gateway/api/clients"
dashboards "github.com/semaphoreio/semaphore/public-api-gateway/api/dashboards.v1alpha"
jobs "github.com/semaphoreio/semaphore/public-api-gateway/api/jobs.v1alpha"
+ middleware "github.com/semaphoreio/semaphore/public-api-gateway/api/middleware"
notifications "github.com/semaphoreio/semaphore/public-api-gateway/api/notifications.v1alpha"
projectSecrets "github.com/semaphoreio/semaphore/public-api-gateway/api/project_secrets.v1"
secrets "github.com/semaphoreio/semaphore/public-api-gateway/api/secrets.v1beta"
@@ -63,7 +66,14 @@ func run() error {
var err error
- mux := runtime.NewServeMux(runtime.WithIncomingHeaderMatcher(headerMatcher),
+ auditClient, err := clients.NewAuditClient(os.Getenv("AMQP_URL"))
+ if err != nil {
+ return fmt.Errorf("failed to initialize audit client: %v", err)
+ }
+
+ mux := runtime.NewServeMux(
+ runtime.WithMiddlewares(middleware.AuditMiddleware(auditClient)),
+ runtime.WithIncomingHeaderMatcher(headerMatcher),
runtime.WithMarshalerOption(runtime.MIMEWildcard, &runtime.HTTPBodyMarshaler{
Marshaler: &runtime.JSONPb{
MarshalOptions: protojson.MarshalOptions{
diff --git a/public-api-gateway/protos/audit/audit.pb.go b/public-api-gateway/protos/audit/audit.pb.go
new file mode 100644
index 000000000..452b0dd7d
--- /dev/null
+++ b/public-api-gateway/protos/audit/audit.pb.go
@@ -0,0 +1,2353 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// protoc-gen-go v1.35.2
+// protoc v3.3.0
+// source: audit.proto
+
+package audit
+
+import (
+ empty "github.com/golang/protobuf/ptypes/empty"
+ timestamp "github.com/golang/protobuf/ptypes/timestamp"
+ protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+ protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+ reflect "reflect"
+ sync "sync"
+)
+
+const (
+ // Verify that this generated code is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+ // Verify that runtime/protoimpl is sufficiently up-to-date.
+ _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+// export stream provider in audit service
+type StreamProvider int32
+
+const (
+ StreamProvider_S3 StreamProvider = 0
+)
+
+// Enum value maps for StreamProvider.
+var (
+ StreamProvider_name = map[int32]string{
+ 0: "S3",
+ }
+ StreamProvider_value = map[string]int32{
+ "S3": 0,
+ }
+)
+
+func (x StreamProvider) Enum() *StreamProvider {
+ p := new(StreamProvider)
+ *p = x
+ return p
+}
+
+func (x StreamProvider) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (StreamProvider) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[0].Descriptor()
+}
+
+func (StreamProvider) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[0]
+}
+
+func (x StreamProvider) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use StreamProvider.Descriptor instead.
+func (StreamProvider) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{0}
+}
+
+type StreamStatus int32
+
+const (
+ StreamStatus_ACTIVE StreamStatus = 0
+ StreamStatus_PAUSED StreamStatus = 1
+)
+
+// Enum value maps for StreamStatus.
+var (
+ StreamStatus_name = map[int32]string{
+ 0: "ACTIVE",
+ 1: "PAUSED",
+ }
+ StreamStatus_value = map[string]int32{
+ "ACTIVE": 0,
+ "PAUSED": 1,
+ }
+)
+
+func (x StreamStatus) Enum() *StreamStatus {
+ p := new(StreamStatus)
+ *p = x
+ return p
+}
+
+func (x StreamStatus) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (StreamStatus) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[1].Descriptor()
+}
+
+func (StreamStatus) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[1]
+}
+
+func (x StreamStatus) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use StreamStatus.Descriptor instead.
+func (StreamStatus) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{1}
+}
+
+type PaginatedListRequest_Direction int32
+
+const (
+ PaginatedListRequest_NEXT PaginatedListRequest_Direction = 0
+ PaginatedListRequest_PREVIOUS PaginatedListRequest_Direction = 1
+)
+
+// Enum value maps for PaginatedListRequest_Direction.
+var (
+ PaginatedListRequest_Direction_name = map[int32]string{
+ 0: "NEXT",
+ 1: "PREVIOUS",
+ }
+ PaginatedListRequest_Direction_value = map[string]int32{
+ "NEXT": 0,
+ "PREVIOUS": 1,
+ }
+)
+
+func (x PaginatedListRequest_Direction) Enum() *PaginatedListRequest_Direction {
+ p := new(PaginatedListRequest_Direction)
+ *p = x
+ return p
+}
+
+func (x PaginatedListRequest_Direction) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (PaginatedListRequest_Direction) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[2].Descriptor()
+}
+
+func (PaginatedListRequest_Direction) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[2]
+}
+
+func (x PaginatedListRequest_Direction) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use PaginatedListRequest_Direction.Descriptor instead.
+func (PaginatedListRequest_Direction) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{2, 0}
+}
+
+type ListStreamLogsRequest_Direction int32
+
+const (
+ ListStreamLogsRequest_NEXT ListStreamLogsRequest_Direction = 0
+ ListStreamLogsRequest_PREVIOUS ListStreamLogsRequest_Direction = 1
+)
+
+// Enum value maps for ListStreamLogsRequest_Direction.
+var (
+ ListStreamLogsRequest_Direction_name = map[int32]string{
+ 0: "NEXT",
+ 1: "PREVIOUS",
+ }
+ ListStreamLogsRequest_Direction_value = map[string]int32{
+ "NEXT": 0,
+ "PREVIOUS": 1,
+ }
+)
+
+func (x ListStreamLogsRequest_Direction) Enum() *ListStreamLogsRequest_Direction {
+ p := new(ListStreamLogsRequest_Direction)
+ *p = x
+ return p
+}
+
+func (x ListStreamLogsRequest_Direction) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (ListStreamLogsRequest_Direction) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[3].Descriptor()
+}
+
+func (ListStreamLogsRequest_Direction) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[3]
+}
+
+func (x ListStreamLogsRequest_Direction) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use ListStreamLogsRequest_Direction.Descriptor instead.
+func (ListStreamLogsRequest_Direction) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{4, 0}
+}
+
+// Describes the type of S3StreamConfig
+// - USER - uses key_id and key_secret as credential
+// - INSTANCE_ROLE - uses AWS instance role to access S3
+type S3StreamConfig_Type int32
+
+const (
+ S3StreamConfig_USER S3StreamConfig_Type = 0
+ S3StreamConfig_INSTANCE_ROLE S3StreamConfig_Type = 1
+)
+
+// Enum value maps for S3StreamConfig_Type.
+var (
+ S3StreamConfig_Type_name = map[int32]string{
+ 0: "USER",
+ 1: "INSTANCE_ROLE",
+ }
+ S3StreamConfig_Type_value = map[string]int32{
+ "USER": 0,
+ "INSTANCE_ROLE": 1,
+ }
+)
+
+func (x S3StreamConfig_Type) Enum() *S3StreamConfig_Type {
+ p := new(S3StreamConfig_Type)
+ *p = x
+ return p
+}
+
+func (x S3StreamConfig_Type) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (S3StreamConfig_Type) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[4].Descriptor()
+}
+
+func (S3StreamConfig_Type) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[4]
+}
+
+func (x S3StreamConfig_Type) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use S3StreamConfig_Type.Descriptor instead.
+func (S3StreamConfig_Type) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{9, 0}
+}
+
+type Event_Resource int32
+
+const (
+ Event_Project Event_Resource = 0
+ Event_User Event_Resource = 1
+ Event_Workflow Event_Resource = 2
+ Event_Pipeline Event_Resource = 3
+ Event_DebugSession Event_Resource = 4
+ Event_PeriodicScheduler Event_Resource = 5
+ Event_Secret Event_Resource = 6
+ Event_Notification Event_Resource = 7
+ Event_Dashboard Event_Resource = 8
+ Event_Job Event_Resource = 9
+ Event_Artifact Event_Resource = 10
+ Event_Organization Event_Resource = 11
+ Event_SelfHostedAgentType Event_Resource = 12
+ Event_SelfHostedAgent Event_Resource = 13
+ Event_CustomDashboard Event_Resource = 14
+ Event_CustomDashboardItem Event_Resource = 15
+ Event_ProjectInsightsSettings Event_Resource = 16
+ Event_Okta Event_Resource = 17
+ Event_FlakyTests Event_Resource = 18
+ Event_RBACRole Event_Resource = 19
+)
+
+// Enum value maps for Event_Resource.
+var (
+ Event_Resource_name = map[int32]string{
+ 0: "Project",
+ 1: "User",
+ 2: "Workflow",
+ 3: "Pipeline",
+ 4: "DebugSession",
+ 5: "PeriodicScheduler",
+ 6: "Secret",
+ 7: "Notification",
+ 8: "Dashboard",
+ 9: "Job",
+ 10: "Artifact",
+ 11: "Organization",
+ 12: "SelfHostedAgentType",
+ 13: "SelfHostedAgent",
+ 14: "CustomDashboard",
+ 15: "CustomDashboardItem",
+ 16: "ProjectInsightsSettings",
+ 17: "Okta",
+ 18: "FlakyTests",
+ 19: "RBACRole",
+ }
+ Event_Resource_value = map[string]int32{
+ "Project": 0,
+ "User": 1,
+ "Workflow": 2,
+ "Pipeline": 3,
+ "DebugSession": 4,
+ "PeriodicScheduler": 5,
+ "Secret": 6,
+ "Notification": 7,
+ "Dashboard": 8,
+ "Job": 9,
+ "Artifact": 10,
+ "Organization": 11,
+ "SelfHostedAgentType": 12,
+ "SelfHostedAgent": 13,
+ "CustomDashboard": 14,
+ "CustomDashboardItem": 15,
+ "ProjectInsightsSettings": 16,
+ "Okta": 17,
+ "FlakyTests": 18,
+ "RBACRole": 19,
+ }
+)
+
+func (x Event_Resource) Enum() *Event_Resource {
+ p := new(Event_Resource)
+ *p = x
+ return p
+}
+
+func (x Event_Resource) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (Event_Resource) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[5].Descriptor()
+}
+
+func (Event_Resource) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[5]
+}
+
+func (x Event_Resource) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use Event_Resource.Descriptor instead.
+func (Event_Resource) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{20, 0}
+}
+
+type Event_Operation int32
+
+const (
+ Event_Added Event_Operation = 0
+ Event_Removed Event_Operation = 1
+ Event_Modified Event_Operation = 2
+ Event_Started Event_Operation = 3
+ Event_Stopped Event_Operation = 4
+ Event_Promoted Event_Operation = 5
+ Event_Demoted Event_Operation = 6
+ Event_Rebuild Event_Operation = 7
+ Event_Download Event_Operation = 8
+ Event_Disabled Event_Operation = 9
+)
+
+// Enum value maps for Event_Operation.
+var (
+ Event_Operation_name = map[int32]string{
+ 0: "Added",
+ 1: "Removed",
+ 2: "Modified",
+ 3: "Started",
+ 4: "Stopped",
+ 5: "Promoted",
+ 6: "Demoted",
+ 7: "Rebuild",
+ 8: "Download",
+ 9: "Disabled",
+ }
+ Event_Operation_value = map[string]int32{
+ "Added": 0,
+ "Removed": 1,
+ "Modified": 2,
+ "Started": 3,
+ "Stopped": 4,
+ "Promoted": 5,
+ "Demoted": 6,
+ "Rebuild": 7,
+ "Download": 8,
+ "Disabled": 9,
+ }
+)
+
+func (x Event_Operation) Enum() *Event_Operation {
+ p := new(Event_Operation)
+ *p = x
+ return p
+}
+
+func (x Event_Operation) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (Event_Operation) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[6].Descriptor()
+}
+
+func (Event_Operation) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[6]
+}
+
+func (x Event_Operation) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use Event_Operation.Descriptor instead.
+func (Event_Operation) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{20, 1}
+}
+
+type Event_Medium int32
+
+const (
+ Event_Web Event_Medium = 0
+ Event_API Event_Medium = 1
+ Event_CLI Event_Medium = 2
+)
+
+// Enum value maps for Event_Medium.
+var (
+ Event_Medium_name = map[int32]string{
+ 0: "Web",
+ 1: "API",
+ 2: "CLI",
+ }
+ Event_Medium_value = map[string]int32{
+ "Web": 0,
+ "API": 1,
+ "CLI": 2,
+ }
+)
+
+func (x Event_Medium) Enum() *Event_Medium {
+ p := new(Event_Medium)
+ *p = x
+ return p
+}
+
+func (x Event_Medium) String() string {
+ return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (Event_Medium) Descriptor() protoreflect.EnumDescriptor {
+ return file_audit_proto_enumTypes[7].Descriptor()
+}
+
+func (Event_Medium) Type() protoreflect.EnumType {
+ return &file_audit_proto_enumTypes[7]
+}
+
+func (x Event_Medium) Number() protoreflect.EnumNumber {
+ return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Use Event_Medium.Descriptor instead.
+func (Event_Medium) EnumDescriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{20, 2}
+}
+
+type ListRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+ // [optional]
+ //
+ // By default, only the last event for an operation is returned.
+ // If you need every event, pass true.
+ AllEventsInOperation bool `protobuf:"varint,2,opt,name=all_events_in_operation,json=allEventsInOperation,proto3" json:"all_events_in_operation,omitempty"`
+}
+
+func (x *ListRequest) Reset() {
+ *x = ListRequest{}
+ mi := &file_audit_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ListRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ListRequest) ProtoMessage() {}
+
+func (x *ListRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[0]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ListRequest.ProtoReflect.Descriptor instead.
+func (*ListRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *ListRequest) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+func (x *ListRequest) GetAllEventsInOperation() bool {
+ if x != nil {
+ return x.AllEventsInOperation
+ }
+ return false
+}
+
+type ListResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Events []*Event `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"`
+}
+
+func (x *ListResponse) Reset() {
+ *x = ListResponse{}
+ mi := &file_audit_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ListResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ListResponse) ProtoMessage() {}
+
+func (x *ListResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[1]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ListResponse.ProtoReflect.Descriptor instead.
+func (*ListResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *ListResponse) GetEvents() []*Event {
+ if x != nil {
+ return x.Events
+ }
+ return nil
+}
+
+type PaginatedListRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+ PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
+ PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
+ Direction PaginatedListRequest_Direction `protobuf:"varint,4,opt,name=direction,proto3,enum=InternalApi.Audit.PaginatedListRequest_Direction" json:"direction,omitempty"`
+}
+
+func (x *PaginatedListRequest) Reset() {
+ *x = PaginatedListRequest{}
+ mi := &file_audit_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *PaginatedListRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*PaginatedListRequest) ProtoMessage() {}
+
+func (x *PaginatedListRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[2]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use PaginatedListRequest.ProtoReflect.Descriptor instead.
+func (*PaginatedListRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{2}
+}
+
+func (x *PaginatedListRequest) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+func (x *PaginatedListRequest) GetPageSize() int32 {
+ if x != nil {
+ return x.PageSize
+ }
+ return 0
+}
+
+func (x *PaginatedListRequest) GetPageToken() string {
+ if x != nil {
+ return x.PageToken
+ }
+ return ""
+}
+
+func (x *PaginatedListRequest) GetDirection() PaginatedListRequest_Direction {
+ if x != nil {
+ return x.Direction
+ }
+ return PaginatedListRequest_NEXT
+}
+
+type PaginatedListResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Events []*Event `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"`
+ NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
+ PreviousPageToken string `protobuf:"bytes,3,opt,name=previous_page_token,json=previousPageToken,proto3" json:"previous_page_token,omitempty"`
+}
+
+func (x *PaginatedListResponse) Reset() {
+ *x = PaginatedListResponse{}
+ mi := &file_audit_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *PaginatedListResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*PaginatedListResponse) ProtoMessage() {}
+
+func (x *PaginatedListResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[3]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use PaginatedListResponse.ProtoReflect.Descriptor instead.
+func (*PaginatedListResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{3}
+}
+
+func (x *PaginatedListResponse) GetEvents() []*Event {
+ if x != nil {
+ return x.Events
+ }
+ return nil
+}
+
+func (x *PaginatedListResponse) GetNextPageToken() string {
+ if x != nil {
+ return x.NextPageToken
+ }
+ return ""
+}
+
+func (x *PaginatedListResponse) GetPreviousPageToken() string {
+ if x != nil {
+ return x.PreviousPageToken
+ }
+ return ""
+}
+
+type ListStreamLogsRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+ PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
+ PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
+ Direction ListStreamLogsRequest_Direction `protobuf:"varint,4,opt,name=direction,proto3,enum=InternalApi.Audit.ListStreamLogsRequest_Direction" json:"direction,omitempty"`
+}
+
+func (x *ListStreamLogsRequest) Reset() {
+ *x = ListStreamLogsRequest{}
+ mi := &file_audit_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ListStreamLogsRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ListStreamLogsRequest) ProtoMessage() {}
+
+func (x *ListStreamLogsRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[4]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ListStreamLogsRequest.ProtoReflect.Descriptor instead.
+func (*ListStreamLogsRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{4}
+}
+
+func (x *ListStreamLogsRequest) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+func (x *ListStreamLogsRequest) GetPageSize() int32 {
+ if x != nil {
+ return x.PageSize
+ }
+ return 0
+}
+
+func (x *ListStreamLogsRequest) GetPageToken() string {
+ if x != nil {
+ return x.PageToken
+ }
+ return ""
+}
+
+func (x *ListStreamLogsRequest) GetDirection() ListStreamLogsRequest_Direction {
+ if x != nil {
+ return x.Direction
+ }
+ return ListStreamLogsRequest_NEXT
+}
+
+type ListStreamLogsResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ StreamLogs []*StreamLog `protobuf:"bytes,1,rep,name=stream_logs,json=streamLogs,proto3" json:"stream_logs,omitempty"`
+ NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
+ PreviousPageToken string `protobuf:"bytes,3,opt,name=previous_page_token,json=previousPageToken,proto3" json:"previous_page_token,omitempty"`
+}
+
+func (x *ListStreamLogsResponse) Reset() {
+ *x = ListStreamLogsResponse{}
+ mi := &file_audit_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ListStreamLogsResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ListStreamLogsResponse) ProtoMessage() {}
+
+func (x *ListStreamLogsResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[5]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ListStreamLogsResponse.ProtoReflect.Descriptor instead.
+func (*ListStreamLogsResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{5}
+}
+
+func (x *ListStreamLogsResponse) GetStreamLogs() []*StreamLog {
+ if x != nil {
+ return x.StreamLogs
+ }
+ return nil
+}
+
+func (x *ListStreamLogsResponse) GetNextPageToken() string {
+ if x != nil {
+ return x.NextPageToken
+ }
+ return ""
+}
+
+func (x *ListStreamLogsResponse) GetPreviousPageToken() string {
+ if x != nil {
+ return x.PreviousPageToken
+ }
+ return ""
+}
+
+type StreamLog struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Timestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
+ ErrorMessage string `protobuf:"bytes,2,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"`
+ FileSize int32 `protobuf:"varint,3,opt,name=file_size,json=fileSize,proto3" json:"file_size,omitempty"`
+ FileName string `protobuf:"bytes,4,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"`
+ FirstEventTimestamp *timestamp.Timestamp `protobuf:"bytes,5,opt,name=first_event_timestamp,json=firstEventTimestamp,proto3" json:"first_event_timestamp,omitempty"`
+ LastEventTimestamp *timestamp.Timestamp `protobuf:"bytes,6,opt,name=last_event_timestamp,json=lastEventTimestamp,proto3" json:"last_event_timestamp,omitempty"`
+}
+
+func (x *StreamLog) Reset() {
+ *x = StreamLog{}
+ mi := &file_audit_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *StreamLog) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*StreamLog) ProtoMessage() {}
+
+func (x *StreamLog) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[6]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use StreamLog.ProtoReflect.Descriptor instead.
+func (*StreamLog) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{6}
+}
+
+func (x *StreamLog) GetTimestamp() *timestamp.Timestamp {
+ if x != nil {
+ return x.Timestamp
+ }
+ return nil
+}
+
+func (x *StreamLog) GetErrorMessage() string {
+ if x != nil {
+ return x.ErrorMessage
+ }
+ return ""
+}
+
+func (x *StreamLog) GetFileSize() int32 {
+ if x != nil {
+ return x.FileSize
+ }
+ return 0
+}
+
+func (x *StreamLog) GetFileName() string {
+ if x != nil {
+ return x.FileName
+ }
+ return ""
+}
+
+func (x *StreamLog) GetFirstEventTimestamp() *timestamp.Timestamp {
+ if x != nil {
+ return x.FirstEventTimestamp
+ }
+ return nil
+}
+
+func (x *StreamLog) GetLastEventTimestamp() *timestamp.Timestamp {
+ if x != nil {
+ return x.LastEventTimestamp
+ }
+ return nil
+}
+
+// Stream is message used to describe one stream
+type Stream struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+ Provider StreamProvider `protobuf:"varint,2,opt,name=provider,proto3,enum=InternalApi.Audit.StreamProvider" json:"provider,omitempty"`
+ Status StreamStatus `protobuf:"varint,3,opt,name=status,proto3,enum=InternalApi.Audit.StreamStatus" json:"status,omitempty"`
+ // based on provider one of the following fields should be set
+ S3Config *S3StreamConfig `protobuf:"bytes,4,opt,name=s3_config,json=s3Config,proto3" json:"s3_config,omitempty"`
+}
+
+func (x *Stream) Reset() {
+ *x = Stream{}
+ mi := &file_audit_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *Stream) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Stream) ProtoMessage() {}
+
+func (x *Stream) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[7]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use Stream.ProtoReflect.Descriptor instead.
+func (*Stream) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{7}
+}
+
+func (x *Stream) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+func (x *Stream) GetProvider() StreamProvider {
+ if x != nil {
+ return x.Provider
+ }
+ return StreamProvider_S3
+}
+
+func (x *Stream) GetStatus() StreamStatus {
+ if x != nil {
+ return x.Status
+ }
+ return StreamStatus_ACTIVE
+}
+
+func (x *Stream) GetS3Config() *S3StreamConfig {
+ if x != nil {
+ return x.S3Config
+ }
+ return nil
+}
+
+// EditMeta contains Stream data
+type EditMeta struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ CreatedAt *timestamp.Timestamp `protobuf:"bytes,1,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"`
+ UpdatedAt *timestamp.Timestamp `protobuf:"bytes,2,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"`
+ ActivityToggledAt *timestamp.Timestamp `protobuf:"bytes,3,opt,name=activity_toggled_at,json=activityToggledAt,proto3" json:"activity_toggled_at,omitempty"`
+ UpdatedBy string `protobuf:"bytes,4,opt,name=updated_by,json=updatedBy,proto3" json:"updated_by,omitempty"`
+ ActivityToggledBy string `protobuf:"bytes,5,opt,name=activity_toggled_by,json=activityToggledBy,proto3" json:"activity_toggled_by,omitempty"`
+}
+
+func (x *EditMeta) Reset() {
+ *x = EditMeta{}
+ mi := &file_audit_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *EditMeta) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*EditMeta) ProtoMessage() {}
+
+func (x *EditMeta) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[8]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use EditMeta.ProtoReflect.Descriptor instead.
+func (*EditMeta) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{8}
+}
+
+func (x *EditMeta) GetCreatedAt() *timestamp.Timestamp {
+ if x != nil {
+ return x.CreatedAt
+ }
+ return nil
+}
+
+func (x *EditMeta) GetUpdatedAt() *timestamp.Timestamp {
+ if x != nil {
+ return x.UpdatedAt
+ }
+ return nil
+}
+
+func (x *EditMeta) GetActivityToggledAt() *timestamp.Timestamp {
+ if x != nil {
+ return x.ActivityToggledAt
+ }
+ return nil
+}
+
+func (x *EditMeta) GetUpdatedBy() string {
+ if x != nil {
+ return x.UpdatedBy
+ }
+ return ""
+}
+
+func (x *EditMeta) GetActivityToggledBy() string {
+ if x != nil {
+ return x.ActivityToggledBy
+ }
+ return ""
+}
+
+type S3StreamConfig struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Bucket string `protobuf:"bytes,1,opt,name=bucket,proto3" json:"bucket,omitempty"`
+ KeyId string `protobuf:"bytes,2,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"`
+ KeySecret string `protobuf:"bytes,3,opt,name=key_secret,json=keySecret,proto3" json:"key_secret,omitempty"`
+ // specify host name of s3 server, defaults to aws
+ Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"`
+ Region string `protobuf:"bytes,5,opt,name=region,proto3" json:"region,omitempty"`
+ Type S3StreamConfig_Type `protobuf:"varint,6,opt,name=type,proto3,enum=InternalApi.Audit.S3StreamConfig_Type" json:"type,omitempty"`
+}
+
+func (x *S3StreamConfig) Reset() {
+ *x = S3StreamConfig{}
+ mi := &file_audit_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *S3StreamConfig) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*S3StreamConfig) ProtoMessage() {}
+
+func (x *S3StreamConfig) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[9]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use S3StreamConfig.ProtoReflect.Descriptor instead.
+func (*S3StreamConfig) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{9}
+}
+
+func (x *S3StreamConfig) GetBucket() string {
+ if x != nil {
+ return x.Bucket
+ }
+ return ""
+}
+
+func (x *S3StreamConfig) GetKeyId() string {
+ if x != nil {
+ return x.KeyId
+ }
+ return ""
+}
+
+func (x *S3StreamConfig) GetKeySecret() string {
+ if x != nil {
+ return x.KeySecret
+ }
+ return ""
+}
+
+func (x *S3StreamConfig) GetHost() string {
+ if x != nil {
+ return x.Host
+ }
+ return ""
+}
+
+func (x *S3StreamConfig) GetRegion() string {
+ if x != nil {
+ return x.Region
+ }
+ return ""
+}
+
+func (x *S3StreamConfig) GetType() S3StreamConfig_Type {
+ if x != nil {
+ return x.Type
+ }
+ return S3StreamConfig_USER
+}
+
+type TestStreamRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
+}
+
+func (x *TestStreamRequest) Reset() {
+ *x = TestStreamRequest{}
+ mi := &file_audit_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *TestStreamRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*TestStreamRequest) ProtoMessage() {}
+
+func (x *TestStreamRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[10]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use TestStreamRequest.ProtoReflect.Descriptor instead.
+func (*TestStreamRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{10}
+}
+
+func (x *TestStreamRequest) GetStream() *Stream {
+ if x != nil {
+ return x.Stream
+ }
+ return nil
+}
+
+type TestStreamResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Success bool `protobuf:"varint,1,opt,name=success,proto3" json:"success,omitempty"`
+ Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
+}
+
+func (x *TestStreamResponse) Reset() {
+ *x = TestStreamResponse{}
+ mi := &file_audit_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *TestStreamResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*TestStreamResponse) ProtoMessage() {}
+
+func (x *TestStreamResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[11]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use TestStreamResponse.ProtoReflect.Descriptor instead.
+func (*TestStreamResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{11}
+}
+
+func (x *TestStreamResponse) GetSuccess() bool {
+ if x != nil {
+ return x.Success
+ }
+ return false
+}
+
+func (x *TestStreamResponse) GetMessage() string {
+ if x != nil {
+ return x.Message
+ }
+ return ""
+}
+
+type CreateStreamRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
+ UserId string `protobuf:"bytes,2,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"`
+}
+
+func (x *CreateStreamRequest) Reset() {
+ *x = CreateStreamRequest{}
+ mi := &file_audit_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CreateStreamRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CreateStreamRequest) ProtoMessage() {}
+
+func (x *CreateStreamRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[12]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CreateStreamRequest.ProtoReflect.Descriptor instead.
+func (*CreateStreamRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{12}
+}
+
+func (x *CreateStreamRequest) GetStream() *Stream {
+ if x != nil {
+ return x.Stream
+ }
+ return nil
+}
+
+func (x *CreateStreamRequest) GetUserId() string {
+ if x != nil {
+ return x.UserId
+ }
+ return ""
+}
+
+type CreateStreamResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
+ Meta *EditMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"`
+}
+
+func (x *CreateStreamResponse) Reset() {
+ *x = CreateStreamResponse{}
+ mi := &file_audit_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CreateStreamResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CreateStreamResponse) ProtoMessage() {}
+
+func (x *CreateStreamResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[13]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CreateStreamResponse.ProtoReflect.Descriptor instead.
+func (*CreateStreamResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{13}
+}
+
+func (x *CreateStreamResponse) GetStream() *Stream {
+ if x != nil {
+ return x.Stream
+ }
+ return nil
+}
+
+func (x *CreateStreamResponse) GetMeta() *EditMeta {
+ if x != nil {
+ return x.Meta
+ }
+ return nil
+}
+
+type DescribeStreamRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+}
+
+func (x *DescribeStreamRequest) Reset() {
+ *x = DescribeStreamRequest{}
+ mi := &file_audit_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *DescribeStreamRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*DescribeStreamRequest) ProtoMessage() {}
+
+func (x *DescribeStreamRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[14]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use DescribeStreamRequest.ProtoReflect.Descriptor instead.
+func (*DescribeStreamRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{14}
+}
+
+func (x *DescribeStreamRequest) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+type DescribeStreamResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
+ Meta *EditMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"`
+}
+
+func (x *DescribeStreamResponse) Reset() {
+ *x = DescribeStreamResponse{}
+ mi := &file_audit_proto_msgTypes[15]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *DescribeStreamResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*DescribeStreamResponse) ProtoMessage() {}
+
+func (x *DescribeStreamResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[15]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use DescribeStreamResponse.ProtoReflect.Descriptor instead.
+func (*DescribeStreamResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{15}
+}
+
+func (x *DescribeStreamResponse) GetStream() *Stream {
+ if x != nil {
+ return x.Stream
+ }
+ return nil
+}
+
+func (x *DescribeStreamResponse) GetMeta() *EditMeta {
+ if x != nil {
+ return x.Meta
+ }
+ return nil
+}
+
+type UpdateStreamRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
+ UserId string `protobuf:"bytes,2,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"`
+}
+
+func (x *UpdateStreamRequest) Reset() {
+ *x = UpdateStreamRequest{}
+ mi := &file_audit_proto_msgTypes[16]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *UpdateStreamRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*UpdateStreamRequest) ProtoMessage() {}
+
+func (x *UpdateStreamRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[16]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use UpdateStreamRequest.ProtoReflect.Descriptor instead.
+func (*UpdateStreamRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{16}
+}
+
+func (x *UpdateStreamRequest) GetStream() *Stream {
+ if x != nil {
+ return x.Stream
+ }
+ return nil
+}
+
+func (x *UpdateStreamRequest) GetUserId() string {
+ if x != nil {
+ return x.UserId
+ }
+ return ""
+}
+
+type UpdateStreamResponse struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"`
+ Meta *EditMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"`
+}
+
+func (x *UpdateStreamResponse) Reset() {
+ *x = UpdateStreamResponse{}
+ mi := &file_audit_proto_msgTypes[17]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *UpdateStreamResponse) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*UpdateStreamResponse) ProtoMessage() {}
+
+func (x *UpdateStreamResponse) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[17]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use UpdateStreamResponse.ProtoReflect.Descriptor instead.
+func (*UpdateStreamResponse) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{17}
+}
+
+func (x *UpdateStreamResponse) GetStream() *Stream {
+ if x != nil {
+ return x.Stream
+ }
+ return nil
+}
+
+func (x *UpdateStreamResponse) GetMeta() *EditMeta {
+ if x != nil {
+ return x.Meta
+ }
+ return nil
+}
+
+type DestroyStreamRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+}
+
+func (x *DestroyStreamRequest) Reset() {
+ *x = DestroyStreamRequest{}
+ mi := &file_audit_proto_msgTypes[18]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *DestroyStreamRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*DestroyStreamRequest) ProtoMessage() {}
+
+func (x *DestroyStreamRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[18]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use DestroyStreamRequest.ProtoReflect.Descriptor instead.
+func (*DestroyStreamRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{18}
+}
+
+func (x *DestroyStreamRequest) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+type SetStreamStateRequest struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ OrgId string `protobuf:"bytes,1,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"`
+ Status StreamStatus `protobuf:"varint,2,opt,name=status,proto3,enum=InternalApi.Audit.StreamStatus" json:"status,omitempty"`
+ UserId string `protobuf:"bytes,3,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"`
+}
+
+func (x *SetStreamStateRequest) Reset() {
+ *x = SetStreamStateRequest{}
+ mi := &file_audit_proto_msgTypes[19]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *SetStreamStateRequest) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*SetStreamStateRequest) ProtoMessage() {}
+
+func (x *SetStreamStateRequest) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[19]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use SetStreamStateRequest.ProtoReflect.Descriptor instead.
+func (*SetStreamStateRequest) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{19}
+}
+
+func (x *SetStreamStateRequest) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+func (x *SetStreamStateRequest) GetStatus() StreamStatus {
+ if x != nil {
+ return x.Status
+ }
+ return StreamStatus_ACTIVE
+}
+
+func (x *SetStreamStateRequest) GetUserId() string {
+ if x != nil {
+ return x.UserId
+ }
+ return ""
+}
+
+// Cloud AMQP messages.
+//
+// Listining for incomming logs on:
+//
+// exchange: 'audit'
+// routing-key: 'log'
+type Event struct {
+ state protoimpl.MessageState
+ sizeCache protoimpl.SizeCache
+ unknownFields protoimpl.UnknownFields
+
+ Resource Event_Resource `protobuf:"varint,1,opt,name=resource,proto3,enum=InternalApi.Audit.Event_Resource" json:"resource,omitempty"` // [required]
+ Operation Event_Operation `protobuf:"varint,2,opt,name=operation,proto3,enum=InternalApi.Audit.Event_Operation" json:"operation,omitempty"` // [required]
+ UserId string `protobuf:"bytes,3,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` // [required] The user who initiated this action.
+ OrgId string `protobuf:"bytes,4,opt,name=org_id,json=orgId,proto3" json:"org_id,omitempty"` // [required] Organization ID in which this event happened.
+ IpAddress string `protobuf:"bytes,5,opt,name=ip_address,json=ipAddress,proto3" json:"ip_address,omitempty"` // [optional] The user's IP address when he run the operation.
+ // [optional]
+ //
+ // If username is not provided, the Audit system will use the user_id to look
+ // up the username. This allows the sender to not worry about fetching the
+ // username and the performance implications that this might add to the system.
+ //
+ // However, on user delete, the username of the user might no longer be
+ // available. Providing the username in regular audit logs is not
+ // necessary, as it will be loaded by the Audit service.
+ //
+ // Not providing the username (only the user_id) is cheaper
+ // performance wise to some services.
+ Username string `protobuf:"bytes,6,opt,name=username,proto3" json:"username,omitempty"`
+ // [optinal] Short textual description of the action.
+ Description string `protobuf:"bytes,7,opt,name=description,proto3" json:"description,omitempty"`
+ // [optinal] JSON Metadata. Must be a valid JSON.
+ Metadata string `protobuf:"bytes,8,opt,name=metadata,proto3" json:"metadata,omitempty"`
+ Timestamp *timestamp.Timestamp `protobuf:"bytes,9,opt,name=timestamp,proto3" json:"timestamp,omitempty"` // [required]
+ // [required]
+ //
+ // An ID that uniquly identifies this operation. Usually the x-request-id
+ // HTTP header of the request.
+ //
+ // Multiple events can belong to a single operation.
+ //
+ // When is this useful?
+ //
+ // Consider a Web request in a controller:
+ //
+ // def start_job do
+ // # running start operation ...
+ // end
+ //
+ // To add an Audit log to this operation, we can send it either before or
+ // after the processing has been executed.
+ //
+ // 1) If we send it after the operation:
+ //
+ // def start_job do
+ // # running start operation ...
+ //
+ // Audit.log(:job, :start)
+ // end
+ //
+ // We can risk the posibility that the processing is executed, but there is no
+ // Audit log saved. This can happen, for example, if the process shuts down
+ // just after the processing is done, but right before the log is sent out.
+ //
+ // 2) If we send it before the operation:
+ //
+ // def start_job do
+ // Audit.log(:job, :start)
+ //
+ // # running stop operation ...
+ // end
+ //
+ // Now, we are certain that the Audit log is saved if the operation is
+ // executed. However, we would also like to save the ID of the job in the
+ // Audit log.
+ //
+ // 3) If we send both the start and the end.
+ //
+ // def start_job do
+ // Audit.log(:job, :start)
+ //
+ // # running stop operation ...
+ //
+ // Audit.log(:job, :start, job_id: id)
+ // end
+ //
+ // This covers all the cases, however, the two events could be hard to
+ // connect while reviwing it.
+ //
+ // 4) Sending both at the start and the end with an operation_id.
+ //
+ // def start_job do
+ // Audit.log(:job, :start, operation_id: req.id)
+ //
+ // # running stop operation ...
+ //
+ // Audit.log(:job, :start, operation_id: req.id, job_id: id)
+ // end
+ //
+ // By specifying the same operation_id, we have all the data and we can
+ // connect it.
+ //
+ // While generating the Audit Log dump, we can merge the events in the backend
+ // and dump a single one to the customer.
+ OperationId string `protobuf:"bytes,10,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"`
+ ResourceId string `protobuf:"bytes,11,opt,name=resource_id,json=resourceId,proto3" json:"resource_id,omitempty"` // [optional] ID of the resource that was modified.
+ ResourceName string `protobuf:"bytes,12,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` // [optional] Name of the resource that was modified.
+ Medium Event_Medium `protobuf:"varint,13,opt,name=medium,proto3,enum=InternalApi.Audit.Event_Medium" json:"medium,omitempty"` // [required] Web, API, or CLI
+}
+
+func (x *Event) Reset() {
+ *x = Event{}
+ mi := &file_audit_proto_msgTypes[20]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *Event) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Event) ProtoMessage() {}
+
+func (x *Event) ProtoReflect() protoreflect.Message {
+ mi := &file_audit_proto_msgTypes[20]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use Event.ProtoReflect.Descriptor instead.
+func (*Event) Descriptor() ([]byte, []int) {
+ return file_audit_proto_rawDescGZIP(), []int{20}
+}
+
+func (x *Event) GetResource() Event_Resource {
+ if x != nil {
+ return x.Resource
+ }
+ return Event_Project
+}
+
+func (x *Event) GetOperation() Event_Operation {
+ if x != nil {
+ return x.Operation
+ }
+ return Event_Added
+}
+
+func (x *Event) GetUserId() string {
+ if x != nil {
+ return x.UserId
+ }
+ return ""
+}
+
+func (x *Event) GetOrgId() string {
+ if x != nil {
+ return x.OrgId
+ }
+ return ""
+}
+
+func (x *Event) GetIpAddress() string {
+ if x != nil {
+ return x.IpAddress
+ }
+ return ""
+}
+
+func (x *Event) GetUsername() string {
+ if x != nil {
+ return x.Username
+ }
+ return ""
+}
+
+func (x *Event) GetDescription() string {
+ if x != nil {
+ return x.Description
+ }
+ return ""
+}
+
+func (x *Event) GetMetadata() string {
+ if x != nil {
+ return x.Metadata
+ }
+ return ""
+}
+
+func (x *Event) GetTimestamp() *timestamp.Timestamp {
+ if x != nil {
+ return x.Timestamp
+ }
+ return nil
+}
+
+func (x *Event) GetOperationId() string {
+ if x != nil {
+ return x.OperationId
+ }
+ return ""
+}
+
+func (x *Event) GetResourceId() string {
+ if x != nil {
+ return x.ResourceId
+ }
+ return ""
+}
+
+func (x *Event) GetResourceName() string {
+ if x != nil {
+ return x.ResourceName
+ }
+ return ""
+}
+
+func (x *Event) GetMedium() Event_Medium {
+ if x != nil {
+ return x.Medium
+ }
+ return Event_Web
+}
+
+var File_audit_proto protoreflect.FileDescriptor
+
+var file_audit_proto_rawDesc = []byte{
+ 0x0a, 0x0b, 0x61, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x11, 0x49,
+ 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74,
+ 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,
+ 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74,
+ 0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
+ 0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5b,
+ 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a,
+ 0x06, 0x6f, 0x72, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f,
+ 0x72, 0x67, 0x49, 0x64, 0x12, 0x35, 0x0a, 0x17, 0x61, 0x6c, 0x6c, 0x5f, 0x65, 0x76, 0x65, 0x6e,
+ 0x74, 0x73, 0x5f, 0x69, 0x6e, 0x5f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x14, 0x61, 0x6c, 0x6c, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73,
+ 0x49, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x40, 0x0a, 0x0c, 0x4c,
+ 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x65,
+ 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x49, 0x6e,
+ 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e,
+ 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xdf, 0x01,
+ 0x0a, 0x14, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72, 0x67, 0x5f, 0x69, 0x64,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49, 0x64, 0x12, 0x1b, 0x0a,
+ 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05,
+ 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61,
+ 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
+ 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x4f, 0x0a, 0x09, 0x64, 0x69, 0x72,
+ 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x31, 0x2e, 0x49,
+ 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74,
+ 0x2e, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52,
+ 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x23, 0x0a, 0x09, 0x44, 0x69,
+ 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x45, 0x58, 0x54, 0x10,
+ 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x52, 0x45, 0x56, 0x49, 0x4f, 0x55, 0x53, 0x10, 0x01, 0x22,
+ 0xa1, 0x01, 0x0a, 0x15, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73,
+ 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x65, 0x76, 0x65,
+ 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x49, 0x6e, 0x74, 0x65,
+ 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x45, 0x76,
+ 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e,
+ 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f,
+ 0x6b, 0x65, 0x6e, 0x12, 0x2e, 0x0a, 0x13, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f,
+ 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x11, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f,
+ 0x6b, 0x65, 0x6e, 0x22, 0xe1, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65,
+ 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a,
+ 0x06, 0x6f, 0x72, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f,
+ 0x72, 0x67, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a,
+ 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a,
+ 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18,
+ 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e,
+ 0x12, 0x50, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20,
+ 0x01, 0x28, 0x0e, 0x32, 0x32, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70,
+ 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65,
+ 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x44, 0x69,
+ 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x22, 0x23, 0x0a, 0x09, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12,
+ 0x08, 0x0a, 0x04, 0x4e, 0x45, 0x58, 0x54, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x52, 0x45,
+ 0x56, 0x49, 0x4f, 0x55, 0x53, 0x10, 0x01, 0x22, 0xaf, 0x01, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74,
+ 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x12, 0x3d, 0x0a, 0x0b, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x6c, 0x6f, 0x67,
+ 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e,
+ 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65,
+ 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x52, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4c, 0x6f, 0x67,
+ 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74,
+ 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74,
+ 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x2e, 0x0a, 0x13, 0x70, 0x72, 0x65,
+ 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e,
+ 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73,
+ 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0xc2, 0x02, 0x0a, 0x09, 0x53, 0x74,
+ 0x72, 0x65, 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73,
+ 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f,
+ 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d,
+ 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d,
+ 0x70, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61,
+ 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d,
+ 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73,
+ 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53,
+ 0x69, 0x7a, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65,
+ 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65,
+ 0x12, 0x4e, 0x0a, 0x15, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f,
+ 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,
+ 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x13, 0x66, 0x69, 0x72,
+ 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
+ 0x12, 0x4c, 0x0a, 0x14, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x74,
+ 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
+ 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
+ 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x12, 0x6c, 0x61, 0x73, 0x74,
+ 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xd7,
+ 0x01, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72, 0x67,
+ 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49, 0x64,
+ 0x12, 0x3d, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69,
+ 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x72, 0x6f,
+ 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12,
+ 0x37, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32,
+ 0x1f, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75,
+ 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73,
+ 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x3e, 0x0a, 0x09, 0x73, 0x33, 0x5f, 0x63,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x49, 0x6e,
+ 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e,
+ 0x53, 0x33, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x08,
+ 0x73, 0x33, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x9b, 0x02, 0x0a, 0x08, 0x45, 0x64, 0x69,
+ 0x74, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64,
+ 0x5f, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
+ 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65,
+ 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74,
+ 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
+ 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
+ 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x4a, 0x0a, 0x13, 0x61,
+ 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x5f, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x64, 0x5f,
+ 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
+ 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73,
+ 0x74, 0x61, 0x6d, 0x70, 0x52, 0x11, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x54, 0x6f,
+ 0x67, 0x67, 0x6c, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74,
+ 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x75, 0x70, 0x64,
+ 0x61, 0x74, 0x65, 0x64, 0x42, 0x79, 0x12, 0x2e, 0x0a, 0x13, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69,
+ 0x74, 0x79, 0x5f, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, 0x05, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x11, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x54, 0x6f, 0x67,
+ 0x67, 0x6c, 0x65, 0x64, 0x42, 0x79, 0x22, 0xeb, 0x01, 0x0a, 0x0e, 0x53, 0x33, 0x53, 0x74, 0x72,
+ 0x65, 0x61, 0x6d, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x75, 0x63,
+ 0x6b, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65,
+ 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x6b, 0x65, 0x79, 0x5f,
+ 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6b, 0x65,
+ 0x79, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18,
+ 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x72,
+ 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, 0x67,
+ 0x69, 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28,
+ 0x0e, 0x32, 0x26, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e,
+ 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x33, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x43, 0x6f,
+ 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22,
+ 0x23, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x53, 0x45, 0x52, 0x10,
+ 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x49, 0x4e, 0x53, 0x54, 0x41, 0x4e, 0x43, 0x45, 0x5f, 0x52, 0x4f,
+ 0x4c, 0x45, 0x10, 0x01, 0x22, 0x46, 0x0a, 0x11, 0x54, 0x65, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65,
+ 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x74, 0x72,
+ 0x65, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x49, 0x6e, 0x74, 0x65,
+ 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74,
+ 0x72, 0x65, 0x61, 0x6d, 0x52, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x22, 0x48, 0x0a, 0x12,
+ 0x54, 0x65, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x18, 0x0a, 0x07,
+ 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d,
+ 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x61, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65,
+ 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a,
+ 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e,
+ 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69,
+ 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d,
+ 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0x7a, 0x0a, 0x14, 0x43, 0x72, 0x65,
+ 0x61, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x19, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e,
+ 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x06, 0x73, 0x74,
+ 0x72, 0x65, 0x61, 0x6d, 0x12, 0x2f, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69,
+ 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52,
+ 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0x2e, 0x0a, 0x15, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62,
+ 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15,
+ 0x0a, 0x06, 0x6f, 0x72, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05,
+ 0x6f, 0x72, 0x67, 0x49, 0x64, 0x22, 0x7c, 0x0a, 0x16, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62,
+ 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+ 0x31, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x19, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75,
+ 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x06, 0x73, 0x74, 0x72, 0x65,
+ 0x61, 0x6d, 0x12, 0x2f, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x1b, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41,
+ 0x75, 0x64, 0x69, 0x74, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d,
+ 0x65, 0x74, 0x61, 0x22, 0x61, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x72,
+ 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x06, 0x73, 0x74,
+ 0x72, 0x65, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x49, 0x6e, 0x74,
+ 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53,
+ 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x17, 0x0a,
+ 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06,
+ 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0x7a, 0x0a, 0x14, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65,
+ 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31,
+ 0x0a, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19,
+ 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64,
+ 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x06, 0x73, 0x74, 0x72, 0x65, 0x61,
+ 0x6d, 0x12, 0x2f, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x1b, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75,
+ 0x64, 0x69, 0x74, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, 0x65,
+ 0x74, 0x61, 0x22, 0x2d, 0x0a, 0x14, 0x44, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x53, 0x74, 0x72,
+ 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72,
+ 0x67, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49,
+ 0x64, 0x22, 0x80, 0x01, 0x0a, 0x15, 0x53, 0x65, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53,
+ 0x74, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6f,
+ 0x72, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67,
+ 0x49, 0x64, 0x12, 0x37, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69,
+ 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x74, 0x61,
+ 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x75,
+ 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73,
+ 0x65, 0x72, 0x49, 0x64, 0x22, 0xa4, 0x08, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x3d,
+ 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e,
+ 0x32, 0x21, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41,
+ 0x75, 0x64, 0x69, 0x74, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x40, 0x0a,
+ 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e,
+ 0x32, 0x22, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41,
+ 0x75, 0x64, 0x69, 0x74, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61,
+ 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12,
+ 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72, 0x67, 0x5f,
+ 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49, 0x64, 0x12,
+ 0x1d, 0x0a, 0x0a, 0x69, 0x70, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x05, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x09, 0x69, 0x70, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1a,
+ 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65,
+ 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08,
+ 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
+ 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65,
+ 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f,
+ 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69,
+ 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61,
+ 0x6d, 0x70, 0x12, 0x21, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f,
+ 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74,
+ 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x06, 0x6d,
+ 0x65, 0x64, 0x69, 0x75, 0x6d, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x49, 0x6e,
+ 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e,
+ 0x45, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x64, 0x69, 0x75, 0x6d, 0x52, 0x06, 0x6d, 0x65,
+ 0x64, 0x69, 0x75, 0x6d, 0x22, 0xdd, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x10, 0x00, 0x12, 0x08,
+ 0x0a, 0x04, 0x55, 0x73, 0x65, 0x72, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x57, 0x6f, 0x72, 0x6b,
+ 0x66, 0x6c, 0x6f, 0x77, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69,
+ 0x6e, 0x65, 0x10, 0x03, 0x12, 0x10, 0x0a, 0x0c, 0x44, 0x65, 0x62, 0x75, 0x67, 0x53, 0x65, 0x73,
+ 0x73, 0x69, 0x6f, 0x6e, 0x10, 0x04, 0x12, 0x15, 0x0a, 0x11, 0x50, 0x65, 0x72, 0x69, 0x6f, 0x64,
+ 0x69, 0x63, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x65, 0x72, 0x10, 0x05, 0x12, 0x0a, 0x0a,
+ 0x06, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x10, 0x06, 0x12, 0x10, 0x0a, 0x0c, 0x4e, 0x6f, 0x74,
+ 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x10, 0x07, 0x12, 0x0d, 0x0a, 0x09, 0x44,
+ 0x61, 0x73, 0x68, 0x62, 0x6f, 0x61, 0x72, 0x64, 0x10, 0x08, 0x12, 0x07, 0x0a, 0x03, 0x4a, 0x6f,
+ 0x62, 0x10, 0x09, 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x10,
+ 0x0a, 0x12, 0x10, 0x0a, 0x0c, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f,
+ 0x6e, 0x10, 0x0b, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x65, 0x6c, 0x66, 0x48, 0x6f, 0x73, 0x74, 0x65,
+ 0x64, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x10, 0x0c, 0x12, 0x13, 0x0a, 0x0f,
+ 0x53, 0x65, 0x6c, 0x66, 0x48, 0x6f, 0x73, 0x74, 0x65, 0x64, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x10,
+ 0x0d, 0x12, 0x13, 0x0a, 0x0f, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x44, 0x61, 0x73, 0x68, 0x62,
+ 0x6f, 0x61, 0x72, 0x64, 0x10, 0x0e, 0x12, 0x17, 0x0a, 0x13, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d,
+ 0x44, 0x61, 0x73, 0x68, 0x62, 0x6f, 0x61, 0x72, 0x64, 0x49, 0x74, 0x65, 0x6d, 0x10, 0x0f, 0x12,
+ 0x1b, 0x0a, 0x17, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6e, 0x73, 0x69, 0x67, 0x68,
+ 0x74, 0x73, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x10, 0x10, 0x12, 0x08, 0x0a, 0x04,
+ 0x4f, 0x6b, 0x74, 0x61, 0x10, 0x11, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x6c, 0x61, 0x6b, 0x79, 0x54,
+ 0x65, 0x73, 0x74, 0x73, 0x10, 0x12, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x42, 0x41, 0x43, 0x52, 0x6f,
+ 0x6c, 0x65, 0x10, 0x13, 0x22, 0x8f, 0x01, 0x0a, 0x09, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69,
+ 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x64, 0x64, 0x65, 0x64, 0x10, 0x00, 0x12, 0x0b, 0x0a,
+ 0x07, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x64, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x6f,
+ 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x74, 0x61, 0x72,
+ 0x74, 0x65, 0x64, 0x10, 0x03, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x74, 0x6f, 0x70, 0x70, 0x65, 0x64,
+ 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x65, 0x64, 0x10, 0x05,
+ 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x64, 0x10, 0x06, 0x12, 0x0b, 0x0a,
+ 0x07, 0x52, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x10, 0x07, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x6f,
+ 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x10, 0x08, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x69, 0x73, 0x61,
+ 0x62, 0x6c, 0x65, 0x64, 0x10, 0x09, 0x22, 0x23, 0x0a, 0x06, 0x4d, 0x65, 0x64, 0x69, 0x75, 0x6d,
+ 0x12, 0x07, 0x0a, 0x03, 0x57, 0x65, 0x62, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x50, 0x49,
+ 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x43, 0x4c, 0x49, 0x10, 0x02, 0x2a, 0x18, 0x0a, 0x0e, 0x53,
+ 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x06, 0x0a,
+ 0x02, 0x53, 0x33, 0x10, 0x00, 0x2a, 0x26, 0x0a, 0x0c, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53,
+ 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0a, 0x0a, 0x06, 0x41, 0x43, 0x54, 0x49, 0x56, 0x45, 0x10,
+ 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x41, 0x55, 0x53, 0x45, 0x44, 0x10, 0x01, 0x32, 0xcc, 0x06,
+ 0x0a, 0x0c, 0x41, 0x75, 0x64, 0x69, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x47,
+ 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1e, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61,
+ 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61,
+ 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x62, 0x0a, 0x0d, 0x50, 0x61, 0x67, 0x69, 0x6e,
+ 0x61, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x27, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72,
+ 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x50, 0x61, 0x67,
+ 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
+ 0x74, 0x1a, 0x28, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e,
+ 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x4c,
+ 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x59, 0x0a, 0x0a, 0x54,
+ 0x65, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x24, 0x2e, 0x49, 0x6e, 0x74, 0x65,
+ 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x54, 0x65,
+ 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x25, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75,
+ 0x64, 0x69, 0x74, 0x2e, 0x54, 0x65, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65,
+ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5f, 0x0a, 0x0c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65,
+ 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x26, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61,
+ 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74,
+ 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27,
+ 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64,
+ 0x69, 0x74, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x65, 0x0a, 0x0e, 0x44, 0x65, 0x73, 0x63, 0x72,
+ 0x69, 0x62, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x28, 0x2e, 0x49, 0x6e, 0x74, 0x65,
+ 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x44, 0x65,
+ 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75,
+ 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70,
+ 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65,
+ 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5f,
+ 0x0a, 0x0c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x12, 0x26,
+ 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64,
+ 0x69, 0x74, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61,
+ 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74,
+ 0x65, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+ 0x50, 0x0a, 0x0d, 0x44, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d,
+ 0x12, 0x27, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41,
+ 0x75, 0x64, 0x69, 0x74, 0x2e, 0x44, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79, 0x53, 0x74, 0x72, 0x65,
+ 0x61, 0x6d, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
+ 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74,
+ 0x79, 0x12, 0x52, 0x0a, 0x0e, 0x53, 0x65, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x53, 0x74,
+ 0x61, 0x74, 0x65, 0x12, 0x28, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70,
+ 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x53, 0x65, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61,
+ 0x6d, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e,
+ 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e,
+ 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x65, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x72,
+ 0x65, 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x28, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e,
+ 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e, 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74,
+ 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
+ 0x74, 0x1a, 0x29, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x70, 0x69, 0x2e,
+ 0x41, 0x75, 0x64, 0x69, 0x74, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d,
+ 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x40, 0x5a, 0x3e,
+ 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x72, 0x65, 0x6e, 0x64, 0x65,
+ 0x72, 0x65, 0x64, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x61, 0x6c, 0x6c, 0x65, 0x73, 0x2f, 0x73, 0x65,
+ 0x6c, 0x66, 0x5f, 0x68, 0x6f, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x68, 0x75, 0x62, 0x2f, 0x70, 0x6b,
+ 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x61, 0x75, 0x64, 0x69, 0x74, 0x62, 0x06,
+ 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+}
+
+var (
+ file_audit_proto_rawDescOnce sync.Once
+ file_audit_proto_rawDescData = file_audit_proto_rawDesc
+)
+
+func file_audit_proto_rawDescGZIP() []byte {
+ file_audit_proto_rawDescOnce.Do(func() {
+ file_audit_proto_rawDescData = protoimpl.X.CompressGZIP(file_audit_proto_rawDescData)
+ })
+ return file_audit_proto_rawDescData
+}
+
+var file_audit_proto_enumTypes = make([]protoimpl.EnumInfo, 8)
+var file_audit_proto_msgTypes = make([]protoimpl.MessageInfo, 21)
+var file_audit_proto_goTypes = []any{
+ (StreamProvider)(0), // 0: InternalApi.Audit.StreamProvider
+ (StreamStatus)(0), // 1: InternalApi.Audit.StreamStatus
+ (PaginatedListRequest_Direction)(0), // 2: InternalApi.Audit.PaginatedListRequest.Direction
+ (ListStreamLogsRequest_Direction)(0), // 3: InternalApi.Audit.ListStreamLogsRequest.Direction
+ (S3StreamConfig_Type)(0), // 4: InternalApi.Audit.S3StreamConfig.Type
+ (Event_Resource)(0), // 5: InternalApi.Audit.Event.Resource
+ (Event_Operation)(0), // 6: InternalApi.Audit.Event.Operation
+ (Event_Medium)(0), // 7: InternalApi.Audit.Event.Medium
+ (*ListRequest)(nil), // 8: InternalApi.Audit.ListRequest
+ (*ListResponse)(nil), // 9: InternalApi.Audit.ListResponse
+ (*PaginatedListRequest)(nil), // 10: InternalApi.Audit.PaginatedListRequest
+ (*PaginatedListResponse)(nil), // 11: InternalApi.Audit.PaginatedListResponse
+ (*ListStreamLogsRequest)(nil), // 12: InternalApi.Audit.ListStreamLogsRequest
+ (*ListStreamLogsResponse)(nil), // 13: InternalApi.Audit.ListStreamLogsResponse
+ (*StreamLog)(nil), // 14: InternalApi.Audit.StreamLog
+ (*Stream)(nil), // 15: InternalApi.Audit.Stream
+ (*EditMeta)(nil), // 16: InternalApi.Audit.EditMeta
+ (*S3StreamConfig)(nil), // 17: InternalApi.Audit.S3StreamConfig
+ (*TestStreamRequest)(nil), // 18: InternalApi.Audit.TestStreamRequest
+ (*TestStreamResponse)(nil), // 19: InternalApi.Audit.TestStreamResponse
+ (*CreateStreamRequest)(nil), // 20: InternalApi.Audit.CreateStreamRequest
+ (*CreateStreamResponse)(nil), // 21: InternalApi.Audit.CreateStreamResponse
+ (*DescribeStreamRequest)(nil), // 22: InternalApi.Audit.DescribeStreamRequest
+ (*DescribeStreamResponse)(nil), // 23: InternalApi.Audit.DescribeStreamResponse
+ (*UpdateStreamRequest)(nil), // 24: InternalApi.Audit.UpdateStreamRequest
+ (*UpdateStreamResponse)(nil), // 25: InternalApi.Audit.UpdateStreamResponse
+ (*DestroyStreamRequest)(nil), // 26: InternalApi.Audit.DestroyStreamRequest
+ (*SetStreamStateRequest)(nil), // 27: InternalApi.Audit.SetStreamStateRequest
+ (*Event)(nil), // 28: InternalApi.Audit.Event
+ (*timestamp.Timestamp)(nil), // 29: google.protobuf.Timestamp
+ (*empty.Empty)(nil), // 30: google.protobuf.Empty
+}
+var file_audit_proto_depIdxs = []int32{
+ 28, // 0: InternalApi.Audit.ListResponse.events:type_name -> InternalApi.Audit.Event
+ 2, // 1: InternalApi.Audit.PaginatedListRequest.direction:type_name -> InternalApi.Audit.PaginatedListRequest.Direction
+ 28, // 2: InternalApi.Audit.PaginatedListResponse.events:type_name -> InternalApi.Audit.Event
+ 3, // 3: InternalApi.Audit.ListStreamLogsRequest.direction:type_name -> InternalApi.Audit.ListStreamLogsRequest.Direction
+ 14, // 4: InternalApi.Audit.ListStreamLogsResponse.stream_logs:type_name -> InternalApi.Audit.StreamLog
+ 29, // 5: InternalApi.Audit.StreamLog.timestamp:type_name -> google.protobuf.Timestamp
+ 29, // 6: InternalApi.Audit.StreamLog.first_event_timestamp:type_name -> google.protobuf.Timestamp
+ 29, // 7: InternalApi.Audit.StreamLog.last_event_timestamp:type_name -> google.protobuf.Timestamp
+ 0, // 8: InternalApi.Audit.Stream.provider:type_name -> InternalApi.Audit.StreamProvider
+ 1, // 9: InternalApi.Audit.Stream.status:type_name -> InternalApi.Audit.StreamStatus
+ 17, // 10: InternalApi.Audit.Stream.s3_config:type_name -> InternalApi.Audit.S3StreamConfig
+ 29, // 11: InternalApi.Audit.EditMeta.created_at:type_name -> google.protobuf.Timestamp
+ 29, // 12: InternalApi.Audit.EditMeta.updated_at:type_name -> google.protobuf.Timestamp
+ 29, // 13: InternalApi.Audit.EditMeta.activity_toggled_at:type_name -> google.protobuf.Timestamp
+ 4, // 14: InternalApi.Audit.S3StreamConfig.type:type_name -> InternalApi.Audit.S3StreamConfig.Type
+ 15, // 15: InternalApi.Audit.TestStreamRequest.stream:type_name -> InternalApi.Audit.Stream
+ 15, // 16: InternalApi.Audit.CreateStreamRequest.stream:type_name -> InternalApi.Audit.Stream
+ 15, // 17: InternalApi.Audit.CreateStreamResponse.stream:type_name -> InternalApi.Audit.Stream
+ 16, // 18: InternalApi.Audit.CreateStreamResponse.meta:type_name -> InternalApi.Audit.EditMeta
+ 15, // 19: InternalApi.Audit.DescribeStreamResponse.stream:type_name -> InternalApi.Audit.Stream
+ 16, // 20: InternalApi.Audit.DescribeStreamResponse.meta:type_name -> InternalApi.Audit.EditMeta
+ 15, // 21: InternalApi.Audit.UpdateStreamRequest.stream:type_name -> InternalApi.Audit.Stream
+ 15, // 22: InternalApi.Audit.UpdateStreamResponse.stream:type_name -> InternalApi.Audit.Stream
+ 16, // 23: InternalApi.Audit.UpdateStreamResponse.meta:type_name -> InternalApi.Audit.EditMeta
+ 1, // 24: InternalApi.Audit.SetStreamStateRequest.status:type_name -> InternalApi.Audit.StreamStatus
+ 5, // 25: InternalApi.Audit.Event.resource:type_name -> InternalApi.Audit.Event.Resource
+ 6, // 26: InternalApi.Audit.Event.operation:type_name -> InternalApi.Audit.Event.Operation
+ 29, // 27: InternalApi.Audit.Event.timestamp:type_name -> google.protobuf.Timestamp
+ 7, // 28: InternalApi.Audit.Event.medium:type_name -> InternalApi.Audit.Event.Medium
+ 8, // 29: InternalApi.Audit.AuditService.List:input_type -> InternalApi.Audit.ListRequest
+ 10, // 30: InternalApi.Audit.AuditService.PaginatedList:input_type -> InternalApi.Audit.PaginatedListRequest
+ 18, // 31: InternalApi.Audit.AuditService.TestStream:input_type -> InternalApi.Audit.TestStreamRequest
+ 20, // 32: InternalApi.Audit.AuditService.CreateStream:input_type -> InternalApi.Audit.CreateStreamRequest
+ 22, // 33: InternalApi.Audit.AuditService.DescribeStream:input_type -> InternalApi.Audit.DescribeStreamRequest
+ 24, // 34: InternalApi.Audit.AuditService.UpdateStream:input_type -> InternalApi.Audit.UpdateStreamRequest
+ 26, // 35: InternalApi.Audit.AuditService.DestroyStream:input_type -> InternalApi.Audit.DestroyStreamRequest
+ 27, // 36: InternalApi.Audit.AuditService.SetStreamState:input_type -> InternalApi.Audit.SetStreamStateRequest
+ 12, // 37: InternalApi.Audit.AuditService.ListStreamLogs:input_type -> InternalApi.Audit.ListStreamLogsRequest
+ 9, // 38: InternalApi.Audit.AuditService.List:output_type -> InternalApi.Audit.ListResponse
+ 11, // 39: InternalApi.Audit.AuditService.PaginatedList:output_type -> InternalApi.Audit.PaginatedListResponse
+ 19, // 40: InternalApi.Audit.AuditService.TestStream:output_type -> InternalApi.Audit.TestStreamResponse
+ 21, // 41: InternalApi.Audit.AuditService.CreateStream:output_type -> InternalApi.Audit.CreateStreamResponse
+ 23, // 42: InternalApi.Audit.AuditService.DescribeStream:output_type -> InternalApi.Audit.DescribeStreamResponse
+ 25, // 43: InternalApi.Audit.AuditService.UpdateStream:output_type -> InternalApi.Audit.UpdateStreamResponse
+ 30, // 44: InternalApi.Audit.AuditService.DestroyStream:output_type -> google.protobuf.Empty
+ 30, // 45: InternalApi.Audit.AuditService.SetStreamState:output_type -> google.protobuf.Empty
+ 13, // 46: InternalApi.Audit.AuditService.ListStreamLogs:output_type -> InternalApi.Audit.ListStreamLogsResponse
+ 38, // [38:47] is the sub-list for method output_type
+ 29, // [29:38] is the sub-list for method input_type
+ 29, // [29:29] is the sub-list for extension type_name
+ 29, // [29:29] is the sub-list for extension extendee
+ 0, // [0:29] is the sub-list for field type_name
+}
+
+func init() { file_audit_proto_init() }
+func file_audit_proto_init() {
+ if File_audit_proto != nil {
+ return
+ }
+ type x struct{}
+ out := protoimpl.TypeBuilder{
+ File: protoimpl.DescBuilder{
+ GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+ RawDescriptor: file_audit_proto_rawDesc,
+ NumEnums: 8,
+ NumMessages: 21,
+ NumExtensions: 0,
+ NumServices: 1,
+ },
+ GoTypes: file_audit_proto_goTypes,
+ DependencyIndexes: file_audit_proto_depIdxs,
+ EnumInfos: file_audit_proto_enumTypes,
+ MessageInfos: file_audit_proto_msgTypes,
+ }.Build()
+ File_audit_proto = out.File
+ file_audit_proto_rawDesc = nil
+ file_audit_proto_goTypes = nil
+ file_audit_proto_depIdxs = nil
+}
diff --git a/public-api-gateway/protos/audit/audit_grpc.pb.go b/public-api-gateway/protos/audit/audit_grpc.pb.go
new file mode 100644
index 000000000..2a7ae4e4d
--- /dev/null
+++ b/public-api-gateway/protos/audit/audit_grpc.pb.go
@@ -0,0 +1,428 @@
+// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
+// versions:
+// - protoc-gen-go-grpc v1.5.1
+// - protoc v3.3.0
+// source: audit.proto
+
+package audit
+
+import (
+ context "context"
+ empty "github.com/golang/protobuf/ptypes/empty"
+ grpc "google.golang.org/grpc"
+ codes "google.golang.org/grpc/codes"
+ status "google.golang.org/grpc/status"
+)
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+// Requires gRPC-Go v1.64.0 or later.
+const _ = grpc.SupportPackageIsVersion9
+
+const (
+ AuditService_List_FullMethodName = "/InternalApi.Audit.AuditService/List"
+ AuditService_PaginatedList_FullMethodName = "/InternalApi.Audit.AuditService/PaginatedList"
+ AuditService_TestStream_FullMethodName = "/InternalApi.Audit.AuditService/TestStream"
+ AuditService_CreateStream_FullMethodName = "/InternalApi.Audit.AuditService/CreateStream"
+ AuditService_DescribeStream_FullMethodName = "/InternalApi.Audit.AuditService/DescribeStream"
+ AuditService_UpdateStream_FullMethodName = "/InternalApi.Audit.AuditService/UpdateStream"
+ AuditService_DestroyStream_FullMethodName = "/InternalApi.Audit.AuditService/DestroyStream"
+ AuditService_SetStreamState_FullMethodName = "/InternalApi.Audit.AuditService/SetStreamState"
+ AuditService_ListStreamLogs_FullMethodName = "/InternalApi.Audit.AuditService/ListStreamLogs"
+)
+
+// AuditServiceClient is the client API for AuditService service.
+//
+// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
+type AuditServiceClient interface {
+ List(ctx context.Context, in *ListRequest, opts ...grpc.CallOption) (*ListResponse, error)
+ PaginatedList(ctx context.Context, in *PaginatedListRequest, opts ...grpc.CallOption) (*PaginatedListResponse, error)
+ // CRUD for config of Audit log exporting
+ TestStream(ctx context.Context, in *TestStreamRequest, opts ...grpc.CallOption) (*TestStreamResponse, error)
+ CreateStream(ctx context.Context, in *CreateStreamRequest, opts ...grpc.CallOption) (*CreateStreamResponse, error)
+ DescribeStream(ctx context.Context, in *DescribeStreamRequest, opts ...grpc.CallOption) (*DescribeStreamResponse, error)
+ UpdateStream(ctx context.Context, in *UpdateStreamRequest, opts ...grpc.CallOption) (*UpdateStreamResponse, error)
+ DestroyStream(ctx context.Context, in *DestroyStreamRequest, opts ...grpc.CallOption) (*empty.Empty, error)
+ // PAUSE/RESUME for any type of Audit log exporting
+ SetStreamState(ctx context.Context, in *SetStreamStateRequest, opts ...grpc.CallOption) (*empty.Empty, error)
+ ListStreamLogs(ctx context.Context, in *ListStreamLogsRequest, opts ...grpc.CallOption) (*ListStreamLogsResponse, error)
+}
+
+type auditServiceClient struct {
+ cc grpc.ClientConnInterface
+}
+
+func NewAuditServiceClient(cc grpc.ClientConnInterface) AuditServiceClient {
+ return &auditServiceClient{cc}
+}
+
+func (c *auditServiceClient) List(ctx context.Context, in *ListRequest, opts ...grpc.CallOption) (*ListResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(ListResponse)
+ err := c.cc.Invoke(ctx, AuditService_List_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) PaginatedList(ctx context.Context, in *PaginatedListRequest, opts ...grpc.CallOption) (*PaginatedListResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(PaginatedListResponse)
+ err := c.cc.Invoke(ctx, AuditService_PaginatedList_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) TestStream(ctx context.Context, in *TestStreamRequest, opts ...grpc.CallOption) (*TestStreamResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(TestStreamResponse)
+ err := c.cc.Invoke(ctx, AuditService_TestStream_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) CreateStream(ctx context.Context, in *CreateStreamRequest, opts ...grpc.CallOption) (*CreateStreamResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(CreateStreamResponse)
+ err := c.cc.Invoke(ctx, AuditService_CreateStream_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) DescribeStream(ctx context.Context, in *DescribeStreamRequest, opts ...grpc.CallOption) (*DescribeStreamResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(DescribeStreamResponse)
+ err := c.cc.Invoke(ctx, AuditService_DescribeStream_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) UpdateStream(ctx context.Context, in *UpdateStreamRequest, opts ...grpc.CallOption) (*UpdateStreamResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(UpdateStreamResponse)
+ err := c.cc.Invoke(ctx, AuditService_UpdateStream_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) DestroyStream(ctx context.Context, in *DestroyStreamRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(empty.Empty)
+ err := c.cc.Invoke(ctx, AuditService_DestroyStream_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) SetStreamState(ctx context.Context, in *SetStreamStateRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(empty.Empty)
+ err := c.cc.Invoke(ctx, AuditService_SetStreamState_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *auditServiceClient) ListStreamLogs(ctx context.Context, in *ListStreamLogsRequest, opts ...grpc.CallOption) (*ListStreamLogsResponse, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(ListStreamLogsResponse)
+ err := c.cc.Invoke(ctx, AuditService_ListStreamLogs_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// AuditServiceServer is the server API for AuditService service.
+// All implementations should embed UnimplementedAuditServiceServer
+// for forward compatibility.
+type AuditServiceServer interface {
+ List(context.Context, *ListRequest) (*ListResponse, error)
+ PaginatedList(context.Context, *PaginatedListRequest) (*PaginatedListResponse, error)
+ // CRUD for config of Audit log exporting
+ TestStream(context.Context, *TestStreamRequest) (*TestStreamResponse, error)
+ CreateStream(context.Context, *CreateStreamRequest) (*CreateStreamResponse, error)
+ DescribeStream(context.Context, *DescribeStreamRequest) (*DescribeStreamResponse, error)
+ UpdateStream(context.Context, *UpdateStreamRequest) (*UpdateStreamResponse, error)
+ DestroyStream(context.Context, *DestroyStreamRequest) (*empty.Empty, error)
+ // PAUSE/RESUME for any type of Audit log exporting
+ SetStreamState(context.Context, *SetStreamStateRequest) (*empty.Empty, error)
+ ListStreamLogs(context.Context, *ListStreamLogsRequest) (*ListStreamLogsResponse, error)
+}
+
+// UnimplementedAuditServiceServer should be embedded to have
+// forward compatible implementations.
+//
+// NOTE: this should be embedded by value instead of pointer to avoid a nil
+// pointer dereference when methods are called.
+type UnimplementedAuditServiceServer struct{}
+
+func (UnimplementedAuditServiceServer) List(context.Context, *ListRequest) (*ListResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method List not implemented")
+}
+func (UnimplementedAuditServiceServer) PaginatedList(context.Context, *PaginatedListRequest) (*PaginatedListResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method PaginatedList not implemented")
+}
+func (UnimplementedAuditServiceServer) TestStream(context.Context, *TestStreamRequest) (*TestStreamResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method TestStream not implemented")
+}
+func (UnimplementedAuditServiceServer) CreateStream(context.Context, *CreateStreamRequest) (*CreateStreamResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method CreateStream not implemented")
+}
+func (UnimplementedAuditServiceServer) DescribeStream(context.Context, *DescribeStreamRequest) (*DescribeStreamResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method DescribeStream not implemented")
+}
+func (UnimplementedAuditServiceServer) UpdateStream(context.Context, *UpdateStreamRequest) (*UpdateStreamResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method UpdateStream not implemented")
+}
+func (UnimplementedAuditServiceServer) DestroyStream(context.Context, *DestroyStreamRequest) (*empty.Empty, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method DestroyStream not implemented")
+}
+func (UnimplementedAuditServiceServer) SetStreamState(context.Context, *SetStreamStateRequest) (*empty.Empty, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method SetStreamState not implemented")
+}
+func (UnimplementedAuditServiceServer) ListStreamLogs(context.Context, *ListStreamLogsRequest) (*ListStreamLogsResponse, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method ListStreamLogs not implemented")
+}
+func (UnimplementedAuditServiceServer) testEmbeddedByValue() {}
+
+// UnsafeAuditServiceServer may be embedded to opt out of forward compatibility for this service.
+// Use of this interface is not recommended, as added methods to AuditServiceServer will
+// result in compilation errors.
+type UnsafeAuditServiceServer interface {
+ mustEmbedUnimplementedAuditServiceServer()
+}
+
+func RegisterAuditServiceServer(s grpc.ServiceRegistrar, srv AuditServiceServer) {
+ // If the following call pancis, it indicates UnimplementedAuditServiceServer was
+ // embedded by pointer and is nil. This will cause panics if an
+ // unimplemented method is ever invoked, so we test this at initialization
+ // time to prevent it from happening at runtime later due to I/O.
+ if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
+ t.testEmbeddedByValue()
+ }
+ s.RegisterService(&AuditService_ServiceDesc, srv)
+}
+
+func _AuditService_List_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(ListRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).List(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_List_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).List(ctx, req.(*ListRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_PaginatedList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(PaginatedListRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).PaginatedList(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_PaginatedList_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).PaginatedList(ctx, req.(*PaginatedListRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_TestStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(TestStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).TestStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_TestStream_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).TestStream(ctx, req.(*TestStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_CreateStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(CreateStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).CreateStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_CreateStream_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).CreateStream(ctx, req.(*CreateStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_DescribeStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(DescribeStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).DescribeStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_DescribeStream_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).DescribeStream(ctx, req.(*DescribeStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_UpdateStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(UpdateStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).UpdateStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_UpdateStream_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).UpdateStream(ctx, req.(*UpdateStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_DestroyStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(DestroyStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).DestroyStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_DestroyStream_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).DestroyStream(ctx, req.(*DestroyStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_SetStreamState_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(SetStreamStateRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).SetStreamState(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_SetStreamState_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).SetStreamState(ctx, req.(*SetStreamStateRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _AuditService_ListStreamLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(ListStreamLogsRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(AuditServiceServer).ListStreamLogs(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: AuditService_ListStreamLogs_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(AuditServiceServer).ListStreamLogs(ctx, req.(*ListStreamLogsRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+// AuditService_ServiceDesc is the grpc.ServiceDesc for AuditService service.
+// It's only intended for direct use with grpc.RegisterService,
+// and not to be introspected or modified (even as a copy)
+var AuditService_ServiceDesc = grpc.ServiceDesc{
+ ServiceName: "InternalApi.Audit.AuditService",
+ HandlerType: (*AuditServiceServer)(nil),
+ Methods: []grpc.MethodDesc{
+ {
+ MethodName: "List",
+ Handler: _AuditService_List_Handler,
+ },
+ {
+ MethodName: "PaginatedList",
+ Handler: _AuditService_PaginatedList_Handler,
+ },
+ {
+ MethodName: "TestStream",
+ Handler: _AuditService_TestStream_Handler,
+ },
+ {
+ MethodName: "CreateStream",
+ Handler: _AuditService_CreateStream_Handler,
+ },
+ {
+ MethodName: "DescribeStream",
+ Handler: _AuditService_DescribeStream_Handler,
+ },
+ {
+ MethodName: "UpdateStream",
+ Handler: _AuditService_UpdateStream_Handler,
+ },
+ {
+ MethodName: "DestroyStream",
+ Handler: _AuditService_DestroyStream_Handler,
+ },
+ {
+ MethodName: "SetStreamState",
+ Handler: _AuditService_SetStreamState_Handler,
+ },
+ {
+ MethodName: "ListStreamLogs",
+ Handler: _AuditService_ListStreamLogs_Handler,
+ },
+ },
+ Streams: []grpc.StreamDesc{},
+ Metadata: "audit.proto",
+}
diff --git a/public-api-gateway/test/jobs_v1alpha_server.go b/public-api-gateway/test/jobs_v1alpha_server.go
new file mode 100644
index 000000000..f36dcaf3a
--- /dev/null
+++ b/public-api-gateway/test/jobs_v1alpha_server.go
@@ -0,0 +1,144 @@
+package main
+
+import (
+ "log"
+
+ "golang.org/x/net/context"
+ "google.golang.org/grpc/metadata"
+
+ pb "github.com/semaphoreio/semaphore/public-api-gateway/api/jobs.v1alpha"
+)
+
+// jobsServer implements the JobsApiServer interface
+var _ pb.JobsApiServer = &jobsServer{}
+
+type jobsServer struct{}
+
+// GetJob returns a job by ID
+func (s *jobsServer) GetJob(ctx context.Context, req *pb.GetJobRequest) (*pb.Job, error) {
+ log.Printf("Incoming GetJob Request")
+ logRequestMetadata(ctx)
+
+ jobID := req.GetJobId()
+ log.Printf("Job ID: %s", jobID)
+
+ return &pb.Job{
+ Metadata: &pb.Job_Metadata{
+ Id: jobID,
+ },
+ Status: &pb.Job_Status{
+ State: pb.Job_Status_RUNNING,
+ },
+ }, nil
+}
+
+// ListJobs returns a list of jobs
+func (s *jobsServer) ListJobs(ctx context.Context, req *pb.ListJobsRequest) (*pb.ListJobsResponse, error) {
+ log.Printf("Incoming ListJobs Request")
+ logRequestMetadata(ctx)
+
+ return &pb.ListJobsResponse{
+ Jobs: []*pb.Job{
+ {
+ Metadata: &pb.Job_Metadata{
+ Id: "job-1",
+ },
+ Status: &pb.Job_Status{
+ State: pb.Job_Status_RUNNING,
+ },
+ },
+ {
+ Metadata: &pb.Job_Metadata{
+ Id: "job-2",
+ },
+ Status: &pb.Job_Status{
+ State: pb.Job_Status_FINISHED,
+ Result: pb.Job_Status_PASSED,
+ },
+ },
+ },
+ }, nil
+}
+
+// StopJob stops a job by ID
+func (s *jobsServer) StopJob(ctx context.Context, req *pb.StopJobRequest) (*pb.Empty, error) {
+ log.Printf("Incoming StopJob Request")
+ logRequestMetadata(ctx)
+
+ jobID := req.GetJobId()
+ log.Printf("Stopping Job ID: %s", jobID)
+
+ return &pb.Empty{}, nil
+}
+
+// GetJobDebugSSHKey returns debug SSH key for a job
+func (s *jobsServer) GetJobDebugSSHKey(ctx context.Context, req *pb.GetJobDebugSSHKeyRequest) (*pb.JobDebugSSHKey, error) {
+ log.Printf("Incoming GetJobDebugSSHKey Request")
+ logRequestMetadata(ctx)
+
+ jobID := req.GetJobId()
+ log.Printf("Getting Debug SSH Key for Job ID: %s", jobID)
+
+ return &pb.JobDebugSSHKey{
+ Key: "mock-ssh-key",
+ }, nil
+}
+
+// CreateJob creates a new job
+func (s *jobsServer) CreateJob(ctx context.Context, req *pb.Job) (*pb.Job, error) {
+ log.Printf("Incoming CreateJob Request")
+ logRequestMetadata(ctx)
+
+ return &pb.Job{
+ Metadata: &pb.Job_Metadata{
+ Id: "new-job-id",
+ },
+ Status: &pb.Job_Status{
+ State: pb.Job_Status_PENDING,
+ },
+ }, nil
+}
+
+// CreateDebugJob creates a debug job
+func (s *jobsServer) CreateDebugJob(ctx context.Context, req *pb.CreateDebugJobRequest) (*pb.Job, error) {
+ log.Printf("Incoming CreateDebugJob Request")
+ logRequestMetadata(ctx)
+
+ return &pb.Job{
+ Metadata: &pb.Job_Metadata{
+ Id: "debug-job-id",
+ },
+ Status: &pb.Job_Status{
+ State: pb.Job_Status_PENDING,
+ },
+ }, nil
+}
+
+// CreateDebugProject creates a debug project
+func (s *jobsServer) CreateDebugProject(ctx context.Context, req *pb.CreateDebugProjectRequest) (*pb.Job, error) {
+ log.Printf("Incoming CreateDebugProject Request")
+ logRequestMetadata(ctx)
+
+ return &pb.Job{
+ Metadata: &pb.Job_Metadata{
+ Id: "debug-project-job-id",
+ },
+ Status: &pb.Job_Status{
+ State: pb.Job_Status_PENDING,
+ },
+ }, nil
+}
+
+// Helper function to log request metadata
+func logRequestMetadata(ctx context.Context) {
+ log.Printf("---------------")
+ headers, _ := metadata.FromIncomingContext(ctx)
+
+ log.Printf("Headers:")
+ for key, values := range headers {
+ for _, value := range values {
+ log.Printf(" %s: %s", key, value)
+ }
+ }
+ log.Printf("---------------")
+}
diff --git a/public-api-gateway/test/secrets_v1beta_server.go b/public-api-gateway/test/secrets_v1beta_server.go
index b965a9eaf..b72884c17 100644
--- a/public-api-gateway/test/secrets_v1beta_server.go
+++ b/public-api-gateway/test/secrets_v1beta_server.go
@@ -9,6 +9,7 @@ import (
"google.golang.org/grpc"
"google.golang.org/grpc/metadata"
+ pbjobs "github.com/semaphoreio/semaphore/public-api-gateway/api/jobs.v1alpha"
pb "github.com/semaphoreio/semaphore/public-api-gateway/api/secrets.v1beta"
)
@@ -69,6 +70,7 @@ func main() {
grpcServer := grpc.NewServer()
pb.RegisterSecretsApiServer(grpcServer, &server{})
+ pbjobs.RegisterJobsApiServer(grpcServer, &jobsServer{})
err = grpcServer.Serve(lis)
if err != nil {
log.Fatalf("failed to serve: %v", err)
diff --git a/public-api-gateway/test/test.sh b/public-api-gateway/test/test.sh
index 8f02da76b..f9431536c 100755
--- a/public-api-gateway/test/test.sh
+++ b/public-api-gateway/test/test.sh
@@ -5,10 +5,13 @@ rm -f /tmp/server_response.txt
rm -f /tmp/server_output.txt
# Start a fake Secrets server
-nohup go run test/secrets_v1beta_server.go > /tmp/server_output.txt 2>&1 &
+nohup go run test/jobs_v1alpha_server.go test/secrets_v1beta_server.go > /tmp/server_output.txt 2>&1 &
# Start the GRPC gateway
-nohup env SECRETS_V1BETA_PUBLIC_GRPC_API_ENDPOINT=localhost:50051 /app/build/server >/tmp/gateway_output 2>&1 &
+SECRETS_V1BETA_PUBLIC_GRPC_API_ENDPOINT=localhost:50051
+AMQP_URL=amqp://guest:guest@rabbitmq:5672
+JOBS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT=localhost:50051
+nohup /app/build/server >/tmp/gateway_output 2>&1 &
# sleep a bit, make sure that every server is running
sleep 18
@@ -53,3 +56,53 @@ fi
if [[ "$server_output" != *"Incomming Create Request"* ]] || [[ "$server_output" != *"Token xxx"* ]] || [[ "$server_output" != *"x-some-other-header-aaaa"* ]]; then
exit 1
fi
+
+
+# Test 2: Job Stop Request (should trigger audit)
+
+# send request to gateway
+job_id=$(cat /proc/sys/kernel/random/uuid)
+curl -X POST -s -H "Authorization: Token yyy" -H "x-some-other-jobs-header: x-some-other-jobs-header-aaaa" "http://localhost:8080/api/v1alpha/jobs/$job_id/stop" > /tmp/server_response.txt
+
+server_output=$(cat /tmp/server_output.txt)
+server_response=$(cat /tmp/server_response.txt)
+
+echo "=== Output"
+echo "$server_output"
+
+echo "=== Response"
+echo "$server_response"
+
+echo "=== Gateway output"
+gateway_output=$(cat /tmp/gateway_output)
+echo "$gateway_output"
+
+echo "=== Tests"
+
+if [[ "$server_output" == *"Incoming StopJob Request"* ]]; then
+ echo "Test passed: passes requests to the server"
+else
+ echo "Test failed: does not pass requests to the server"
+fi
+
+if [[ "$server_output" == *"Token yyy"* ]]; then
+ echo "Test passed: passes the authorization header"
+else
+ echo "Test failed: does not pass the authorization header"
+fi
+
+if [[ "$server_output" == *"x-some-other-jobs-header-aaaa"* ]]; then
+ echo "Test passed: passes random headers without modifications"
+else
+ echo "Test failed: does not pass random headers without modifications"
+fi
+
+if [[ "$gateway_output" != *"Audit event published via AMQP"* ]]; then
+ echo "Test failed: does not publish audit events"
+else
+ echo "Test passed: publishes audit events"
+fi
+
+if [[ "$server_output" != *"Incoming StopJob Request"* ]] || [[ "$server_output" != *"Token yyy"* ]] || [[ "$server_output" != *"x-some-other-jobs-header-aaaa"* ]] || [[ "$gateway_output" != *"Audit event published via AMQP"* ]]; then
+ exit 1
+fi
From 80775ff0381f37d575e98bd5b9c248bfcaed4b1f Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Mon, 23 Jun 2025 13:38:47 +0200
Subject: [PATCH 10/87] fix(hooks_processor): Lower the log message level
(#407)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Use json logging formatter when running in production env
Lower the priority of some hot path logs and introduce metrics in their
place
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
hooks_processor/config/config.exs | 2 ++
hooks_processor/config/runtime.exs | 4 +++
.../hooks/processing/bitbucket_worker.ex | 31 ++++++++++++++++---
hooks_processor/mix.exs | 1 +
hooks_processor/mix.lock | 1 +
5 files changed, 35 insertions(+), 4 deletions(-)
diff --git a/hooks_processor/config/config.exs b/hooks_processor/config/config.exs
index 27da17056..5b35ffa45 100644
--- a/hooks_processor/config/config.exs
+++ b/hooks_processor/config/config.exs
@@ -2,6 +2,8 @@ import Config
config :hooks_processor, environment: config_env()
+config :logger, :console, format: "$time $metadata[$level] $message\n"
+
config :hooks_processor,
ecto_repos: [HooksProcessor.EctoRepo]
diff --git a/hooks_processor/config/runtime.exs b/hooks_processor/config/runtime.exs
index c5fb63f16..f5d1f3b56 100644
--- a/hooks_processor/config/runtime.exs
+++ b/hooks_processor/config/runtime.exs
@@ -24,6 +24,10 @@ config :hooks_processor, HooksProcessor.EctoRepo,
config :logger, level: (System.get_env("LOG_LEVEL") || "info") |> String.to_atom()
+if config_env() == :prod do
+ config :logger, :default_handler, formatter: LoggerJSON.Formatters.Basic.new()
+end
+
# We need valid dsn or nil
sentry_dsn = System.get_env("SENTRY_DSN", "") |> String.trim() |> (&if(&1 != "", do: &1, else: nil)).()
diff --git a/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex b/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex
index 44a277876..98fb54323 100644
--- a/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex
+++ b/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex
@@ -40,7 +40,7 @@ defmodule HooksProcessor.Hooks.Processing.BitbucketWorker do
requester_id <- get_requester_id(webhook, actor_id, "bitbucket"),
{:ok, _webhook} <-
process_webhook(hook_type, webhook, project.repository, requester_id) do
- "Processing finished successfully." |> graceful_exit(state)
+ :ok |> graceful_exit(state)
else
error -> graceful_exit(error, state)
end
@@ -94,6 +94,9 @@ defmodule HooksProcessor.Hooks.Processing.BitbucketWorker do
end
defp process_webhook(hook_type, _webhook, _project, _requester_id) do
+ # Increment unsupported hook type metric
+ Watchman.increment({"hooks.processing.bitbucket", ["unsupported_hook"]})
+
"Unsuported type of the hook: '#{hook_type}'"
end
@@ -109,9 +112,15 @@ defmodule HooksProcessor.Hooks.Processing.BitbucketWorker do
defp should_build?(repository, hook_data, hook_type) do
cond do
hook_type not in repository.run_on ->
+ # Increment skip configuration metric
+ Watchman.increment({"hooks.processing.bitbucket", ["skip", "configuration"]})
+
{:build, {false, hook_state(hook_type, :skip)}, hook_data}
not whitelisted?(repository.whitelist, hook_data, hook_type) ->
+ # Increment skip configuration metric
+ Watchman.increment({"hooks.processing.bitbucket", ["skip", "whitelist"]})
+
{:build, {false, hook_state(hook_type, :whitelist)}, hook_data}
true ->
@@ -124,14 +133,28 @@ defmodule HooksProcessor.Hooks.Processing.BitbucketWorker do
defp hook_state(:TAGS, :skip), do: "skip_tag"
defp hook_state(:TAGS, :whitelist), do: "whitelist_tag"
- defp graceful_exit(message, state) do
- message
- |> LT.info("Hook #{state.id} - bitbucket worker process exits: ")
+ defp graceful_exit(result, state) do
+ case result do
+ :ok ->
+ Watchman.increment({"hooks.processing.bitbucket", ["success"]})
+
+ "Processing finished successfully."
+ |> LT.debug("Hook #{state.id} - bitbucket worker process exits: ")
+
+ error ->
+ Watchman.increment({"hooks.processing.bitbucket", ["error"]})
+
+ error
+ |> LT.error("Hook #{state.id} - bitbucket worker process exits: ")
+ end
{:stop, :normal, state}
end
defp restart(error, state) do
+ # Increment failure metric
+ Watchman.increment({"hooks.processing.bitbucket", ["restart"]})
+
error
|> LT.warn("Hook #{state.id} - bitbucket worker process failiure: ")
diff --git a/hooks_processor/mix.exs b/hooks_processor/mix.exs
index 278bac1b3..99ef14ecd 100644
--- a/hooks_processor/mix.exs
+++ b/hooks_processor/mix.exs
@@ -38,6 +38,7 @@ defmodule HooksProcessor.MixProject do
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:uuid, "~> 1.1"},
+ {:logger_json, "~> 7.0"},
{:junit_formatter, "~> 3.1", only: [:test]},
# head because support for JSON is not yet released
{:sentry, github: "getsentry/sentry-elixir", ref: "f375551f32f35674f9baab470d0e571466b07055"},
diff --git a/hooks_processor/mix.lock b/hooks_processor/mix.lock
index b97c02c2c..711f7c4cd 100644
--- a/hooks_processor/mix.lock
+++ b/hooks_processor/mix.lock
@@ -23,6 +23,7 @@
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
"junit_formatter": {:hex, :junit_formatter, "3.4.0", "d0e8db6c34dab6d3c4154c3b46b21540db1109ae709d6cf99ba7e7a2ce4b1ac2", [:mix], [], "hexpm", "bb36e2ae83f1ced6ab931c4ce51dd3dbef1ef61bb4932412e173b0cfa259dacd"},
"log_tee": {:git, "https://github.com/renderedtext/log-tee.git", "30c69704d583bda8cffb324b8936ffd3680e6ae4", []},
+ "logger_json": {:hex, :logger_json, "7.0.3", "0f202788d743154796bd208e1078d878bb4fccf0adc4ed9c83cba821732d326c", [:mix], [{:decimal, ">= 0.0.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:ecto, "~> 3.11", [hex: :ecto, repo: "hexpm", optional: true]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "9d1ca7dfe77eb7eac4664edfd6f874168d4707aedbedea09fba8eaa6ed2e2f97"},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
"mime": {:hex, :mime, "2.0.6", "8f18486773d9b15f95f4f4f1e39b710045fa1de891fada4516559967276e4dc2", [:mix], [], "hexpm", "c9945363a6b26d747389aac3643f8e0e09d30499a138ad64fe8fd1d13d9b153e"},
"mimerl": {:hex, :mimerl, "1.3.0", "d0cd9fc04b9061f82490f6581e0128379830e78535e017f7780f37fea7545726", [:rebar3], [], "hexpm", "a1e15a50d1887217de95f0b9b0793e32853f7c258a5cd227650889b38839fe9d"},
From 56cff6ebfde424d0079ca183386426b7c9aea8c3 Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Mon, 23 Jun 2025 15:31:30 +0200
Subject: [PATCH 11/87] fix(hooks_processor): explicitly add jason as
production dependency (#410)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
`jason` was not packaged into the release so json logger is crashing in
prod env and printing just a log message
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.../hooks_processor/hooks/processing/workers_supervisor.ex | 4 ++--
hooks_processor/mix.exs | 1 +
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/hooks_processor/lib/hooks_processor/hooks/processing/workers_supervisor.ex b/hooks_processor/lib/hooks_processor/hooks/processing/workers_supervisor.ex
index 7c9a9b8a3..08b11a0e7 100644
--- a/hooks_processor/lib/hooks_processor/hooks/processing/workers_supervisor.ex
+++ b/hooks_processor/lib/hooks_processor/hooks/processing/workers_supervisor.ex
@@ -44,12 +44,12 @@ defmodule HooksProcessor.Hooks.Processing.WorkersSupervisor do
end
defp process_response(resp = {:ok, pid}, id, provider) do
- LT.info(pid, "Hook #{id} - #{provider} worker started")
+ LT.debug(pid, "Hook #{id} - #{provider} worker started")
resp
end
defp process_response({:error, {:already_started, pid}}, id, provider) do
- LT.info(pid, "Hook #{id} - #{provider} worker already started")
+ LT.debug(pid, "Hook #{id} - #{provider} worker already started")
{:ok, pid}
end
diff --git a/hooks_processor/mix.exs b/hooks_processor/mix.exs
index 99ef14ecd..b8aac231d 100644
--- a/hooks_processor/mix.exs
+++ b/hooks_processor/mix.exs
@@ -39,6 +39,7 @@ defmodule HooksProcessor.MixProject do
{:postgrex, ">= 0.0.0"},
{:uuid, "~> 1.1"},
{:logger_json, "~> 7.0"},
+ {:jason, "~> 1.4"},
{:junit_formatter, "~> 3.1", only: [:test]},
# head because support for JSON is not yet released
{:sentry, github: "getsentry/sentry-elixir", ref: "f375551f32f35674f9baab470d0e571466b07055"},
From 4153030bc0dab495fb7cbb70c8686b3fdfcae9c9 Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Wed, 25 Jun 2025 11:25:56 +0200
Subject: [PATCH 12/87] chore(front): Refresh protos (#412)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Refresh protobuf definitions
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
front/lib/internal_api/usage.pb.ex | 1 +
1 file changed, 1 insertion(+)
diff --git a/front/lib/internal_api/usage.pb.ex b/front/lib/internal_api/usage.pb.ex
index c75c955ff..031458039 100644
--- a/front/lib/internal_api/usage.pb.ex
+++ b/front/lib/internal_api/usage.pb.ex
@@ -292,6 +292,7 @@ defmodule InternalApi.Usage.SeatOrigin do
field(:SEAT_ORIGIN_SEMAPHORE, 1)
field(:SEAT_ORIGIN_GITHUB, 2)
field(:SEAT_ORIGIN_BITBUCKET, 3)
+ field(:SEAT_ORIGIN_GITLAB, 4)
end
defmodule InternalApi.Usage.SeatStatus do
From c0095f5ce5ddf4a731edae643cabdb2d0b68e511 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Damjan=20Be=C4=87irovi=C4=87?=
Date: Wed, 25 Jun 2025 14:00:25 +0200
Subject: [PATCH 13/87] fix(hooks-processor): Don't retry processing of hooks
of unsupported type (#413)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
If the webhook received from the git provider is of an unsupported type
(e.g. merge requests on GitLab), we should not retry
processing since that is not a transient issue.
## ✅ Checklist
- [x] I have tested this change
- [x] ~This change requires documentation update~ -N/A
Co-authored-by: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
---
.../hooks/processing/bitbucket_worker.ex | 7 ++-
.../hooks/processing/git_worker.ex | 8 +--
.../hooks/processing/gitlab_worker.ex | 7 ++-
.../processing/bitbucket_worker_test.exs | 56 ++++++++++++++++++
.../test/hooks/processing/git_worker_test.exs | 57 +++++++++++++++++++
.../hooks/processing/gitlab_worker_test.exs | 55 ++++++++++++++++++
hooks_processor/test/support/git_hooks.ex | 14 +++++
7 files changed, 196 insertions(+), 8 deletions(-)
diff --git a/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex b/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex
index 98fb54323..29afc0ec8 100644
--- a/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex
+++ b/hooks_processor/lib/hooks_processor/hooks/processing/bitbucket_worker.ex
@@ -93,11 +93,14 @@ defmodule HooksProcessor.Hooks.Processing.BitbucketWorker do
e -> e
end
- defp process_webhook(hook_type, _webhook, _project, _requester_id) do
+ defp process_webhook(hook_type, webhook, _project, requester_id) do
+ params = %{provider: "bitbucket", requester_id: requester_id}
+ HooksQueries.update_webhook(webhook, params, "failed", "BAD REQUEST")
+
# Increment unsupported hook type metric
Watchman.increment({"hooks.processing.bitbucket", ["unsupported_hook"]})
- "Unsuported type of the hook: '#{hook_type}'"
+ {:error, "Unsuported type of the hook: '#{hook_type}'"}
end
defp perform_actions(webhook, parsed_data, hook_type, action_type)
diff --git a/hooks_processor/lib/hooks_processor/hooks/processing/git_worker.ex b/hooks_processor/lib/hooks_processor/hooks/processing/git_worker.ex
index e84c76445..983734547 100644
--- a/hooks_processor/lib/hooks_processor/hooks/processing/git_worker.ex
+++ b/hooks_processor/lib/hooks_processor/hooks/processing/git_worker.ex
@@ -136,11 +136,11 @@ defmodule HooksProcessor.Hooks.Processing.GitWorker do
e -> e
end
- defp process_webhook(hook_type, webhook, _project, _requester_id) do
- webhook
- |> LT.warn("Unsupported type of the hook: '#{hook_type}'")
+ defp process_webhook(hook_type, webhook, _project, requester_id) do
+ params = %{provider: "git", requester_id: requester_id}
+ HooksQueries.update_webhook(webhook, params, "failed", "BAD REQUEST")
- HooksQueries.update_webhook(webhook, %{}, "failed")
+ {:error, "Unsupported type of the hook: '#{hook_type}' for webhook: #{inspect(webhook)}"}
end
defp perform_actions(webhook, parsed_data) do
diff --git a/hooks_processor/lib/hooks_processor/hooks/processing/gitlab_worker.ex b/hooks_processor/lib/hooks_processor/hooks/processing/gitlab_worker.ex
index 4366f0cc0..422e328a8 100644
--- a/hooks_processor/lib/hooks_processor/hooks/processing/gitlab_worker.ex
+++ b/hooks_processor/lib/hooks_processor/hooks/processing/gitlab_worker.ex
@@ -95,8 +95,11 @@ defmodule HooksProcessor.Hooks.Processing.GitlabWorker do
e -> e
end
- defp process_webhook(hook_type, _webhook, _project, _requester_id) do
- "Unsuported type of the hook: '#{hook_type}'"
+ defp process_webhook(hook_type, webhook, _project, requester_id) do
+ params = %{provider: "gitlab", requester_id: requester_id}
+ HooksQueries.update_webhook(webhook, params, "failed", "BAD REQUEST")
+
+ {:error, "Unsuported type of the hook: '#{hook_type}'"}
end
defp should_build?(repository, hook_data, hook_type) do
diff --git a/hooks_processor/test/hooks/processing/bitbucket_worker_test.exs b/hooks_processor/test/hooks/processing/bitbucket_worker_test.exs
index 8b1cc8276..fdd561ad8 100644
--- a/hooks_processor/test/hooks/processing/bitbucket_worker_test.exs
+++ b/hooks_processor/test/hooks/processing/bitbucket_worker_test.exs
@@ -701,4 +701,60 @@ defmodule HooksProcessor.Hooks.Processing.BitbucketWorkerTest do
GrpcMock.verify!(ProjectHubServiceMock)
end
+
+ test "unsupported hook type => hook is recorded as failed" do
+ params = %{
+ received_at: DateTime.utc_now(),
+ webhook: BitbucketHooks.pull_request_open(),
+ repository_id: UUID.uuid4(),
+ project_id: UUID.uuid4(),
+ organization_id: UUID.uuid4(),
+ provider: "bitbucket"
+ }
+
+ assert {:ok, webhook} = HooksQueries.insert(params)
+
+ # setup mocks
+
+ ProjectHubServiceMock
+ |> GrpcMock.expect(:describe, fn req, _ ->
+ assert req.id == webhook.project_id
+
+ %Projecthub.DescribeResponse{
+ project: %{
+ metadata: %{
+ id: req.id,
+ org_id: UUID.uuid4()
+ },
+ spec: %{
+ repository: %{
+ pipeline_file: ".semaphore/semaphore.yml",
+ run_on: [:BRANCHES, :TAGS],
+ whitelist: %{tags: ["/v1.*/", "/release-.*/"]}
+ }
+ }
+ },
+ metadata: %{status: %{code: :OK}}
+ }
+ end)
+
+ # wait for worker to finish and check results
+
+ assert {:ok, pid} = WorkersSupervisor.start_worker_for_webhook(webhook.id)
+
+ Test.Helpers.wait_for_worker_to_finish(pid, 15_000)
+
+ assert {:ok, webhook} = HooksQueries.get_by_id(webhook.id)
+ assert webhook.provider == "bitbucket"
+ assert webhook.state == "failed"
+ assert webhook.result == "BAD REQUEST"
+ assert webhook.wf_id == nil
+ assert webhook.ppl_id == nil
+ assert webhook.branch_id == nil
+ assert webhook.commit_sha == nil
+ assert webhook.commit_author == nil
+ assert webhook.git_ref == nil
+
+ GrpcMock.verify!(ProjectHubServiceMock)
+ end
end
diff --git a/hooks_processor/test/hooks/processing/git_worker_test.exs b/hooks_processor/test/hooks/processing/git_worker_test.exs
index 07f762356..dc956a8af 100644
--- a/hooks_processor/test/hooks/processing/git_worker_test.exs
+++ b/hooks_processor/test/hooks/processing/git_worker_test.exs
@@ -503,4 +503,61 @@ defmodule HooksProcessor.Hooks.Processing.GitWorkerTest do
GrpcMock.verify!(ProjectHubServiceMock)
end
+
+ test "unsupported hook type => hook is recorded as failed" do
+ params = %{
+ received_at: DateTime.utc_now(),
+ webhook: GitHooks.unsupported_hook_type(),
+ repository_id: UUID.uuid4(),
+ project_id: UUID.uuid4(),
+ organization_id: UUID.uuid4(),
+ provider: "git"
+ }
+
+ assert {:ok, webhook} = HooksQueries.insert(params)
+
+ # setup mocks
+
+ ProjectHubServiceMock
+ |> GrpcMock.expect(:describe, fn req, _ ->
+ assert req.id == webhook.project_id
+
+ %Projecthub.DescribeResponse{
+ project: %{
+ metadata: %{
+ id: req.id,
+ org_id: UUID.uuid4()
+ },
+ spec: %{
+ repository: %{
+ owner: "semaphore",
+ name: "elixir-project",
+ pipeline_file: ".semaphore/semaphore.yml",
+ run_on: [:BRANCHES, :TAGS],
+ whitelist: %{tags: ["/release-.*/"]}
+ }
+ }
+ },
+ metadata: %{status: %{code: :OK}}
+ }
+ end)
+
+ # wait for worker to finish and check results
+
+ assert {:ok, pid} = WorkersSupervisor.start_worker_for_webhook(webhook.id)
+
+ Test.Helpers.wait_for_worker_to_finish(pid, 15_000)
+
+ assert {:ok, webhook} = HooksQueries.get_by_id(webhook.id)
+ assert webhook.state == "failed"
+ assert webhook.result == "BAD REQUEST"
+ assert webhook.wf_id == nil
+ assert webhook.ppl_id == nil
+ assert webhook.branch_id == nil
+ assert webhook.commit_sha == nil
+ assert webhook.commit_author == nil
+ assert webhook.git_ref == nil
+
+ GrpcMock.verify!(ProjectHubServiceMock)
+ end
end
diff --git a/hooks_processor/test/hooks/processing/gitlab_worker_test.exs b/hooks_processor/test/hooks/processing/gitlab_worker_test.exs
index cfe23ae5f..41dea7367 100644
--- a/hooks_processor/test/hooks/processing/gitlab_worker_test.exs
+++ b/hooks_processor/test/hooks/processing/gitlab_worker_test.exs
@@ -701,4 +701,59 @@ defmodule HooksProcessor.Hooks.Processing.GitlabWorkerTest do
GrpcMock.verify!(ProjectHubServiceMock)
end
+
+ test "unsupported hook type => hook is recorded as failed" do
+ params = %{
+ received_at: DateTime.utc_now(),
+ webhook: GitlabHooks.merge_request_open(),
+ repository_id: UUID.uuid4(),
+ project_id: UUID.uuid4(),
+ organization_id: UUID.uuid4(),
+ provider: "gitlab"
+ }
+
+ assert {:ok, webhook} = HooksQueries.insert(params)
+
+ # setup mocks
+
+ ProjectHubServiceMock
+ |> GrpcMock.expect(:describe, fn req, _ ->
+ assert req.id == webhook.project_id
+
+ %Projecthub.DescribeResponse{
+ project: %{
+ metadata: %{
+ id: req.id,
+ org_id: UUID.uuid4()
+ },
+ spec: %{
+ repository: %{
+ pipeline_file: ".semaphore/semaphore.yml",
+ run_on: [:BRANCHES, :TAGS],
+ whitelist: %{tags: ["/release-.*/"]}
+ }
+ }
+ },
+ metadata: %{status: %{code: :OK}}
+ }
+ end)
+
+ # wait for worker to finish and check results
+
+ assert {:ok, pid} = WorkersSupervisor.start_worker_for_webhook(webhook.id)
+
+ Test.Helpers.wait_for_worker_to_finish(pid, 15_000)
+
+ assert {:ok, webhook} = HooksQueries.get_by_id(webhook.id)
+ assert webhook.state == "failed"
+ assert webhook.result == "BAD REQUEST"
+ assert webhook.wf_id == nil
+ assert webhook.ppl_id == nil
+ assert webhook.branch_id == nil
+ assert webhook.commit_sha == nil
+ assert webhook.commit_author == nil
+ assert webhook.git_ref == nil
+
+ GrpcMock.verify!(ProjectHubServiceMock)
+ end
end
diff --git a/hooks_processor/test/support/git_hooks.ex b/hooks_processor/test/support/git_hooks.ex
index 44f7efc2a..98f6458b3 100644
--- a/hooks_processor/test/support/git_hooks.ex
+++ b/hooks_processor/test/support/git_hooks.ex
@@ -49,4 +49,18 @@ defmodule Support.GitHooks do
}
}
end
+
+ def unsupported_hook_type do
+ %{
+ "reference" => "refs/puls/123",
+ "commit" => %{
+ "sha" => "023becf74ae8a5d93911db4bad7967f94343b44b",
+ "message" => "Initial commit"
+ },
+ "author" => %{
+ "name" => "Radek",
+ "email" => "radek@example.com"
+ }
+ }
+ end
end
From b8801c2d0301d3a632b9f9ac9c814e0dc9ab4396 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miko=C5=82aj=20Kutryj?=
Date: Thu, 26 Jun 2025 11:33:32 +0200
Subject: [PATCH 14/87] feat(front): partial pipeline rebuild in UI (#414)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Related task: https://github.com/semaphoreio/semaphore/issues/29
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.../interactive_pipeline_tree.js | 30 ++++++++++++
front/lib/front/clients/pipeline.ex | 24 ++++++++++
front/lib/front/models/pipeline.ex | 18 ++++++++
.../controllers/pipeline_controller.ex | 43 ++++++++++++++++-
front/lib/front_web/router.ex | 4 ++
.../status/_interactive_pipeline.html.eex | 4 ++
front/lib/front_web/views/pipeline_view.ex | 4 ++
front/test/front/clients/pipeline_test.exs | 12 +++++
.../controllers/pipeline_controller_test.exs | 46 +++++++++++++++++++
.../front_web/views/pipeline_view_test.exs | 22 +++++++++
front/test/support/factories/pipeline.ex | 8 ++++
front/test/support/stubs/pipeline.ex | 11 +++++
12 files changed, 225 insertions(+), 1 deletion(-)
diff --git a/front/assets/js/workflow_view/interactive_pipeline_tree.js b/front/assets/js/workflow_view/interactive_pipeline_tree.js
index d58ef2574..e060d9c0e 100644
--- a/front/assets/js/workflow_view/interactive_pipeline_tree.js
+++ b/front/assets/js/workflow_view/interactive_pipeline_tree.js
@@ -11,6 +11,7 @@ export var InteractivePipelineTree = {
init: function(opts = {}) {
InteractivePipelineTree.handleWorkflowTreeItemClicks(opts);
InteractivePipelineTree.handlePipelineStopClicks();
+ InteractivePipelineTree.handlePipelineRebuildClicks();
InteractivePipelineTree.handleToggleSkippedBlocksClicks();
},
@@ -41,6 +42,35 @@ export var InteractivePipelineTree = {
});
},
+ handlePipelineRebuildClicks: function() {
+ $("body").on("click", "[pipeline-rebuild-button]", function(event) {
+ event.preventDefault();
+ let button = $(event.currentTarget);
+ let href = button.attr("href");
+ button.text("Rebuilding...")
+ button.attr("disabled", true);
+
+ let req = $.ajax({
+ url: href,
+ type: "POST",
+ beforeSend: function(xhr) {
+ xhr.setRequestHeader("X-CSRF-Token", $("meta[name='csrf-token']").attr("content"));
+ }
+ });
+
+ req.done(function(data) {
+ if(data.error != undefined) {
+ Notice.error(data.error)
+ button.text("Rebuild Pipeline")
+ button.attr("disabled", false);
+ } else {
+ Notice.notice(data.message)
+ button.remove();
+ }
+ })
+ });
+ },
+
onWorkflowTreeItemClick: function(event) {
let pipelineId = $(event.currentTarget).data("pipeline-id");
diff --git a/front/lib/front/clients/pipeline.ex b/front/lib/front/clients/pipeline.ex
index 745eea6ff..6acf5798a 100644
--- a/front/lib/front/clients/pipeline.ex
+++ b/front/lib/front/clients/pipeline.ex
@@ -184,4 +184,28 @@ defmodule Front.Clients.Pipeline do
response
end)
end
+
+ def partial_rebuild(request) do
+ Watchman.benchmark("pipeline.partial_rebuild.duration", fn ->
+ response =
+ channel()
+ |> Stub.partial_rebuild(request, metadata: metadata(), timeout: timeout())
+
+ case response do
+ {:ok, _} -> Watchman.increment("pipeline.partial_rebuild.success")
+ {:error, _} -> Watchman.increment("pipeline.partial_rebuild.failure")
+ end
+
+ Logger.debug(fn ->
+ """
+ Pipeline API partial_rebuild returned response
+ #{inspect(response)}
+ for request
+ #{inspect(request)}
+ """
+ end)
+
+ response
+ end)
+ end
end
diff --git a/front/lib/front/models/pipeline.ex b/front/lib/front/models/pipeline.ex
index 9913396b7..303a585e5 100644
--- a/front/lib/front/models/pipeline.ex
+++ b/front/lib/front/models/pipeline.ex
@@ -14,6 +14,7 @@ defmodule Front.Models.Pipeline do
DescribeTopologyRequest,
ListKeysetRequest,
ListRequest,
+ PartialRebuildRequest,
Pipeline,
TerminateRequest
}
@@ -349,6 +350,23 @@ defmodule Front.Models.Pipeline do
end
end
+ def rebuild(id, requester_id, _tracing_headers \\ nil) do
+ request =
+ PartialRebuildRequest.new(
+ ppl_id: id,
+ user_id: requester_id,
+ request_token: UUID.uuid4()
+ )
+
+ {:ok, response} = Clients.Pipeline.partial_rebuild(request)
+
+ case ResponseCode.key(response.response_status.code) do
+ :OK -> {:ok, response.ppl_id}
+ :BAD_PARAM -> {:error, response.response_status.message}
+ _ -> {:error, "Failed to rebuild pipeline"}
+ end
+ end
+
defp request_stream(req, tracing_headers, override \\ nil) do
request(req, tracing_headers) |> stream_if_needed(override)
end
diff --git a/front/lib/front_web/controllers/pipeline_controller.ex b/front/lib/front_web/controllers/pipeline_controller.ex
index 055130c7e..6bd144971 100644
--- a/front/lib/front_web/controllers/pipeline_controller.ex
+++ b/front/lib/front_web/controllers/pipeline_controller.ex
@@ -18,9 +18,10 @@ defmodule FrontWeb.PipelineController do
plug(PublicPageAccess when action in @public_endpoints)
plug(PageAccess, [permissions: "project.view"] when action not in @public_endpoints)
plug(PageAccess, [permissions: "project.job.stop"] when action == :stop)
+ plug(PageAccess, [permissions: "project.job.rerun"] when action == :rebuild)
plug(:assign_pipeline_with_blocks when action in [:show, :poll])
- plug(:assign_pipeline_without_blocks when action in [:status, :switch, :stop])
+ plug(:assign_pipeline_without_blocks when action in [:status, :switch, :stop, :rebuild])
plug(:preload_switch when action in [:show, :poll, :switch])
def path(conn, params) do
@@ -130,6 +131,32 @@ defmodule FrontWeb.PipelineController do
end
end
+ def rebuild(conn, _params) do
+ Watchman.benchmark("rebuild.duration", fn ->
+ project = conn.assigns.project
+ workflow = conn.assigns.workflow
+ pipeline = conn.assigns.pipeline
+
+ log_rebuild(conn, project, workflow, pipeline)
+ rebuild_pipeline(conn, pipeline.id, conn.assigns.user_id, conn.assigns.tracing_headers)
+ end)
+ end
+
+ defp rebuild_pipeline(conn, ppl_id, user_id, tracing_headers) do
+ case Pipeline.rebuild(ppl_id, user_id, tracing_headers) do
+ {:ok, new_pipeline_id} ->
+ conn
+ |> json(%{
+ message: "Pipeline rebuild initiated successfully.",
+ pipeline_id: new_pipeline_id
+ })
+
+ {:error, message} ->
+ conn
+ |> json(%{error: message})
+ end
+ end
+
defp organization_matches?(organization_id, pipeline_organization_id) do
organization_id == pipeline_organization_id
end
@@ -152,6 +179,20 @@ defmodule FrontWeb.PipelineController do
|> Audit.log()
end
+ defp log_rebuild(conn, project, workflow, pipeline) do
+ conn
+ |> Audit.new(:Pipeline, :Rebuild)
+ |> Audit.add(:resource_name, pipeline.name)
+ |> Audit.add(:description, "Rebuilt the pipeline")
+ |> Audit.metadata(project_id: project.id)
+ |> Audit.metadata(project_name: project.name)
+ |> Audit.metadata(branch_name: workflow.branch_name)
+ |> Audit.metadata(workflow_id: workflow.id)
+ |> Audit.metadata(commit_sha: workflow.commit_sha)
+ |> Audit.metadata(pipeline_id: pipeline.id)
+ |> Audit.log()
+ end
+
defp pipeline_data(conn, params) do
diagram =
if FeatureProvider.feature_enabled?(:toggle_skipped_blocks,
diff --git a/front/lib/front_web/router.ex b/front/lib/front_web/router.ex
index 2e240fbe9..5e414f985 100644
--- a/front/lib/front_web/router.ex
+++ b/front/lib/front_web/router.ex
@@ -638,6 +638,10 @@ defmodule FrontWeb.Router do
as: :pipeline_stop
)
+ post("/workflows/:workflow_id/pipelines/:pipeline_id/rebuild", PipelineController, :rebuild,
+ as: :pipeline_rebuild
+ )
+
post(
"/workflows/:workflow_id/pipelines/:pipeline_id/swithes/:switch_id/targets/:name",
TargetController,
diff --git a/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex b/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex
index 642127996..36c7b051a 100644
--- a/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex
+++ b/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex
@@ -21,6 +21,10 @@
·
<%= link "Stop Pipeline", to: pipeline_stop_path(@conn, :stop, @workflow.id, @pipeline.id), class: "btn btn-secondary btn-tiny", pipeline_stop_button: "true" %>
<% end %>
+ <%= if @conn.assigns.permissions["project.job.rerun"] && FrontWeb.PipelineView.pipeline_rebuildable?(@pipeline) && !FrontWeb.PipelineView.anonymous?(@conn) do %>
+ ·
+ <%= link "Rebuild Pipeline", to: pipeline_rebuild_path(@conn, :rebuild, @workflow.id, @pipeline.id), class: "btn btn-secondary btn-tiny", pipeline_rebuild_button: "true", title: "Rerun only failed jobs in this pipeline" %>
+ <% end %>
←
diff --git a/front/lib/front_web/views/pipeline_view.ex b/front/lib/front_web/views/pipeline_view.ex
index da6dfda24..787a66e0f 100644
--- a/front/lib/front_web/views/pipeline_view.ex
+++ b/front/lib/front_web/views/pipeline_view.ex
@@ -299,6 +299,10 @@ defmodule FrontWeb.PipelineView do
pipeline.state != :DONE && pipeline.state != :STOPPING
end
+ def pipeline_rebuildable?(pipeline) do
+ pipeline.state == :DONE && pipeline.result != :PASSED
+ end
+
def anonymous?(conn) do
conn.assigns.anonymous
end
diff --git a/front/test/front/clients/pipeline_test.exs b/front/test/front/clients/pipeline_test.exs
index 052f49305..c15817061 100644
--- a/front/test/front/clients/pipeline_test.exs
+++ b/front/test/front/clients/pipeline_test.exs
@@ -7,6 +7,7 @@ defmodule Front.Clients.PipelineTest do
DescribeManyRequest,
DescribeRequest,
DescribeTopologyRequest,
+ PartialRebuildRequest,
TerminateRequest
}
@@ -53,4 +54,15 @@ defmodule Front.Clients.PipelineTest do
assert {:ok, response} == Pipeline.terminate(request)
end
end
+
+ describe "partial_rebuild" do
+ test "returns PartialRebuildResponse for PartialRebuildRequest" do
+ request = PartialRebuildRequest.new()
+
+ response = Factories.Pipeline.partial_rebuild_response()
+ GrpcMock.stub(PipelineMock, :partial_rebuild, response)
+
+ assert {:ok, response} == Pipeline.partial_rebuild(request)
+ end
+ end
end
diff --git a/front/test/front_web/controllers/pipeline_controller_test.exs b/front/test/front_web/controllers/pipeline_controller_test.exs
index 2266fc73a..74b27b0ac 100644
--- a/front/test/front_web/controllers/pipeline_controller_test.exs
+++ b/front/test/front_web/controllers/pipeline_controller_test.exs
@@ -262,4 +262,50 @@ defmodule FrontWeb.PipelineControllerTest do
assert conn.status == 404
end
end
+
+ describe "rebuild" do
+ test "sends partial rebuild request", %{
+ conn: conn,
+ workflow_id: workflow_id,
+ pipeline_id: pipeline_id
+ } do
+ conn =
+ conn
+ |> post("/workflows/#{workflow_id}/pipelines/#{pipeline_id}/rebuild")
+
+ assert conn.status == 200
+ assert json_response(conn, 200)["message"] == "Pipeline rebuild initiated successfully."
+ assert json_response(conn, 200)["pipeline_id"] != nil
+ end
+
+ test "returns 404 when organization_id mismatches", %{
+ conn: conn,
+ workflow_id: workflow_id,
+ pipeline_id: pipeline_id
+ } do
+ conn =
+ conn
+ |> Plug.Conn.put_req_header("x-semaphore-org-id", Ecto.UUID.generate())
+ |> post("/workflows/#{workflow_id}/pipelines/#{pipeline_id}/rebuild")
+
+ assert conn.status == 404
+ end
+ end
+
+ describe "rebuild => when user does not have permission to rerun jobs" do
+ test "returns 404", %{conn: conn, workflow_id: workflow_id, pipeline_id: pipeline_id} do
+ Support.Stubs.PermissionPatrol.remove_all_permissions()
+
+ org = Support.Stubs.DB.first(:organizations)
+ user = Support.Stubs.DB.first(:users)
+
+ Support.Stubs.PermissionPatrol.allow_everything_except(org.id, user.id, "project.job.rerun")
+
+ conn =
+ conn
+ |> post("/workflows/#{workflow_id}/pipelines/#{pipeline_id}/rebuild")
+
+ assert conn.status == 404
+ end
+ end
end
diff --git a/front/test/front_web/views/pipeline_view_test.exs b/front/test/front_web/views/pipeline_view_test.exs
index 07346d57f..0901476c4 100644
--- a/front/test/front_web/views/pipeline_view_test.exs
+++ b/front/test/front_web/views/pipeline_view_test.exs
@@ -244,4 +244,26 @@ defmodule FrontWeb.PipelineViewTest do
assert action =~ "Triggered"
end
end
+
+ describe ".pipeline_rebuildable?" do
+ test "returns true when pipeline is in DONE state" do
+ pipeline = %Models.Pipeline{state: :DONE}
+ assert PipelineView.pipeline_rebuildable?(pipeline) == true
+ end
+
+ test "returns false when pipeline is in PENDING state" do
+ pipeline = %Models.Pipeline{state: :PENDING}
+ assert PipelineView.pipeline_rebuildable?(pipeline) == false
+ end
+
+ test "returns false when pipeline is in RUNNING state" do
+ pipeline = %Models.Pipeline{state: :RUNNING}
+ assert PipelineView.pipeline_rebuildable?(pipeline) == false
+ end
+
+ test "returns false when pipeline is in STOPPING state" do
+ pipeline = %Models.Pipeline{state: :STOPPING}
+ assert PipelineView.pipeline_rebuildable?(pipeline) == false
+ end
+ end
end
diff --git a/front/test/support/factories/pipeline.ex b/front/test/support/factories/pipeline.ex
index 95acfe8f6..6a2e4a04c 100644
--- a/front/test/support/factories/pipeline.ex
+++ b/front/test/support/factories/pipeline.ex
@@ -5,6 +5,7 @@ defmodule Support.Factories.Pipeline do
Block,
DescribeResponse,
DescribeTopologyResponse,
+ PartialRebuildResponse,
Pipeline,
ResponseStatus,
TerminateResponse
@@ -23,6 +24,13 @@ defmodule Support.Factories.Pipeline do
}
end
+ def partial_rebuild_response do
+ %PartialRebuildResponse{
+ response_status: ResponseStatus.new(code: ResponseCode.value(:OK)),
+ ppl_id: "new-pipeline-id-#{:rand.uniform(10000)}"
+ }
+ end
+
def list_response do
InternalApi.Plumber.ListResponse.new(
response_status: InternalApi.Plumber.ResponseStatus.new(),
diff --git a/front/test/support/stubs/pipeline.ex b/front/test/support/stubs/pipeline.ex
index 95545447e..021abcf1d 100644
--- a/front/test/support/stubs/pipeline.ex
+++ b/front/test/support/stubs/pipeline.ex
@@ -299,12 +299,23 @@ defmodule Support.Stubs.Pipeline do
GrpcMock.stub(PipelineMock, :list_keyset, &__MODULE__.list_keyset/2)
GrpcMock.stub(PipelineMock, :describe_topology, &__MODULE__.describe_topology/2)
GrpcMock.stub(PipelineMock, :terminate, &__MODULE__.terminate/2)
+ GrpcMock.stub(PipelineMock, :partial_rebuild, &__MODULE__.partial_rebuild/2)
end
def terminate(_req, _) do
InternalApi.Plumber.TerminateResponse.new(response_status: ok())
end
+ def partial_rebuild(_req, _) do
+ # Create a new pipeline for the partial rebuild
+ new_pipeline_id = Support.Stubs.UUID.gen()
+
+ InternalApi.Plumber.PartialRebuildResponse.new(
+ response_status: ok(),
+ ppl_id: new_pipeline_id
+ )
+ end
+
def describe(req, _) do
case DB.find(:pipelines, req.ppl_id) do
nil ->
From d4999d8b3b27534f776f35755ee2996d7f7d8066 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Damjan=20Be=C4=87irovi=C4=87?=
Date: Thu, 26 Jun 2025 12:07:28 +0200
Subject: [PATCH 15/87] feat(v1alpha api): Allow passing parameters in Run
Workflow API call (#323)
Changes the Run workflow API implementation to allow passing parameters
and improve performance.
Previously, the request was synchronously waiting for repo-proxy to
create a hook by retrieving data from the git provider and then
schedule the workflow on the plumber.
This had performance issues, and it did not support passing parameters to
be used in the pipeline.
The new implementation goes directly to the plumber, which only saves the
request in the database before responding, and all hook processing is
done asynchronously.
This should improve performance, and also the plumber supports passing
parameters out-of-the-box.
---
docs/docs/reference/api.md | 19 +-
.../repo_proxy/repo_proxy_server.rb | 60 ++++++-
.../repo_proxy/repo_proxy_server_spec.rb | 164 ++++++++++++++++++
plumber/ppl/lib/ppl/actions/schedule_impl.ex | 32 ++--
.../ppl/ppl_requests/model/ppl_requests.ex | 13 +-
.../model/ppl_requests_queries.ex | 8 +-
.../model/ppl_sub_init_queries.ex | 4 +-
.../ppl/lib/ppl/ppls/model/ppls_queries.ex | 4 +-
plumber/ppl/lib/ppl/repo_proxy_client.ex | 19 +-
.../lib/ppl/workflow/workflow_api_server.ex | 9 +-
.../ppl_requests/model/ppl_requests_test.exs | 21 ---
.../stm_handler/event_publishing_test.exs | 1 -
plumber/ppl/test/repo_proxy_client_test.exs | 4 +-
.../workflow/workflow_api_server_test.exs | 1 -
.../internal_api/plumber_w_f.workflow.pb.ex | 10 +-
.../internal_api/plumber_w_f.workflow.pb.ex | 36 +++-
.../lib/pipelines_api/pipelines_client.ex | 9 -
.../pipelines_client/grpc_client.ex | 28 ---
.../pipelines_client/request_formatter.ex | 33 ----
.../pipelines_client/response_formatter.ex | 22 ---
.../lib/pipelines_api/repo_proxy_client.ex | 73 --------
.../lib/pipelines_api/workflow_client.ex | 2 +-
.../workflow_client/wf_request_formatter.ex | 55 +++---
.../workflow_client/wf_response_formatter.ex | 29 +++-
.../lib/pipelines_api/workflows/schedule.ex | 35 +++-
.../request_formatter_test.exs | 65 -------
.../response_formatter_test.exs | 39 -----
.../v1alpha/test/repo_proxy_client_test.exs | 77 --------
.../v1alpha/test/router/describe_test.exs | 1 -
public-api/v1alpha/test/router/list_test.exs | 2 -
.../test/router/promotions/list_test.exs | 2 -
.../test/router/schedules/delete_test.exs | 2 +-
.../test/router/schedules/describe_test.exs | 2 +-
.../test/router/schedules/list_test.exs | 2 -
.../v1alpha/test/router/wf_list_test.exs | 2 -
.../v1alpha/test/router/wf_schedule_test.exs | 42 ++---
.../v1alpha/test/support/stubs/workflow.ex | 65 ++++---
public-api/v1alpha/test/test_helper.exs | 19 --
.../v1alpha/test/workflow_client_test.exs | 51 ++----
public-api/v2/test/test_helper.exs | 17 --
40 files changed, 481 insertions(+), 598 deletions(-)
delete mode 100644 public-api/v1alpha/lib/pipelines_api/repo_proxy_client.ex
delete mode 100644 public-api/v1alpha/test/repo_proxy_client_test.exs
diff --git a/docs/docs/reference/api.md b/docs/docs/reference/api.md
index f9befb5ba..2e2827632 100644
--- a/docs/docs/reference/api.md
+++ b/docs/docs/reference/api.md
@@ -100,6 +100,7 @@ Parameters:
- `reference` (**required**) - git reference for the desired branch, tag, or pull request--e.g. *refs/heads/master*, *refs/tags/v1.0*, or *refs/pull/123*.
- `commit_sha` (*optional*) - Commit sha of the desired commit.
- `pipeline_file` (*optional*) - The path within the repository to the YAML file that contains the pipeline definition. The default value is *.semaphore/semaphore.yml*.
+- `parameters`: (key-values) specify parameter values that will be available in all jobs of the initial pipeline and can be used in the same way as the parameters from the [parameterized promotions](../using-semaphore/promotions#parameters).
Response:
@@ -108,17 +109,25 @@ HTTP status: 200
{
"workflow_id": "32a689e0-9082-4c5b-a648-bb3dc645452d",
- "pipeline_id": "2abeb1a9-eb4a-4834-84b8-cb7806aec063",
- "hook_id": "ff7d57ef-92c5-4fcd-9c0c-6ae9e24bfcec"
+ "pipeline_id": "2abeb1a9-eb4a-4834-84b8-cb7806aec063"
}
```
Example:
```shell
-curl -i -H "Authorization: Token {api_token}" \
- -d "project_id={project_id}&reference={reference}" \
- -X POST "https://.semaphoreci.com/api/v1alpha/plumber-workflows"
+curl -X POST --location "https://.semaphoreci.com/api/v1alpha/plumber-workflows" \
+ -H "Authorization: Token {api_token}" \
+ -H "Content-Type: application/json" \
+ -d $'{
+ "project_id": "my_project_id",
+ "reference": "refs/heads/master",
+ "pipeline_file": "/.semaphore/deploy.yml",
+ "parameters": {
+ "PARAM_NAME": "PARAM_VALUE",
+ "PARAM_NAME_2": "PARAM_VALUE_2"
+ }
+}'
```
### Describe a workflow
diff --git a/github_hooks/lib/internal_api/repo_proxy/repo_proxy_server.rb b/github_hooks/lib/internal_api/repo_proxy/repo_proxy_server.rb
index be4783c2f..f12c25795 100644
--- a/github_hooks/lib/internal_api/repo_proxy/repo_proxy_server.rb
+++ b/github_hooks/lib/internal_api/repo_proxy/repo_proxy_server.rb
@@ -51,6 +51,64 @@ class RepoProxyServer < RepoProxyService::Service
end
end
+ define_rpc :create_blank do |req, logger|
+ project = ::Project.find(req.project_id)
+ user = ::User.find(req.requester_id)
+
+ payload_builder = InternalApi::RepoProxy::PayloadFactory.create(req.git.reference, req.git.commit_sha)
+ payload = payload_builder.call(project, user)
+
+ params = ActionController::Parameters.new
+ params["hash_id"] = project.id
+ params["payload"] = payload.to_json
+
+ workflow = ::Semaphore::RepoHost::Hooks::Recorder.record_hook(params, project)
+ workflow.update(:result => ::Workflow::RESULT_OK)
+
+ branch = ::Branch.find_or_create_for_workflow(workflow)
+ branch.unarchive
+ workflow.update(:branch_id => branch.id)
+
+ if workflow.payload.pull_request?
+ branch.update(:pull_request_mergeable => true)
+ workflow.update(
+ :commit_author => payload["commit_author"],
+ :commit_sha => payload["merge_commit_sha"],
+ :git_ref => payload["semaphore_ref"]
+ )
+ end
+
+ workflow.update(:ppl_id => req.pipeline_id)
+ workflow.update(:wf_id => req.wf_id)
+ workflow.update(:state => Workflow::STATE_LAUNCHING)
+
+ InternalApi::RepoProxy::CreateBlankResponse.new(
+ :hook_id => workflow.id,
+ :wf_id => req.wf_id,
+ :pipeline_id => req.pipeline_id,
+ :branch_id => branch.id,
+ :repo => InternalApi::RepoProxy::CreateBlankResponse::Repo.new(
+ :owner => branch.project.repository.owner,
+ :repo_name => branch.project.repository.name,
+ :branch_name => branch.name,
+ :commit_sha => workflow.commit_sha,
+ :repository_id => branch.project.repository.id
+ )
+ )
+
+ rescue ::InternalApi::RepoProxy::PrPayload::PrNotMergeableError => e
+ raise GRPC::Aborted, e.message
+ rescue ::InternalApi::RepoProxy::PayloadFactory::InvalidReferenceError => e
+ raise GRPC::InvalidArgument, e.message
+ rescue ::RepoHost::RemoteException::NotFound
+ raise GRPC::NotFound, "Reference not found on GitHub #{req.git.reference} #{req.git.commit_sha}"
+ rescue ::RepoHost::RemoteException::Unknown => e
+ logger.error("Unknown error", error: e.message)
+ raise GRPC::Internal, "Unknown error"
+ rescue ::ActiveRecord::RecordNotFound => e
+ raise GRPC::NotFound, e.message
+ end
+
define_rpc :create do |req, logger|
project = ::Project.find(req.project_id)
@@ -102,8 +160,6 @@ def create_for_github_project(req, logger)
workflow.branch_name
end
- integration_token, = ::Semaphore::ProjectIntegrationToken.new.project_token(branch.project)
-
client = InternalApi::PlumberWF::WorkflowService::Stub.new(App.plumber_internal_url, :this_channel_is_insecure)
request = InternalApi::PlumberWF::ScheduleRequest.new(
:service => InternalApi::PlumberWF::ScheduleRequest::ServiceType::GIT_HUB,
diff --git a/github_hooks/spec/lib/internal_api/repo_proxy/repo_proxy_server_spec.rb b/github_hooks/spec/lib/internal_api/repo_proxy/repo_proxy_server_spec.rb
index b68b64efa..6c1a72d04 100644
--- a/github_hooks/spec/lib/internal_api/repo_proxy/repo_proxy_server_spec.rb
+++ b/github_hooks/spec/lib/internal_api/repo_proxy/repo_proxy_server_spec.rb
@@ -217,6 +217,170 @@
end
end
+ describe "#create_blank" do
+ let(:user) { FactoryBot.create(:user, :github_connection) }
+ let(:repository) do
+ FactoryBot.create(
+ :repository,
+ name: "sandbox",
+ owner: "renderedtext",
+ integration_type: "github_app"
+ )
+ end
+ let(:project) { FactoryBot.create(:project, repository: repository) }
+ let(:workflow) { FactoryBot.create(:workflow_with_branch, project: project) }
+ let(:branch) { workflow.branch }
+
+ let(:git) do
+ InternalApi::RepoProxy::CreateBlankRequest::Git.new(
+ reference: "refs/heads/main",
+ commit_sha: "abc123"
+ )
+ end
+
+ let(:req) do
+ InternalApi::RepoProxy::CreateBlankRequest.new(
+ project_id: project.id,
+ requester_id: user.id,
+ pipeline_id: "pipeline-id",
+ wf_id: "workflow-id",
+ git: git
+ )
+ end
+
+ let(:payload_hash) do
+ {
+ "commit_author" => "dev@example.com",
+ "merge_commit_sha" => "abc123",
+ "semaphore_ref" => "refs/merge"
+ }
+ end
+
+ before do
+ payload = instance_double(InternalApi::RepoProxy::PrPayload, call: payload_hash)
+ allow(InternalApi::RepoProxy::PayloadFactory).to receive(:create)
+ .with(req.git.reference, req.git.commit_sha)
+ .and_return(payload)
+
+ allow(Semaphore::RepoHost::Hooks::Recorder).to receive(:record_hook)
+ .and_return(workflow)
+
+ allow(Branch).to receive(:find_or_create_for_workflow).with(workflow).and_return(branch)
+ allow(branch).to receive(:unarchive)
+ allow(branch).to receive(:update)
+ allow(workflow).to receive(:update)
+ end
+
+ it "creates a blank hook and returns the expected response" do
+ allow(payload_hash).to receive(:pull_request?).and_return(false)
+ allow(workflow).to receive(:payload).and_return(payload_hash)
+ expect(workflow).to receive(:update).with(state: Workflow::STATE_LAUNCHING)
+ result = server.create_blank(req, call)
+
+ expect(result).to be_a(InternalApi::RepoProxy::CreateBlankResponse)
+ expect(result.hook_id).to eq(workflow.id)
+ expect(result.wf_id).to eq(req.wf_id)
+ expect(result.pipeline_id).to eq(req.pipeline_id)
+ expect(result.branch_id).to eq(branch.id)
+
+ repo = result.repo
+ expect(repo.owner).to eq(repository.owner)
+ expect(repo.repo_name).to eq(repository.name)
+ expect(repo.branch_name).to eq(branch.name)
+ expect(repo.commit_sha).to eq(workflow.commit_sha)
+ expect(repo.repository_id).to eq(repository.id)
+ end
+
+ context "when pull request is not mergeable" do
+ before do
+ allow(InternalApi::RepoProxy::PayloadFactory).to receive(:create).and_raise(
+ InternalApi::RepoProxy::PrPayload::PrNotMergeableError.new("PR not mergeable")
+ )
+ end
+
+ it "raises GRPC::Aborted" do
+ expect do
+ server.create_blank(req, call)
+ end.to raise_error(GRPC::Aborted, /PR not mergeable/)
+ end
+ end
+
+ context "when reference is invalid" do
+ before do
+ allow(InternalApi::RepoProxy::PayloadFactory).to receive(:create)
+ .and_raise(InternalApi::RepoProxy::PayloadFactory::InvalidReferenceError.new("Invalid ref"))
+ end
+
+ it "raises GRPC::InvalidArgument" do
+ expect do
+ server.create_blank(req, call)
+ end.to raise_error(GRPC::InvalidArgument, /Invalid ref/)
+ end
+ end
+
+ context "when reference is not found on GitHub" do
+ before do
+ allow(InternalApi::RepoProxy::PayloadFactory).to receive(:create)
+ .and_raise(RepoHost::RemoteException::NotFound)
+ end
+
+ it "raises GRPC::NotFound" do
+ expect do
+ server.create_blank(req, call)
+ end.to raise_error(GRPC::NotFound, /Reference not found/)
+ end
+ end
+
+ context "when unknown error occurs" do
+ before do
+ allow(Semaphore::RepoHost::Hooks::Recorder).to receive(:record_hook)
+ .and_raise(RepoHost::RemoteException::Unknown.new("Boom"))
+ end
+
+ it "raises GRPC::Internal" do
+ expect do
+ server.create_blank(req, call)
+ end.to raise_error(GRPC::Internal, /Unknown error/)
+ end
+ end
+
+ context "when the user is not found" do
+ before do
+ @invalid_req = InternalApi::RepoProxy::CreateBlankRequest.new(
+ project_id: project.id,
+ requester_id: "invalid-user-id",
+ pipeline_id: "pipeline-id",
+ wf_id: "workflow-id",
+ git: git
+ )
+ end
+
+ it "raises GRPC::NotFound for missing user" do
+ expect do
+ server.create_blank(@invalid_req, call)
+ end.to raise_error(GRPC::NotFound, /Couldn't find User/)
+ end
+ end
+
+ context "when the project is not found" do
+ before do
+ @invalid_req = InternalApi::RepoProxy::CreateBlankRequest.new(
+ project_id: "invalid-project-id",
+ requester_id: user.id,
+ pipeline_id: "pipeline-id",
+ wf_id: "workflow-id",
+ git: git
+ )
+ end
+
+ it "raises GRPC::NotFound for missing project" do
+ expect do
+ server.create_blank(@invalid_req, call)
+ end.to raise_error(GRPC::NotFound, /Couldn't find Project/)
+ end
+ end
+ end
+
describe "#create" do
before "when unknown remote error is raised" do
allow(InternalApi::RepoProxy::PayloadFactory).to receive(
diff --git a/plumber/ppl/lib/ppl/actions/schedule_impl.ex b/plumber/ppl/lib/ppl/actions/schedule_impl.ex
index d7bc84fe7..cd2bf3221 100644
--- a/plumber/ppl/lib/ppl/actions/schedule_impl.ex
+++ b/plumber/ppl/lib/ppl/actions/schedule_impl.ex
@@ -79,11 +79,11 @@ defmodule Ppl.Actions.ScheduleImpl do
# Schedule
- def schedule(ctx, top_level?, initial_request?, task_workflow?) do
+ def schedule(ctx, top_level?, initial_request?, start_in_conceived?) do
log_run_request(ctx)
ctx
- |> prepare_request_multi(top_level?, initial_request?, task_workflow?)
+ |> prepare_request_multi(top_level?, initial_request?, start_in_conceived?)
|> persist_request
|> case do
{:ok, %{ppl_req: ppl_req}} ->
@@ -92,7 +92,7 @@ defmodule Ppl.Actions.ScheduleImpl do
retry_count: publish_retry_count(), timeout_ms: publish_timeout()),
predicate <- fn query -> query |> where(ppl_id: ^ppl_req.id) end,
- :ok <- execute_first_state_with_predicate(predicate, task_workflow?),
+ :ok <- execute_first_state_with_predicate(predicate, start_in_conceived?),
do: response(ppl_req)
# Idempotency -> return {:ok, ...}
{:error, :ppl_req, {:request_token_exists, request_token}, _} ->
@@ -136,32 +136,32 @@ defmodule Ppl.Actions.ScheduleImpl do
|> Map.put("suppressed_attributes", attribute_list)
end
- def prepare_request_multi(ctx, top_level?, initial_request?, task_workflow?) do
+ def prepare_request_multi(ctx, top_level?, initial_request?, start_in_conceived?) do
ctx = RequestReviser.revise(ctx)
Multi.new()
# insert pipeline request
|> Multi.run(:ppl_req, fn _, _ ->
Metrics.benchmark("Ppl.schedule_break_down", ["insert_request"], fn ->
- PplRequestsQueries.insert_request(ctx, top_level?, initial_request?, task_workflow?)
+ PplRequestsQueries.insert_request(ctx, top_level?, initial_request?, start_in_conceived?)
end)
end)
# insert pipeline based on that request
|> Multi.run(:ppl, fn _, %{ppl_req: ppl_req} ->
Metrics.benchmark("Ppl.schedule_break_down", ["insert_pipeline"], fn ->
- PplsQueries.insert(ppl_req, "", task_workflow?)
+ PplsQueries.insert(ppl_req, "", start_in_conceived?)
end)
end)
# update pipeline to include wf_number
|> Multi.run(:wf_num, fn _, %{ppl_req: ppl_req, ppl: ppl} ->
Metrics.benchmark("Ppl.schedule_break_down", ["set_wf_num"], fn ->
- set_workflow_number(ppl, ppl_req, task_workflow?)
+ set_workflow_number(ppl, ppl_req, start_in_conceived?)
end)
end)
# insert pipeline sub init for this pipeline
|> Multi.run(:ppl_sub_init, fn _, %{ppl_req: ppl_req} ->
Metrics.benchmark("Ppl.schedule_break_down", ["insert_subinit"], fn ->
- PplSubInitsQueries.insert(ppl_req, "regular", task_workflow?)
+ PplSubInitsQueries.insert(ppl_req, "regular", start_in_conceived?)
end)
end)
# save inital_request separately for easier debug
@@ -179,25 +179,25 @@ defmodule Ppl.Actions.ScheduleImpl do
end
# promotions
- def set_workflow_number(ppl, req = %{request_args: %{"wf_number" => num}}, task_workflow?)
+ def set_workflow_number(ppl, req = %{request_args: %{"wf_number" => num}}, start_in_conceived?)
when is_integer(num) and num > 0 do
with service <- Map.get(req.request_args, "service"),
- {:ok, _ppl} <- update_ppl(ppl, service, num, task_workflow?),
+ {:ok, _ppl} <- update_ppl(ppl, service, num, start_in_conceived?),
do: {:ok, num}
end
# partial rebuilds
- def set_workflow_number(ppl = %{partial_rebuild_of: val}, ppl_req, task_workflow?)
+ def set_workflow_number(ppl = %{partial_rebuild_of: val}, ppl_req, start_in_conceived?)
when is_binary(val) and val != "" do
with {:ok, l_wf} <- calculate_wf_num(ppl, ppl_req),
service <- Map.get(ppl_req.request_args, "service"),
- {:ok, _ppl} <- update_ppl(ppl, service, l_wf.wf_number + 1, task_workflow?),
+ {:ok, _ppl} <- update_ppl(ppl, service, l_wf.wf_number + 1, start_in_conceived?),
do: {:ok, l_wf.wf_number + 1}
end
# regular schedule and wf_rebuild
- def set_workflow_number(ppl, ppl_req, task_workflow?) do
+ def set_workflow_number(ppl, ppl_req, start_in_conceived?) do
with {:ok, l_wf} <- read_from_latest_wf_table(ppl, ppl_req),
service <- Map.get(ppl_req.request_args, "service"),
- {:ok, _ppl} <- update_ppl(ppl, service, l_wf.wf_number + 1, task_workflow?),
+ {:ok, _ppl} <- update_ppl(ppl, service, l_wf.wf_number + 1, start_in_conceived?),
{:ok, _} <- LatestWfsQueries.insert_or_update(l_wf, ppl_req, l_wf.wf_number + 1),
do: {:ok, l_wf.wf_number + 1}
end
@@ -224,8 +224,8 @@ defmodule Ppl.Actions.ScheduleImpl do
defp get_initial_wf_ppl(%{wf_id: wf_id}, _ppl),
do: PplsQueries.get_initial_wf_ppl(wf_id)
- defp update_ppl(ppl, service, wf_num, task_workflow?) do
- with_repo_data? = !task_workflow?
+ defp update_ppl(ppl, service, wf_num, start_in_conceived?) do
+ with_repo_data? = !start_in_conceived?
ppl
|> Ppls.changeset(%{wf_number: wf_num}, service == "listener_proxy", with_repo_data?)
diff --git a/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests.ex b/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests.ex
index 8dba1f464..406f4a06c 100644
--- a/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests.ex
+++ b/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests.ex
@@ -241,11 +241,11 @@ defmodule Ppl.PplRequests.Model.PplRequests do
iex> PplRequests.changeset_request(%PplRequests{}, params) |> Map.get(:valid?)
true
"""
- def changeset_request(ppl_req, params \\ %{}, task_workflow? \\ false) do
+ def changeset_request(ppl_req, params \\ %{}, start_in_conceived? \\ false) do
ppl_req
|> cast(params, @required_fields_request)
|> validate_required(@required_fields_request)
- |> validate_non_scheduler_task_fields(task_workflow?)
+ |> validate_hook_related_fields(!start_in_conceived?)
|> validate_change(:request_args, &request_args_field_validator__branch_name/2)
# this unique_constraint references unique_index in migration
|> unique_constraint(:unique_request_token_for_ppl_requests,
@@ -253,14 +253,9 @@ defmodule Ppl.PplRequests.Model.PplRequests do
)
end
- defp validate_non_scheduler_task_fields(changeset, true) do
- changeset
- |> validate_change(:request_args, fn _, value ->
- value |> Map.get("scheduler_task_id") |> field_required("scheduler_task_id")
- end)
- end
+ defp validate_hook_related_fields(changeset, false), do: changeset
- defp validate_non_scheduler_task_fields(changeset, false) do
+ defp validate_hook_related_fields(changeset, true) do
changeset
|> validate_change(:request_args, &request_args_field_validator__hook_id/2)
|> validate_change(:request_args, &request_args_field_validator__branch_id/2)
diff --git a/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests_queries.ex b/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests_queries.ex
index a403f8bfc..47c344ce9 100644
--- a/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests_queries.ex
+++ b/plumber/ppl/lib/ppl/ppl_requests/model/ppl_requests_queries.ex
@@ -18,7 +18,7 @@ defmodule Ppl.PplRequests.Model.PplRequestsQueries do
@doc """
Inserts new PplRequest with given params in DB
"""
- def insert_request(ctx, top_level \\ true, initial_request \\ true, task_workflow \\ false) do
+ def insert_request(ctx, top_level \\ true, initial_request \\ true, start_in_conceived? \\ false) do
ppl_id = UUID.uuid4()
wf_id = Map.get(ctx, "wf_id")
request_token = Map.get(ctx, "request_token")
@@ -32,11 +32,11 @@ defmodule Ppl.PplRequests.Model.PplRequestsQueries do
top_level: top_level, initial_request: initial_request, id: ppl_id,
ppl_artefact_id: ppl_id, wf_id: wf_id}
- insert_request_(params, task_workflow)
+ insert_request_(params, start_in_conceived?)
end
- defp insert_request_(params, task_workflow \\ false) do
- %PplRequests{} |> PplRequests.changeset_request(params, task_workflow) |> Repo.insert()
+ defp insert_request_(params, start_in_conceived? \\ false) do
+ %PplRequests{} |> PplRequests.changeset_request(params, start_in_conceived?) |> Repo.insert()
|> process_response(params[:request_token])
rescue
e -> {:error, e}
diff --git a/plumber/ppl/lib/ppl/ppl_sub_inits/model/ppl_sub_init_queries.ex b/plumber/ppl/lib/ppl/ppl_sub_inits/model/ppl_sub_init_queries.ex
index 205864219..b56002332 100644
--- a/plumber/ppl/lib/ppl/ppl_sub_inits/model/ppl_sub_init_queries.ex
+++ b/plumber/ppl/lib/ppl/ppl_sub_inits/model/ppl_sub_init_queries.ex
@@ -13,10 +13,10 @@ defmodule Ppl.PplSubInits.Model.PplSubInitsQueries do
@doc """
Inserts new PplSubInit record into DB with given parameters
"""
- def insert(ppl_req, init_type, task_workflow? \\ false) do
+ def insert(ppl_req, init_type, start_in_conceived? \\ false) do
params =
%{ppl_id: ppl_req.id, init_type: init_type}
- |> Map.put(:state, if(task_workflow?, do: "conceived", else: "created"))
+ |> Map.put(:state, if(start_in_conceived?, do: "conceived", else: "created"))
|> Map.put(:in_scheduling, "false")
try do
%PplSubInits{} |> PplSubInits.changeset(params) |> Repo.insert()
diff --git a/plumber/ppl/lib/ppl/ppls/model/ppls_queries.ex b/plumber/ppl/lib/ppl/ppls/model/ppls_queries.ex
index 21c48811e..e54e41a70 100644
--- a/plumber/ppl/lib/ppl/ppls/model/ppls_queries.ex
+++ b/plumber/ppl/lib/ppl/ppls/model/ppls_queries.ex
@@ -49,13 +49,13 @@ defmodule Ppl.Ppls.Model.PplsQueries do
@repo_fields ~w(owner repo_name branch_name commit_sha project_id label)
- def insert(ctx, partial_rebuild_of \\ "", task_workflow? \\ false) do
+ def insert(ctx, partial_rebuild_of \\ "", start_in_conceived? \\ false) do
req_args = ctx.request_args
yml_file_path = Expand.full_name(req_args["working_dir"], req_args["file_name"])
service = Map.get(ctx.request_args, "service")
extension_of = Map.get(req_args, "extension_of", "")
scheduler_task_id = Map.get(req_args, "scheduler_task_id", "")
- with_repo_data? = not task_workflow?
+ with_repo_data? = not start_in_conceived?
%{ppl_id: ctx.id, yml_file_path: yml_file_path}
|> Map.put(:state, "initializing")
diff --git a/plumber/ppl/lib/ppl/repo_proxy_client.ex b/plumber/ppl/lib/ppl/repo_proxy_client.ex
index 3f55674ad..db050e3f5 100644
--- a/plumber/ppl/lib/ppl/repo_proxy_client.ex
+++ b/plumber/ppl/lib/ppl/repo_proxy_client.ex
@@ -73,7 +73,12 @@ defmodule Ppl.RepoProxyClient do
)
)
- {:ok, channel} = GRPC.Stub.connect(new_url())
+ {:ok, channel} =
+ if ppl_req.request_args |> Map.get("service", "") == "git_hub" do
+ GRPC.Stub.connect(old_url())
+ else
+ GRPC.Stub.connect(new_url())
+ end
channel
|> RepoProxyService.Stub.create_blank(request, @opts)
@@ -95,14 +100,16 @@ defmodule Ppl.RepoProxyClient do
end
defp git_reference_from_ppl_req(ppl_req) do
- branch_name = ppl_req.request_args |> Map.get("branch_name", "")
+ git_ref = ppl_req.request_args |> Map.get("git_reference", "")
- if branch_name == "" do
- raise "Provided an empty branch_name"
+ if git_ref == "" do
+ branch_name = ppl_req.request_args |> Map.get("branch_name", "")
+ "refs/heads/#{branch_name}"
+ else
+ git_ref
end
-
- "refs/heads/#{branch_name}"
end
+
defp process_status({:ok, map}) do
case map |> Map.get(:status, %{}) |> Map.get(:code) do
:OK ->
diff --git a/plumber/ppl/lib/ppl/workflow/workflow_api_server.ex b/plumber/ppl/lib/ppl/workflow/workflow_api_server.ex
index 7c1be4097..7e444481f 100644
--- a/plumber/ppl/lib/ppl/workflow/workflow_api_server.ex
+++ b/plumber/ppl/lib/ppl/workflow/workflow_api_server.ex
@@ -39,8 +39,8 @@ defmodule Plumber.WorkflowAPI.Server do
{:ok, _org_id} <- id_present?(schedule_request, "organization_id"),
{:ok, request_map} <- Proto.to_map(schedule_request, string_keys: true),
{:ok, schedule_params} <- Actions.form_schedule_params(request_map),
- task_workflow? <- params_contain_scheduler_task_id?(schedule_params),
- {:ok, result} <- Actions.schedule(schedule_params, true, true, task_workflow?)
+ start_in_conceived? <- start_in_conceived_state?(schedule_params),
+ {:ok, result} <- Actions.schedule(schedule_params, true, true, start_in_conceived?)
do
schedule_response(result.wf_id, result.ppl_id)
else
@@ -318,9 +318,8 @@ defmodule Plumber.WorkflowAPI.Server do
end)
end
- defp params_contain_scheduler_task_id?(params) do
- params |> Map.get("scheduler_task_id", "") |> String.length() > 0
- end
+ defp start_in_conceived_state?(%{"scheduler_task_id" => val}) when is_binary(val) and val != "", do: true
+ defp start_in_conceived_state?(%{"start_in_conceived_state" => val}), do: val
defp one_of_required_present(:skip, :skip, :skip),
do: {:error, "One of 'project_ids', 'project_id' or 'organization_id' parameters is required."}
diff --git a/plumber/ppl/test/ppl_requests/model/ppl_requests_test.exs b/plumber/ppl/test/ppl_requests/model/ppl_requests_test.exs
index da1fcb89c..a94cc75f3 100644
--- a/plumber/ppl/test/ppl_requests/model/ppl_requests_test.exs
+++ b/plumber/ppl/test/ppl_requests/model/ppl_requests_test.exs
@@ -415,27 +415,6 @@ defmodule Ppl.PplRequests.Model.PplRequests.Test do
true
)
end
-
- test "scheduler_task_id is required when workflow originates from task" do
- assert %Ecto.Changeset{
- valid?: false,
- errors: [request_args: {"Missing field 'scheduler_task_id'", _}]
- } =
- PplRequests.changeset_request(
- %PplRequests{},
- %{
- request_args: %{"service" => "local"},
- request_token: "asdfgh2345678xcvb",
- prev_ppl_artefact_ids: [],
- top_level: false,
- initial_request: false,
- id: UUID.uuid4(),
- ppl_artefact_id: UUID.uuid4(),
- wf_id: UUID.uuid4()
- },
- true
- )
- end
end
test "changeset_conception updates request_args with missing information" do
diff --git a/plumber/ppl/test/ppls/stm_handler/event_publishing_test.exs b/plumber/ppl/test/ppls/stm_handler/event_publishing_test.exs
index 3ec61a8ba..fa0645470 100644
--- a/plumber/ppl/test/ppls/stm_handler/event_publishing_test.exs
+++ b/plumber/ppl/test/ppls/stm_handler/event_publishing_test.exs
@@ -3,7 +3,6 @@ defmodule Ppl.Ppls.STMHandler.EventPublishing.Test do
alias Test.Helpers
alias Ppl.Actions
- alias Test.GitHub.Credentials
alias Ppl.Ppls.Model.PplsQueries
alias InternalApi.Plumber.{ScheduleRequest, PipelineService}
alias Ppl.Ppls.STMHandler.EventPublishing.Test.{
diff --git a/plumber/ppl/test/repo_proxy_client_test.exs b/plumber/ppl/test/repo_proxy_client_test.exs
index 7dd32e3eb..3855431d7 100644
--- a/plumber/ppl/test/repo_proxy_client_test.exs
+++ b/plumber/ppl/test/repo_proxy_client_test.exs
@@ -85,12 +85,14 @@ defmodule Ppl.RepoProxyClient.Test do
wf_id: UUID.uuid4(),
request_token: UUID.uuid4(),
request_args: %{
+ "service" => "git_hub",
"project_id" => UUID.uuid4(),
"requester_id" => UUID.uuid4(),
"file_name" => "semaphore.yml",
"triggered_by" => "schedule",
"branch_name" => "master",
- "commit_sha" => ""
+ "commit_sha" => "",
+ "git_reference" => "refs/heads/master"
}
}
end
diff --git a/plumber/ppl/test/workflow/workflow_api_server_test.exs b/plumber/ppl/test/workflow/workflow_api_server_test.exs
index 130791a02..c07a252f5 100644
--- a/plumber/ppl/test/workflow/workflow_api_server_test.exs
+++ b/plumber/ppl/test/workflow/workflow_api_server_test.exs
@@ -3,7 +3,6 @@ defmodule Plumber.WorkflowAPI.Server.Test do
@test_commit_sha_2 "#{:crypto.strong_rand_bytes(20) |> Base.encode16(case: :lower)}"
use Ppl.IntegrationCase
- alias Test.GitHub.Credentials
alias Util.{Proto, ToTuple}
alias Ppl.Grpc.InFlightCounter
alias Ppl.PplRequests.Model.PplRequestsQueries
diff --git a/plumber/proto/lib/internal_api/plumber_w_f.workflow.pb.ex b/plumber/proto/lib/internal_api/plumber_w_f.workflow.pb.ex
index 5c1f748d5..86a7d4dd5 100644
--- a/plumber/proto/lib/internal_api/plumber_w_f.workflow.pb.ex
+++ b/plumber/proto/lib/internal_api/plumber_w_f.workflow.pb.ex
@@ -16,7 +16,9 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
label: String.t(),
triggered_by: integer,
scheduler_task_id: String.t(),
- env_vars: [InternalApi.PlumberWF.ScheduleRequest.EnvVar.t()]
+ env_vars: [InternalApi.PlumberWF.ScheduleRequest.EnvVar.t()],
+ start_in_conceived_state: boolean,
+ git_reference: String.t()
}
defstruct [
:service,
@@ -32,7 +34,9 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
:label,
:triggered_by,
:scheduler_task_id,
- :env_vars
+ :env_vars,
+ :start_in_conceived_state,
+ :git_reference
]
field :service, 2, type: InternalApi.PlumberWF.ScheduleRequest.ServiceType, enum: true
@@ -49,6 +53,8 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
field :triggered_by, 15, type: InternalApi.PlumberWF.TriggeredBy, enum: true
field :scheduler_task_id, 16, type: :string
field :env_vars, 17, repeated: true, type: InternalApi.PlumberWF.ScheduleRequest.EnvVar
+ field :start_in_conceived_state, 18, type: :bool
+ field :git_reference, 19, type: :string
end
defmodule InternalApi.PlumberWF.ScheduleRequest.Repo do
diff --git a/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex b/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex
index 97d0606a8..3124b5830 100644
--- a/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex
+++ b/public-api/v1alpha/lib/internal_api/plumber_w_f.workflow.pb.ex
@@ -16,7 +16,9 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
label: String.t(),
triggered_by: integer,
scheduler_task_id: String.t(),
- env_vars: [InternalApi.PlumberWF.ScheduleRequest.EnvVar.t()]
+ env_vars: [InternalApi.PlumberWF.ScheduleRequest.EnvVar.t()],
+ start_in_conceived_state: boolean,
+ git_reference: String.t()
}
defstruct [
:service,
@@ -32,7 +34,9 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
:label,
:triggered_by,
:scheduler_task_id,
- :env_vars
+ :env_vars,
+ :start_in_conceived_state,
+ :git_reference
]
field(:service, 2, type: InternalApi.PlumberWF.ScheduleRequest.ServiceType, enum: true)
@@ -49,6 +53,8 @@ defmodule InternalApi.PlumberWF.ScheduleRequest do
field(:triggered_by, 15, type: InternalApi.PlumberWF.TriggeredBy, enum: true)
field(:scheduler_task_id, 16, type: :string)
field(:env_vars, 17, repeated: true, type: InternalApi.PlumberWF.ScheduleRequest.EnvVar)
+ field(:start_in_conceived_state, 18, type: :bool)
+ field(:git_reference, 19, type: :string)
end
defmodule InternalApi.PlumberWF.ScheduleRequest.Repo do
@@ -602,6 +608,32 @@ defmodule InternalApi.PlumberWF.DescribeManyResponse do
field(:workflows, 2, repeated: true, type: InternalApi.PlumberWF.WorkflowDetails)
end
+defmodule InternalApi.PlumberWF.DescribeManyRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ wf_ids: [String.t()]
+ }
+ defstruct [:wf_ids]
+
+ field(:wf_ids, 1, repeated: true, type: :string)
+end
+
+defmodule InternalApi.PlumberWF.DescribeManyResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ status: InternalApi.Status.t(),
+ workflows: [InternalApi.PlumberWF.WorkflowDetails.t()]
+ }
+ defstruct [:status, :workflows]
+
+ field(:status, 1, type: InternalApi.Status)
+ field(:workflows, 2, repeated: true, type: InternalApi.PlumberWF.WorkflowDetails)
+end
+
defmodule InternalApi.PlumberWF.TerminateRequest do
@moduledoc false
use Protobuf, syntax: :proto3
diff --git a/public-api/v1alpha/lib/pipelines_api/pipelines_client.ex b/public-api/v1alpha/lib/pipelines_api/pipelines_client.ex
index 9e061111f..15e80a50b 100644
--- a/public-api/v1alpha/lib/pipelines_api/pipelines_client.ex
+++ b/public-api/v1alpha/lib/pipelines_api/pipelines_client.ex
@@ -20,15 +20,6 @@ defmodule PipelinesAPI.PipelinesClient do
alias PipelinesAPI.Util.Metrics
alias PipelinesAPI.PipelinesClient.{RequestFormatter, GrpcClient, ResponseFormatter}
- def schedule(pipeline_request) do
- Metrics.benchmark("PipelinesAPI.ppl_client", ["schedule"], fn ->
- pipeline_request
- |> RequestFormatter.form_schedule_request()
- |> GrpcClient.schedule()
- |> ResponseFormatter.process_schedule_response()
- end)
- end
-
def describe(pipeline_id, params) do
Metrics.benchmark("PipelinesAPI.ppl_client", ["describe"], fn ->
pipeline_id
diff --git a/public-api/v1alpha/lib/pipelines_api/pipelines_client/grpc_client.ex b/public-api/v1alpha/lib/pipelines_api/pipelines_client/grpc_client.ex
index 2095102ef..5d7661368 100644
--- a/public-api/v1alpha/lib/pipelines_api/pipelines_client/grpc_client.ex
+++ b/public-api/v1alpha/lib/pipelines_api/pipelines_client/grpc_client.ex
@@ -13,34 +13,6 @@ defmodule PipelinesAPI.PipelinesClient.GrpcClient do
defp timeout(), do: Application.get_env(:pipelines_api, :grpc_timeout)
- # Schedule
-
- def schedule({:ok, schedule_request}) do
- result =
- Wormhole.capture(__MODULE__, :schedule_, [schedule_request],
- timeout: timeout(),
- stacktrace: true,
- skip_log: true
- )
-
- case result do
- {:ok, result} -> result
- {:error, reason} -> Log.internal_error(reason, "schedule")
- end
- end
-
- def schedule(error), do: error
-
- def schedule_(schedule_request) do
- {:ok, channel} = GRPC.Stub.connect(url())
-
- Metrics.benchmark("PipelinesAPI.ppl_client.grpc_client", ["schedule"], fn ->
- channel
- |> PipelineService.Stub.schedule(schedule_request, opts())
- |> Resp.ok?("schedule")
- end)
- end
-
# Describe
def describe({:ok, describe_request}) do
diff --git a/public-api/v1alpha/lib/pipelines_api/pipelines_client/request_formatter.ex b/public-api/v1alpha/lib/pipelines_api/pipelines_client/request_formatter.ex
index 421127797..0083cb966 100644
--- a/public-api/v1alpha/lib/pipelines_api/pipelines_client/request_formatter.ex
+++ b/public-api/v1alpha/lib/pipelines_api/pipelines_client/request_formatter.ex
@@ -7,7 +7,6 @@ defmodule PipelinesAPI.PipelinesClient.RequestFormatter do
alias PipelinesAPI.Util.ToTuple
alias InternalApi.Plumber.{
- ScheduleRequest,
DescribeRequest,
TerminateRequest,
ListRequest,
@@ -17,40 +16,8 @@ defmodule PipelinesAPI.PipelinesClient.RequestFormatter do
PartialRebuildRequest
}
- alias InternalApi.Plumber.ScheduleRequest.{Repo, Auth}
alias LogTee, as: LT
- # Schedule
-
- def form_schedule_request(params) when is_map(params) do
- %{
- service: params.service,
- hook_id: params.hook_id,
- branch_id: params.branch_id,
- request_token: params.ppl_request_token,
- repo:
- Repo.new(
- owner: params.owner,
- repo_name: params.repo_name,
- branch_name: params.branch_name,
- commit_sha: params.commit_sha
- ),
- auth:
- Auth.new(
- client_id: params.client_id,
- client_secret: params.client_secret,
- access_token: params.access_token
- ),
- project_id: params.project_id,
- snapshot_id: Map.get(params, :snapshot_id, ""),
- definition_file: Map.get(params, :definition_file, "")
- }
- |> ScheduleRequest.new()
- |> ToTuple.ok()
- end
-
- def form_schedule_request(_), do: ToTuple.internal_error("Internal error")
-
# Describe
def form_describe_request(pipeline_id, params) when is_binary(pipeline_id) do
diff --git a/public-api/v1alpha/lib/pipelines_api/pipelines_client/response_formatter.ex b/public-api/v1alpha/lib/pipelines_api/pipelines_client/response_formatter.ex
index 87aff6fce..fc34a4a65 100644
--- a/public-api/v1alpha/lib/pipelines_api/pipelines_client/response_formatter.ex
+++ b/public-api/v1alpha/lib/pipelines_api/pipelines_client/response_formatter.ex
@@ -11,28 +11,6 @@ defmodule PipelinesAPI.PipelinesClient.ResponseFormatter do
alias InternalApi.Plumber.{Pipeline, Block}
alias Util.Proto
- # Schedule
-
- def process_schedule_response({:ok, schedule_response}) do
- with true <- is_map(schedule_response),
- {:ok, response_status} <- Map.fetch(schedule_response, :response_status),
- :OK <- response_code_value(response_status),
- {:ok, ppl_id} <- Map.fetch(schedule_response, :ppl_id) do
- {:ok, ppl_id}
- else
- :LIMIT_EXCEEDED ->
- schedule_response.response_status |> Map.get(:message) |> ToTuple.user_error()
-
- :BAD_PARAM ->
- schedule_response.response_status |> Map.get(:message) |> ToTuple.user_error()
-
- _ ->
- log_invalid_response(schedule_response, "schedule")
- end
- end
-
- def process_schedule_response(error), do: error
-
# Describe
def process_describe_response({:ok, describe_response}) do
diff --git a/public-api/v1alpha/lib/pipelines_api/repo_proxy_client.ex b/public-api/v1alpha/lib/pipelines_api/repo_proxy_client.ex
deleted file mode 100644
index d52700d2d..000000000
--- a/public-api/v1alpha/lib/pipelines_api/repo_proxy_client.ex
+++ /dev/null
@@ -1,73 +0,0 @@
-defmodule PipelinesAPI.RepoProxyClient do
- @moduledoc """
- Module is used for communication with RepoProxy service over gRPC.
- """
-
- alias PipelinesAPI.Util.{Metrics, ToTuple}
- alias InternalApi.RepoProxy.{CreateRequest, RepoProxyService}
- alias Util.Proto
- alias LogTee, as: LT
-
- defp url(), do: System.get_env("REPO_PROXY_URL")
-
- @wormhole_timeout Application.compile_env(:pipelines_api, :grpc_timeout, [])
-
- def create(params) do
- Metrics.benchmark(__MODULE__, ["create"], fn ->
- params
- |> form_request()
- |> grpc_call()
- end)
- end
-
- defp form_request(params) do
- %{
- request_token: UUID.uuid4(),
- project_id: params |> Map.get("project_id", ""),
- requester_id: params |> Map.get("requester_id", ""),
- definition_file: params |> Map.get("pipeline_file", ""),
- git: %{
- reference: params |> Map.get("reference", "") |> ref(),
- commit_sha: params |> Map.get("commit_sha", "")
- },
- triggered_by: :API
- }
- |> Proto.deep_new(CreateRequest)
- catch
- error -> error
- end
-
- defp ref(""), do: ""
- defp ref(value = "refs/" <> _rest), do: value
- defp ref(branch_name), do: "refs/heads/" <> branch_name
-
- defp grpc_call({:ok, request}) do
- result =
- Wormhole.capture(__MODULE__, :call_repo_proxy, [request],
- stacktrace: true,
- skip_log: true,
- timeout_ms: @wormhole_timeout,
- ok_tuple: true
- )
-
- case result do
- {:ok, result} ->
- Proto.to_map(result)
-
- # Not_found, Invalid_argument and Aborted errors
- {:error, {:error, %GRPC.RPCError{message: msg, status: status}}}
- when status in [3, 5, 10] ->
- ToTuple.user_error(msg)
-
- {:error, reason} ->
- reason |> LT.error("RepoProxy service responded to 'create' with:")
- ToTuple.internal_error("Internal error")
- end
- end
-
- def call_repo_proxy(request) do
- {:ok, channel} = url() |> GRPC.Stub.connect()
-
- RepoProxyService.Stub.create(channel, request, timeout: @wormhole_timeout)
- end
-end
diff --git a/public-api/v1alpha/lib/pipelines_api/workflow_client.ex b/public-api/v1alpha/lib/pipelines_api/workflow_client.ex
index c843135ab..5dc129a3b 100644
--- a/public-api/v1alpha/lib/pipelines_api/workflow_client.ex
+++ b/public-api/v1alpha/lib/pipelines_api/workflow_client.ex
@@ -29,7 +29,7 @@ defmodule PipelinesAPI.WorkflowClient do
Metrics.benchmark("PipelinesAPI.router", ["reschedule"], fn ->
WFRequestFormatter.form_reschedule_request(wf_id, requester_id, request_token)
|> WFGrpcClient.reschedule()
- |> WFResponseFormatter.process_schedule_response()
+ |> WFResponseFormatter.process_reschedule_response()
end)
end
diff --git a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
index 240d9ea19..c00a097da 100644
--- a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
+++ b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
@@ -13,42 +13,51 @@ defmodule PipelinesAPI.WorkflowClient.WFRequestFormatter do
def form_schedule_request(params) when is_map(params) do
%{
- service: service(params["service"]),
+ service: service_type(params["repository"].integration_type),
repo: %{
- owner: params["owner"],
- repo_name: params["repo_name"],
- branch_name: params["branch_name"],
- commit_sha: params["commit_sha"]
- },
- auth: %{
- client_id: params["client_id"],
- client_secret: params["client_secret"],
- access_token: params["access_token"]
+ branch_name: params |> Map.get("reference", "") |> branch_name(),
+ commit_sha: params |> Map.get("commit_sha", "")
},
+ request_token: UUID.uuid4(),
project_id: params["project_id"],
- branch_id: params["branch_id"],
- hook_id: params["hook_id"],
- request_token: params["ppl_request_token"],
- snapshot_id: Map.get(params, "snapshot_id", ""),
- definition_file: Map.get(params, "definition_file", ""),
requester_id: Map.get(params, "requester_id", ""),
- organization_id: Map.get(params, "organization_id", "")
+ definition_file: Map.get(params, "definition_file", ".semaphore/semaphore.yml"),
+ organization_id: Map.get(params, "organization_id", ""),
+ git_reference: params |> Map.get("reference", "") |> ref(),
+ start_in_conceived_state: true,
+ triggered_by: :API,
+ env_vars: parameter_values_to_env_vars(params["parameters"])
}
|> Proto.deep_new(ScheduleRequest)
end
def form_schedule_request(_), do: ToTuple.internal_error("Internal error")
- defp service(service_val) when is_integer(service_val), do: service_val
+ defp service_type(:GITHUB_OAUTH_TOKEN), do: :GIT_HUB
+ defp service_type(:GITHUB_APP), do: :GIT_HUB
+ defp service_type(:BITBUCKET), do: :BITBUCKET
+ defp service_type(:GITLAB), do: :GITLAB
+ defp service_type(:GIT), do: :GIT
+
+ defp parameter_values_to_env_vars(nil), do: []
- defp service(service_val) when is_binary(service_val) do
- service_val
- |> String.upcase()
- |> String.to_atom()
- |> InternalApi.PlumberWF.ScheduleRequest.ServiceType.value()
+ defp parameter_values_to_env_vars(parameter_values) do
+ Enum.into(parameter_values, [], ¶meter_value_to_env_var/1)
end
- defp service(_service_val), do: 0
+ defp parameter_value_to_env_var({name, value}) do
+ %{name: name, value: if(is_nil(value), do: "", else: value)}
+ end
+
+ defp ref(""), do: ""
+ defp ref(value = "refs/" <> _rest), do: value
+ defp ref(branch_name), do: "refs/heads/" <> branch_name
+
+ defp branch_name(""), do: ""
+ defp branch_name(tag = "refs/tags/" <> _rest), do: tag
+ defp branch_name("refs/pull/" <> number), do: "pull-request-" <> number
+ defp branch_name("refs/heads/" <> branch_name), do: branch_name
+ defp branch_name(name), do: name
# Terminate
diff --git a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_response_formatter.ex b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_response_formatter.ex
index 6dd4ef86c..a2232f695 100644
--- a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_response_formatter.ex
+++ b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_response_formatter.ex
@@ -21,7 +21,7 @@ defmodule PipelinesAPI.WorkflowClient.WFResponseFormatter do
{:code, :OK} <- {:code, Map.get(status, :code)},
{:ok, wf_id} <- Map.fetch(schedule_response, :wf_id),
{:ok, ppl_id} <- Map.fetch(schedule_response, :ppl_id) do
- {:ok, %{wf_id: wf_id, ppl_id: ppl_id}}
+ {:ok, %{workflow_id: wf_id, pipeline_id: ppl_id}}
else
{:code, _} -> when_status_code_not_ok(schedule_response)
_ -> log_invalid_response(schedule_response, "schedule")
@@ -34,6 +34,7 @@ defmodule PipelinesAPI.WorkflowClient.WFResponseFormatter do
schedule_response
|> Proto.to_map!()
|> Map.get(:status)
+ |> Map.get(:message)
|> ToTuple.user_error()
end
@@ -44,6 +45,30 @@ defmodule PipelinesAPI.WorkflowClient.WFResponseFormatter do
ToTuple.internal_error("Internal error")
end
+ # Reschedule
+
+ def process_reschedule_response({:ok, reschedule_response}) do
+ with true <- is_map(reschedule_response),
+ response_map <- Proto.to_map!(reschedule_response),
+ {:ok, status} <- Map.fetch(response_map, :status),
+ {:code, :OK} <- {:code, Map.get(status, :code)},
+ {:ok, wf_id} <- Map.fetch(reschedule_response, :wf_id),
+ {:ok, ppl_id} <- Map.fetch(reschedule_response, :ppl_id) do
+ {:ok, %{wf_id: wf_id, ppl_id: ppl_id}}
+ else
+ {:code, _} ->
+ reschedule_response
+ |> Proto.to_map!()
+ |> Map.get(:status)
+ |> ToTuple.user_error()
+
+ _ ->
+ log_invalid_response(reschedule_response, "reschedule")
+ end
+ end
+
+ def process_reschedule_response(error), do: error
+
# Terminate
def process_terminate_response({:ok, terminate_response}) do
@@ -115,7 +140,7 @@ defmodule PipelinesAPI.WorkflowClient.WFResponseFormatter do
value |> Atom.to_string() |> String.downcase()
end
- def enum_to_string(name, value) when is_integer(value) do
+ def enum_to_string(_name, value) when is_integer(value) do
value |> TriggeredBy.key() |> Atom.to_string() |> String.downcase()
end
end
diff --git a/public-api/v1alpha/lib/pipelines_api/workflows/schedule.ex b/public-api/v1alpha/lib/pipelines_api/workflows/schedule.ex
index 4255bfd31..1c1b5cf7f 100644
--- a/public-api/v1alpha/lib/pipelines_api/workflows/schedule.ex
+++ b/public-api/v1alpha/lib/pipelines_api/workflows/schedule.ex
@@ -6,7 +6,8 @@ defmodule PipelinesAPI.Workflows.Schedule do
alias PipelinesAPI.Util.Metrics
alias PipelinesAPI.Pipelines.Common
- alias PipelinesAPI.RepoProxyClient
+ alias PipelinesAPI.WorkflowClient
+ alias PipelinesAPI.ProjectClient
alias Plug.Conn
use Plug.Builder
@@ -18,15 +19,35 @@ defmodule PipelinesAPI.Workflows.Schedule do
def schedule(conn, _opts) do
Metrics.benchmark("PipelinesAPI.router", ["wf_schedule"], fn ->
- conn
- |> add_requester_id()
- |> RepoProxyClient.create()
- |> Common.respond(conn)
+ case find_repository(conn) do
+ {:ok, params} ->
+ params
+ |> add_requester_id(conn)
+ |> add_organization_id(conn)
+ |> WorkflowClient.schedule()
+ |> Common.respond(conn)
+
+ error ->
+ Common.respond(error, conn)
+ end
end)
end
- defp add_requester_id(conn) do
+ defp add_requester_id(params, conn) do
requester_id = Conn.get_req_header(conn, "x-semaphore-user-id") |> Enum.at(0, "")
- Map.put(conn.params, "requester_id", requester_id)
+ Map.put(params, "requester_id", requester_id)
+ end
+
+ defp add_organization_id(params, conn) do
+ organization_id = Conn.get_req_header(conn, "x-semaphore-org-id") |> Enum.at(0, "")
+ Map.put(params, "organization_id", organization_id)
+ end
+
+ defp find_repository(conn = %{params: %{"project_id" => project_id}})
+ when is_binary(project_id) and project_id != "" do
+ case ProjectClient.describe(project_id) do
+ {:ok, project} -> {:ok, Map.put(conn.params, "repository", project.spec.repository)}
+ {:error, _reason} -> {:error, {:user, "Invalid request - missing parameter 'project_id'."}}
+ end
end
end
diff --git a/public-api/v1alpha/test/pipelines_client/request_formatter_test.exs b/public-api/v1alpha/test/pipelines_client/request_formatter_test.exs
index c817e21f7..fe4d334be 100644
--- a/public-api/v1alpha/test/pipelines_client/request_formatter_test.exs
+++ b/public-api/v1alpha/test/pipelines_client/request_formatter_test.exs
@@ -1,11 +1,9 @@
defmodule PipelinesAPI.PipelinesClient.RequestFormatter.Test do
use ExUnit.Case
- alias Test.GitHub.Credentials
alias PipelinesAPI.PipelinesClient.RequestFormatter
alias InternalApi.Plumber.{
- ScheduleRequest,
DescribeRequest,
TerminateRequest,
ListRequest,
@@ -14,69 +12,6 @@ defmodule PipelinesAPI.PipelinesClient.RequestFormatter.Test do
ValidateYamlRequest
}
- alias PipelinesAPI.Validator
- alias Util.ToTuple
-
- # Schedule
-
- @schedule_request_required_fields ~w(service ppl_request_token owner repo_name hook_id
- branch_name commit_sha client_id client_secret access_token project_id branch_id)
-
- test "form_schedule_request() returns {:ok, request} when called with map with all params" do
- {:ok, params} = schedule_params() |> Validator.validate_post_pipelines() |> atom_keys()
-
- assert {:ok, schedule_request} = RequestFormatter.form_schedule_request(params)
- assert %ScheduleRequest{} = schedule_request
- end
-
- defp atom_keys({:ok, list}) do
- list |> Enum.map(fn {k, v} -> {String.to_atom(k), v} end) |> Enum.into(%{}) |> ToTuple.ok()
- end
-
- test "form_schedule_request() returns error when called with map with misssing params" do
- params = schedule_params()
-
- @schedule_request_required_fields
- |> Enum.map(fn field_name -> test_field_is_required(params, field_name) end)
- end
-
- defp test_field_is_required(params, field_name) do
- params = Map.delete(params, field_name)
-
- assert {:error, {:user, message}} = Validator.validate_post_pipelines(params)
- assert message == "Missing field #{field_name} in pipeline schedule request"
- end
-
- test "form_schedule_request() returns error when called with map with wrong service field value" do
- params = schedule_params() |> Map.put("service", "non-existing")
-
- assert {:error, {:user, message}} = Validator.validate_post_pipelines(params)
- assert message == "Invalid value for service field: non-existing"
- end
-
- test "form_schedule_request() returns internal error when it is not called with map as a param" do
- params = "123"
-
- assert {:error, {:internal, message}} = RequestFormatter.form_schedule_request(params)
- assert message == "Internal error"
- end
-
- defp schedule_params() do
- %{
- "owner" => "renderedtext",
- "repo_name" => "pipelines-test-repo-auto-call",
- "service" => "git_hub",
- "commit_sha" => "6a87726284a6109fc5ce27e02722abd4c6265de0",
- "branch_name" => "non-default-branch",
- "ppl_request_token" => UUID.uuid4(),
- "project_id" => "test",
- "hook_id" => UUID.uuid4(),
- "branch_id" => UUID.uuid4(),
- "organization_id" => UUID.uuid4()
- }
- |> Map.merge(Credentials.string_keys())
- end
-
# Describe
test "form_describe_request() returns {:ok, request} when called with string param" do
diff --git a/public-api/v1alpha/test/pipelines_client/response_formatter_test.exs b/public-api/v1alpha/test/pipelines_client/response_formatter_test.exs
index 4a2df6750..b42d218fb 100644
--- a/public-api/v1alpha/test/pipelines_client/response_formatter_test.exs
+++ b/public-api/v1alpha/test/pipelines_client/response_formatter_test.exs
@@ -4,7 +4,6 @@ defmodule PipelinesAPI.PipelinesClient.ResponseFormatter.Test do
alias PipelinesAPI.PipelinesClient.ResponseFormatter
alias InternalApi.Plumber.{
- ScheduleResponse,
DescribeResponse,
TerminateResponse,
VersionResponse,
@@ -19,44 +18,6 @@ defmodule PipelinesAPI.PipelinesClient.ResponseFormatter.Test do
alias PipelinesAPI.Util.ToTuple
alias InternalApi.Plumber.Pipeline.State
- # Schedule
-
- test "process_schedule_response() returns {:ok, ppl_id} when given valid params" do
- response = schedule_response(:OK, "") |> ToTuple.ok()
-
- assert {:ok, ppl_id} = ResponseFormatter.process_schedule_response(response)
- assert {:ok, _} = UUID.info(ppl_id)
- end
-
- test "process_schedule_response() returns error and server message when server returns BAD_PARAM code" do
- response = schedule_response(:BAD_PARAM, "Error message from server") |> ToTuple.ok()
-
- assert {:error, {:user, message}} = ResponseFormatter.process_schedule_response(response)
- assert message == "Error message from server"
- end
-
- test "process_schedule_response() returns internal error when it receives {:ok, invalid_data}" do
- response = {:ok, "123"}
-
- assert {:error, {:internal, message}} = ResponseFormatter.process_schedule_response(response)
- assert message == "Internal error"
- end
-
- test "process_schedule_response() returns what it gets if it's not an :ok tuple" do
- response = {:error, {:user, "Error message"}}
-
- assert {:error, {:user, message}} = ResponseFormatter.process_schedule_response(response)
- assert message == "Error message"
- end
-
- defp schedule_response(code, message) do
- %{
- ppl_id: UUID.uuid4(),
- response_status: response_status(code, message)
- }
- |> ScheduleResponse.new()
- end
-
# Describe
test "process_describe_response() returns {:ok, description} when given valid params and state is done" do
diff --git a/public-api/v1alpha/test/repo_proxy_client_test.exs b/public-api/v1alpha/test/repo_proxy_client_test.exs
deleted file mode 100644
index 0cd07b93b..000000000
--- a/public-api/v1alpha/test/repo_proxy_client_test.exs
+++ /dev/null
@@ -1,77 +0,0 @@
-defmodule PipelinesAPI.RepoProxyClient.Test do
- use ExUnit.Case
-
- alias PipelinesAPI.RepoProxyClient
-
- test "call Create API and get :ok response" do
- params = %{
- "project_id" => "project_1",
- "reference" => "master",
- "commit_sha" => "1234",
- "requester_id" => "user_1",
- "pipeline_file" => ".semaphore/semaphore.yml"
- }
-
- assert {:ok, response} = RepoProxyClient.create(params)
-
- assert {:ok, _} = UUID.info(response.workflow_id)
- assert {:ok, _} = UUID.info(response.pipeline_id)
- assert {:ok, _} = UUID.info(response.hook_id)
- end
-
- test "call Create API and get :invalid_argument response" do
- params = %{
- "project_id" => "invalid_arg",
- "reference" => "master",
- "commit_sha" => "1234",
- "requester_id" => "user_1",
- "pipeline_file" => ".semaphore/semaphore.yml"
- }
-
- assert {:error, {:user, message}} = RepoProxyClient.create(params)
- assert message == "Invalid argument"
- end
-
- test "call Create API and get :not_found response" do
- params = %{
- "project_id" => "not_found",
- "reference" => "master",
- "commit_sha" => "1234",
- "requester_id" => "user_1",
- "pipeline_file" => ".semaphore/semaphore.yml"
- }
-
- assert {:error, {:user, message}} = RepoProxyClient.create(params)
- assert message == "Not found"
- end
-
- test "call Create API and get :aborted response" do
- params = %{
- "project_id" => "aborted",
- "reference" => "master",
- "commit_sha" => "1234",
- "requester_id" => "user_1",
- "pipeline_file" => ".semaphore/semaphore.yml"
- }
-
- assert {:error, {:user, message}} = RepoProxyClient.create(params)
- assert message == "Aborted"
- end
-
- test "create rpc call returns internal error when it can't connect to RepoProxy service" do
- System.put_env("REPO_PROXY_URL", "something:12345")
-
- params = %{
- "project_id" => "project_1",
- "reference" => "master",
- "commit_sha" => "1234",
- "requester_id" => "user_1",
- "pipeline_file" => ".semaphore/semaphore.yml"
- }
-
- assert {:error, {:internal, message}} = RepoProxyClient.create(params)
- assert message == "Internal error"
-
- System.put_env("REPO_PROXY_URL", "127.0.0.1:50052")
- end
-end
diff --git a/public-api/v1alpha/test/router/describe_test.exs b/public-api/v1alpha/test/router/describe_test.exs
index 44dd4856f..b97202480 100644
--- a/public-api/v1alpha/test/router/describe_test.exs
+++ b/public-api/v1alpha/test/router/describe_test.exs
@@ -108,7 +108,6 @@ defmodule Router.DescribeTest do
assert %{"pipeline" => _ppl, "blocks" => blocks} = body
blocks
- |> IO.inspect(label: "block")
|> Enum.map(fn block -> assert is_list(block["jobs"]) end)
end
diff --git a/public-api/v1alpha/test/router/list_test.exs b/public-api/v1alpha/test/router/list_test.exs
index a21ae0829..fef1dee81 100644
--- a/public-api/v1alpha/test/router/list_test.exs
+++ b/public-api/v1alpha/test/router/list_test.exs
@@ -112,7 +112,6 @@ defmodule Router.ListTest do
params = %{wf_id: wf_id}
{:ok, response} = get_list_ppls(params)
%{body: body, status_code: status_code, headers: headers} = response
- if(status_code != 200, do: IO.puts("Response body: #{inspect(body)}"))
body =
case decode? do
@@ -127,7 +126,6 @@ defmodule Router.ListTest do
params = %{project_id: project_id, branch_name: branch_name}
{:ok, response} = get_list_ppls(params)
%{body: body, status_code: status_code, headers: headers} = response
- if(status_code != 200, do: IO.puts("Response body: #{inspect(body)}"))
{status_code, headers, Poison.decode!(body)}
end
diff --git a/public-api/v1alpha/test/router/promotions/list_test.exs b/public-api/v1alpha/test/router/promotions/list_test.exs
index b12ef364d..8ec359526 100644
--- a/public-api/v1alpha/test/router/promotions/list_test.exs
+++ b/public-api/v1alpha/test/router/promotions/list_test.exs
@@ -72,8 +72,6 @@ defmodule Router.Promotions.ListTest do
params = %{pipeline_id: ppl_id, name: target_name}
{:ok, response} = get_promotions_request(params, headers())
%{body: body, status_code: status_code, headers: headers} = response
- IO.puts("Response body: #{inspect(body)}")
- IO.puts("Headers: #{inspect(headers)}")
if decode do
{status_code, headers, Poison.decode!(body)}
diff --git a/public-api/v1alpha/test/router/schedules/delete_test.exs b/public-api/v1alpha/test/router/schedules/delete_test.exs
index 23a1d79ea..fe9d315fb 100644
--- a/public-api/v1alpha/test/router/schedules/delete_test.exs
+++ b/public-api/v1alpha/test/router/schedules/delete_test.exs
@@ -45,7 +45,7 @@ defmodule PipelinesAPI.Schedules.Delete.Test do
def delete_schedule(identifier, expected_status_code, decode \\ true) do
{:ok, response} = delete(identifier)
%{:body => body, :status_code => status_code} = response
- if(status_code != 200, do: IO.puts("Response body: #{inspect(body)}"))
+ if(status_code != expected_status_code, do: IO.puts("Response body: #{inspect(body)}"))
assert status_code == expected_status_code
if decode do
diff --git a/public-api/v1alpha/test/router/schedules/describe_test.exs b/public-api/v1alpha/test/router/schedules/describe_test.exs
index a549af068..996333cea 100644
--- a/public-api/v1alpha/test/router/schedules/describe_test.exs
+++ b/public-api/v1alpha/test/router/schedules/describe_test.exs
@@ -79,7 +79,7 @@ defmodule PipelinesAPI.Schedules.Describe.Test do
def describe_schedule(identifier, expected_status_code, decode \\ true) do
{:ok, response} = get_description(identifier)
%{:body => body, :status_code => status_code} = response
- IO.puts("Response body: #{inspect(body)}")
+ if(status_code != expected_status_code, do: IO.puts("Response body: #{inspect(body)}"))
assert status_code == expected_status_code
if decode do
diff --git a/public-api/v1alpha/test/router/schedules/list_test.exs b/public-api/v1alpha/test/router/schedules/list_test.exs
index ef0c190f7..36849e7e9 100644
--- a/public-api/v1alpha/test/router/schedules/list_test.exs
+++ b/public-api/v1alpha/test/router/schedules/list_test.exs
@@ -69,8 +69,6 @@ defmodule PipelinesAPI.Schedules.List.Test do
def list_schedules(parms, expected_status_code, decode \\ true) do
{:ok, response} = list(parms)
%{:body => body, :status_code => status_code, headers: headers} = response
- IO.puts("Response body: #{inspect(body)}")
- IO.puts("Headers: #{inspect(headers)}")
assert status_code == expected_status_code
if decode do
diff --git a/public-api/v1alpha/test/router/wf_list_test.exs b/public-api/v1alpha/test/router/wf_list_test.exs
index 81b74adad..a333d0ba4 100644
--- a/public-api/v1alpha/test/router/wf_list_test.exs
+++ b/public-api/v1alpha/test/router/wf_list_test.exs
@@ -73,8 +73,6 @@ defmodule Router.WfListTest do
def list_wfs(params, decode? \\ true) do
{:ok, response} = get_list_wfs(params)
%{:body => body, :status_code => status_code, headers: headers} = response
- IO.puts("Response body: #{inspect(body)}")
- IO.puts("Headers: #{inspect(headers)}")
body =
case decode? do
diff --git a/public-api/v1alpha/test/router/wf_schedule_test.exs b/public-api/v1alpha/test/router/wf_schedule_test.exs
index d3256a6fa..27f8db0d2 100644
--- a/public-api/v1alpha/test/router/wf_schedule_test.exs
+++ b/public-api/v1alpha/test/router/wf_schedule_test.exs
@@ -6,21 +6,6 @@ defmodule PipelinesAPI.Workflows.Schedule.Test do
:ok
end
- test "POST /workflows/ - project ID mismatch" do
- org = Support.Stubs.Organization.create(name: "RT2", org_username: "rt2")
- user = Support.Stubs.User.create_default()
- project = Support.Stubs.Project.create(org, user)
-
- params = %{
- "project_id" => project.id,
- "reference" => "master",
- "commit_sha" => "1234",
- "pipeline_file" => ".semaphore/semaphore.yml"
- }
-
- assert "Not Found" = create_workflow(params, 404)
- end
-
test "POST /workflows/ - 403 when user does not have permission" do
GrpcMock.stub(RBACMock, :list_user_permissions, fn _, _ ->
InternalApi.RBAC.ListUserPermissionsResponse.new(
@@ -58,7 +43,6 @@ defmodule PipelinesAPI.Workflows.Schedule.Test do
assert {:ok, response} = Poison.decode(body)
assert {:ok, _} = UUID.info(response["workflow_id"])
assert {:ok, _} = UUID.info(response["pipeline_id"])
- assert {:ok, _} = UUID.info(response["hook_id"])
end
test "POST /workflows/ - returns 400 when server returns :invalid_argument response" do
@@ -76,59 +60,55 @@ defmodule PipelinesAPI.Workflows.Schedule.Test do
assert "\"Invalid argument\"" = create_workflow(params, 400)
end
- test "POST /workflows/ - returns 400 when server returns :not_found response" do
+ test "POST /workflows/ - returns 400 when server returns :failed_precondition response" do
org = Support.Stubs.Organization.create_default()
user = Support.Stubs.User.create_default()
- Support.Stubs.Project.create(org, user, id: "not_found")
+ Support.Stubs.Project.create(org, user, id: "project_deleted")
params = %{
- "project_id" => "not_found",
+ "project_id" => "project_deleted",
"reference" => "master",
"commit_sha" => "1234",
"pipeline_file" => ".semaphore/semaphore.yml"
}
- assert "\"Not found\"" = create_workflow(params, 400)
+ assert "\"Failed precondition\"" = create_workflow(params, 400)
end
- test "POST /workflows/ - returns 400 when server returns :aborted response" do
+ test "POST /workflows/ - returns 400 when server returns :resource_exhausted response" do
org = Support.Stubs.Organization.create_default()
user = Support.Stubs.User.create_default()
- Support.Stubs.Project.create(org, user, id: "aborted")
+ Support.Stubs.Project.create(org, user, id: "resource_exhausted")
params = %{
- "project_id" => "aborted",
+ "project_id" => "resource_exhausted",
"reference" => "master",
"commit_sha" => "1234",
"pipeline_file" => ".semaphore/semaphore.yml"
}
- assert "\"Aborted\"" = create_workflow(params, 400)
+ assert "\"Resource exhausted\"" = create_workflow(params, 400)
end
test "POST /workflows/ - returns 500 when there is an internal error on server" do
- System.put_env("REPO_PROXY_URL", "something:12345")
-
org = Support.Stubs.Organization.create_default()
user = Support.Stubs.User.create_default()
- Support.Stubs.Project.create(org, user, id: "project_1")
+ Support.Stubs.Project.create(org, user, id: "internal_error")
params = %{
- "project_id" => "project_1",
+ "project_id" => "internal_error",
"reference" => "master",
"commit_sha" => "1234",
"pipeline_file" => ".semaphore/semaphore.yml"
}
assert "\"Internal error\"" = create_workflow(params, 500)
-
- System.put_env("REPO_PROXY_URL", "127.0.0.1:50052")
end
def create_workflow(params, expected_status_code) do
{:ok, response} = params |> Poison.encode!() |> create()
%{:body => body, :status_code => status_code} = response
- if(status_code != 200, do: IO.puts("Response body: #{inspect(body)}"))
+ if(status_code != expected_status_code, do: IO.puts("Response body: #{inspect(body)}"))
assert status_code == expected_status_code
body
end
diff --git a/public-api/v1alpha/test/support/stubs/workflow.ex b/public-api/v1alpha/test/support/stubs/workflow.ex
index dfc495961..48516f021 100644
--- a/public-api/v1alpha/test/support/stubs/workflow.ex
+++ b/public-api/v1alpha/test/support/stubs/workflow.ex
@@ -121,28 +121,49 @@ defmodule Support.Stubs.Workflow do
end
def schedule(req, _) do
- if req.request_token != "" do
- user_id = UUID.uuid4()
-
- hook = %{
- id: req.hook_id,
- project_id: req.project_id,
- branch_id: req.branch_id
- }
-
- new_workflow = Support.Stubs.Workflow.create(hook, user_id)
- new_pipeline = Support.Stubs.Pipeline.create(new_workflow)
-
- InternalApi.PlumberWF.ScheduleResponse.new(
- wf_id: new_workflow.id,
- ppl_id: new_pipeline.id,
- status: ok()
- )
- else
- InternalApi.PlumberWF.ScheduleResponse.new(
- status:
- InternalApi.Status.new(code: Google.Rpc.Code.value(:INVALID_ARGUMENT), message: "")
- )
+ case req.project_id do
+ "invalid_arg" ->
+ InternalApi.PlumberWF.ScheduleResponse.new(
+ status:
+ InternalApi.Status.new(
+ code: Google.Rpc.Code.value(:INVALID_ARGUMENT),
+ message: "Invalid argument"
+ )
+ )
+
+ "project_deleted" ->
+ InternalApi.PlumberWF.ScheduleResponse.new(
+ status:
+ InternalApi.Status.new(
+ code: Google.Rpc.Code.value(:FAILED_PRECONDITION),
+ message: "Failed precondition"
+ )
+ )
+
+ "resource_exhausted" ->
+ InternalApi.PlumberWF.ScheduleResponse.new(
+ status:
+ InternalApi.Status.new(
+ code: Google.Rpc.Code.value(:RESOURCE_EXHAUSTED),
+ message: "Resource exhausted"
+ )
+ )
+
+ "internal_error" ->
+ raise GRPC.RPCError, status: GRPC.Status.internal(), message: "Internal error"
+
+ _ ->
+ user_id = UUID.uuid4()
+ branch = Support.Stubs.Branch.create(%{id: req.project_id})
+ hook = Support.Stubs.Hook.create(branch)
+ new_workflow = Support.Stubs.Workflow.create(hook, user_id)
+ new_pipeline = Support.Stubs.Pipeline.create(new_workflow)
+
+ InternalApi.PlumberWF.ScheduleResponse.new(
+ wf_id: new_workflow.id,
+ ppl_id: new_pipeline.id,
+ status: ok()
+ )
end
end
diff --git a/public-api/v1alpha/test/test_helper.exs b/public-api/v1alpha/test/test_helper.exs
index 2c8204415..5c0c30f62 100644
--- a/public-api/v1alpha/test/test_helper.exs
+++ b/public-api/v1alpha/test/test_helper.exs
@@ -19,23 +19,6 @@ ExUnit.configure(
ExUnit.start(trace: true, capture_log: true)
-defmodule Test.GitHub.Credentials do
- @moduledoc """
- User credentials for GH repo access
- (only for public repos)
- """
-
- def string_keys, do: atom_keys() |> Poison.encode!() |> Poison.decode!()
-
- def atom_keys do
- %{
- client_id: "328c742132e5407abd7d",
- client_secret: "1d5559c02a2a20d8c3343967c331d93401959d9e",
- access_token: "7e12be7748ab2e7ac9c4b1ecc3fcb19741fc28ec"
- }
- end
-end
-
defmodule Test.PipelinesClient do
use ExUnit.Case
@@ -95,7 +78,6 @@ defmodule Test.PipelinesClient do
def describe_ppl_with_id(id, decode? \\ true, detailed \\ false, headers \\ headers()) do
{:ok, response} = get_ppl_description(id, Atom.to_string(detailed), headers)
%{:body => body, :status_code => status_code} = response
- if(status_code != 200, do: IO.puts("Response body: #{inspect(body)}"))
body =
case decode? do
@@ -109,7 +91,6 @@ defmodule Test.PipelinesClient do
def describe_wf(wf_id, decode? \\ true, headers \\ headers()) do
{:ok, response} = get_wf_description(wf_id, headers)
%{:body => body, :status_code => status_code} = response
- if(status_code != 200, do: IO.puts("Response body: #{inspect(body)}"))
body =
case decode? do
diff --git a/public-api/v1alpha/test/workflow_client_test.exs b/public-api/v1alpha/test/workflow_client_test.exs
index 60906d5c8..ec38e119a 100644
--- a/public-api/v1alpha/test/workflow_client_test.exs
+++ b/public-api/v1alpha/test/workflow_client_test.exs
@@ -2,7 +2,6 @@ defmodule PipelinesAPI.WorkflowClient.Test do
use ExUnit.Case
alias PipelinesAPI.WorkflowClient
- alias Test.GitHub.Credentials
setup do
Support.Stubs.reset()
@@ -11,17 +10,11 @@ defmodule PipelinesAPI.WorkflowClient.Test do
test "workflow client schedule and get valid response" do
response = WorkflowClient.schedule(schedule_params())
assert {:ok, schedule_response} = response
- assert %{wf_id: wf_id, ppl_id: ppl_id} = schedule_response
+ assert %{workflow_id: wf_id, pipeline_id: ppl_id} = schedule_response
assert {:ok, _} = UUID.info(ppl_id)
assert {:ok, _} = UUID.info(wf_id)
end
- test "workflow client schedule - empty request_token" do
- params = schedule_params() |> Map.replace!("ppl_request_token", "")
- assert {:error, {:user, message}} = WorkflowClient.schedule(params)
- assert message.code == :INVALID_ARGUMENT
- end
-
test "workflow client schedule - limit exceeded" do
GrpcMock.stub(WorkflowMock, :schedule, fn _, _stream ->
InternalApi.PlumberWF.ScheduleResponse.new(
@@ -33,10 +26,9 @@ defmodule PipelinesAPI.WorkflowClient.Test do
)
end)
- response = WorkflowClient.schedule(schedule_params_same_branch())
- assert {:error, {:user, status}} = response
- assert status.code == :RESOURCE_EXHAUSTED
- assert status.message == "No more workflows for you."
+ response = WorkflowClient.schedule(schedule_params())
+ assert {:error, {:user, message}} = response
+ assert message == "No more workflows for you."
end
test "workflow client schedule - refused if project deletion was requested" do
@@ -50,40 +42,19 @@ defmodule PipelinesAPI.WorkflowClient.Test do
)
end)
- assert {:error, {:user, status}} = WorkflowClient.schedule(schedule_params())
- assert status.code == :FAILED_PRECONDITION
- assert status.message == "Project was deleted."
- end
-
- defp schedule_params_same_branch() do
- same_branch_params = %{
- "branch_id" => "123",
- "project_id" => "123",
- "service" => "local",
- "repo_name" => "8_sleeping"
- }
-
- schedule_params()
- |> Map.merge(same_branch_params)
+ assert {:error, {:user, message}} = WorkflowClient.schedule(schedule_params())
+ assert message == "Project was deleted."
end
defp schedule_params() do
%{
- "owner" => "renderedtext",
- "repo_name" => "pipelines-test-repo-auto-call",
- "service" => "git_hub",
- "ppl_request_token" => UUID.uuid4(),
- "branch_id" => UUID.uuid4(),
- "hook_id" => UUID.uuid4(),
- "requester_id" => UUID.uuid4(),
- "branch_name" => "10s-pipeline-run",
+ "reference" => "refs/heads/main",
"commit_sha" => "773d5c953bd68cc97efa81d2e014449336265fb4",
- "file_name" => "semaphore.yml",
- "working_dir" => ".semaphore",
- "snapshot_archive" => "123",
+ "definition_file" => "semaphore.yml",
"project_id" => UUID.uuid4(),
- "organization_id" => UUID.uuid4()
+ "organization_id" => UUID.uuid4(),
+ "requester_id" => UUID.uuid4(),
+ "repository" => %{integration_type: :GITHUB_APP}
}
- |> Map.merge(Credentials.string_keys())
end
end
diff --git a/public-api/v2/test/test_helper.exs b/public-api/v2/test/test_helper.exs
index cf11f41f3..af157ed5e 100644
--- a/public-api/v2/test/test_helper.exs
+++ b/public-api/v2/test/test_helper.exs
@@ -21,23 +21,6 @@ ExUnit.configure(
ExUnit.start()
-defmodule Test.GitHub.Credentials do
- @moduledoc """
- User credentials for GH repo access
- (only for public repos)
- """
-
- def string_keys, do: atom_keys() |> Jason.encode!() |> Jason.decode!()
-
- def atom_keys do
- %{
- client_id: "328c742132e5407abd7d",
- client_secret: "1d5559c02a2a20d8c3343967c331d93401959d9e",
- access_token: "7e12be7748ab2e7ac9c4b1ecc3fcb19741fc28ec"
- }
- end
-end
-
defmodule Test.PipelinesClient do
use ExUnit.Case
From b522501e0693510f3a66903d96415b1da10022c8 Mon Sep 17 00:00:00 2001
From: Veljko Maksimovic <45179708+VeljkoMaksimovic@users.noreply.github.com>
Date: Fri, 27 Jun 2025 16:07:49 +0200
Subject: [PATCH 16/87] feat(rbac): enable okta login when creating integration
(#416)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
https://github.com/renderedtext/tasks/issues/8169
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
ee/rbac/lib/rbac/api/organization.ex | 25 ++
ee/rbac/lib/rbac/okta/integrations.ex | 92 ++++++--
.../rbac/grpc_servers/okta_server_test.exs | 217 ++++++++++++++++--
.../rbac/okta/saml/payload_parser_test.exs | 24 +-
ee/rbac/test/rbac/okta/scim/api_test.exs | 10 +-
.../test/rbac/okta/scim/provisioner_test.exs | 10 +-
6 files changed, 339 insertions(+), 39 deletions(-)
diff --git a/ee/rbac/lib/rbac/api/organization.ex b/ee/rbac/lib/rbac/api/organization.ex
index cb7318a16..d04e570c5 100644
--- a/ee/rbac/lib/rbac/api/organization.ex
+++ b/ee/rbac/lib/rbac/api/organization.ex
@@ -4,6 +4,15 @@ defmodule Rbac.Api.Organization do
def find_by_username(username) do
req = %Organization.DescribeRequest{org_username: username}
+ describe_organization(req)
+ end
+
+ def find_by_id(org_id) do
+ req = %Organization.DescribeRequest{org_id: org_id}
+ describe_organization(req)
+ end
+
+ defp describe_organization(req) do
{:ok, channel} = GRPC.Stub.connect(Application.fetch_env!(:rbac, :organization_grpc_endpoint))
Logger.info("Sending Organization describe request: #{inspect(req)}")
@@ -20,4 +29,20 @@ defmodule Rbac.Api.Organization do
{:error, :not_found}
end
end
+
+ def update(organization) do
+ req = %Organization.UpdateRequest{organization: organization}
+ {:ok, channel} = GRPC.Stub.connect(Application.fetch_env!(:rbac, :organization_grpc_endpoint))
+
+ Logger.info("Sending Organization update request: #{inspect(req)}")
+
+ grpc_result = Organization.OrganizationService.Stub.update(channel, req, timeout: 30_000)
+
+ Logger.info("Received Organization update response: #{inspect(grpc_result)}")
+
+ case grpc_result do
+ {:ok, res} -> {:ok, res.organization}
+ {:error, error} -> {:error, error}
+ end
+ end
end
diff --git a/ee/rbac/lib/rbac/okta/integrations.ex b/ee/rbac/lib/rbac/okta/integrations.ex
index aa87e5953..b25d33c5c 100644
--- a/ee/rbac/lib/rbac/okta/integrations.ex
+++ b/ee/rbac/lib/rbac/okta/integrations.ex
@@ -4,6 +4,8 @@ defmodule Rbac.Okta.Integration do
"""
require Ecto.Query
+ require Logger
+
alias Ecto.Query
alias Rbac.Repo
alias Rbac.Okta.Saml.Certificate
@@ -21,23 +23,82 @@ defmodule Rbac.Okta.Integration do
jit_provisioning_enabled,
idempotency_token \\ Ecto.UUID.generate()
) do
- with {:ok, fingerprint} <- Certificate.fingerprint(certificate),
- {:ok, integration} <-
- Rbac.Repo.OktaIntegration.insert_or_update(
- org_id: org_id,
- creator_id: creator_id,
- sso_url: sso_url,
- saml_issuer: saml_issuer,
- saml_certificate_fingerprint: Base.encode64(fingerprint),
- jit_provisioning_enabled: jit_provisioning_enabled,
- idempotency_token: idempotency_token
- ) do
- {:ok, integration}
+ Ecto.Multi.new()
+ |> Ecto.Multi.run(:fingerprint, fn _repo, _changes ->
+ Certificate.fingerprint(certificate)
+ end)
+ |> Ecto.Multi.run(:integration, fn _repo, %{fingerprint: fingerprint} ->
+ Rbac.Repo.OktaIntegration.insert_or_update(
+ org_id: org_id,
+ creator_id: creator_id,
+ sso_url: sso_url,
+ saml_issuer: saml_issuer,
+ saml_certificate_fingerprint: Base.encode64(fingerprint),
+ jit_provisioning_enabled: jit_provisioning_enabled,
+ idempotency_token: idempotency_token
+ )
+ end)
+ |> Ecto.Multi.run(:allowed_id_providers, fn _repo, _changes ->
+ add_okta_to_allowed_id_providers(org_id)
+ end)
+ |> Rbac.Repo.transaction()
+ |> case do
+ {:ok, %{integration: integration}} ->
+ {:ok, integration}
+
+ {:error, :fingerprint, reason, _changes} ->
+ Logger.error("Failed to decode certificate for org #{org_id}: #{inspect(reason)}.")
+ {:error, :cert_decode_error}
+
+ {:error, :integration, reason, _changes} ->
+ Logger.error(
+ "Failed to create/update Okta integration for org #{org_id}: #{inspect(reason)}"
+ )
+
+ {:error, {:integration_failed, reason}}
+
+ {:error, :allowed_id_providers, reason, _changes} ->
+ Logger.error(
+ "Failed to add Okta to allowed ID providers for org #{org_id}: #{inspect(reason)}"
+ )
+
+ {:error, {:allowed_id_providers_failed, reason}}
+
+ {:error, operation, reason, _changes} ->
+ Logger.error(
+ "Unknown operation #{inspect(operation)} failed for org #{org_id}: #{inspect(reason)}"
+ )
+
+ {:error, reason}
+ end
+ end
+
+ defp update_id_providers(org_id, operation, action) do
+ with {:ok, org} <- Rbac.Api.Organization.find_by_id(org_id),
+ updated_providers <- operation.(org.allowed_id_providers || []),
+ updated_org <- Map.put(org, :allowed_id_providers, updated_providers),
+ {:ok, updated} <- Rbac.Api.Organization.update(updated_org) do
+ {:ok, updated}
else
- e -> e
+ {:error, :not_found} ->
+ Logger.error("Failed to #{action} okta provider: Org #{org_id} not found")
+ {:error, :organization_not_found}
+
+ {:error, reason} ->
+ Logger.error("Failed to #{action} okta provider for org #{org_id}: #{inspect(reason)}")
+
+ {:error, :update_failed}
end
end
+ defp add_okta_to_allowed_id_providers(org_id) do
+ update_id_providers(org_id, &Enum.uniq(&1 ++ ["okta"]), "add")
+ end
+
+ defp remove_okta_from_allowed_id_providers(org_id) do
+ update_id_providers(org_id, &Enum.reject(&1, fn provider -> provider == "okta" end), "remove")
+ end
+
def generate_scim_token(integration) do
token = Rbac.Okta.Scim.Token.generate()
token_hash = Rbac.Okta.Scim.Token.hash(token)
@@ -96,10 +157,13 @@ defmodule Rbac.Okta.Integration do
Rbac.RoleManagement.retract_roles(rbi, :okta)
{:ok, :retracted_roles}
end)
- |> Ecto.Multi.run(:delete_okta_users, fn _repo, _cahnges ->
+ |> Ecto.Multi.run(:delete_okta_users, fn _repo, _changes ->
OktaUser.delete_all(id)
{:ok, :okta_users_deleted}
end)
+ |> Ecto.Multi.run(:remove_okta_from_allowed_id_providers, fn _repo, _changes ->
+ remove_okta_from_allowed_id_providers(integration.org_id)
+ end)
|> Ecto.Multi.delete(:delete_okta_integration, integration)
|> Rbac.Repo.transaction(timeout: 60_000)
diff --git a/ee/rbac/test/rbac/grpc_servers/okta_server_test.exs b/ee/rbac/test/rbac/grpc_servers/okta_server_test.exs
index caefd925a..f064435b0 100644
--- a/ee/rbac/test/rbac/grpc_servers/okta_server_test.exs
+++ b/ee/rbac/test/rbac/grpc_servers/okta_server_test.exs
@@ -34,8 +34,28 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
saml_certificate: cert
}
- with_mock Rbac.Store.UserPermissions, [:passthrough],
- read_user_permissions: fn _ -> "organization.okta.manage" end do
+ org_without_okta = %{
+ org_id: request.org_id,
+ allowed_id_providers: ["github"]
+ }
+
+ org_with_okta = %{
+ org_id: request.org_id,
+ allowed_id_providers: ["github", "okta"]
+ }
+
+ with_mocks([
+ {Rbac.Store.UserPermissions, [],
+ [read_user_permissions: fn _ -> "organization.okta.manage" end]},
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:ok, org_without_okta} end,
+ update: fn org ->
+ assert "okta" in org.allowed_id_providers
+ {:ok, org_with_okta}
+ end
+ ]}
+ ]) do
assert {:ok, channel} = GRPC.Stub.connect("localhost:50051")
assert {:ok, res} = InternalApi.Okta.Okta.Stub.set_up(channel, request)
@@ -64,6 +84,10 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
{:ok, fingerprint} = Rbac.Okta.Saml.Certificate.fingerprint(cert)
assert integration.saml_certificate_fingerprint == Base.encode64(fingerprint)
+
+ # Verify that organization API was called to update allowed_id_providers
+ assert_called(Rbac.Api.Organization.find_by_id(request.org_id))
+ assert_called(Rbac.Api.Organization.update(:_))
end
end
@@ -79,8 +103,23 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
saml_certificate: cert
}
- with_mock Rbac.Store.UserPermissions, [:passthrough],
- read_user_permissions: fn _ -> "organization.okta.manage" end do
+ org = %{
+ org_id: request.org_id,
+ allowed_id_providers: ["github", "okta"]
+ }
+
+ with_mocks([
+ {Rbac.Store.UserPermissions, [],
+ [read_user_permissions: fn _ -> "organization.okta.manage" end]},
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:ok, org} end,
+ update: fn org ->
+ assert "okta" in org.allowed_id_providers
+ {:ok, org}
+ end
+ ]}
+ ]) do
assert {:ok, channel} = GRPC.Stub.connect("localhost:50051")
assert {:ok, res1} = InternalApi.Okta.Okta.Stub.set_up(channel, request)
assert {:ok, res2} = InternalApi.Okta.Okta.Stub.set_up(channel, request)
@@ -112,8 +151,23 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
saml_certificate: cert
}
- with_mock Rbac.Store.UserPermissions, [:passthrough],
- read_user_permissions: fn _ -> "organization.okta.manage" end do
+ org_without_okta = %{
+ org_id: org_id,
+ allowed_id_providers: ["github"]
+ }
+
+ with_mocks([
+ {Rbac.Store.UserPermissions, [],
+ [read_user_permissions: fn _ -> "organization.okta.manage" end]},
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn ^org_id -> {:ok, org_without_okta} end,
+ update: fn org ->
+ assert "okta" in org.allowed_id_providers
+ {:ok, org}
+ end
+ ]}
+ ]) do
assert {:ok, channel} = GRPC.Stub.connect("localhost:50051")
assert {:ok, res} = InternalApi.Okta.Okta.Stub.set_up(channel, request)
:timer.sleep(2_000)
@@ -122,6 +176,58 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
assert update_res.integration.created_at == res.integration.created_at
assert update_res.integration.updated_at != res.integration.updated_at
assert update_res.integration.idempotency_token != res.integration.idempotency_token
+
+ assert_called(Rbac.Api.Organization.find_by_id(org_id))
+ assert_called_exactly(Rbac.Api.Organization.update(:_), 2)
+ end
+ end
+
+ test "Integration is not created if updating allowed_id_providers fails" do
+ import ExUnit.CaptureLog
+
+ {:ok, cert} = Support.Okta.Saml.PayloadBuilder.test_cert()
+
+ org_id = Ecto.UUID.generate()
+
+ request = %InternalApi.Okta.SetUpRequest{
+ org_id: org_id,
+ creator_id: Ecto.UUID.generate(),
+ idempotency_token: Ecto.UUID.generate(),
+ saml_issuer: "https://otkta.something/very/secure",
+ jit_provisioning_enabled: false,
+ saml_certificate: cert
+ }
+
+ org_without_okta = %{
+ org_id: org_id,
+ allowed_id_providers: ["github"]
+ }
+
+ with_mocks([
+ {Rbac.Store.UserPermissions, [],
+ [read_user_permissions: fn _ -> "organization.okta.manage" end]},
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn ^org_id -> {:ok, org_without_okta} end,
+ update: fn org ->
+ assert "okta" in org.allowed_id_providers
+ {:error, nil}
+ end
+ ]}
+ ]) do
+ assert {:ok, channel} = GRPC.Stub.connect("localhost:50051")
+
+ log =
+ capture_log(fn ->
+ assert match?({:error, _}, InternalApi.Okta.Okta.Stub.set_up(channel, request))
+ assert {:error, :not_found} = Rbac.Okta.Integration.find_by_org_id(org_id)
+ end)
+
+ # Verify API calls and logging
+ assert_called(Rbac.Api.Organization.find_by_id(org_id))
+ assert_called(Rbac.Api.Organization.update(:_))
+
+ assert log =~ "Failed to add okta provider for org"
end
end
@@ -337,11 +443,34 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
integration_id: integration.id
}
+ # Define the organization with okta in allowed_id_providers
+ org_with_okta = %{
+ org_id: integration.org_id,
+ allowed_id_providers: ["github", "okta"]
+ }
+
assert {:ok, channel} = GRPC.Stub.connect("localhost:50051")
- with_mock Rbac.Store.UserPermissions, [:passthrough],
- read_user_permissions: fn _ -> "organization.okta.manage" end do
+ with_mocks([
+ {Rbac.Store.UserPermissions, [:passthrough],
+ [read_user_permissions: fn _ -> "organization.okta.manage" end]},
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ ->
+ {:ok, org_with_okta}
+ end,
+ update: fn org ->
+ # Assert that okta is removed from allowed_id_providers
+ refute "okta" in org.allowed_id_providers
+ {:ok, nil}
+ end
+ ]}
+ ]) do
assert {:ok, _res} = InternalApi.Okta.Okta.Stub.destroy(channel, request)
+ # The mocked function is executed async, hence the wait
+ :timer.sleep(2_000)
+ assert_called_exactly(Rbac.Api.Organization.find_by_id(:_), 1)
+ assert_called_exactly(Rbac.Api.Organization.update(:_), 1)
end
assert user_has_one_role_assigned?(non_okta_user.id)
@@ -351,6 +480,54 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
assert {:error, :not_found} == Rbac.Okta.Integration.find(integration.id)
end
+ test "If okta is not removed as provider, restore everything", %{integration: integration} do
+ {:ok, okta_user} = Support.Factories.RbacUser.insert()
+
+ {:ok, _} =
+ Support.Factories.OktaUser.insert(
+ integration_id: integration.id,
+ org_id: integration.org_id,
+ user_id: okta_user.id
+ )
+
+ # Assigning org role to the okta user
+ {:ok, _} =
+ Support.Factories.SubjectRoleBinding.insert(
+ org_id: integration.org_id,
+ subject_id: okta_user.id,
+ project_id: nil,
+ binding_source: :okta
+ )
+
+ request = %InternalApi.Okta.DestroyRequest{
+ user_id: okta_user.id,
+ integration_id: integration.id
+ }
+
+ assert {:ok, channel} = GRPC.Stub.connect("localhost:50051")
+
+ with_mocks([
+ {Rbac.Store.UserPermissions, [:passthrough],
+ [read_user_permissions: fn _ -> "organization.okta.manage" end]},
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:error, :not_found} end,
+ update: fn org ->
+ # Assert that okta is removed from allowed_id_providers
+ refute "okta" in org.allowed_id_providers
+ {:ok, nil}
+ end
+ ]}
+ ]) do
+ assert {:ok, _res} = InternalApi.Okta.Okta.Stub.destroy(channel, request)
+ # The mocked function is executed async, hence the wait
+ :timer.sleep(2_000)
+ end
+
+ assert user_has_one_role_assigned?(okta_user.id)
+ assert match?({:ok, _}, Rbac.Okta.Integration.find(integration.id))
+ end
+
test "Dont allow if user doesn't have permission", %{integration: integration} do
request = %InternalApi.Okta.DestroyRequest{
user_id: Ecto.UUID.generate(),
@@ -740,14 +917,22 @@ defmodule Rbac.GrpcServers.OktaServer.Test do
def create_integration do
{:ok, cert} = Support.Okta.Saml.PayloadBuilder.test_cert()
- Rbac.Okta.Integration.create_or_update(
- Ecto.UUID.generate(),
- Ecto.UUID.generate(),
- "https://sso-url.com",
- "https://saml-issuer.com",
- cert,
- false
- )
+ with_mocks([
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:ok, %{allowed_id_providers: []}} end,
+ update: fn _ -> {:ok, %{}} end
+ ]}
+ ]) do
+ Rbac.Okta.Integration.create_or_update(
+ Ecto.UUID.generate(),
+ Ecto.UUID.generate(),
+ "https://sso-url.com",
+ "https://saml-issuer.com",
+ cert,
+ false
+ )
+ end
end
defp user_has_one_role_assigned?(user_id) do
diff --git a/ee/rbac/test/rbac/okta/saml/payload_parser_test.exs b/ee/rbac/test/rbac/okta/saml/payload_parser_test.exs
index 7edd1e506..66fe4ac56 100644
--- a/ee/rbac/test/rbac/okta/saml/payload_parser_test.exs
+++ b/ee/rbac/test/rbac/okta/saml/payload_parser_test.exs
@@ -1,6 +1,7 @@
defmodule Rbac.Okta.Saml.PayloadParser.Test do
use Rbac.RepoCase, async: true
+ import Mock
alias Rbac.Okta.Saml.PayloadParser, as: Parser
@org_id Ecto.UUID.generate()
@@ -86,9 +87,24 @@ defmodule Rbac.Okta.Saml.PayloadParser.Test do
def integration(issuer) do
{:ok, cert} = Support.Okta.Saml.PayloadBuilder.test_cert()
- {:ok, integration} =
- Rbac.Okta.Integration.create_or_update(@org_id, @creator_id, @sso_url, issuer, cert, false)
-
- integration
+ with_mocks([
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:ok, %{allowed_id_providers: []}} end,
+ update: fn _ -> {:ok, %{}} end
+ ]}
+ ]) do
+ {:ok, integration} =
+ Rbac.Okta.Integration.create_or_update(
+ @org_id,
+ @creator_id,
+ @sso_url,
+ issuer,
+ cert,
+ false
+ )
+
+ integration
+ end
end
end
diff --git a/ee/rbac/test/rbac/okta/scim/api_test.exs b/ee/rbac/test/rbac/okta/scim/api_test.exs
index eea661882..b862b4c63 100644
--- a/ee/rbac/test/rbac/okta/scim/api_test.exs
+++ b/ee/rbac/test/rbac/okta/scim/api_test.exs
@@ -15,14 +15,19 @@ defmodule Rbac.Okta.Scim.Api.Test do
"x-semaphore-org-id": @org_id
]
- setup do
+ setup_with_mocks([
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:ok, %{allowed_id_providers: []}} end,
+ update: fn _ -> {:ok, %{}} end
+ ]}
+ ]) do
Rbac.FrontRepo.delete_all(Rbac.FrontRepo.User)
Support.Rbac.create_org_roles(@org_id)
Support.Rbac.create_project_roles(@org_id)
{:ok, provisioner} = Rbac.Okta.Scim.Provisioner.start_link()
-
on_exit(fn -> Process.exit(provisioner, :kill) end)
end
@@ -196,7 +201,6 @@ defmodule Rbac.Okta.Scim.Api.Test do
)
{:ok, token} = Rbac.Okta.Integration.generate_scim_token(integration)
-
{:ok, %{integration: integration, token: token}}
end
diff --git a/ee/rbac/test/rbac/okta/scim/provisioner_test.exs b/ee/rbac/test/rbac/okta/scim/provisioner_test.exs
index 011802c0d..b5d07ad57 100644
--- a/ee/rbac/test/rbac/okta/scim/provisioner_test.exs
+++ b/ee/rbac/test/rbac/okta/scim/provisioner_test.exs
@@ -9,12 +9,18 @@ defmodule Rbac.Okta.Scim.ProvisionerTest do
alias Rbac.Repo.OktaUser
import Mock
- setup do
+ # Setup global mocks that will be available for all tests
+ setup_with_mocks([
+ {Rbac.Api.Organization, [],
+ [
+ find_by_id: fn _ -> {:ok, %{allowed_id_providers: []}} end,
+ update: fn _ -> {:ok, %{}} end
+ ]}
+ ]) do
Support.Rbac.Store.clear!()
Support.Rbac.create_org_roles(@org_id)
{:ok, provisioner} = Rbac.Okta.Scim.Provisioner.start_link()
-
on_exit(fn -> Process.exit(provisioner, :kill) end)
{:ok, cert} = Support.Okta.Saml.PayloadBuilder.test_cert()
From a8e58aa1638356d57b11c597e2d11b17a59c6f77 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miko=C5=82aj=20Kutryj?=
Date: Tue, 1 Jul 2025 12:24:35 +0200
Subject: [PATCH 17/87] toil(front): add feature flag for pipeline rebuilds in
the UI (#419)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Add feature flag to control if we want to display rebuild pipeline
button in the UI
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.../controllers/pipeline_controller.ex | 14 +++++++-
.../status/_interactive_pipeline.html.eex | 2 +-
.../controllers/pipeline_controller_test.exs | 34 +++++++++++++++++++
front/test/support/stubs/feature.ex | 3 +-
4 files changed, 50 insertions(+), 3 deletions(-)
diff --git a/front/lib/front_web/controllers/pipeline_controller.ex b/front/lib/front_web/controllers/pipeline_controller.ex
index 6bd144971..63b42dc64 100644
--- a/front/lib/front_web/controllers/pipeline_controller.ex
+++ b/front/lib/front_web/controllers/pipeline_controller.ex
@@ -5,7 +5,14 @@ defmodule FrontWeb.PipelineController do
alias Front.Models.Pipeline
alias Front.Models.Switch
alias Front.WorkflowPage.PipelineStatus
- alias FrontWeb.Plugs.{FetchPermissions, PageAccess, PublicPageAccess, PutProjectAssigns}
+
+ alias FrontWeb.Plugs.{
+ FeatureEnabled,
+ FetchPermissions,
+ PageAccess,
+ PublicPageAccess,
+ PutProjectAssigns
+ }
require Logger
@@ -24,6 +31,11 @@ defmodule FrontWeb.PipelineController do
plug(:assign_pipeline_without_blocks when action in [:status, :switch, :stop, :rebuild])
plug(:preload_switch when action in [:show, :poll, :switch])
+ plug(
+ FeatureEnabled,
+ [:ui_partial_ppl_rebuild] when action in [:rebuild]
+ )
+
def path(conn, params) do
organization_id = conn.assigns.organization_id
workflow_id = conn.assigns.workflow.id
diff --git a/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex b/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex
index 36c7b051a..8d0450366 100644
--- a/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex
+++ b/front/lib/front_web/templates/workflow/status/_interactive_pipeline.html.eex
@@ -21,7 +21,7 @@
·
<%= link "Stop Pipeline", to: pipeline_stop_path(@conn, :stop, @workflow.id, @pipeline.id), class: "btn btn-secondary btn-tiny", pipeline_stop_button: "true" %>
<% end %>
- <%= if @conn.assigns.permissions["project.job.rerun"] && FrontWeb.PipelineView.pipeline_rebuildable?(@pipeline) && !FrontWeb.PipelineView.anonymous?(@conn) do %>
+ <%= if FeatureProvider.feature_enabled?(:ui_partial_ppl_rebuild, param: @conn.assigns[:organization_id]) && @conn.assigns.permissions["project.job.rerun"] && FrontWeb.PipelineView.pipeline_rebuildable?(@pipeline) && !FrontWeb.PipelineView.anonymous?(@conn) do %>
·
<%= link "Rebuild Pipeline", to: pipeline_rebuild_path(@conn, :rebuild, @workflow.id, @pipeline.id), class: "btn btn-secondary btn-tiny", pipeline_rebuild_button: "true", title: "Rerun only failed jobs in this pipeline" %>
<% end %>
diff --git a/front/test/front_web/controllers/pipeline_controller_test.exs b/front/test/front_web/controllers/pipeline_controller_test.exs
index 74b27b0ac..188a678e2 100644
--- a/front/test/front_web/controllers/pipeline_controller_test.exs
+++ b/front/test/front_web/controllers/pipeline_controller_test.exs
@@ -308,4 +308,38 @@ defmodule FrontWeb.PipelineControllerTest do
assert conn.status == 404
end
end
+
+ describe "rebuild => with ui_partial_ppl_rebuild feature flag" do
+ test "returns 404 when feature flag is disabled", %{
+ conn: conn,
+ workflow_id: workflow_id,
+ pipeline_id: pipeline_id
+ } do
+ org = Support.Stubs.DB.first(:organizations)
+ Support.Stubs.Feature.disable_feature(org.id, :ui_partial_ppl_rebuild)
+
+ conn =
+ conn
+ |> post("/workflows/#{workflow_id}/pipelines/#{pipeline_id}/rebuild")
+
+ assert conn.status == 404
+ end
+
+ test "returns 200 when feature flag is enabled", %{
+ conn: conn,
+ workflow_id: workflow_id,
+ pipeline_id: pipeline_id
+ } do
+ org = Support.Stubs.DB.first(:organizations)
+ Support.Stubs.Feature.enable_feature(org.id, :ui_partial_ppl_rebuild)
+
+ conn =
+ conn
+ |> post("/workflows/#{workflow_id}/pipelines/#{pipeline_id}/rebuild")
+
+ assert conn.status == 200
+ assert json_response(conn, 200)["message"] == "Pipeline rebuild initiated successfully."
+ assert json_response(conn, 200)["pipeline_id"] != nil
+ end
+ end
end
diff --git a/front/test/support/stubs/feature.ex b/front/test/support/stubs/feature.ex
index 55be62453..60fb23987 100644
--- a/front/test/support/stubs/feature.ex
+++ b/front/test/support/stubs/feature.ex
@@ -137,7 +137,8 @@ defmodule Support.Stubs.Feature do
{"new_project_onboarding", state: :ENABLED, quantity: 1},
{"open_id_connect_filter", state: :ENABLED, quantity: 1},
{"wf_editor_via_jobs", state: :HIDDEN, quantity: 0},
- {"ui_reports", state: :ENABLED, quantity: 1}
+ {"ui_reports", state: :ENABLED, quantity: 1},
+ {"ui_partial_ppl_rebuild", state: :ENABLED, quantity: 1}
]
end
From 47af2ae5c4be45a54660c756296de84a55367cd4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Damjan=20Be=C4=87irovi=C4=87?=
Date: Wed, 2 Jul 2025 10:31:52 +0200
Subject: [PATCH 18/87] fix(api v1alpha): Use pipeline_file as param name in
Run wf API (#420)
In the recent changes to how the Run workflow API works in the backend,
we changed by mistake the expected name for the parameter that holds the
name of the pipeline file. This PR fixes that issue and adds a test to
verify that the request is properly formed based on the input
parameters.
---
.../workflow_client/wf_request_formatter.ex | 2 +-
.../v1alpha/test/workflow_client_test.exs | 26 +++++++++++++++++--
2 files changed, 25 insertions(+), 3 deletions(-)
diff --git a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
index c00a097da..68fbedfa2 100644
--- a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
+++ b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
@@ -21,7 +21,7 @@ defmodule PipelinesAPI.WorkflowClient.WFRequestFormatter do
request_token: UUID.uuid4(),
project_id: params["project_id"],
requester_id: Map.get(params, "requester_id", ""),
- definition_file: Map.get(params, "definition_file", ".semaphore/semaphore.yml"),
+ definition_file: Map.get(params, "pipeline_file", ".semaphore/semaphore.yml"),
organization_id: Map.get(params, "organization_id", ""),
git_reference: params |> Map.get("reference", "") |> ref(),
start_in_conceived_state: true,
diff --git a/public-api/v1alpha/test/workflow_client_test.exs b/public-api/v1alpha/test/workflow_client_test.exs
index ec38e119a..d5dff39db 100644
--- a/public-api/v1alpha/test/workflow_client_test.exs
+++ b/public-api/v1alpha/test/workflow_client_test.exs
@@ -46,15 +46,37 @@ defmodule PipelinesAPI.WorkflowClient.Test do
assert message == "Project was deleted."
end
+ test "workflow client request formatter schedule - creates valid gRPC request when given valid params" do
+ alias InternalApi.PlumberWF.TriggeredBy
+ alias PipelinesAPI.WorkflowClient.WFRequestFormatter
+ alias InternalApi.PlumberWF.ScheduleRequest.{ServiceType, EnvVar}
+
+ params = schedule_params()
+
+ assert {:ok, request} = WFRequestFormatter.form_schedule_request(params)
+ assert request.service == ServiceType.value(:GIT_HUB)
+ assert request.repo.branch_name == "main"
+ assert request.repo.commit_sha == "773d5c953bd68cc97efa81d2e014449336265fb4"
+ assert {:ok, _} = UUID.info(request.request_token)
+ assert request.requester_id == params["requester_id"]
+ assert request.definition_file == "semaphore.yml"
+ assert request.organization_id == params["organization_id"]
+ assert request.git_reference == "refs/heads/main"
+ assert request.start_in_conceived_state == true
+ assert request.triggered_by == TriggeredBy.value(:API)
+ assert request.env_vars == [%EnvVar{name: "MY_PARAM", value: "my_value"}]
+ end
+
defp schedule_params() do
%{
"reference" => "refs/heads/main",
"commit_sha" => "773d5c953bd68cc97efa81d2e014449336265fb4",
- "definition_file" => "semaphore.yml",
+ "pipeline_file" => "semaphore.yml",
"project_id" => UUID.uuid4(),
"organization_id" => UUID.uuid4(),
"requester_id" => UUID.uuid4(),
- "repository" => %{integration_type: :GITHUB_APP}
+ "repository" => %{integration_type: :GITHUB_APP},
+ "parameters" => %{"MY_PARAM" => "my_value"}
}
end
end
From 6f4d195307d671a086d6d7e428e7532813898a2a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miko=C5=82aj=20Kutryj?=
Date: Wed, 2 Jul 2025 11:42:27 +0200
Subject: [PATCH 19/87] feat(security-toolbox): allow scanners to be configured
(#421)
This PR enables passing a list of scanners to be used for policy checks.
---
security-toolbox/docker | 8 +++++++-
security-toolbox/policies/docker/trivy_image.rb | 1 +
security-toolbox/policies/docker/trivy_table_output.rb | 4 +++-
3 files changed, 11 insertions(+), 2 deletions(-)
diff --git a/security-toolbox/docker b/security-toolbox/docker
index 633b71608..35c216f35 100755
--- a/security-toolbox/docker
+++ b/security-toolbox/docker
@@ -6,7 +6,9 @@ require_relative "policies/policy"
Dir["#{__dir__}/policies/docker/*.rb"].each { |f| require_relative f }
-args = {}
+args = {
+ scanners: "vuln,misconfig,secret,license"
+}
OptionParser.new do |parser|
parser.on("-i", "--image IMAGE", "Docker image to scan") do |image|
args[:image] = image
@@ -35,6 +37,10 @@ OptionParser.new do |parser|
parser.on("-d", "--dependencies", "Install dependencies") do
args[:install_dependencies] = true
end
+
+ parser.on("", "--scanners SCANNERS", "Which scanners to use") do |scanners|
+ args[:scanners] = scanners
+ end
end.parse!
policy_file = "docker-ignore-policy.rego"
diff --git a/security-toolbox/policies/docker/trivy_image.rb b/security-toolbox/policies/docker/trivy_image.rb
index c75423361..25c5c1660 100644
--- a/security-toolbox/policies/docker/trivy_image.rb
+++ b/security-toolbox/policies/docker/trivy_image.rb
@@ -15,6 +15,7 @@ def initialize(args)
@skip_files = args[:skip_files].to_s.split(",") || []
@skip_dirs = args[:skip_dirs].to_s.split(",") || []
+ @scanners = args[:scanners]
end
def test
diff --git a/security-toolbox/policies/docker/trivy_table_output.rb b/security-toolbox/policies/docker/trivy_table_output.rb
index c4b8b0ac1..de1b872bf 100644
--- a/security-toolbox/policies/docker/trivy_table_output.rb
+++ b/security-toolbox/policies/docker/trivy_table_output.rb
@@ -3,6 +3,8 @@
class Policy::TrivyTableOutput < Policy
def initialize(args)
super(args)
+
+ @scanners = args[:scanners]
end
def test
@@ -11,7 +13,7 @@ def test
"convert",
"--format table",
"--output table.txt",
- "--scanners vuln,secret,misconfig,license",
+ "--scanners #{@scanners}",
"out/docker-scan-trivy.json"
]
From ec07d8a52a0209fd3e265952c243bbf0d10df900 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miko=C5=82aj=20Kutryj?=
Date: Fri, 4 Jul 2025 10:59:37 +0200
Subject: [PATCH 20/87] fix(velocity): properly handle reports exceeding size
limit (#395)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
This version of Sonic properly passes the `io.Reader` errors to the
decoder.
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
ee/velocity/go.mod | 10 +++++-----
ee/velocity/go.sum | 10 ++++++++++
2 files changed, 15 insertions(+), 5 deletions(-)
diff --git a/ee/velocity/go.mod b/ee/velocity/go.mod
index 4fd30ccfc..b101aaa6d 100644
--- a/ee/velocity/go.mod
+++ b/ee/velocity/go.mod
@@ -6,7 +6,7 @@ toolchain go1.23.8
require (
github.com/allegro/bigcache/v3 v3.1.0
- github.com/bytedance/sonic v1.12.10
+ github.com/bytedance/sonic v1.13.3
github.com/eko/gocache/lib/v4 v4.1.5
github.com/eko/gocache/store/bigcache/v4 v4.2.1
github.com/go-co-op/gocron v1.23.0
@@ -30,7 +30,7 @@ require (
require (
github.com/beorn7/perks v1.0.1 // indirect
- github.com/bytedance/sonic/loader v0.2.2 // indirect
+ github.com/bytedance/sonic/loader v0.2.4 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
@@ -41,7 +41,7 @@ require (
github.com/jackc/puddle/v2 v2.2.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
- github.com/klauspost/cpuid/v2 v2.0.9 // indirect
+ github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_golang v1.14.0 // indirect
@@ -51,12 +51,12 @@ require (
github.com/rabbitmq/amqp091-go v1.9.0 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
- golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
+ golang.org/x/arch v0.18.0 // indirect
golang.org/x/crypto v0.36.0 // indirect
golang.org/x/exp v0.0.0-20230315142452-642cacee5cc0 // indirect
golang.org/x/net v0.36.0 // indirect
golang.org/x/sync v0.12.0 // indirect
- golang.org/x/sys v0.31.0 // indirect
+ golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.23.0 // indirect
gopkg.in/alexcesaro/statsd.v2 v2.0.0 // indirect
)
diff --git a/ee/velocity/go.sum b/ee/velocity/go.sum
index ae569063f..cd64034ce 100644
--- a/ee/velocity/go.sum
+++ b/ee/velocity/go.sum
@@ -47,9 +47,13 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bytedance/sonic v1.12.10 h1:uVCQr6oS5669E9ZVW0HyksTLfNS7Q/9hV6IVS4nEMsI=
github.com/bytedance/sonic v1.12.10/go.mod h1:uVvFidNmlt9+wa31S1urfwwthTWteBgG0hWuoKAXTx8=
+github.com/bytedance/sonic v1.13.3 h1:MS8gmaH16Gtirygw7jV91pDCN33NyMrPbN7qiYhEsF0=
+github.com/bytedance/sonic v1.13.3/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.2 h1:jxAJuN9fOot/cyz5Q6dUuMJF5OqQ6+5GfA8FjjQ0R4o=
github.com/bytedance/sonic/loader v0.2.2/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
+github.com/bytedance/sonic/loader v0.2.4 h1:ZWCw4stuXUsn1/+zQDqeE7JKP+QO47tz7QCNan80NzY=
+github.com/bytedance/sonic/loader v0.2.4/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
@@ -182,6 +186,8 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
+github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
+github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -289,6 +295,8 @@ go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9i
go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
+golang.org/x/arch v0.18.0 h1:WN9poc33zL4AzGxqf8VtpKUnGvMi8O9lhNyBMF/85qc=
+golang.org/x/arch v0.18.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@@ -441,6 +449,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
+golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
From b1ee30e6ae9a73b1115283a9e6369549140b129e Mon Sep 17 00:00:00 2001
From: Veljko Maksimovic <45179708+VeljkoMaksimovic@users.noreply.github.com>
Date: Fri, 4 Jul 2025 16:35:58 +0200
Subject: [PATCH 21/87] toil(guard): Error handling in update emails script
(#423)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
While I was working on [this
task](https://github.com/renderedtext/tasks/issues/8114#issuecomment-3034726378),
I encountered an error with the update script, when users GH API token
is no longer valid. This fixes it. Also, the script is duplicated over 2
services, which is not needed
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
ee/rbac/lib/rbac/user/update_mails.ex | 107 --------------------------
guard/lib/guard/user/update_mails.ex | 53 +++++++------
2 files changed, 26 insertions(+), 134 deletions(-)
delete mode 100644 ee/rbac/lib/rbac/user/update_mails.ex
diff --git a/ee/rbac/lib/rbac/user/update_mails.ex b/ee/rbac/lib/rbac/user/update_mails.ex
deleted file mode 100644
index 33463a615..000000000
--- a/ee/rbac/lib/rbac/user/update_mails.ex
+++ /dev/null
@@ -1,107 +0,0 @@
-defmodule Rbac.User.UpdateMails do
- @moduledoc """
- This module contains a script that goes through all the users within a given org,
- checks if they have corporate emails, and if not, goes through secondary GitHub emails
- to see if one of those is a corporate mail. If so, the email is updated, if not, nothing happens.
-
- This script is not used from anywhere within the code base, and is ment to be ran manualy. For now,
- the only use-case for this script was when a organization wants to use SCIM/SAML for SSO, and emails
- in their SAML provider need to match emails on Semaphoere.
- """
- import Ecto.Query
- require Logger
-
- @doc """
- If an organization has corporate email address that ends with important-org.org, that would be given
- as a parameter togeather with the org's semaphore id.
-
- The function returns a list of all updated emails. If `nil` values appear in this list, that
- means some users dont have corporate email, but the scrip wasn't able to find one. Either their GitHub API
- token is not valid, or more likely they did not connect ther GitHub account with their corporate mail.
- """
- def migrate(org_id, corporate_email_domain) do
- user_ids = get_wrong_email_users(org_id, corporate_email_domain)
-
- all_emails =
- user_ids
- |> Enum.each(fn id ->
- {:ok, token} = get_api_token(id)
-
- {:ok, resp} =
- HTTPoison.get("https://api.github.com/user/emails", [
- {"Authorization", "Token #{token}"}
- ])
-
- {:ok, body} = resp |> Map.get(:body) |> Jason.decode()
-
- if is_list(body) do
- body
- |> Enum.map(fn email ->
- email["email"]
- end)
- |> update_email(id, corporate_email_domain)
- else
- Logger.error("Bad request for user #{id}: #{inspect(resp)}")
- nil
- end
- end)
-
- all_emails
- end
-
- def update_email(emails, user_id, corporate_email_domain) do
- new_mail = emails |> Enum.find(&(&1 =~ "@#{corporate_email_domain}"))
-
- if new_mail == nil do
- Logger.info("Could not find corporate email for #{user_id}")
- else
- Logger.info("Updating email for user #{user_id} to #{new_mail}")
-
- Rbac.Repo.RbacUser
- |> where([u], u.id == ^user_id)
- |> Rbac.Repo.update_all(set: [email: new_mail])
-
- Rbac.FrontRepo.User
- |> where([u], u.id == ^user_id)
- |> Rbac.FrontRepo.update_all(set: [email: new_mail])
-
- if Rbac.OIDC.enabled?() do
- handle_oidc_sync(user_id)
- end
- end
- end
-
- def handle_oidc_sync(user_id) do
- user = Rbac.Store.RbacUser.fetch(user_id)
-
- case Rbac.Store.OIDCUser.fetch_by_user_id(user_id) do
- {:ok, oidc_user} ->
- case Rbac.OIDC.User.update_oidc_user(oidc_user.oidc_user_id, user) do
- {:ok, oidc_user_id} ->
- Logger.info("OIDC user #{oidc_user_id} updated")
-
- e ->
- Logger.error("Error syncing new user with OIDC #{inspect(e)}")
- end
-
- {:error, :not_found} ->
- Logger.error("While updating an existing user, the same OIDC user was not found!")
- end
- end
-
- def get_wrong_email_users(org_id, corporate_email_domain) do
- Rbac.Repo.SubjectRoleBinding
- |> join(:inner, [srb], u in Rbac.Repo.RbacUser, on: srb.subject_id == u.id)
- |> where(
- [srb, u],
- srb.org_id == ^org_id and is_nil(srb.project_id) and
- not like(u.email, ^"%#{corporate_email_domain}%")
- )
- |> select([srb], srb.subject_id)
- |> Rbac.Repo.all()
- end
-
- defp get_api_token(user_id) do
- Rbac.FrontRepo.RepoHostAccount.get_github_token(user_id)
- end
-end
diff --git a/guard/lib/guard/user/update_mails.ex b/guard/lib/guard/user/update_mails.ex
index a2cdf220b..5ef3e7515 100644
--- a/guard/lib/guard/user/update_mails.ex
+++ b/guard/lib/guard/user/update_mails.ex
@@ -8,9 +8,12 @@ defmodule Guard.User.UpdateMails do
the only use-case for this script was when a organization wants to use SCIM/SAML for SSO, and emails
in their SAML provider need to match emails on Semaphoere.
"""
+
import Ecto.Query
require Logger
+ @github_api_domain "https://api.github.com/user/emails"
+
@doc """
If an organization has corporate email address that ends with important-org.org, that would be given
as a parameter togeather with the org's semaphore id.
@@ -20,33 +23,21 @@ defmodule Guard.User.UpdateMails do
token is not valid, or more likely they did not connect ther GitHub account with their corporate mail.
"""
def migrate(org_id, corporate_email_domain) do
- user_ids = get_wrong_email_users(org_id, corporate_email_domain)
-
- all_emails =
- user_ids
- |> Enum.each(fn id ->
- {:ok, token} = get_api_token(id)
-
- {:ok, resp} =
- HTTPoison.get("https://api.github.com/user/emails", [
- {"Authorization", "Token #{token}"}
- ])
-
- {:ok, body} = resp |> Map.get(:body) |> Jason.decode()
-
- if is_list(body) do
- body
- |> Enum.map(fn email ->
- email["email"]
- end)
- |> update_email(id, corporate_email_domain)
- else
- Logger.error("Bad request for user #{id}: #{inspect(resp)}")
- nil
- end
- end)
-
- all_emails
+ get_wrong_email_users(org_id, corporate_email_domain)
+ |> Enum.map(fn id -> {id, get_api_token(id)} end)
+ |> Enum.filter(fn {_, token} -> token != nil end)
+ |> Enum.each(fn {id, token} ->
+ {:ok, resp} = HTTPoison.get(@github_api_domain, [{"Authorization", "Token #{token}"}])
+ {:ok, body} = resp |> Map.get(:body) |> Jason.decode()
+
+ if is_list(body) do
+ body
+ |> Enum.map(fn email -> email["email"] end)
+ |> update_email(id, corporate_email_domain)
+ else
+ Logger.error("Bad request for user #{id}: #{inspect(resp)}")
+ end
+ end)
end
def update_email(emails, user_id, corporate_email_domain) do
@@ -103,5 +94,13 @@ defmodule Guard.User.UpdateMails do
defp get_api_token(user_id) do
Guard.FrontRepo.RepoHostAccount.get_github_token(user_id)
+ |> case do
+ {:error, reason} ->
+ Logger.info("Failed to get GitHub token for user #{user_id}: #{inspect(reason)}")
+ nil
+
+ {:ok, token} ->
+ token
+ end
end
end
From aea409e3befc31afbdcc9dd455a5a77d8d9de8d8 Mon Sep 17 00:00:00 2001
From: csidyel <46521788+csidyel@users.noreply.github.com>
Date: Tue, 8 Jul 2025 11:31:36 +0200
Subject: [PATCH 22/87] Starter templates Rspec and Elixir overhauls (#400)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Changes to the following starter templates:
Rspec cloud and docker
Elixir cloud and docker
Go cloud and docker
Their properties files as well
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.../properties/elixir_docker.properties.json | 7 +-
.../properties/elixir_saas.properties.json | 7 +-
.../properties/go_docker.properties.json | 9 +-
.../properties/go_saas.properties.json | 9 +-
.../properties/rspec_docker.properties.json | 7 +-
.../properties/rspec_saas.properties.json | 7 +-
.../saas_new/templates/elixir_docker.yml | 162 ++++++++++++-----
.../saas_new/templates/elixir_saas.yml | 170 ++++++++++++------
.../saas_new/templates/go_docker.yml | 153 +++++++++-------
.../saas_new/templates/go_saas.yml | 138 ++++++++------
.../saas_new/templates/rspec_docker.yml | 130 ++++++++------
.../saas_new/templates/rspec_saas.yml | 129 +++++++------
12 files changed, 581 insertions(+), 347 deletions(-)
diff --git a/front/workflow_templates/saas_new/properties/elixir_docker.properties.json b/front/workflow_templates/saas_new/properties/elixir_docker.properties.json
index 668cd42d8..34a3d00cb 100644
--- a/front/workflow_templates/saas_new/properties/elixir_docker.properties.json
+++ b/front/workflow_templates/saas_new/properties/elixir_docker.properties.json
@@ -1,11 +1,12 @@
{
- "title": "Elixir",
- "description": "Run quality checks and tests for your Elixir app in Docker with security scanning and type checking. ElixirDockerDialyzerCredo
",
- "short_description": "Run your Elixir suite",
+ "title": "Elixir CI/CD (Docker)",
+ "description": "Comprehensive Docker-based pipeline for non-Phoenix Elixir projects: dependency caching, code formatting, static analysis (Credo), security scanning (Sobelow & deps.audit), type checking with Dialyzer, parallelised tests on PostgreSQL, and an optional Docker image build + push on main. ElixirDockerCredoDialyzerSobelowDeps-AuditPostgres
",
+ "short_description": "Docker-native CI/CD for Elixir apps",
"group": "ci",
"language": "Elixir",
"environment": "docker",
"use_case": "ci_pipeline",
+ "tags": ["ci_pipeline", "security_checks"],
"icon": "lang-elixir.svg",
"template_path": "templates/elixir_docker.yml",
"workflow_tip": "other"
diff --git a/front/workflow_templates/saas_new/properties/elixir_saas.properties.json b/front/workflow_templates/saas_new/properties/elixir_saas.properties.json
index 693ba5e62..1a8218b62 100644
--- a/front/workflow_templates/saas_new/properties/elixir_saas.properties.json
+++ b/front/workflow_templates/saas_new/properties/elixir_saas.properties.json
@@ -1,11 +1,12 @@
{
- "title": "Elixir",
- "description": "Run quality checks and tests for your Elixir app using Mix tools for formatting, security, and type checking. Tests are parallelized for faster execution. ElixirMixDialyzerCredo
",
- "short_description": "Run your Elixir suite",
+ "title": "Elixir CI Pipeline",
+ "description": "End-to-end pipeline for non-Phoenix Elixir projects: dependency caching, code formatting, static analysis, security scanning, type checking, parallelised tests with PostgreSQL, and an optional Docker image build on main. ElixirMixCredoSobelowDeps-AuditDialyzerPostgresDocker
",
+ "short_description": "Comprehensive CI for Elixir apps",
"group": "ci",
"language": "Elixir",
"environment": "linux",
"use_case": "ci_pipeline",
+ "tags": ["ci_pipeline", "security_checks"],
"icon": "lang-elixir.svg",
"template_path": "templates/elixir_saas.yml",
"workflow_tip": "other"
diff --git a/front/workflow_templates/saas_new/properties/go_docker.properties.json b/front/workflow_templates/saas_new/properties/go_docker.properties.json
index 5e0280322..9ad77fb2a 100644
--- a/front/workflow_templates/saas_new/properties/go_docker.properties.json
+++ b/front/workflow_templates/saas_new/properties/go_docker.properties.json
@@ -1,11 +1,12 @@
{
- "title": "Go",
- "description": "Run quality checks and tests for your Go app in Docker using gotestsum for enhanced test reporting. GoDockergotestsum
",
- "short_description": "Run your Go suite",
+ "title": "Go – Docker CI Pipeline",
+ "description": "Run a full featured Go CI pipeline inside Docker with dependency caching, code formatting and static analysis (gofmt, golangci-lint, staticcheck), security scanning via govulncheck, fast parallel tests and JUnit reports with gotestsum, binary compilation, and Docker image packaging. GoDockergotestsumgolangci-lintstaticcheckgovulncheckJUnit
",
+ "short_description": "Full Go CI pipeline in Docker with parallel tests",
"group": "ci",
- "language": "Go",
+ "language": "golang",
"environment": "docker",
"use_case": "ci_pipeline",
+ "tags": ["ci_pipeline", "security_checks"],
"icon": "lang-go.svg",
"template_path": "templates/go_docker.yml",
"workflow_tip": "other"
diff --git a/front/workflow_templates/saas_new/properties/go_saas.properties.json b/front/workflow_templates/saas_new/properties/go_saas.properties.json
index dabcf2d30..e220396c7 100644
--- a/front/workflow_templates/saas_new/properties/go_saas.properties.json
+++ b/front/workflow_templates/saas_new/properties/go_saas.properties.json
@@ -1,11 +1,12 @@
{
- "title": "Go",
- "description": "Run quality checks and parallel tests for your Go app using gotestsum for enhanced test reporting. GogotestsumJUnit
",
- "short_description": "Run your Go suite",
+ "title": "Go CI Pipeline",
+ "description": "Run a full featured Go CI pipeline with dependency caching, code formatting and static analysis (gofmt, golangci-lint, staticcheck), security scanning with govulncheck, parallel test execution and JUnit reports via gotestsum, binary artifact compilation, and Docker image packaging. Gogotestsumgolangci-lintstaticcheckgovulncheckDockerJUnit
",
+ "short_description": "Full Go CI pipeline with fast parallel tests",
"group": "ci",
- "language": "Go",
+ "language": "golang",
"environment": "linux",
"use_case": "ci_pipeline",
+ "tags": ["ci_pipeline", "security_checks"],
"icon": "lang-go.svg",
"template_path": "templates/go_saas.yml",
"workflow_tip": "other"
diff --git a/front/workflow_templates/saas_new/properties/rspec_docker.properties.json b/front/workflow_templates/saas_new/properties/rspec_docker.properties.json
index 89cbb3a13..63f168524 100644
--- a/front/workflow_templates/saas_new/properties/rspec_docker.properties.json
+++ b/front/workflow_templates/saas_new/properties/rspec_docker.properties.json
@@ -1,11 +1,12 @@
{
- "title": "RSpec",
- "description": "Run parallel RSpec tests in Docker with PostgreSQL and Redis support. Uses Knapsack for test distribution. RubyDockerRSpecKnapsack
",
- "short_description": "Run your Ruby RSpec suite",
+ "title": "Ruby – RSpec (Docker)",
+ "description": "Run a Rails-ready CI pipeline inside Docker with PostgreSQL 17 & Redis 7 service containers, dependency caching, asset compilation, JS/CSS linting (ESLint + Stylelint), Ruby quality checks (RuboCop), security scanning (Brakeman + Bundler-Audit), and fast parallel RSpec execution via Semaphore Boosters. RubyDockerRSpecRuboCopBrakemanBundlerESLintStylelintBoosters
",
+ "short_description": "Full RSpec pipeline in Docker",
"group": "ci",
"language": "Ruby",
"environment": "docker",
"use_case": "ci_pipeline",
+ "tags": ["ci_pipeline", "security_checks"],
"icon": "lang-ruby.svg",
"template_path": "templates/rspec_docker.yml",
"workflow_tip": "other"
diff --git a/front/workflow_templates/saas_new/properties/rspec_saas.properties.json b/front/workflow_templates/saas_new/properties/rspec_saas.properties.json
index b8a9c41e1..a63c5da30 100644
--- a/front/workflow_templates/saas_new/properties/rspec_saas.properties.json
+++ b/front/workflow_templates/saas_new/properties/rspec_saas.properties.json
@@ -1,11 +1,12 @@
{
- "title": "RSpec",
- "description": "Use Bundler, Bundler-audit, RuboCop, and Brakeman to test your Rails app. This CI pipeline uses Knapsack Pro to paralelize your tests. RailsRuboCopKnapsack
",
- "short_description": "Run your Ruby RSpec suite",
+ "title": "Ruby – RSpec CI Pipeline",
+ "description": "Set up a full-featured Rails pipeline with dependency caching, asset compilation, JS/CSS linting (ESLint + Stylelint), Ruby quality checks (RuboCop), security scanning (Brakeman + Bundler-Audit), and lightning-fast parallel test execution powered by Semaphore Boosters. RailsRuboCopBoostersESLintStylelintBrakemanBundler-Audit
",
+ "short_description": "Parallel RSpec pipeline for Rails apps",
"group": "ci",
"language": "Ruby",
"environment": "linux",
"use_case": "ci_pipeline",
+ "tags": ["ci_pipeline", "security_checks"],
"icon": "lang-ruby.svg",
"template_path": "templates/rspec_saas.yml",
"workflow_tip": "other"
diff --git a/front/workflow_templates/saas_new/templates/elixir_docker.yml b/front/workflow_templates/saas_new/templates/elixir_docker.yml
index 1a5d441e3..354386f34 100644
--- a/front/workflow_templates/saas_new/templates/elixir_docker.yml
+++ b/front/workflow_templates/saas_new/templates/elixir_docker.yml
@@ -1,85 +1,163 @@
+# Semaphore CI pipeline for a non-Phoenix Elixir project
+# Spec: https://docs.semaphoreci.com/reference/pipeline-yaml
+
version: v1.0
-name: "💫 Elixir CI Pipeline"
+name: "💧 Elixir CI Pipeline"
+# -------------------------------------------------------------------
+# Define the machine type and OS image to run the pipeline on
+# -------------------------------------------------------------------
agent:
machine:
- type: {{ machine_type }} # Define the machine type (e.g., e1-standard-2 for cost-efficiency)
- os_image: {{ os_image }} # Specify the OS image (e.g., ubuntu2004 for compatibility)
+ type: {{ machine_type }}
+ os_image: {{ os_image }}
containers:
- name: main
- image: 'registry.semaphoreci.com/elixir:1.15.7' # Elixir 1.15.7 with Erlang/OTP 26
+ image: 'registry.semaphoreci.com/elixir:1.16' # Elixir 1.16 with Erlang/OTP 26
- name: postgres
image: 'registry.semaphoreci.com/postgres:17' # PostgreSQL 17 for database operations
+# Configure when to stop the pipeline early
fail_fast:
stop:
- when: branch != 'main' # Stop execution early for non-main branches to save resources
+ when: branch != 'main' # Stop all blocks if a job fails on non-main branches
auto_cancel:
running:
when: branch != 'main' # Cancel running pipelines on non-main branches
queued:
- when: branch = 'main' # Cancel queued pipelines on the main branch
+ when: branch = 'main' # Cancel queued pipelines on main branch
+# -------------------------------------------------------------------
+# GLOBAL JOB CONFIG – runs before every job
+# Docs: https://docs.semaphoreci.com/reference/pipeline-yaml#global-job-config
+# Keep only commands truly needed everywhere
+# -------------------------------------------------------------------
global_job_config:
prologue:
commands:
- - checkout # Fetch the latest code from the repository
- - cache restore # Restore cached dependencies for faster builds
- - mix local.hex --force # Install Hex, the Elixir package manager
- - mix local.rebar --force # Install rebar3, a build tool for Erlang/Elixir
- - mix deps.get # Fetch project dependencies
+ - checkout # Clone repo → https://docs.semaphoreci.com/reference/toolbox#checkout
+ - mix local.hex --force
+ - mix local.rebar --force
+ - cache restore # Reuse deps/ or _build cache
+# -------------------------------------------------------------------
+# BLOCK: 📦 Install & Build – compile & cache artifacts
+# -------------------------------------------------------------------
blocks:
- - name: "🛠️ Setup and Cache"
+ - name: "📦 Install & Build"
dependencies: []
task:
jobs:
- - name: "Install Dependencies"
+ - name: "⚙️ Compile"
+ commands:
+ - mix deps.get
+ - mix compile
+ - cache store # Save deps/ or _build for later steps
+
+# -------------------------------------------------------------------
+# CODE-QUALITY BLOCKS – one block per check
+# -------------------------------------------------------------------
+ - name: "🖋 Format"
+ dependencies: ["📦 Install & Build"]
+ task:
+ jobs:
+ - name: "🎨 mix format"
+ commands:
+ - mix format --check-formatted
+
+ - name: "🔎 Credo"
+ dependencies: ["📦 Install & Build"]
+ task:
+ jobs:
+ - name: "🔍 mix credo"
commands:
- - mix deps.compile # Compile dependencies to ensure compatibility
- - mix compile # Compile the project
- - cache store # Store compiled dependencies for future runs
+ - mix credo --strict
- - name: "🔍 Code Quality"
- dependencies: ["🛠️ Setup and Cache"]
+ - name: "🔐 Sobelow"
+ dependencies: ["📦 Install & Build"]
task:
jobs:
- - name: "Run Formatter and Credo"
+ - name: "🛡️ mix sobelow"
commands:
- - mix format --check-formatted # Ensure all code is properly formatted
- - mix credo --strict # Perform static analysis to enforce best practices
+ - mix sobelow --exit
- - name: "🔐 Security Checks"
- dependencies: ["🛠️ Setup and Cache"]
+ - name: "🛡 Deps Audit"
+ dependencies: ["📦 Install & Build"]
task:
jobs:
- - name: "Run Security Scans"
+ - name: "🛡️ mix deps.audit"
commands:
- - mix deps.audit # Audit dependencies for known vulnerabilities
- - mix sobelow # Run security analysis on the codebase
+ - mix deps.audit
- name: "📊 Dialyzer"
- dependencies: ["🛠️ Setup and Cache"]
+ dependencies: ["📦 Install & Build"]
+ task:
+ jobs:
+ - name: "🧠 mix dialyzer"
+ env_vars:
+ - name: MIX_ENV
+ value: test
+ commands:
+ - mix dialyzer --halt-exit-status
+
+# -------------------------------------------------------------------
+# BLOCK: 🧪 Tests – split suite for faster feedback
+# -------------------------------------------------------------------
+ - name: "🧪 Tests"
+ dependencies: ["📦 Install & Build"]
task:
+ epilogue: # Publish JUnit only for test jobs
+ always:
+ commands:
+ - '[ -f report.xml ] && test-results publish report.xml' # https://docs.semaphoreci.com/using-semaphore/tests/test-reports
jobs:
- - name: "Type Checking"
+ - name: "🧪 Tests-1"
+ env_vars:
+ - name: MIX_TEST_PARTITION
+ value: "1"
commands:
- - mix dialyzer # Perform static type checking for code correctness
+ - mix test --color
+ - name: "🧪 Tests-2"
+ env_vars:
+ - name: MIX_TEST_PARTITION
+ value: "2"
+ commands:
+ - mix test --color
- - name: "🧪 Test Suite"
- dependencies: ["🛠️ Setup and Cache"]
+# -------------------------------------------------------------------
+# BLOCK: 🚀 Docker Deploy – runs only on main branch
+# Secrets docs: https://docs.semaphoreci.com/reference/pipeline-yaml#secrets-in-task
+# -------------------------------------------------------------------
+ - name: "🚀 Docker Deploy"
+ dependencies:
+ - "🖋 Format"
+ - "🔎 Credo"
+ - "🔐 Sobelow"
+ - "🛡 Deps Audit"
+ - "📊 Dialyzer"
+ - "🧪 Tests"
+ run:
+ when: "branch = 'main'"
task:
- env_vars:
- - name: MIX_ENV
- value: test # Set the environment to test
- - name: PGHOST
- value: postgres # PostgreSQL container name
- - name: PGUSER
- value: postgres # PostgreSQL user
+ secrets:
+ - name: dockerhub # Registry creds
jobs:
- - name: "🟢 Run Tests with Coverage"
- parallelism: 4 # Run tests in parallel for faster execution
+ - name: "🐳 Build & Push"
commands:
- - mix test.setup # Set up the test database
- - mix test --partitions $SEMAPHORE_JOB_COUNT --partition $SEMAPHORE_JOB_INDEX --cover # Run tests with partitioning and coverage reporting
+ - echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USERNAME" --password-stdin
+ - docker build -t myapp:${SEMAPHORE_GIT_SHA} .
+ - docker tag myapp:${SEMAPHORE_GIT_SHA} myapp:latest
+ - docker push myapp:${SEMAPHORE_GIT_SHA}
+ - docker push myapp:latest
+
+# -------------------------------------------------------------------
+# AFTER PIPELINE – merge JUnit files into a single report
+# Docs: https://docs.semaphoreci.com/using-semaphore/tests/test-reports#step3
+# -------------------------------------------------------------------
+after_pipeline:
+ task:
+ jobs:
+ - name: "📊 Merge Reports"
+ commands:
+ - test-results gen-pipeline-report # Combine job reports
diff --git a/front/workflow_templates/saas_new/templates/elixir_saas.yml b/front/workflow_templates/saas_new/templates/elixir_saas.yml
index 8fd1dc47b..55111c308 100644
--- a/front/workflow_templates/saas_new/templates/elixir_saas.yml
+++ b/front/workflow_templates/saas_new/templates/elixir_saas.yml
@@ -1,11 +1,12 @@
-# This is a Semaphore configuration file for Elixir projects
-# For more information about Semaphore configuration visit:
-# https://docs.semaphoreci.com/reference/pipeline-yaml-reference/
+# Semaphore CI pipeline for a non-Phoenix Elixir project
+# Spec: https://docs.semaphoreci.com/reference/pipeline-yaml
-version: v1.0 # Semaphore configuration version
-name: "💫 Elixir CI Pipeline" # Pipeline display name
+version: v1.0
+name: "💧 Elixir CI Pipeline"
+# -------------------------------------------------------------------
# Define the machine type and OS image to run the pipeline on
+# -------------------------------------------------------------------
agent:
machine:
type: {{ machine_type }}
@@ -21,79 +22,140 @@ auto_cancel:
queued:
when: branch = 'main' # Cancel queued pipelines on main branch
-# Commands to run before each job
+# -------------------------------------------------------------------
+# GLOBAL JOB CONFIG – runs before every job
+# Docs: https://docs.semaphoreci.com/reference/pipeline-yaml#global-job-config
+# Keep only commands truly needed everywhere
+# -------------------------------------------------------------------
global_job_config:
prologue:
commands:
- - checkout # Get the code from repository
- - sem-service start postgres 17 # Start PostgreSQL service
- - sem-version erlang 26.2 # Set Erlang version
- - sem-version elixir 1.15.7 # Set Elixir version
- - cache restore # Restore cached dependencies
- - mix local.hex --force # Install Hex package manager
- - mix local.rebar --force # Install rebar3 build tool
- - mix deps.get # Install dependencies
+ - checkout # Clone repo → https://docs.semaphoreci.com/reference/toolbox#checkout
+ - sem-version elixir 1.16 # Select Elixir version → https://docs.semaphoreci.com/reference/toolbox#sem-version
+ - sem-version erlang 26.2 # Select Erlang/OTP version → https://docs.semaphoreci.com/using-semaphore/languages/elixir-erlang
+ - mix local.hex --force
+ - mix local.rebar --force
+ - cache restore # Reuse deps/ or _build cache
-# Pipeline blocks represent groups of jobs that can run in parallel
+# -------------------------------------------------------------------
+# BLOCK: 📦 Install & Build – compile & cache artifacts
+# -------------------------------------------------------------------
blocks:
- # Block for setting up dependencies and caching
- - name: "🛠️ Setup and Cache"
+ - name: "📦 Install & Build"
dependencies: []
task:
jobs:
- - name: Install Dependencies
+ - name: "⚙️ Compile"
commands:
- - mix deps.compile # Compile dependencies
- - mix compile # Compile project
- - cache store # Cache dependencies for future runs
+ - mix deps.get
+ - mix compile
+ - cache store # Save deps/ or _build for later steps
- # Block for code quality checks
- - name: "🔍 Code Quality"
- dependencies:
- - "🛠️ Setup and Cache"
+# -------------------------------------------------------------------
+# CODE-QUALITY BLOCKS – one block per check
+# -------------------------------------------------------------------
+ - name: "🖋 Format"
+ dependencies: ["📦 Install & Build"]
task:
jobs:
- - name: Run Formatter and Credo
+ - name: "🎨 mix format"
commands:
- - mix format --check-formatted # Check code formatting
- - mix credo --strict # Run static code analysis
+ - mix format --check-formatted
- # Block for security checks
- - name: "🔐 Security Checks"
- dependencies:
- - "🛠️ Setup and Cache"
+ - name: "🔎 Credo"
+ dependencies: ["📦 Install & Build"]
+ task:
+ jobs:
+ - name: "🔍 mix credo"
+ commands:
+ - mix credo --strict
+
+ - name: "🔐 Sobelow"
+ dependencies: ["📦 Install & Build"]
+ task:
+ jobs:
+ - name: "🛡️ mix sobelow"
+ commands:
+ - mix sobelow --exit
+
+ - name: "🛡 Deps Audit"
+ dependencies: ["📦 Install & Build"]
task:
jobs:
- - name: Run Security Scans
+ - name: "🛡️ mix deps.audit"
commands:
- - mix deps.audit # Check for known vulnerabilities
- - mix sobelow # Run security-focused static analysis
+ - mix deps.audit
- # Block for type checking
- name: "📊 Dialyzer"
- dependencies:
- - "🛠️ Setup and Cache"
+ dependencies: ["📦 Install & Build"]
task:
jobs:
- - name: Type Checking
+ - name: "🧠 mix dialyzer"
+ env_vars:
+ - name: MIX_ENV
+ value: test
commands:
- - mix dialyzer # Run static type checking
+ - mix dialyzer --halt-exit-status
- # Block for running tests
- - name: "🧪 Test Suite"
+# -------------------------------------------------------------------
+# BLOCK: 🧪 Tests – split suite for faster feedback
+# -------------------------------------------------------------------
+ - name: "🧪 Tests"
+ dependencies: ["📦 Install & Build"]
+ task:
+ epilogue: # Publish JUnit only for test jobs
+ always:
+ commands:
+ - '[ -f report.xml ] && test-results publish report.xml' # https://docs.semaphoreci.com/using-semaphore/tests/test-reports
+ jobs:
+ - name: "🧪 Tests-1"
+ env_vars:
+ - name: MIX_TEST_PARTITION
+ value: "1"
+ commands:
+ - sem-service start postgres 17
+ - mix test --color
+ - name: "🧪 Tests-2"
+ env_vars:
+ - name: MIX_TEST_PARTITION
+ value: "2"
+ commands:
+ - sem-service start postgres 17
+ - mix test --color
+
+# -------------------------------------------------------------------
+# BLOCK: 🚀 Docker Deploy – runs only on main branch
+# Secrets docs: https://docs.semaphoreci.com/reference/pipeline-yaml#secrets-in-task
+# -------------------------------------------------------------------
+ - name: "🚀 Docker Deploy"
dependencies:
- - "🛠️ Setup and Cache"
+ - "🖋 Format"
+ - "🔎 Credo"
+ - "🔐 Sobelow"
+ - "🛡 Deps Audit"
+ - "📊 Dialyzer"
+ - "🧪 Tests"
+ run:
+ when: "branch = 'main'"
task:
- env_vars:
- - name: MIX_ENV
- value: test
- - name: PGHOST
- value: 127.0.0.1
- - name: PGUSER
- value: postgres
+ secrets:
+ - name: dockerhub # Registry creds
jobs:
- - name: "🟢 ExUnit Tests"
- parallelism: 4 # Run tests in parallel
+ - name: "🐳 Build & Push"
commands:
- - mix test.setup # Setup test database
- - mix test --partitions $SEMAPHORE_JOB_COUNT --partition $SEMAPHORE_JOB_INDEX --cover # Run tests with coverage
+ - echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USERNAME" --password-stdin
+ - docker build -t myapp:${SEMAPHORE_GIT_SHA} .
+ - docker tag myapp:${SEMAPHORE_GIT_SHA} myapp:latest
+ - docker push myapp:${SEMAPHORE_GIT_SHA}
+ - docker push myapp:latest
+
+# -------------------------------------------------------------------
+# AFTER PIPELINE – merge JUnit files into a single report
+# Docs: https://docs.semaphoreci.com/using-semaphore/tests/test-reports#step3
+# -------------------------------------------------------------------
+after_pipeline:
+ task:
+ jobs:
+ - name: "📊 Merge Reports"
+ commands:
+ - test-results gen-pipeline-report # Combine job reports
diff --git a/front/workflow_templates/saas_new/templates/go_docker.yml b/front/workflow_templates/saas_new/templates/go_docker.yml
index 31df7062e..c52b5e293 100644
--- a/front/workflow_templates/saas_new/templates/go_docker.yml
+++ b/front/workflow_templates/saas_new/templates/go_docker.yml
@@ -1,117 +1,132 @@
-# This is a Semaphore configuration file for Go projects using Docker
-# For more information about Semaphore configuration visit:
-# https://docs.semaphoreci.com/reference/pipeline-yaml-reference/
+# =============================================================================
+# Semaphore CI/CD pipeline for Go projects
+# Starter pipeline showcasing key Semaphore features for newcomers
+#
+# Additional references:
+# Pipeline YAML reference https://docs.semaphoreci.com/reference/pipeline-yaml/
+# Agents & VM images https://docs.semaphoreci.com/reference/agent/
+# =============================================================================
+version: v1.0
+name: "🚀 Go Project CI/CD Pipeline"
-version: v1.0 # Semaphore configuration version
-name: "🐹 Go CI Pipeline" # Pipeline display name
-
-# Define the machine type, OS image, and containers
+# --------------------------------------------------------------------------- #
+# AGENT #
+# --------------------------------------------------------------------------- #
agent:
machine:
- type: {{ machine_type }}
- os_image: {{ os_image }}
+ type: {{ machine_type }} # Define the machine type (e.g., f1-standard-2)
+ os_image: {{ os_image }} # Specify the OS image (e.g., ubuntu2204)
containers:
- name: main
- image: 'registry.semaphoreci.com/golang:1.21' # Go 1.21 container
+ image: 'registry.semaphoreci.com/golang:1.22' # Go 1.22 container
-# Configure when to stop the pipeline early
-fail_fast:
+# --------------------------------------------------------------------------- #
+# WORKFLOW CONTROL #
+# --------------------------------------------------------------------------- #
+fail_fast: # Stop pipeline on first failure https://docs.semaphoreci.com/essentials/fail-fast/
stop:
- when: branch != 'main' # Stop all blocks if a job fails on non-main branches
-auto_cancel:
+ when: "branch != 'main'"
+
+auto_cancel: # Cancel outdated pipelines https://docs.semaphoreci.com/essentials/auto-cancel/
running:
- when: branch != 'main' # Cancel running pipelines on non-main branches
+ when: "branch != 'main'"
queued:
- when: branch = 'main' # Cancel queued pipelines on main branch
+ when: "branch != 'main'"
-# Commands to run before each job
+# --------------------------------------------------------------------------- #
+# GLOBAL JOB CONFIG (runs before every job) #
+# --------------------------------------------------------------------------- #
global_job_config:
prologue:
commands:
- - checkout # Get the code from repository
- - cache restore # Restore cached dependencies
- - go mod download # Download dependencies
- - go install gotest.tools/gotestsum@latest # Install gotestsum for enhanced test output
+ - checkout # Clone repository source code https://docs.semaphoreci.com/reference/toolbox#checkout
+ - cache restore # Restore Go modules cache https://docs.semaphoreci.com/using-semaphore/optimization/cache
-# Pipeline blocks represent groups of jobs that can run in parallel
+# --------------------------------------------------------------------------- #
+# BLOCKS #
+# --------------------------------------------------------------------------- #
blocks:
- # Block for setting up dependencies and caching
- - name: "🛠 Setup and Cache"
+ # ---------------------------------- #
+ - name: "🛠️ Setup & Dependency Caching"
dependencies: []
task:
jobs:
- - name: Install Dependencies
+ - name: "📦 Install dependencies"
commands:
- - go install golang.org/x/lint/golint@latest
- - go install honnef.co/go/tools/cmd/staticcheck@latest
- - cache store # Cache dependencies for future runs
+ - go mod download
+ - cache store # Save Go modules cache https://docs.semaphoreci.com/using-semaphore/languages/go#cache
- # Block for code quality checks
- - name: "🔍 Code Quality"
- dependencies: ["🛠 Setup and Cache"]
+ # ---------------------------------- #
+ - name: "🔍 Code Quality Checks"
+ dependencies: ["🛠️ Setup & Dependency Caching"]
task:
jobs:
- - name: Lint and Vet
+ - name: "🎨 go fmt check"
+ commands:
+ - go fmt ./... # Verify gofmt compliance
+ - name: "📝 golangci-lint"
commands:
- - golint ./... # Run linter
- - go vet ./... # Run static analysis
- - staticcheck ./... # Run advanced static analysis
- - go fmt ./... # Format code
+ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
+ - golangci-lint run ./...
+ - name: "🧐 go vet"
+ commands:
+ - go vet ./... # Static code vetting https://pkg.go.dev/cmd/vet
+ - name: "🔎 staticcheck"
+ commands:
+ - go install honnef.co/go/tools/cmd/staticcheck@latest
+ - staticcheck ./...
- # Block for security checks
- - name: "🔐 Security Checks"
- dependencies: ["🛠 Setup and Cache"]
+ # ---------------------------------- #
+ - name: "🔒 Security Scanning"
+ dependencies: ["🛠️ Setup & Dependency Caching"]
task:
jobs:
- - name: Security Scan
+ - name: "🛡️ govulncheck"
commands:
- - go install golang.org/x/vuln/cmd/govulncheck@latest # Install security scanner
- - govulncheck ./... # Check for vulnerabilities
+ - go install golang.org/x/vuln/cmd/govulncheck@latest
+ - govulncheck ./... # Scan for known vulnerabilities https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck
- # Block for running tests
- - name: "🧪 Tests"
- dependencies: ["🔍 Code Quality", "🔐 Security Checks"]
+ # ---------------------------------- #
+ - name: "✅ Testing"
+ dependencies: ["🔒 Security Scanning"]
task:
- env_vars:
- - name: GO_TEST_REPORT
- value: "test-results.xml" # Define the test report output path
jobs:
- - name: "🟢 Go Test"
- parallelism: 4 # Run tests across 4 parallel jobs for efficiency
+ - name: "🧪 Run Go Tests"
+ parallelism: 4 # Split tests across 4 jobs https://docs.semaphoreci.com/using-semaphore/jobs#job-parallelism
commands:
- - gotestsum --junitfile=$GO_TEST_REPORT -- -coverprofile=coverage.txt -covermode=atomic ./... # Run tests with coverage and generate JUnit report
- - go tool cover -func=coverage.txt # Display coverage report
+ - go install gotest.tools/gotestsum@latest
+ - packages=$(go list ./... | awk "NR % $SEMAPHORE_JOB_TOTAL == $SEMAPHORE_JOB_INDEX") gotestsum --junitfile reports.xml $packages
epilogue:
always:
commands:
- - test-results publish $GO_TEST_REPORT # Publish test results to Semaphore
+ - test-results publish reports.xml # Upload JUnit report https://docs.semaphoreci.com/using-semaphore/tests/test-reports
- - name: "📦 Build"
- dependencies: ["🧪 Tests"]
+ # ---------------------------------- #
+ - name: "🏗️ Build Artifact"
+ dependencies: ["✅ Testing"]
task:
jobs:
- - name: "Build Binary"
+ - name: "🔨 Compile Go Binary"
commands:
- - GOOS=linux GOARCH=amd64 go build -o app
- - artifact push workflow app # Store binary as workflow artifact
+ - go build -o app # Produce binary artifact https://go.dev/cmd/go/#hdr-Compile_packages_and_dependencies
- - name: "🐳 Docker"
- dependencies: ["📦 Build"]
+ # ---------------------------------- #
+ - name: "🐳 Docker Image Build"
+ dependencies: ["🏗️ Build Artifact"]
task:
- secrets:
- - name: dockerhub
jobs:
- - name: "Build and Push"
+ - name: "🐋 Build & Tag Docker Image"
commands:
- - artifact pull workflow app # Get the binary from previous step
- - echo "${DOCKER_PASSWORD}" | docker login -u "${DOCKER_USERNAME}" --password-stdin
- - docker build -t your-image:$SEMAPHORE_WORKFLOW_ID .
- - docker push your-image:$SEMAPHORE_WORKFLOW_ID
+ - docker build -t my-app:$SEMAPHORE_GIT_SHA . # Build container image https://docs.docker.com/engine/reference/commandline/build/
+ - docker tag my-app:$SEMAPHORE_GIT_SHA my-app:latest
+# --------------------------------------------------------------------------- #
+# AFTER PIPELINE #
+# --------------------------------------------------------------------------- #
after_pipeline:
task:
jobs:
- - name: "Merge Reports 📊"
+ - name: "📊 Publish Test Results"
commands:
- - test-results gen-pipeline-report # Generate a summary report of test results
+ - test-results gen-pipeline-report # Generate summary report https://docs.semaphoreci.com/reference/test-results/
diff --git a/front/workflow_templates/saas_new/templates/go_saas.yml b/front/workflow_templates/saas_new/templates/go_saas.yml
index ba5bfdaec..efe32b8e5 100644
--- a/front/workflow_templates/saas_new/templates/go_saas.yml
+++ b/front/workflow_templates/saas_new/templates/go_saas.yml
@@ -1,91 +1,129 @@
-version: v1.0 # Semaphore configuration version
-name: "🐹 Go CI Pipeline" # Pipeline display name
+# =============================================================================
+# Semaphore CI/CD pipeline for Go projects
+# Starter pipeline showcasing key Semaphore features for newcomers
+#
+# Additional references:
+# Pipeline YAML reference https://docs.semaphoreci.com/reference/pipeline-yaml/
+# Agents & VM images https://docs.semaphoreci.com/reference/agent/
+# =============================================================================
+version: v1.0
+name: "🚀 Go Project CI/CD Pipeline"
+# --------------------------------------------------------------------------- #
+# AGENT #
+# --------------------------------------------------------------------------- #
agent:
machine:
- type: {{ machine_type }} # Define the machine type (e.g., e1-standard-2)
- os_image: {{ os_image }} # Specify the OS image (e.g., ubuntu2004)
+ type: {{ machine_type }} # Define the machine type (e.g., f1-standard-2)
+ os_image: {{ os_image }} # Specify the OS image (e.g., ubuntu2204)
-fail_fast:
+# --------------------------------------------------------------------------- #
+# WORKFLOW CONTROL #
+# --------------------------------------------------------------------------- #
+fail_fast: # Stop pipeline on first failure https://docs.semaphoreci.com/essentials/fail-fast/
stop:
- when: branch != 'main' # Stop execution early for non-main branches
-auto_cancel:
+ when: "branch != 'main'"
+
+auto_cancel: # Cancel outdated pipelines https://docs.semaphoreci.com/essentials/auto-cancel/
running:
- when: branch != 'main' # Cancel running pipelines on non-main branches
+ when: "branch != 'main'"
queued:
- when: branch = 'main' # Cancel queued pipelines on main branch
+ when: "branch != 'main'"
+# --------------------------------------------------------------------------- #
+# GLOBAL JOB CONFIG (runs before every job) #
+# --------------------------------------------------------------------------- #
global_job_config:
prologue:
commands:
- - checkout # Fetch the latest code from the repository
- - sem-version go 1.21 # Use Go version 1.21
- - export GOPATH=~/go # Set Go path
- - export PATH=/home/semaphore/go/bin:$PATH # Update PATH with Go bin directory
- - cache restore # Restore cached dependencies to save time
- - go mod download # Download project dependencies
- - go install gotest.tools/gotestsum@latest # Install gotestsum for enhanced test output
+ - checkout # Clone repository source code https://docs.semaphoreci.com/reference/toolbox#checkout
+ - sem-version go 1.22 # Set Go toolchain version https://docs.semaphoreci.com/using-semaphore/languages/go#versions
+ - cache restore # Restore Go modules cache https://docs.semaphoreci.com/using-semaphore/optimization/cache
+# --------------------------------------------------------------------------- #
+# BLOCKS #
+# --------------------------------------------------------------------------- #
blocks:
- - name: "🛠️ Setup and Cache"
+ # ---------------------------------- #
+ - name: "🛠️ Setup & Dependency Caching"
dependencies: []
task:
jobs:
- - name: "📦 Install Dependencies"
+ - name: "📦 Install dependencies"
commands:
- - go install golang.org/x/lint/golint@latest # Install linter
- - go install honnef.co/go/tools/cmd/staticcheck@latest # Install static checker
- - cache store # Save the current state of dependencies for future runs
+ - go mod download
+ - cache store # Save Go modules cache https://docs.semaphoreci.com/using-semaphore/languages/go#cache
- - name: "🔍 Code Quality"
- dependencies: ["🛠️ Setup and Cache"]
+ # ---------------------------------- #
+ - name: "🔍 Code Quality Checks"
+ dependencies: ["🛠️ Setup & Dependency Caching"]
task:
jobs:
- - name: "✨ Lint and Vet"
+ - name: "🎨 go fmt check"
+ commands:
+ - go fmt ./... # Verify gofmt compliance
+ - name: "📝 golangci-lint"
+ commands:
+ - go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
+ - golangci-lint run ./...
+ - name: "🧐 go vet"
+ commands:
+ - go vet ./... # Static code vetting https://pkg.go.dev/cmd/vet
+ - name: "🔎 staticcheck"
commands:
- - golint ./... # Run linter to enforce coding standards
- - go vet ./... # Run vet tool to detect issues
- - staticcheck ./... # Perform static analysis
- - go fmt ./... # Ensure code is formatted correctly
+ - go install honnef.co/go/tools/cmd/staticcheck@latest
+ - staticcheck ./...
- - name: "🔐 Security Checks"
- dependencies: ["🛠️ Setup and Cache"]
+ # ---------------------------------- #
+ - name: "🔒 Security Scanning"
+ dependencies: ["🛠️ Setup & Dependency Caching"]
task:
jobs:
- - name: "🛡️ Security Scan"
+ - name: "🛡️ govulncheck"
commands:
- - go install golang.org/x/vuln/cmd/govulncheck@latest # Install vulnerability scanner
- - govulncheck ./... # Check for known vulnerabilities in dependencies
+ - go install golang.org/x/vuln/cmd/govulncheck@latest
+ - govulncheck ./... # Scan for known vulnerabilities https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck
- - name: "🧪 Test Suite"
- dependencies: ["🛠️ Setup and Cache"]
+ # ---------------------------------- #
+ - name: "✅ Testing"
+ dependencies: ["🔒 Security Scanning"]
task:
- env_vars:
- - name: GO_TEST_REPORT
- value: "test-results.xml" # Define the test report output path
jobs:
- - name: "🟢 Go Test"
- parallelism: 4 # Run tests across 4 parallel jobs for efficiency
+ - name: "🧪 Run Go Tests"
+ parallelism: 4 # Split tests across 4 jobs https://docs.semaphoreci.com/using-semaphore/jobs#job-parallelism
commands:
- - gotestsum --junitfile=$GO_TEST_REPORT -- -coverprofile=coverage.txt -covermode=atomic ./... # Run tests with coverage and generate JUnit report
- - go tool cover -func=coverage.txt # Display coverage report
+ - go install gotest.tools/gotestsum@latest
+ - packages=$(go list ./... | awk "NR % $SEMAPHORE_JOB_TOTAL == $SEMAPHORE_JOB_INDEX") gotestsum --junitfile reports.xml $packages
epilogue:
always:
commands:
- - test-results publish $GO_TEST_REPORT # Publish test results to Semaphore
+ - test-results publish reports.xml # Upload JUnit report https://docs.semaphoreci.com/using-semaphore/tests/test-reports
+
+ # ---------------------------------- #
+ - name: "🏗️ Build Artifact"
+ dependencies: ["✅ Testing"]
+ task:
+ jobs:
+ - name: "🔨 Compile Go Binary"
+ commands:
+ - go build -o app # Produce binary artifact https://go.dev/cmd/go/#hdr-Compile_packages_and_dependencies
- - name: "📦 Build"
- dependencies: ["🧪 Test Suite"]
+ # ---------------------------------- #
+ - name: "🐳 Docker Image Build"
+ dependencies: ["🏗️ Build Artifact"]
task:
jobs:
- - name: "🔨 Build Binary"
+ - name: "🐋 Build & Tag Docker Image"
commands:
- - GOOS=linux GOARCH=amd64 go build -o bin/app # Build for Linux
- - GOOS=darwin GOARCH=amd64 go build -o bin/app-darwin # Build for macOS
+ - docker build -t my-app:$SEMAPHORE_GIT_SHA . # Build container image https://docs.docker.com/engine/reference/commandline/build/
+ - docker tag my-app:$SEMAPHORE_GIT_SHA my-app:latest
+# --------------------------------------------------------------------------- #
+# AFTER PIPELINE #
+# --------------------------------------------------------------------------- #
after_pipeline:
task:
jobs:
- - name: "Merge Reports 📊"
+ - name: "📊 Publish Test Results"
commands:
- - test-results gen-pipeline-report # Generate a summary report of test results
+ - test-results gen-pipeline-report # Generate summary report https://docs.semaphoreci.com/reference/test-results/
diff --git a/front/workflow_templates/saas_new/templates/rspec_docker.yml b/front/workflow_templates/saas_new/templates/rspec_docker.yml
index aee4a02be..f704c9dbe 100644
--- a/front/workflow_templates/saas_new/templates/rspec_docker.yml
+++ b/front/workflow_templates/saas_new/templates/rspec_docker.yml
@@ -1,125 +1,145 @@
version: v1.0
name: "💎 Ruby CI Pipeline"
+# ---------------------------------------------------------------------
+# GLOBAL SETTINGS
+# ---------------------------------------------------------------------
agent:
machine:
- type: {{ machine_type }}
+ type: {{ machine_type }} # Filled by template variables
os_image: {{ os_image }}
+ # Containers run side-by-side on the VM
containers:
- name: main
- image: 'registry.semaphoreci.com/ruby:3.2.2-node-browsers' # Ruby 3.2.2 with Node.js and browser support for front-end tests
+ image: 'registry.semaphoreci.com/ruby:3.2.2-node-browsers' # Ruby 3.2.2 + Node 20 + browsers
- name: postgres
- image: registry.semaphoreci.com/postgres:17 # PostgreSQL 17 for database operations
+ image: 'registry.semaphoreci.com/postgres:17' # PostgreSQL 17
- name: redis
- image: registry.semaphoreci.com/redis:7.0 # Redis 7.0 for caching and session management
+ image: 'registry.semaphoreci.com/redis:7.0' # Redis 7.0
+# Fast feedback & cost control
fail_fast:
stop:
- when: branch != 'master' # Stop execution early for non-master branches
+ when: branch != 'main' # Abort early on non-main branches
auto_cancel:
running:
- when: branch != 'master' # Auto-cancel running pipelines on non-master branches
+ when: branch != 'main' # Cancel older running pipelines on feature branches
queued:
- when: branch = 'master' # Auto-cancel queued pipelines for master branch
+ when: branch = 'main' # Keep main’s queue clean
+# Common pre-steps for every job
global_job_config:
prologue:
commands:
- - checkout # Fetch the source code
- - cache restore # Restore cached dependencies
- - yarn install --frozen-lockfile # Install JavaScript dependencies for Webpacker
- - bundle install --deployment --path vendor/bundle # Install Ruby gems
+ - checkout # Grab the code
+ - cache restore # Pull dependencies & packs from cache
+# ---------------------------------------------------------------------
+# WORKFLOW
+# ---------------------------------------------------------------------
blocks:
- - name: "🛠 Setup and Cache"
+ # ---------------- SETUP ----------------
+ - name: "🛠 Setup & Cache"
task:
jobs:
- - name: Install Dependencies
+ - name: Install Gems & JS deps
commands:
- - cache store # Store dependencies in cache for later reuse
+ - 'curl -o- -L https://yarnpkg.com/install.sh | bash -s -- --version 1.22.19'
+ - 'export PATH="$HOME/.yarn/bin:$PATH"'
+ - yarn install --frozen-lockfile # Install JS deps
+ - bundle install --deployment --path vendor/bundle # Install Ruby gems
+ - gem install --no-document semaphore_test_boosters # ⏩ Boosters gem
+ - cache store # Save vendor/bundle & node_modules
dependencies: []
- - name: "📦 Webpacker Setup"
+ # ------------- FRONT-END BUILD -------------
+ - name: "🖼️ Webpacker Build"
task:
jobs:
- - name: Precompile Webpacker Assets
+ - name: Compile Assets
commands:
- - bundle exec rake webpacker:compile # Compile Webpacker assets for tests
+ - cache restore webpacker-assets # Reuse previous packs if present
+ - bundle exec rake webpacker:compile # Produce packs for tests
+ - cache store webpacker-assets public/packs # Save packs for later blocks
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🔍 Lint Code"
+ # ------------- CODE QUALITY -------------
+ - name: "🔍 ESLint & Stylelint"
task:
jobs:
- - name: Lint JavaScript and CSS
+ - name: JS / CSS Lint
commands:
- - yarn run eslint . # Run ESLint for JavaScript code
- - yarn run stylelint "**/*.scss" # Run Stylelint for SCSS files
+ - 'curl -o- -L https://yarnpkg.com/install.sh | bash -s -- --version 1.22.19'
+ - 'export PATH="$HOME/.yarn/bin:$PATH"'
+ - yarn run eslint . # Lint JavaScript
+ - yarn run stylelint '**/*.scss' # Lint SCSS
dependencies:
- - "📦 Webpacker Setup"
+ - "🖼️ Webpacker Build"
- - name: "🧹 Run RuboCop"
+ - name: "🧹 RuboCop"
task:
jobs:
- - name: bundle exec rubocop
+ - name: Ruby Style Check
commands:
- - bundle exec rubocop # Lint and enforce Ruby coding standards
+ - bundle exec rubocop # Enforce Ruby style
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🛡️ Run Brakeman"
+ # ------------- SECURITY -------------
+ - name: "🛡️ Brakeman"
task:
jobs:
- - name: bundle exec brakeman --force
+ - name: Static Analysis
commands:
- - bundle exec brakeman --force # Perform security checks
+ - bundle exec brakeman --force # Rails security scan
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🔍 Run Bundler Audit"
+ - name: "🛡️ Bundler Audit"
task:
jobs:
- - name: bundle exec bundle-audit check --update
+ - name: Gem CVE Check
commands:
- - bundle exec bundle-audit check --update # Check for known vulnerabilities in dependencies
+ - bundle exec bundle-audit check --update
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🚦 Test Suite"
+ # ------------- TEST SUITE (Boosters) -------------
+ - name: "🚦 RSpec Suite"
task:
env_vars:
- name: RAILS_ENV
- value: test # Set environment to test
+ value: test # Use the test environment
- name: PGHOST
- value: 127.0.0.1 # PostgreSQL host
+ value: 127.0.0.1 # PostgreSQL host (service container)
- name: PGUSER
- value: postgres # PostgreSQL user
- - name: KNAPSACK_TEST_FILE_PATTERN
- value: 'spec/**{,/*/**}/*_spec.rb' # Define test file pattern for RSpec
- - name: KNAPSACK_REPORT_PATH
- value: knapsack_rspec_rails_report.json # Set path for Knapsack report
+ value: postgres # PostgreSQL user
jobs:
- name: "🟢 RSpec Tests"
- parallelism: 5 # Run tests in parallel across 5 jobs
+ parallelism: 5 # Split across 5 nodes
commands:
- - 'bundle exec rake db:setup' # Prepare the test database
- - 'CI_NODE_TOTAL=$SEMAPHORE_JOB_COUNT CI_NODE_INDEX=$((SEMAPHORE_JOB_INDEX-1)) bundle exec rake "knapsack:rspec[-t ~manual -t ~v4 --format RspecJunitFormatter --out report.xml --format documentation]"' # Run RSpec tests with Knapsack
- secrets:
- - name: KNAPSACK_PRO_TEST_SUITE_TOKEN_RSPEC # Secure token for Knapsack Pro
+ - cache restore webpacker-assets # Pull precompiled packs
+ - bundle exec rake db:setup # Build fresh test DB
+ # Automatic test splitting via Semaphore Boosters
+ - rspec_booster --job "$SEMAPHORE_JOB_INDEX/$SEMAPHORE_JOB_COUNT" --format RspecJunitFormatter --out report.xml --format documentation
epilogue:
always:
commands:
- - '[[ -f report.xml ]] && test-results publish report.xml' # Publish test results if available
+ - '[[ -f report.xml ]] && test-results publish report.xml'
dependencies:
- - "🔍 Run Bundler Audit"
- - "🛡️ Run Brakeman"
- - "🧹 Run RuboCop"
- - "📦 Webpacker Setup"
+ - "🧹 RuboCop"
+ - "🛡️ Brakeman"
+ - "🛡️ Bundler Audit"
+ - "🖼️ Webpacker Build"
+# ---------------------------------------------------------------------
+# PIPELINE SUMMARY
+# ---------------------------------------------------------------------
after_pipeline:
task:
jobs:
- - name: "Merge Reports 📊"
+ - name: "📊 Merge Results"
commands:
- - test-results gen-pipeline-report # Generate a summary of the test results
+ - test-results gen-pipeline-report # Collate JUnit reports
diff --git a/front/workflow_templates/saas_new/templates/rspec_saas.yml b/front/workflow_templates/saas_new/templates/rspec_saas.yml
index 9e2b76046..d3abf508c 100644
--- a/front/workflow_templates/saas_new/templates/rspec_saas.yml
+++ b/front/workflow_templates/saas_new/templates/rspec_saas.yml
@@ -1,121 +1,136 @@
version: v1.0
-name: "💎 Ruby CI Pipeline"
+name: "💎 Ruby CI Pipeline"
+# ---------------------------------------------------------------------
+# GLOBAL SETTINGS
+# ---------------------------------------------------------------------
agent:
machine:
- type: {{ machine_type }}
+ type: {{ machine_type }} # Filled by template variables
os_image: {{ os_image }}
+# Fast feedback & cost control
fail_fast:
stop:
- when: branch != 'master' # Stop execution early for non-master branches
+ when: branch != 'main' # Abort early on non-main branches
auto_cancel:
running:
- when: branch != 'master' # Auto-cancel running pipelines on non-master branches
+ when: branch != 'main' # Cancel older runs on feature branches
queued:
- when: branch = 'master' # Auto-cancel queued pipelines for master branch
+ when: branch = 'main' # Keep the queue clean for main
+# Common pre-steps for every job
global_job_config:
prologue:
commands:
- - checkout # Fetch the source code
- - sem-service start postgres 17 # Start PostgreSQL 17 service
- - sem-service start redis 7 # Start Redis 7 service
- - sem-version ruby 3.2.2 # Use Ruby version 3.2.2
- - sem-version node 20.11.0 # Use Node.js version 20.11.0
- - cache restore # Restore cached dependencies
- - yarn install --frozen-lockfile # Install JavaScript dependencies for Webpacker
- - bundle install --deployment --path vendor/bundle # Install Ruby gems
+ - checkout # Grab the code
+ - sem-service start postgres 17 # Start Postgres 17
+ - sem-service start redis 7 # Start Redis 7
+ - sem-version ruby 3.2.2 # Select Ruby 3.2.2
+ - sem-version node 20.11.0 # Select Node 20.11
+ - cache restore # Pull dependencies & packs from cache
+# ---------------------------------------------------------------------
+# WORKFLOW
+# ---------------------------------------------------------------------
blocks:
- - name: "🛠 Setup and Cache"
+ # ---------------- SETUP ----------------
+ - name: "🛠 Setup & Cache"
task:
jobs:
- - name: Install Dependencies
+ - name: Install Gems & JS deps
commands:
- - cache store # Store dependencies in cache for later reuse
+ - yarn install --frozen-lockfile # Install JS deps
+ - bundle install --deployment --path vendor/bundle # Install Ruby gems
+ - gem install --no-document semaphore_test_boosters # Install Boosters gem
+ - cache store # Save vendor/bundle & node_modules
dependencies: []
- - name: "📦 Webpacker Setup"
+ # ------------- FRONT-END BUILD -------------
+ - name: "🖼️ Webpacker Build"
task:
jobs:
- - name: Precompile Webpacker Assets
+ - name: Compile Assets
commands:
- - bundle exec rake webpacker:compile # Compile Webpacker assets for tests
+ - cache restore webpacker-assets # Reuse previous packs if present
+ - bundle exec rake webpacker:compile # Produce packs for tests
+ - cache store webpacker-assets public/packs # Save packs for later blocks
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🔍 Lint Code"
+ # ------------- CODE QUALITY -------------
+ - name: "🔍 ESLint & Stylelint"
task:
jobs:
- - name: Lint JavaScript and CSS
+ - name: JS / CSS Lint
commands:
- - yarn run eslint . # Run ESLint for JavaScript code
- - yarn run stylelint "**/*.scss" # Run Stylelint for SCSS files
+ - yarn run eslint . # Lint JavaScript
+ - yarn run stylelint '**/*.scss' # Lint SCSS
dependencies:
- - "📦 Webpacker Setup"
+ - "🖼️ Webpacker Build"
- - name: "🧹 Run RuboCop"
+ - name: "🧹 RuboCop"
task:
jobs:
- - name: bundle exec rubocop
+ - name: Ruby Style Check
commands:
- - bundle exec rubocop # Lint and enforce Ruby coding standards
+ - bundle exec rubocop # Enforce Ruby style
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🛡️ Run Brakeman"
+ # ------------- SECURITY -------------
+ - name: "🛡️ Brakeman"
task:
jobs:
- - name: bundle exec brakeman --force
+ - name: Static Analysis
commands:
- - bundle exec brakeman --force # Perform security checks
+ - bundle exec brakeman --force # Rails security scan
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🔍 Run Bundler Audit"
+ - name: "🛡️ Bundler Audit"
task:
jobs:
- - name: bundle exec bundle-audit check --update
+ - name: Gem CVE Check
commands:
- - bundle exec bundle-audit check --update # Check for known vulnerabilities in dependencies
+ - bundle exec bundle-audit check --update
dependencies:
- - "🛠 Setup and Cache"
+ - "🛠 Setup & Cache"
- - name: "🚦 Test Suite"
+ # ------------- TEST SUITE (Boosters) -------------
+ - name: "🚦 RSpec Suite"
task:
env_vars:
- name: RAILS_ENV
- value: test # Set environment to test
+ value: test # Use the test environment
- name: PGHOST
- value: 127.0.0.1 # PostgreSQL host
+ value: 127.0.0.1 # PostgreSQL host
- name: PGUSER
- value: postgres # PostgreSQL user
- - name: KNAPSACK_TEST_FILE_PATTERN
- value: 'spec/**{,/*/**}/*_spec.rb' # Define test file pattern for RSpec
- - name: KNAPSACK_REPORT_PATH
- value: knapsack_rspec_rails_report.json # Set path for Knapsack report
+ value: postgres # PostgreSQL user
jobs:
- name: "🟢 RSpec Tests"
- parallelism: 5 # Run tests in parallel across 5 jobs
+ parallelism: 5 # Split across 5 nodes
commands:
- - 'bundle exec rake db:setup' # Prepare the test database
- - 'CI_NODE_TOTAL=$SEMAPHORE_JOB_COUNT CI_NODE_INDEX=$((SEMAPHORE_JOB_INDEX-1)) bundle exec rake "knapsack:rspec[-t ~manual -t ~v4 --format RspecJunitFormatter --out report.xml --format documentation]"' # Run RSpec tests with Knapsack
- secrets:
- - name: KNAPSACK_PRO_TEST_SUITE_TOKEN_RSPEC # Secure token for Knapsack Pro
+ - cache restore webpacker-assets # Pull precompiled packs
+ - bundle exec rake db:setup # Build fresh test DB
+ # Automatic test splitting via Semaphore Boosters
+ - rspec_booster --job "$SEMAPHORE_JOB_INDEX/$SEMAPHORE_JOB_COUNT" --format RspecJunitFormatter --out report.xml --format documentation
epilogue:
always:
commands:
- - '[[ -f report.xml ]] && test-results publish report.xml' # Publish test results if available
+ - '[[ -f report.xml ]] && test-results publish report.xml'
dependencies:
- - "🔍 Run Bundler Audit"
- - "🛡️ Run Brakeman"
- - "🧹 Run RuboCop"
- - "📦 Webpacker Setup"
+ - "🧹 RuboCop"
+ - "🛡️ Brakeman"
+ - "🛡️ Bundler Audit"
+ - "🖼️ Webpacker Build"
+# ---------------------------------------------------------------------
+# PIPELINE SUMMARY
+# ---------------------------------------------------------------------
after_pipeline:
task:
jobs:
- - name: "Merge Reports 📊"
+ - name: "📊 Merge Results"
commands:
- - test-results gen-pipeline-report # Generate a summary of the test results
+ - test-results gen-pipeline-report # Collate JUnit reports
From 3dcb4e8cda7185f196f8ec6703d0a1adca2143ee Mon Sep 17 00:00:00 2001
From: d-stefanovic <83216231+d-stefanovic@users.noreply.github.com>
Date: Wed, 9 Jul 2025 10:23:34 +0200
Subject: [PATCH 23/87] docs: update container-registry.md (added Redis 8.0 and
Valkey 8.1) (#424)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
- Update container-registry.md
- added Redis 8.0
- Valkey 8.1
## ✅ Checklist
- [ ] I have tested this change
- [ ] This change requires documentation update
---
.../optimization/container-registry.md | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/docs/docs/using-semaphore/optimization/container-registry.md b/docs/docs/using-semaphore/optimization/container-registry.md
index 115d5124a..62ac0e9ff 100644
--- a/docs/docs/using-semaphore/optimization/container-registry.md
+++ b/docs/docs/using-semaphore/optimization/container-registry.md
@@ -538,6 +538,20 @@ This is a PostgreSQL container extended with [PostGIS](https://postgis.net/).
| redis:5.0 | `registry.semaphoreci.com/redis:5.0` |
| redis:6.2 | `registry.semaphoreci.com/redis:6.2` |
| redis:7.0 | `registry.semaphoreci.com/redis:7.0` |
+| redis:8.0 | `registry.semaphoreci.com/redis:8.0` |
+
+
+
+
+### Valkey
+
+
+Valkey images
+
+
+| Image | Link |
+|--------|--------|
+| valkey:8.1 | `registry.semaphoreci.com/valkey:8.1` |
From 6db0d9e20ef5a3839791ed27edde816006e5ef76 Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Thu, 10 Jul 2025 13:56:38 +0200
Subject: [PATCH 24/87] fix(plumber): check if DT permits user to partially
rebuild pipeline (#427)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Refreshed protos, in partial_rebuild action checks if deployment target
policy allows user to run the pipeline
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
plumber/gofer_client/lib/gofer_client.ex | 7 +
.../lib/gofer_client/grpc_client.ex | 21 +
.../lib/gofer_client/request_formatter.ex | 15 +
.../lib/gofer_client/response_parser.ex | 30 +
plumber/ppl/lib/ppl/grpc/server.ex | 20 +
plumber/ppl/test/grpc/server_test.exs | 80 ++
plumber/proto/Makefile | 1 +
.../proto/lib/internal_api/artifacthub.pb.ex | 227 +++---
plumber/proto/lib/internal_api/build.pb.ex | 206 +++---
plumber/proto/lib/internal_api/gofer.dt.pb.ex | 588 +++++++++++++++
.../proto/lib/internal_api/gofer.switch.pb.ex | 276 +++----
plumber/proto/lib/internal_api/health.pb.ex | 16 +-
.../include/google/protobuf/timestamp.pb.ex | 4 +-
.../include/google/rpc/code.pb.ex | 34 +-
.../include/google/rpc/status.pb.ex | 6 +-
.../internal_api/response_status.pb.ex | 8 +-
.../include/internal_api/status.pb.ex | 4 +-
.../proto/lib/internal_api/organization.pb.ex | 660 ++++++++---------
.../lib/internal_api/paparazzo.snapshot.pb.ex | 36 +-
.../lib/internal_api/plumber.admin.pb.ex | 30 +-
.../internal_api/pre_flight_checks_hub.pb.ex | 100 +--
.../proto/lib/internal_api/projecthub.pb.ex | 683 +++++++++++-------
.../proto/lib/internal_api/repo_proxy.pb.ex | 204 +++---
.../proto/lib/internal_api/repository.pb.ex | 514 +++++++------
.../internal_api/repository_integrator.pb.ex | 144 ++--
.../stethoscope.listener_proxy.pb.ex | 23 +-
plumber/proto/lib/internal_api/task.pb.ex | 183 ++---
plumber/proto/lib/internal_api/user.pb.ex | 406 +++++------
plumber/proto/mix.exs | 19 +-
29 files changed, 2800 insertions(+), 1745 deletions(-)
create mode 100644 plumber/proto/lib/internal_api/gofer.dt.pb.ex
diff --git a/plumber/gofer_client/lib/gofer_client.ex b/plumber/gofer_client/lib/gofer_client.ex
index bb35e2b5a..09bfe66ff 100644
--- a/plumber/gofer_client/lib/gofer_client.ex
+++ b/plumber/gofer_client/lib/gofer_client.ex
@@ -36,4 +36,11 @@ defmodule GoferClient do
|> GrpcClient.pipeline_done()
|> ResponseParser.process_pipeline_done_response()
end
+
+ def verify_deployment_target_access(target_id, triggerer, git_ref_type, git_ref_label) do
+ target_id
+ |> RequestFormatter.form_verify_request(triggerer, git_ref_type, git_ref_label)
+ |> GrpcClient.verify_deployment_target_access()
+ |> ResponseParser.process_verify_response()
+ end
end
diff --git a/plumber/gofer_client/lib/gofer_client/grpc_client.ex b/plumber/gofer_client/lib/gofer_client/grpc_client.ex
index b0de50cad..15e8adab0 100644
--- a/plumber/gofer_client/lib/gofer_client/grpc_client.ex
+++ b/plumber/gofer_client/lib/gofer_client/grpc_client.ex
@@ -6,6 +6,7 @@ defmodule GoferClient.GrpcClient do
"""
alias InternalApi.Gofer.Switch
+ alias InternalApi.Gofer.DeploymentTargets
alias Util.Metrics
alias LogTee, as: LT
@@ -54,6 +55,26 @@ defmodule GoferClient.GrpcClient do
end)
end
+ # Verify Deployment Target Access
+
+ def verify_deployment_target_access({:ok, verify_request}) do
+ result = Wormhole.capture(__MODULE__, :verify_deployment_target_access_, [verify_request], stacktrace: true, timeout: 2_345)
+ case result do
+ {:ok, result} -> result
+ error -> error
+ end
+ end
+ def verify_deployment_target_access(error), do: error
+
+ def verify_deployment_target_access_(verify_request) do
+ {:ok, channel} = GRPC.Stub.connect(url())
+ Metrics.benchmark("Ppl.gofer_client.grpc_client", "verify_deployment_target", fn ->
+ channel
+ |> DeploymentTargets.DeploymentTargets.Stub.verify(verify_request, opts())
+ |> is_ok?("verify_deployment_target")
+ end)
+ end
+
# Utility
defp is_ok?(response = {:ok, _rsp}, _method), do: response
diff --git a/plumber/gofer_client/lib/gofer_client/request_formatter.ex b/plumber/gofer_client/lib/gofer_client/request_formatter.ex
index c580aa81e..93977e7ba 100644
--- a/plumber/gofer_client/lib/gofer_client/request_formatter.ex
+++ b/plumber/gofer_client/lib/gofer_client/request_formatter.ex
@@ -5,6 +5,7 @@ defmodule GoferClient.RequestFormatter do
"""
alias InternalApi.Gofer.{CreateRequest, PipelineDoneRequest, GitRefType}
+ alias InternalApi.Gofer.DeploymentTargets.VerifyRequest
alias Util.{ToTuple, Proto}
# Create
@@ -79,4 +80,18 @@ defmodule GoferClient.RequestFormatter do
"One or more of these params: #{inspect switch_id}, #{inspect result} and #{inspect result_reason} is not string."
|> ToTuple.error()
end
+
+ # Verify
+
+ def form_verify_request(target_id, triggerer, git_ref_type, git_ref_label)
+ when is_binary(target_id) and is_binary(triggerer) and is_binary(git_ref_type) and is_binary(git_ref_label) do
+ verify_params = %{target_id: target_id, triggerer: triggerer, git_ref_type: git_ref_type, git_ref_label: git_ref_label}
+
+ Proto.deep_new(VerifyRequest, verify_params,
+ transformations: %{VerifyRequest.GitRefType => {__MODULE__, :string_to_enum_atom}})
+ end
+ def form_verify_request(target_id, triggerer, git_ref_type, git_ref_label) do
+ "One or more of these params: #{inspect target_id}, #{inspect triggerer}, #{inspect git_ref_type} and #{inspect git_ref_label} is not in the expected format."
+ |> ToTuple.error()
+ end
end
diff --git a/plumber/gofer_client/lib/gofer_client/response_parser.ex b/plumber/gofer_client/lib/gofer_client/response_parser.ex
index 4c8a456f9..c0fe924f9 100644
--- a/plumber/gofer_client/lib/gofer_client/response_parser.ex
+++ b/plumber/gofer_client/lib/gofer_client/response_parser.ex
@@ -5,6 +5,7 @@ defmodule GoferClient.ResponseParser do
"""
alias InternalApi.Gofer.ResponseStatus.ResponseCode
+ alias InternalApi.Gofer.DeploymentTargets.VerifyResponse.Status
alias LogTee, as: LT
alias Util.ToTuple
@@ -47,8 +48,31 @@ defmodule GoferClient.ResponseParser do
def process_pipeline_done_response(error), do: error
+ # Verify
+
+ def process_verify_response({:ok, response}) do
+ with true <- is_map(response),
+ {:ok, status} <- Map.fetch(response, :status),
+ status_atom <- verify_status_value(status)
+ do
+ handle_verify_status(status_atom, response)
+ else
+ _ -> log_invalid_response(response, "verify")
+ end
+ end
+
+ def process_verify_response(error), do: error
+
# Util
+ defp handle_verify_status(:ACCESS_GRANTED, _response), do: {:ok, :access_granted}
+ defp handle_verify_status(:SYNCING_TARGET, _response), do: {:error, :syncing_target}
+ defp handle_verify_status(:BANNED_SUBJECT, _response), do: {:error, :banned_subject}
+ defp handle_verify_status(:BANNED_OBJECT, _response), do: {:error, :banned_object}
+ defp handle_verify_status(:CORDONED_TARGET, _response), do: {:error, :cordoned_target}
+ defp handle_verify_status(:CORRUPTED_TARGET, _response), do: {:error, :corrupted_target}
+ defp handle_verify_status(_status_atom, response), do: log_invalid_response(response, "verify")
+
defp response_code_value(%{code: code}) do
ResponseCode.key(code)
rescue _ ->
@@ -56,6 +80,12 @@ defmodule GoferClient.ResponseParser do
end
defp response_code_value(_), do: nil
+ defp verify_status_value(status) do
+ Status.key(status)
+ rescue _ ->
+ nil
+ end
+
defp log_invalid_response(response, rpc_method) do
response
|> LT.error("Gofer service responded to #{rpc_method} with :ok and invalid data:")
diff --git a/plumber/ppl/lib/ppl/grpc/server.ex b/plumber/ppl/lib/ppl/grpc/server.ex
index c94d23688..c47b81682 100644
--- a/plumber/ppl/lib/ppl/grpc/server.ex
+++ b/plumber/ppl/lib/ppl/grpc/server.ex
@@ -418,6 +418,8 @@ defmodule Ppl.Grpc.Server do
{:ok, false} <- project_deleted?(ppl.project_id),
{"done", result} when result != "passed"
<- {ppl.state, ppl.result},
+ {:ok, ppl_req} <- PplRequestsQueries.get_by_id(request.ppl_id),
+ {:ok} <- verify_deployment_target_permission(ppl_req, request.user_id),
{:ok, ppl_id} <- Actions.partial_rebuild(request)
do
Proto.deep_new!(PartialRebuildResponse,
@@ -425,6 +427,8 @@ defmodule Ppl.Grpc.Server do
else
{:error, {:project_deleted, project_id}} ->
responed_refused(PartialRebuildResponse, "Project with id #{project_id} was deleted.")
+ {:error, {:deployment_target_permission_denied, reason}} ->
+ rebuild_error_resp("Access to deployment target denied: #{inspect reason}")
{:error, message} ->
rebuild_error_resp("#{inspect message}")
{"done", "passed"} ->
@@ -491,6 +495,22 @@ defmodule Ppl.Grpc.Server do
defp limit_status(message),
do: ResponseStatus.new(code: ResponseCode.value(:LIMIT_EXCEEDED), message: to_str(message))
+ defp verify_deployment_target_permission(%{request_args: %{"deployment_target_id" => ""}}, _user_id), do: {:ok}
+ defp verify_deployment_target_permission(%{request_args: %{"deployment_target_id" => nil}}, _user_id), do: {:ok}
+ defp verify_deployment_target_permission(%{
+ request_args: %{"deployment_target_id" => deployment_target_id, "label" => label},
+ source_args: %{"git_ref_type" => git_ref_type}
+ }, user_id) when is_binary(git_ref_type) and is_binary(label) and label != "" do
+ case GoferClient.verify_deployment_target_access(deployment_target_id, user_id, git_ref_type, label) do
+ {:ok, :access_granted} -> {:ok}
+ {:error, reason} -> {:error, {:deployment_target_permission_denied, reason}}
+ error -> {:error, {:deployment_target_permission_denied, error}}
+ end
+ end
+ defp verify_deployment_target_permission(%{request_args: %{"deployment_target_id" => deployment_target_id}}, _user_id) when is_binary(deployment_target_id) and deployment_target_id != "",
+ do: {:error, {:deployment_target_permission_denied, "Missing label or git_ref_type"}}
+ defp verify_deployment_target_permission(_, _), do: {:ok}
+
defp string_keys(map), do: map |> Poison.encode!() |> Poison.decode!()
defp to_str(term) when is_binary(term), do: term
diff --git a/plumber/ppl/test/grpc/server_test.exs b/plumber/ppl/test/grpc/server_test.exs
index 74b42bc72..c9cf2179a 100644
--- a/plumber/ppl/test/grpc/server_test.exs
+++ b/plumber/ppl/test/grpc/server_test.exs
@@ -1,6 +1,7 @@
defmodule Ppl.Grpc.Server.Test do
use Ppl.IntegrationCase
@moduletag capture_log: true
+ import Mock
alias Test.Helpers
alias Util.{ToTuple, Proto}
alias Ppl.PplRequests.Model.PplRequestsQueries
@@ -2208,6 +2209,85 @@ defmodule Ppl.Grpc.Server.Test do
assert new_ppl_id_1 == new_ppl_id_2
end
+ @tag :integration
+ test "gRPC partial_rebuild() - succeeds when no deployment target is specified" do
+ {:ok, %{ppl_id: ppl_id}} =
+ %{"repo_name" => "14_free_topology_failing_block"}
+ |> Test.Helpers.schedule_request_factory(:local)
+ |> Actions.schedule()
+
+ loopers = Test.Helpers.start_all_loopers()
+ {:ok, _ppl} = Test.Helpers.wait_for_ppl_state(ppl_id, "done", 20_000)
+ Test.Helpers.stop_all_loopers(loopers)
+
+ request_token = UUID.uuid4()
+ new_ppl_id = assert_partial_rebuild(ppl_id, request_token, :ok)
+ assert is_binary(new_ppl_id)
+ end
+
+ @tag :integration
+ test "gRPC partial_rebuild() - fails when deployment target permission is denied" do
+ deployment_target_id = UUID.uuid4()
+ {:ok, %{ppl_id: ppl_id}} = create_pipeline_with_deployment_target(deployment_target_id)
+
+ # Mock GoferClient to return access denied
+ with_mock GoferClient, [
+ verify_deployment_target_access: fn(_, _, _, _) -> {:error, :banned_subject} end
+ ] do
+ expected_message = "Access to deployment target denied: :banned_subject"
+ assert_partial_rebuild(ppl_id, UUID.uuid4(), :error, expected_message)
+ end
+ end
+
+ @tag :integration
+ test "gRPC partial_rebuild() - succeeds when deployment target permission is granted" do
+ deployment_target_id = UUID.uuid4()
+ {:ok, %{ppl_id: ppl_id}} = create_pipeline_with_deployment_target(deployment_target_id)
+
+ # Mock GoferClient to return access granted
+ with_mock GoferClient, [
+ verify_deployment_target_access: fn(_, _, _, _) -> {:ok, :access_granted} end
+ ] do
+ request_token = UUID.uuid4()
+ new_ppl_id = assert_partial_rebuild(ppl_id, request_token, :ok)
+ assert is_binary(new_ppl_id)
+ end
+ end
+
+ @tag :integration
+ test "gRPC partial_rebuild() - fails when deployment target verification returns error" do
+ deployment_target_id = UUID.uuid4()
+ {:ok, %{ppl_id: ppl_id}} = create_pipeline_with_deployment_target(deployment_target_id)
+
+ # Mock GoferClient to return syncing target error
+ with_mock GoferClient, [
+ verify_deployment_target_access: fn(_, _, _, _) -> {:error, :syncing_target} end
+ ] do
+ expected_message = "Access to deployment target denied: :syncing_target"
+ assert_partial_rebuild(ppl_id, UUID.uuid4(), :error, expected_message)
+ end
+ end
+
+ defp create_pipeline_with_deployment_target(deployment_target_id) do
+ source_args = Test.Support.RequestFactory.source_args(%{})
+
+ %{
+ "repo_name" => "14_free_topology_failing_block",
+ "deployment_target_id" => deployment_target_id
+ }
+ |> Test.Helpers.schedule_request_factory(:local)
+ |> Map.put("source_args", source_args)
+ |> Actions.schedule()
+ |> case do
+ {:ok, %{ppl_id: ppl_id}} = result ->
+ loopers = Test.Helpers.start_all_loopers()
+ {:ok, _ppl} = Test.Helpers.wait_for_ppl_state(ppl_id, "done", 20_000)
+ Test.Helpers.stop_all_loopers(loopers)
+ result
+ error -> error
+ end
+ end
+
defp assert_partial_rebuild(ppl_id, request_token, expected_status, expected_message \\ "") do
request =
%{ppl_id: ppl_id, request_token: request_token, user_id: "rebuild_user"}
diff --git a/plumber/proto/Makefile b/plumber/proto/Makefile
index ab1781a85..c0495c8a7 100644
--- a/plumber/proto/Makefile
+++ b/plumber/proto/Makefile
@@ -31,6 +31,7 @@ endif
docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/include/google/rpc/status.proto
docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/include/google/rpc/code.proto
docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/gofer.switch.proto
+ docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/gofer.dt.proto
docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/organization.proto
docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/paparazzo.snapshot.proto
docker run --rm -v $(PWD):/home/protoc/code -v $(TMP_INTERNAL_REPO_DIR):/home/protoc/source renderedtext/protoc:$(RT_PROTOC_IMG_VSN) protoc -I /home/protoc/source -I /home/protoc/source/include --elixir_out=plugins=grpc:$(RELATIVE_INTERNAL_PB_OUTPUT_DIR) --plugin=/root/.mix/escripts/protoc-gen-elixir /home/protoc/source/plumber.pipeline.proto
diff --git a/plumber/proto/lib/internal_api/artifacthub.pb.ex b/plumber/proto/lib/internal_api/artifacthub.pb.ex
index 207643f39..d952ea7c3 100644
--- a/plumber/proto/lib/internal_api/artifacthub.pb.ex
+++ b/plumber/proto/lib/internal_api/artifacthub.pb.ex
@@ -37,20 +37,23 @@ defmodule InternalApi.Artifacthub.RetentionPolicy do
:last_cleaned_at
]
- field :project_level_retention_policies, 1,
+ field(:project_level_retention_policies, 1,
repeated: true,
type: InternalApi.Artifacthub.RetentionPolicy.RetentionPolicyRule
+ )
- field :workflow_level_retention_policies, 2,
+ field(:workflow_level_retention_policies, 2,
repeated: true,
type: InternalApi.Artifacthub.RetentionPolicy.RetentionPolicyRule
+ )
- field :job_level_retention_policies, 3,
+ field(:job_level_retention_policies, 3,
repeated: true,
type: InternalApi.Artifacthub.RetentionPolicy.RetentionPolicyRule
+ )
- field :scheduled_for_cleaning_at, 4, type: Google.Protobuf.Timestamp
- field :last_cleaned_at, 5, type: Google.Protobuf.Timestamp
+ field(:scheduled_for_cleaning_at, 4, type: Google.Protobuf.Timestamp)
+ field(:last_cleaned_at, 5, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Artifacthub.RetentionPolicy.RetentionPolicyRule do
@@ -63,8 +66,8 @@ defmodule InternalApi.Artifacthub.RetentionPolicy.RetentionPolicyRule do
}
defstruct [:selector, :age]
- field :selector, 1, type: :string
- field :age, 2, type: :int64
+ field(:selector, 1, type: :string)
+ field(:age, 2, type: :int64)
end
defmodule InternalApi.Artifacthub.UpdateRetentionPolicyRequest do
@@ -77,8 +80,8 @@ defmodule InternalApi.Artifacthub.UpdateRetentionPolicyRequest do
}
defstruct [:artifact_id, :retention_policy]
- field :artifact_id, 1, type: :string
- field :retention_policy, 2, type: InternalApi.Artifacthub.RetentionPolicy
+ field(:artifact_id, 1, type: :string)
+ field(:retention_policy, 2, type: InternalApi.Artifacthub.RetentionPolicy)
end
defmodule InternalApi.Artifacthub.UpdateRetentionPolicyResponse do
@@ -90,7 +93,7 @@ defmodule InternalApi.Artifacthub.UpdateRetentionPolicyResponse do
}
defstruct [:retention_policy]
- field :retention_policy, 1, type: InternalApi.Artifacthub.RetentionPolicy
+ field(:retention_policy, 1, type: InternalApi.Artifacthub.RetentionPolicy)
end
defmodule InternalApi.Artifacthub.CreateRequest do
@@ -103,8 +106,8 @@ defmodule InternalApi.Artifacthub.CreateRequest do
}
defstruct [:request_token, :retention_policy]
- field :request_token, 1, type: :string
- field :retention_policy, 2, type: InternalApi.Artifacthub.RetentionPolicy
+ field(:request_token, 1, type: :string)
+ field(:retention_policy, 2, type: InternalApi.Artifacthub.RetentionPolicy)
end
defmodule InternalApi.Artifacthub.CreateResponse do
@@ -116,7 +119,7 @@ defmodule InternalApi.Artifacthub.CreateResponse do
}
defstruct [:artifact]
- field :artifact, 1, type: InternalApi.Artifacthub.Artifact
+ field(:artifact, 1, type: InternalApi.Artifacthub.Artifact)
end
defmodule InternalApi.Artifacthub.DescribeRequest do
@@ -129,8 +132,8 @@ defmodule InternalApi.Artifacthub.DescribeRequest do
}
defstruct [:artifact_id, :include_retention_policy]
- field :artifact_id, 1, type: :string
- field :include_retention_policy, 2, type: :bool
+ field(:artifact_id, 1, type: :string)
+ field(:include_retention_policy, 2, type: :bool)
end
defmodule InternalApi.Artifacthub.DescribeResponse do
@@ -143,8 +146,8 @@ defmodule InternalApi.Artifacthub.DescribeResponse do
}
defstruct [:artifact, :retention_policy]
- field :artifact, 1, type: InternalApi.Artifacthub.Artifact
- field :retention_policy, 2, type: InternalApi.Artifacthub.RetentionPolicy
+ field(:artifact, 1, type: InternalApi.Artifacthub.Artifact)
+ field(:retention_policy, 2, type: InternalApi.Artifacthub.RetentionPolicy)
end
defmodule InternalApi.Artifacthub.DestroyRequest do
@@ -156,7 +159,7 @@ defmodule InternalApi.Artifacthub.DestroyRequest do
}
defstruct [:artifact_id]
- field :artifact_id, 1, type: :string
+ field(:artifact_id, 1, type: :string)
end
defmodule InternalApi.Artifacthub.DestroyResponse do
@@ -172,12 +175,14 @@ defmodule InternalApi.Artifacthub.ListPathRequest do
@type t :: %__MODULE__{
artifact_id: String.t(),
- path: String.t()
+ path: String.t(),
+ unwrap_directories: boolean
}
- defstruct [:artifact_id, :path]
+ defstruct [:artifact_id, :path, :unwrap_directories]
- field :artifact_id, 1, type: :string
- field :path, 2, type: :string
+ field(:artifact_id, 1, type: :string)
+ field(:path, 2, type: :string)
+ field(:unwrap_directories, 3, type: :bool)
end
defmodule InternalApi.Artifacthub.ListPathResponse do
@@ -189,7 +194,7 @@ defmodule InternalApi.Artifacthub.ListPathResponse do
}
defstruct [:items]
- field :items, 1, repeated: true, type: InternalApi.Artifacthub.ListItem
+ field(:items, 1, repeated: true, type: InternalApi.Artifacthub.ListItem)
end
defmodule InternalApi.Artifacthub.DeletePathRequest do
@@ -202,8 +207,8 @@ defmodule InternalApi.Artifacthub.DeletePathRequest do
}
defstruct [:artifact_id, :path]
- field :artifact_id, 1, type: :string
- field :path, 2, type: :string
+ field(:artifact_id, 1, type: :string)
+ field(:path, 2, type: :string)
end
defmodule InternalApi.Artifacthub.DeletePathResponse do
@@ -238,9 +243,9 @@ defmodule InternalApi.Artifacthub.GetSignedURLRequest do
}
defstruct [:artifact_id, :path, :method]
- field :artifact_id, 1, type: :string
- field :path, 2, type: :string
- field :method, 3, type: :string
+ field(:artifact_id, 1, type: :string)
+ field(:path, 2, type: :string)
+ field(:method, 3, type: :string)
end
defmodule InternalApi.Artifacthub.GetSignedURLResponse do
@@ -252,7 +257,7 @@ defmodule InternalApi.Artifacthub.GetSignedURLResponse do
}
defstruct [:url]
- field :url, 1, type: :string
+ field(:url, 1, type: :string)
end
defmodule InternalApi.Artifacthub.ListBucketsRequest do
@@ -264,7 +269,7 @@ defmodule InternalApi.Artifacthub.ListBucketsRequest do
}
defstruct [:ids]
- field :ids, 1, repeated: true, type: :string
+ field(:ids, 1, repeated: true, type: :string)
end
defmodule InternalApi.Artifacthub.ListBucketsResponse do
@@ -276,10 +281,11 @@ defmodule InternalApi.Artifacthub.ListBucketsResponse do
}
defstruct [:bucket_names_for_ids]
- field :bucket_names_for_ids, 1,
+ field(:bucket_names_for_ids, 1,
repeated: true,
type: InternalApi.Artifacthub.ListBucketsResponse.BucketNamesForIdsEntry,
map: true
+ )
end
defmodule InternalApi.Artifacthub.ListBucketsResponse.BucketNamesForIdsEntry do
@@ -292,8 +298,8 @@ defmodule InternalApi.Artifacthub.ListBucketsResponse.BucketNamesForIdsEntry do
}
defstruct [:key, :value]
- field :key, 1, type: :string
- field :value, 2, type: :string
+ field(:key, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.Artifacthub.CountArtifactsRequest do
@@ -307,18 +313,18 @@ defmodule InternalApi.Artifacthub.CountArtifactsRequest do
}
defstruct [:category, :category_id, :artifact_id]
- field :category, 1, type: InternalApi.Artifacthub.CountArtifactsRequest.Category, enum: true
- field :category_id, 2, type: :string
- field :artifact_id, 3, type: :string
+ field(:category, 1, type: InternalApi.Artifacthub.CountArtifactsRequest.Category, enum: true)
+ field(:category_id, 2, type: :string)
+ field(:artifact_id, 3, type: :string)
end
defmodule InternalApi.Artifacthub.CountArtifactsRequest.Category do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PROJECT, 0
- field :WORKFLOW, 1
- field :JOB, 2
+ field(:PROJECT, 0)
+ field(:WORKFLOW, 1)
+ field(:JOB, 2)
end
defmodule InternalApi.Artifacthub.CountArtifactsResponse do
@@ -330,7 +336,7 @@ defmodule InternalApi.Artifacthub.CountArtifactsResponse do
}
defstruct [:artifact_count]
- field :artifact_count, 5, type: :int32
+ field(:artifact_count, 5, type: :int32)
end
defmodule InternalApi.Artifacthub.CountBucketsRequest do
@@ -349,7 +355,7 @@ defmodule InternalApi.Artifacthub.CountBucketsResponse do
}
defstruct [:bucket_count]
- field :bucket_count, 1, type: :int32
+ field(:bucket_count, 1, type: :int32)
end
defmodule InternalApi.Artifacthub.UpdateCORSRequest do
@@ -361,7 +367,7 @@ defmodule InternalApi.Artifacthub.UpdateCORSRequest do
}
defstruct [:bucket_name]
- field :bucket_name, 1, type: :string
+ field(:bucket_name, 1, type: :string)
end
defmodule InternalApi.Artifacthub.UpdateCORSResponse do
@@ -373,7 +379,7 @@ defmodule InternalApi.Artifacthub.UpdateCORSResponse do
}
defstruct [:next_bucket_name]
- field :next_bucket_name, 1, type: :string
+ field(:next_bucket_name, 1, type: :string)
end
defmodule InternalApi.Artifacthub.ListItem do
@@ -386,8 +392,8 @@ defmodule InternalApi.Artifacthub.ListItem do
}
defstruct [:name, :is_directory]
- field :name, 1, type: :string
- field :is_directory, 2, type: :bool
+ field(:name, 1, type: :string)
+ field(:is_directory, 2, type: :bool)
end
defmodule InternalApi.Artifacthub.Artifact do
@@ -401,9 +407,9 @@ defmodule InternalApi.Artifacthub.Artifact do
}
defstruct [:id, :bucket_name, :artifact_token]
- field :id, 1, type: :string
- field :bucket_name, 2, type: :string
- field :artifact_token, 4, type: :string
+ field(:id, 1, type: :string)
+ field(:bucket_name, 2, type: :string)
+ field(:artifact_token, 4, type: :string)
end
defmodule InternalApi.Artifacthub.GenerateTokenRequest do
@@ -419,11 +425,11 @@ defmodule InternalApi.Artifacthub.GenerateTokenRequest do
}
defstruct [:artifact_id, :job_id, :workflow_id, :project_id, :duration]
- field :artifact_id, 1, type: :string
- field :job_id, 2, type: :string
- field :workflow_id, 3, type: :string
- field :project_id, 4, type: :string
- field :duration, 5, type: :uint32
+ field(:artifact_id, 1, type: :string)
+ field(:job_id, 2, type: :string)
+ field(:workflow_id, 3, type: :string)
+ field(:project_id, 4, type: :string)
+ field(:duration, 5, type: :uint32)
end
defmodule InternalApi.Artifacthub.GenerateTokenResponse do
@@ -435,55 +441,84 @@ defmodule InternalApi.Artifacthub.GenerateTokenResponse do
}
defstruct [:token]
- field :token, 1, type: :string
+ field(:token, 1, type: :string)
end
defmodule InternalApi.Artifacthub.ArtifactService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Artifacthub.ArtifactService"
- rpc :HealthCheck,
- InternalApi.Artifacthub.HealthCheckRequest,
- InternalApi.Artifacthub.HealthCheckResponse
-
- rpc :Create, InternalApi.Artifacthub.CreateRequest, InternalApi.Artifacthub.CreateResponse
- rpc :Describe, InternalApi.Artifacthub.DescribeRequest, InternalApi.Artifacthub.DescribeResponse
- rpc :Destroy, InternalApi.Artifacthub.DestroyRequest, InternalApi.Artifacthub.DestroyResponse
- rpc :ListPath, InternalApi.Artifacthub.ListPathRequest, InternalApi.Artifacthub.ListPathResponse
-
- rpc :DeletePath,
- InternalApi.Artifacthub.DeletePathRequest,
- InternalApi.Artifacthub.DeletePathResponse
-
- rpc :UpdateRetentionPolicy,
- InternalApi.Artifacthub.UpdateRetentionPolicyRequest,
- InternalApi.Artifacthub.UpdateRetentionPolicyResponse
-
- rpc :GenerateToken,
- InternalApi.Artifacthub.GenerateTokenRequest,
- InternalApi.Artifacthub.GenerateTokenResponse
-
- rpc :Cleanup, InternalApi.Artifacthub.CleanupRequest, InternalApi.Artifacthub.CleanupResponse
-
- rpc :GetSignedURL,
- InternalApi.Artifacthub.GetSignedURLRequest,
- InternalApi.Artifacthub.GetSignedURLResponse
-
- rpc :ListBuckets,
- InternalApi.Artifacthub.ListBucketsRequest,
- InternalApi.Artifacthub.ListBucketsResponse
-
- rpc :CountArtifacts,
- InternalApi.Artifacthub.CountArtifactsRequest,
- InternalApi.Artifacthub.CountArtifactsResponse
-
- rpc :CountBuckets,
- InternalApi.Artifacthub.CountBucketsRequest,
- InternalApi.Artifacthub.CountBucketsResponse
-
- rpc :UpdateCORS,
- InternalApi.Artifacthub.UpdateCORSRequest,
- InternalApi.Artifacthub.UpdateCORSResponse
+ rpc(
+ :HealthCheck,
+ InternalApi.Artifacthub.HealthCheckRequest,
+ InternalApi.Artifacthub.HealthCheckResponse
+ )
+
+ rpc(:Create, InternalApi.Artifacthub.CreateRequest, InternalApi.Artifacthub.CreateResponse)
+
+ rpc(
+ :Describe,
+ InternalApi.Artifacthub.DescribeRequest,
+ InternalApi.Artifacthub.DescribeResponse
+ )
+
+ rpc(:Destroy, InternalApi.Artifacthub.DestroyRequest, InternalApi.Artifacthub.DestroyResponse)
+
+ rpc(
+ :ListPath,
+ InternalApi.Artifacthub.ListPathRequest,
+ InternalApi.Artifacthub.ListPathResponse
+ )
+
+ rpc(
+ :DeletePath,
+ InternalApi.Artifacthub.DeletePathRequest,
+ InternalApi.Artifacthub.DeletePathResponse
+ )
+
+ rpc(
+ :UpdateRetentionPolicy,
+ InternalApi.Artifacthub.UpdateRetentionPolicyRequest,
+ InternalApi.Artifacthub.UpdateRetentionPolicyResponse
+ )
+
+ rpc(
+ :GenerateToken,
+ InternalApi.Artifacthub.GenerateTokenRequest,
+ InternalApi.Artifacthub.GenerateTokenResponse
+ )
+
+ rpc(:Cleanup, InternalApi.Artifacthub.CleanupRequest, InternalApi.Artifacthub.CleanupResponse)
+
+ rpc(
+ :GetSignedURL,
+ InternalApi.Artifacthub.GetSignedURLRequest,
+ InternalApi.Artifacthub.GetSignedURLResponse
+ )
+
+ rpc(
+ :ListBuckets,
+ InternalApi.Artifacthub.ListBucketsRequest,
+ InternalApi.Artifacthub.ListBucketsResponse
+ )
+
+ rpc(
+ :CountArtifacts,
+ InternalApi.Artifacthub.CountArtifactsRequest,
+ InternalApi.Artifacthub.CountArtifactsResponse
+ )
+
+ rpc(
+ :CountBuckets,
+ InternalApi.Artifacthub.CountBucketsRequest,
+ InternalApi.Artifacthub.CountBucketsResponse
+ )
+
+ rpc(
+ :UpdateCORS,
+ InternalApi.Artifacthub.UpdateCORSRequest,
+ InternalApi.Artifacthub.UpdateCORSResponse
+ )
end
defmodule InternalApi.Artifacthub.ArtifactService.Stub do
diff --git a/plumber/proto/lib/internal_api/build.pb.ex b/plumber/proto/lib/internal_api/build.pb.ex
index d2c17819e..928b6e6f0 100644
--- a/plumber/proto/lib/internal_api/build.pb.ex
+++ b/plumber/proto/lib/internal_api/build.pb.ex
@@ -11,11 +11,11 @@ defmodule InternalApi.Build.ScheduleRequest do
}
defstruct [:build, :build_request_id, :ppl_id, :hook_id, :wf_id]
- field :build, 1, type: InternalApi.Build.Build
- field :build_request_id, 5, type: :string
- field :ppl_id, 6, type: :string
- field :hook_id, 7, type: :string
- field :wf_id, 8, type: :string
+ field(:build, 1, type: InternalApi.Build.Build)
+ field(:build_request_id, 5, type: :string)
+ field(:ppl_id, 6, type: :string)
+ field(:hook_id, 7, type: :string)
+ field(:wf_id, 8, type: :string)
end
defmodule InternalApi.Build.Build do
@@ -28,8 +28,8 @@ defmodule InternalApi.Build.Build do
}
defstruct [:jobs, :boosters]
- field :jobs, 1, repeated: true, type: InternalApi.Build.Job
- field :boosters, 2, repeated: true, type: InternalApi.Build.Booster
+ field(:jobs, 1, repeated: true, type: InternalApi.Build.Job)
+ field(:boosters, 2, repeated: true, type: InternalApi.Build.Booster)
end
defmodule InternalApi.Build.Job do
@@ -61,16 +61,16 @@ defmodule InternalApi.Build.Job do
:epilogue_commands
]
- field :name, 2, type: :string
- field :semaphore_image, 8, type: :string
- field :agent, 12, type: InternalApi.Build.Agent
- field :ppl_env_variables, 10, repeated: true, type: InternalApi.Build.EnvVariable
- field :env_variables, 3, repeated: true, type: InternalApi.Build.EnvVariable
- field :secrets, 11, repeated: true, type: InternalApi.Build.Secret
- field :ppl_commands, 9, repeated: true, type: :string
- field :prologue_commands, 5, repeated: true, type: :string
- field :commands, 1, repeated: true, type: :string
- field :epilogue_commands, 6, repeated: true, type: :string
+ field(:name, 2, type: :string)
+ field(:semaphore_image, 8, type: :string)
+ field(:agent, 12, type: InternalApi.Build.Agent)
+ field(:ppl_env_variables, 10, repeated: true, type: InternalApi.Build.EnvVariable)
+ field(:env_variables, 3, repeated: true, type: InternalApi.Build.EnvVariable)
+ field(:secrets, 11, repeated: true, type: InternalApi.Build.Secret)
+ field(:ppl_commands, 9, repeated: true, type: :string)
+ field(:prologue_commands, 5, repeated: true, type: :string)
+ field(:commands, 1, repeated: true, type: :string)
+ field(:epilogue_commands, 6, repeated: true, type: :string)
end
defmodule InternalApi.Build.Agent do
@@ -82,7 +82,7 @@ defmodule InternalApi.Build.Agent do
}
defstruct [:machine]
- field :machine, 1, type: InternalApi.Build.Agent.Machine
+ field(:machine, 1, type: InternalApi.Build.Agent.Machine)
end
defmodule InternalApi.Build.Agent.Machine do
@@ -95,8 +95,8 @@ defmodule InternalApi.Build.Agent.Machine do
}
defstruct [:type, :os_image]
- field :type, 1, type: :string
- field :os_image, 2, type: :string
+ field(:type, 1, type: :string)
+ field(:os_image, 2, type: :string)
end
defmodule InternalApi.Build.EnvVariable do
@@ -109,8 +109,8 @@ defmodule InternalApi.Build.EnvVariable do
}
defstruct [:key, :value]
- field :key, 1, type: :string
- field :value, 2, type: :string
+ field(:key, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.Build.Booster do
@@ -142,24 +142,24 @@ defmodule InternalApi.Build.Booster do
:ppl_env_variables
]
- field :name, 1, type: :string
- field :job_count, 2, type: :int32
- field :type, 3, type: InternalApi.Build.Booster.Type, enum: true
- field :env_variables, 4, repeated: true, type: InternalApi.Build.EnvVariable
- field :prologue_commands, 6, repeated: true, type: :string
- field :epilogue_commands, 7, repeated: true, type: :string
- field :secrets, 12, repeated: true, type: InternalApi.Build.Secret
- field :semaphore_image, 9, type: :string
- field :ppl_commands, 10, repeated: true, type: :string
- field :ppl_env_variables, 11, repeated: true, type: InternalApi.Build.EnvVariable
+ field(:name, 1, type: :string)
+ field(:job_count, 2, type: :int32)
+ field(:type, 3, type: InternalApi.Build.Booster.Type, enum: true)
+ field(:env_variables, 4, repeated: true, type: InternalApi.Build.EnvVariable)
+ field(:prologue_commands, 6, repeated: true, type: :string)
+ field(:epilogue_commands, 7, repeated: true, type: :string)
+ field(:secrets, 12, repeated: true, type: InternalApi.Build.Secret)
+ field(:semaphore_image, 9, type: :string)
+ field(:ppl_commands, 10, repeated: true, type: :string)
+ field(:ppl_env_variables, 11, repeated: true, type: InternalApi.Build.EnvVariable)
end
defmodule InternalApi.Build.Booster.Type do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :RSPEC, 0
- field :CUCUMBER, 1
+ field(:RSPEC, 0)
+ field(:CUCUMBER, 1)
end
defmodule InternalApi.Build.Secret do
@@ -173,9 +173,9 @@ defmodule InternalApi.Build.Secret do
}
defstruct [:name, :env_var_names, :config_file_paths]
- field :name, 1, type: :string
- field :env_var_names, 2, repeated: true, type: :string
- field :config_file_paths, 3, repeated: true, type: :string
+ field(:name, 1, type: :string)
+ field(:env_var_names, 2, repeated: true, type: :string)
+ field(:config_file_paths, 3, repeated: true, type: :string)
end
defmodule InternalApi.Build.ScheduleResponse do
@@ -188,8 +188,8 @@ defmodule InternalApi.Build.ScheduleResponse do
}
defstruct [:response_status, :status]
- field :response_status, 2, type: InternalApi.Build.ResponseStatus
- field :status, 3, type: InternalApi.ResponseStatus
+ field(:response_status, 2, type: InternalApi.Build.ResponseStatus)
+ field(:status, 3, type: InternalApi.ResponseStatus)
end
defmodule InternalApi.Build.ResponseStatus do
@@ -202,16 +202,16 @@ defmodule InternalApi.Build.ResponseStatus do
}
defstruct [:code, :message]
- field :code, 1, type: InternalApi.Build.ResponseStatus.ResponseCode, enum: true
- field :message, 2, type: :string
+ field(:code, 1, type: InternalApi.Build.ResponseStatus.ResponseCode, enum: true)
+ field(:message, 2, type: :string)
end
defmodule InternalApi.Build.ResponseStatus.ResponseCode do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :OK, 0
- field :BAD_PARAM, 2
+ field(:OK, 0)
+ field(:BAD_PARAM, 2)
end
defmodule InternalApi.Build.DescribeRequest do
@@ -223,7 +223,7 @@ defmodule InternalApi.Build.DescribeRequest do
}
defstruct [:build_request_id]
- field :build_request_id, 2, type: :string
+ field(:build_request_id, 2, type: :string)
end
defmodule InternalApi.Build.DescribeManyRequest do
@@ -235,7 +235,7 @@ defmodule InternalApi.Build.DescribeManyRequest do
}
defstruct [:build_request_ids]
- field :build_request_ids, 1, repeated: true, type: :string
+ field(:build_request_ids, 1, repeated: true, type: :string)
end
defmodule InternalApi.Build.DescribeResponse do
@@ -250,10 +250,10 @@ defmodule InternalApi.Build.DescribeResponse do
}
defstruct [:build_status, :response_status, :build, :status]
- field :build_status, 1, type: InternalApi.Build.ExecutionStatus
- field :response_status, 3, type: InternalApi.Build.ResponseStatus
- field :build, 4, type: InternalApi.Build.BuildDescription
- field :status, 5, type: InternalApi.ResponseStatus
+ field(:build_status, 1, type: InternalApi.Build.ExecutionStatus)
+ field(:response_status, 3, type: InternalApi.Build.ResponseStatus)
+ field(:build, 4, type: InternalApi.Build.BuildDescription)
+ field(:status, 5, type: InternalApi.ResponseStatus)
end
defmodule InternalApi.Build.DescribeManyResponse do
@@ -267,9 +267,9 @@ defmodule InternalApi.Build.DescribeManyResponse do
}
defstruct [:response_status, :builds, :status]
- field :response_status, 1, type: InternalApi.Build.ResponseStatus
- field :builds, 2, repeated: true, type: InternalApi.Build.BuildDescription
- field :status, 3, type: InternalApi.ResponseStatus
+ field(:response_status, 1, type: InternalApi.Build.ResponseStatus)
+ field(:builds, 2, repeated: true, type: InternalApi.Build.BuildDescription)
+ field(:status, 3, type: InternalApi.ResponseStatus)
end
defmodule InternalApi.Build.BuildDescription do
@@ -284,10 +284,10 @@ defmodule InternalApi.Build.BuildDescription do
}
defstruct [:build_request_id, :status, :result, :jobs]
- field :build_request_id, 1, type: :string
- field :status, 2, type: InternalApi.Build.BuildDescription.Status, enum: true
- field :result, 3, type: InternalApi.Build.BuildDescription.Result, enum: true
- field :jobs, 4, repeated: true, type: InternalApi.Build.BuildDescription.Job
+ field(:build_request_id, 1, type: :string)
+ field(:status, 2, type: InternalApi.Build.BuildDescription.Status, enum: true)
+ field(:result, 3, type: InternalApi.Build.BuildDescription.Result, enum: true)
+ field(:jobs, 4, repeated: true, type: InternalApi.Build.BuildDescription.Job)
end
defmodule InternalApi.Build.BuildDescription.Job do
@@ -303,50 +303,50 @@ defmodule InternalApi.Build.BuildDescription.Job do
}
defstruct [:job_id, :status, :result, :name, :index]
- field :job_id, 1, type: :string
- field :status, 2, type: InternalApi.Build.BuildDescription.Job.Status, enum: true
- field :result, 3, type: InternalApi.Build.BuildDescription.Job.Result, enum: true
- field :name, 4, type: :string
- field :index, 5, type: :int32
+ field(:job_id, 1, type: :string)
+ field(:status, 2, type: InternalApi.Build.BuildDescription.Job.Status, enum: true)
+ field(:result, 3, type: InternalApi.Build.BuildDescription.Job.Result, enum: true)
+ field(:name, 4, type: :string)
+ field(:index, 5, type: :int32)
end
defmodule InternalApi.Build.BuildDescription.Job.Status do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ENQUEUED, 0
- field :RUNNING, 1
- field :STOPPING, 2
- field :FINISHED, 3
+ field(:ENQUEUED, 0)
+ field(:RUNNING, 1)
+ field(:STOPPING, 2)
+ field(:FINISHED, 3)
end
defmodule InternalApi.Build.BuildDescription.Job.Result do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PASSED, 0
- field :FAILED, 1
- field :STOPPED, 2
+ field(:PASSED, 0)
+ field(:FAILED, 1)
+ field(:STOPPED, 2)
end
defmodule InternalApi.Build.BuildDescription.Status do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ENQUEUED, 0
- field :RUNNING, 1
- field :STOPPING, 2
- field :FINISHED, 3
- field :DELETED, 4
+ field(:ENQUEUED, 0)
+ field(:RUNNING, 1)
+ field(:STOPPING, 2)
+ field(:FINISHED, 3)
+ field(:DELETED, 4)
end
defmodule InternalApi.Build.BuildDescription.Result do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PASSED, 0
- field :FAILED, 1
- field :STOPPED, 2
+ field(:PASSED, 0)
+ field(:FAILED, 1)
+ field(:STOPPED, 2)
end
defmodule InternalApi.Build.ExecutionStatus do
@@ -360,29 +360,29 @@ defmodule InternalApi.Build.ExecutionStatus do
}
defstruct [:status, :result, :name]
- field :status, 1, type: InternalApi.Build.ExecutionStatus.Status, enum: true
- field :result, 2, type: InternalApi.Build.ExecutionStatus.Result, enum: true
- field :name, 3, type: :string
+ field(:status, 1, type: InternalApi.Build.ExecutionStatus.Status, enum: true)
+ field(:result, 2, type: InternalApi.Build.ExecutionStatus.Result, enum: true)
+ field(:name, 3, type: :string)
end
defmodule InternalApi.Build.ExecutionStatus.Status do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ENQUEUED, 0
- field :RUNNING, 1
- field :STOPPING, 2
- field :FINISHED, 3
- field :DELETED, 4
+ field(:ENQUEUED, 0)
+ field(:RUNNING, 1)
+ field(:STOPPING, 2)
+ field(:FINISHED, 3)
+ field(:DELETED, 4)
end
defmodule InternalApi.Build.ExecutionStatus.Result do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PASSED, 0
- field :FAILED, 1
- field :STOPPED, 2
+ field(:PASSED, 0)
+ field(:FAILED, 1)
+ field(:STOPPED, 2)
end
defmodule InternalApi.Build.VersionRequest do
@@ -401,7 +401,7 @@ defmodule InternalApi.Build.VersionResponse do
}
defstruct [:version]
- field :version, 1, type: :string
+ field(:version, 1, type: :string)
end
defmodule InternalApi.Build.TerminateRequest do
@@ -413,7 +413,7 @@ defmodule InternalApi.Build.TerminateRequest do
}
defstruct [:build_request_id]
- field :build_request_id, 1, type: :string
+ field(:build_request_id, 1, type: :string)
end
defmodule InternalApi.Build.TerminateResponse do
@@ -426,8 +426,8 @@ defmodule InternalApi.Build.TerminateResponse do
}
defstruct [:response_status, :status]
- field :response_status, 1, type: InternalApi.Build.ResponseStatus
- field :status, 2, type: InternalApi.ResponseStatus
+ field(:response_status, 1, type: InternalApi.Build.ResponseStatus)
+ field(:status, 2, type: InternalApi.ResponseStatus)
end
defmodule InternalApi.Build.BuildStarted do
@@ -440,8 +440,8 @@ defmodule InternalApi.Build.BuildStarted do
}
defstruct [:build_request_id, :timestamp]
- field :build_request_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:build_request_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Build.BuildFinished do
@@ -454,19 +454,25 @@ defmodule InternalApi.Build.BuildFinished do
}
defstruct [:build_request_id, :timestamp]
- field :build_request_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:build_request_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Build.BuildService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Build.BuildService"
- rpc :Schedule, InternalApi.Build.ScheduleRequest, InternalApi.Build.ScheduleResponse
- rpc :Describe, InternalApi.Build.DescribeRequest, InternalApi.Build.DescribeResponse
- rpc :DescribeMany, InternalApi.Build.DescribeManyRequest, InternalApi.Build.DescribeManyResponse
- rpc :Version, InternalApi.Build.VersionRequest, InternalApi.Build.VersionResponse
- rpc :Terminate, InternalApi.Build.TerminateRequest, InternalApi.Build.TerminateResponse
+ rpc(:Schedule, InternalApi.Build.ScheduleRequest, InternalApi.Build.ScheduleResponse)
+ rpc(:Describe, InternalApi.Build.DescribeRequest, InternalApi.Build.DescribeResponse)
+
+ rpc(
+ :DescribeMany,
+ InternalApi.Build.DescribeManyRequest,
+ InternalApi.Build.DescribeManyResponse
+ )
+
+ rpc(:Version, InternalApi.Build.VersionRequest, InternalApi.Build.VersionResponse)
+ rpc(:Terminate, InternalApi.Build.TerminateRequest, InternalApi.Build.TerminateResponse)
end
defmodule InternalApi.Build.BuildService.Stub do
diff --git a/plumber/proto/lib/internal_api/gofer.dt.pb.ex b/plumber/proto/lib/internal_api/gofer.dt.pb.ex
new file mode 100644
index 000000000..03f671496
--- /dev/null
+++ b/plumber/proto/lib/internal_api/gofer.dt.pb.ex
@@ -0,0 +1,588 @@
+defmodule InternalApi.Gofer.DeploymentTargets.ListRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ project_id: String.t(),
+ requester_id: String.t()
+ }
+ defstruct [:project_id, :requester_id]
+
+ field(:project_id, 1, type: :string)
+ field(:requester_id, 2, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.ListResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ targets: [InternalApi.Gofer.DeploymentTargets.DeploymentTarget.t()]
+ }
+ defstruct [:targets]
+
+ field(:targets, 1, repeated: true, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DescribeRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ project_id: String.t(),
+ target_name: String.t(),
+ target_id: String.t()
+ }
+ defstruct [:project_id, :target_name, :target_id]
+
+ field(:project_id, 1, type: :string)
+ field(:target_name, 2, type: :string)
+ field(:target_id, 3, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DescribeResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target: InternalApi.Gofer.DeploymentTargets.DeploymentTarget.t()
+ }
+ defstruct [:target]
+
+ field(:target, 1, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ triggerer: String.t(),
+ git_ref_type: integer,
+ git_ref_label: String.t()
+ }
+ defstruct [:target_id, :triggerer, :git_ref_type, :git_ref_label]
+
+ field(:target_id, 1, type: :string)
+ field(:triggerer, 2, type: :string)
+
+ field(:git_ref_type, 3,
+ type: InternalApi.Gofer.DeploymentTargets.VerifyRequest.GitRefType,
+ enum: true
+ )
+
+ field(:git_ref_label, 4, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyRequest.GitRefType do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:BRANCH, 0)
+ field(:TAG, 1)
+ field(:PR, 2)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ status: integer
+ }
+ defstruct [:status]
+
+ field(:status, 1, type: InternalApi.Gofer.DeploymentTargets.VerifyResponse.Status, enum: true)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.VerifyResponse.Status do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:SYNCING_TARGET, 0)
+ field(:ACCESS_GRANTED, 1)
+ field(:BANNED_SUBJECT, 2)
+ field(:BANNED_OBJECT, 3)
+ field(:CORDONED_TARGET, 4)
+ field(:CORRUPTED_TARGET, 5)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ cursor_type: integer,
+ cursor_value: non_neg_integer,
+ filters: InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters.t(),
+ requester_id: String.t()
+ }
+ defstruct [:target_id, :cursor_type, :cursor_value, :filters, :requester_id]
+
+ field(:target_id, 1, type: :string)
+
+ field(:cursor_type, 2,
+ type: InternalApi.Gofer.DeploymentTargets.HistoryRequest.CursorType,
+ enum: true
+ )
+
+ field(:cursor_value, 3, type: :uint64)
+ field(:filters, 4, type: InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters)
+ field(:requester_id, 5, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest.Filters do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ git_ref_type: String.t(),
+ git_ref_label: String.t(),
+ triggered_by: String.t(),
+ parameter1: String.t(),
+ parameter2: String.t(),
+ parameter3: String.t()
+ }
+ defstruct [:git_ref_type, :git_ref_label, :triggered_by, :parameter1, :parameter2, :parameter3]
+
+ field(:git_ref_type, 1, type: :string)
+ field(:git_ref_label, 2, type: :string)
+ field(:triggered_by, 3, type: :string)
+ field(:parameter1, 4, type: :string)
+ field(:parameter2, 5, type: :string)
+ field(:parameter3, 6, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.HistoryRequest.CursorType do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:FIRST, 0)
+ field(:AFTER, 1)
+ field(:BEFORE, 2)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.HistoryResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ deployments: [InternalApi.Gofer.DeploymentTargets.Deployment.t()],
+ cursor_before: non_neg_integer,
+ cursor_after: non_neg_integer
+ }
+ defstruct [:deployments, :cursor_before, :cursor_after]
+
+ field(:deployments, 1, repeated: true, type: InternalApi.Gofer.DeploymentTargets.Deployment)
+ field(:cursor_before, 2, type: :uint64)
+ field(:cursor_after, 3, type: :uint64)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.CordonRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ cordoned: boolean
+ }
+ defstruct [:target_id, :cordoned]
+
+ field(:target_id, 1, type: :string)
+ field(:cordoned, 2, type: :bool)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.CordonResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ cordoned: boolean
+ }
+ defstruct [:target_id, :cordoned]
+
+ field(:target_id, 1, type: :string)
+ field(:cordoned, 2, type: :bool)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.CreateRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target: InternalApi.Gofer.DeploymentTargets.DeploymentTarget.t(),
+ secret: InternalApi.Gofer.DeploymentTargets.EncryptedSecretData.t(),
+ unique_token: String.t(),
+ requester_id: String.t()
+ }
+ defstruct [:target, :secret, :unique_token, :requester_id]
+
+ field(:target, 1, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
+ field(:secret, 2, type: InternalApi.Gofer.DeploymentTargets.EncryptedSecretData)
+ field(:unique_token, 3, type: :string)
+ field(:requester_id, 4, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.CreateResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target: InternalApi.Gofer.DeploymentTargets.DeploymentTarget.t()
+ }
+ defstruct [:target]
+
+ field(:target, 1, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.UpdateRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target: InternalApi.Gofer.DeploymentTargets.DeploymentTarget.t(),
+ secret: InternalApi.Gofer.DeploymentTargets.EncryptedSecretData.t(),
+ unique_token: String.t(),
+ requester_id: String.t()
+ }
+ defstruct [:target, :secret, :unique_token, :requester_id]
+
+ field(:target, 1, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
+ field(:secret, 2, type: InternalApi.Gofer.DeploymentTargets.EncryptedSecretData)
+ field(:unique_token, 3, type: :string)
+ field(:requester_id, 4, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.UpdateResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target: InternalApi.Gofer.DeploymentTargets.DeploymentTarget.t()
+ }
+ defstruct [:target]
+
+ field(:target, 1, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DeleteRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t(),
+ requester_id: String.t(),
+ unique_token: String.t()
+ }
+ defstruct [:target_id, :requester_id, :unique_token]
+
+ field(:target_id, 1, type: :string)
+ field(:requester_id, 2, type: :string)
+ field(:unique_token, 3, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DeleteResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ target_id: String.t()
+ }
+ defstruct [:target_id]
+
+ field(:target_id, 1, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DeploymentTarget do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ id: String.t(),
+ name: String.t(),
+ description: String.t(),
+ url: String.t(),
+ organization_id: String.t(),
+ project_id: String.t(),
+ created_by: String.t(),
+ updated_by: String.t(),
+ created_at: Google.Protobuf.Timestamp.t(),
+ updated_at: Google.Protobuf.Timestamp.t(),
+ state: integer,
+ state_message: String.t(),
+ subject_rules: [InternalApi.Gofer.DeploymentTargets.SubjectRule.t()],
+ object_rules: [InternalApi.Gofer.DeploymentTargets.ObjectRule.t()],
+ last_deployment: InternalApi.Gofer.DeploymentTargets.Deployment.t(),
+ cordoned: boolean,
+ bookmark_parameter1: String.t(),
+ bookmark_parameter2: String.t(),
+ bookmark_parameter3: String.t(),
+ secret_name: String.t()
+ }
+ defstruct [
+ :id,
+ :name,
+ :description,
+ :url,
+ :organization_id,
+ :project_id,
+ :created_by,
+ :updated_by,
+ :created_at,
+ :updated_at,
+ :state,
+ :state_message,
+ :subject_rules,
+ :object_rules,
+ :last_deployment,
+ :cordoned,
+ :bookmark_parameter1,
+ :bookmark_parameter2,
+ :bookmark_parameter3,
+ :secret_name
+ ]
+
+ field(:id, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:description, 3, type: :string)
+ field(:url, 4, type: :string)
+ field(:organization_id, 5, type: :string)
+ field(:project_id, 6, type: :string)
+ field(:created_by, 7, type: :string)
+ field(:updated_by, 8, type: :string)
+ field(:created_at, 9, type: Google.Protobuf.Timestamp)
+ field(:updated_at, 10, type: Google.Protobuf.Timestamp)
+ field(:state, 11, type: InternalApi.Gofer.DeploymentTargets.DeploymentTarget.State, enum: true)
+ field(:state_message, 12, type: :string)
+ field(:subject_rules, 13, repeated: true, type: InternalApi.Gofer.DeploymentTargets.SubjectRule)
+ field(:object_rules, 14, repeated: true, type: InternalApi.Gofer.DeploymentTargets.ObjectRule)
+ field(:last_deployment, 15, type: InternalApi.Gofer.DeploymentTargets.Deployment)
+ field(:cordoned, 16, type: :bool)
+ field(:bookmark_parameter1, 17, type: :string)
+ field(:bookmark_parameter2, 18, type: :string)
+ field(:bookmark_parameter3, 19, type: :string)
+ field(:secret_name, 20, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DeploymentTarget.State do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:SYNCING, 0)
+ field(:USABLE, 1)
+ field(:UNUSABLE, 2)
+ field(:CORDONED, 3)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.Deployment do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ id: String.t(),
+ target_id: String.t(),
+ prev_pipeline_id: String.t(),
+ pipeline_id: String.t(),
+ triggered_by: String.t(),
+ triggered_at: Google.Protobuf.Timestamp.t(),
+ state: integer,
+ state_message: String.t(),
+ switch_id: String.t(),
+ target_name: String.t(),
+ env_vars: [InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar.t()],
+ can_requester_rerun: boolean
+ }
+ defstruct [
+ :id,
+ :target_id,
+ :prev_pipeline_id,
+ :pipeline_id,
+ :triggered_by,
+ :triggered_at,
+ :state,
+ :state_message,
+ :switch_id,
+ :target_name,
+ :env_vars,
+ :can_requester_rerun
+ ]
+
+ field(:id, 1, type: :string)
+ field(:target_id, 2, type: :string)
+ field(:prev_pipeline_id, 3, type: :string)
+ field(:pipeline_id, 4, type: :string)
+ field(:triggered_by, 5, type: :string)
+ field(:triggered_at, 6, type: Google.Protobuf.Timestamp)
+ field(:state, 7, type: InternalApi.Gofer.DeploymentTargets.Deployment.State, enum: true)
+ field(:state_message, 8, type: :string)
+ field(:switch_id, 9, type: :string)
+ field(:target_name, 10, type: :string)
+
+ field(:env_vars, 11,
+ repeated: true,
+ type: InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar
+ )
+
+ field(:can_requester_rerun, 12, type: :bool)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.Deployment.EnvVar do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ name: String.t(),
+ value: String.t()
+ }
+ defstruct [:name, :value]
+
+ field(:name, 1, type: :string)
+ field(:value, 2, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.Deployment.State do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:PENDING, 0)
+ field(:STARTED, 1)
+ field(:FAILED, 2)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.SubjectRule do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ type: integer,
+ subject_id: String.t()
+ }
+ defstruct [:type, :subject_id]
+
+ field(:type, 1, type: InternalApi.Gofer.DeploymentTargets.SubjectRule.Type, enum: true)
+ field(:subject_id, 2, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.SubjectRule.Type do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:USER, 0)
+ field(:ROLE, 1)
+ field(:GROUP, 2)
+ field(:AUTO, 3)
+ field(:ANY, 4)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.ObjectRule do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ type: integer,
+ match_mode: integer,
+ pattern: String.t()
+ }
+ defstruct [:type, :match_mode, :pattern]
+
+ field(:type, 1, type: InternalApi.Gofer.DeploymentTargets.ObjectRule.Type, enum: true)
+ field(:match_mode, 2, type: InternalApi.Gofer.DeploymentTargets.ObjectRule.Mode, enum: true)
+ field(:pattern, 3, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.ObjectRule.Type do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:BRANCH, 0)
+ field(:TAG, 1)
+ field(:PR, 2)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.ObjectRule.Mode do
+ @moduledoc false
+ use Protobuf, enum: true, syntax: :proto3
+
+ field(:ALL, 0)
+ field(:EXACT, 1)
+ field(:REGEX, 2)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.EncryptedSecretData do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ key_id: String.t(),
+ aes256_key: String.t(),
+ init_vector: String.t(),
+ payload: String.t()
+ }
+ defstruct [:key_id, :aes256_key, :init_vector, :payload]
+
+ field(:key_id, 2, type: :string)
+ field(:aes256_key, 3, type: :string)
+ field(:init_vector, 4, type: :string)
+ field(:payload, 5, type: :string)
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DeploymentTargets.Service do
+ @moduledoc false
+ use GRPC.Service, name: "InternalApi.Gofer.DeploymentTargets.DeploymentTargets"
+
+ rpc(
+ :List,
+ InternalApi.Gofer.DeploymentTargets.ListRequest,
+ InternalApi.Gofer.DeploymentTargets.ListResponse
+ )
+
+ rpc(
+ :Describe,
+ InternalApi.Gofer.DeploymentTargets.DescribeRequest,
+ InternalApi.Gofer.DeploymentTargets.DescribeResponse
+ )
+
+ rpc(
+ :Verify,
+ InternalApi.Gofer.DeploymentTargets.VerifyRequest,
+ InternalApi.Gofer.DeploymentTargets.VerifyResponse
+ )
+
+ rpc(
+ :History,
+ InternalApi.Gofer.DeploymentTargets.HistoryRequest,
+ InternalApi.Gofer.DeploymentTargets.HistoryResponse
+ )
+
+ rpc(
+ :Cordon,
+ InternalApi.Gofer.DeploymentTargets.CordonRequest,
+ InternalApi.Gofer.DeploymentTargets.CordonResponse
+ )
+
+ rpc(
+ :Create,
+ InternalApi.Gofer.DeploymentTargets.CreateRequest,
+ InternalApi.Gofer.DeploymentTargets.CreateResponse
+ )
+
+ rpc(
+ :Update,
+ InternalApi.Gofer.DeploymentTargets.UpdateRequest,
+ InternalApi.Gofer.DeploymentTargets.UpdateResponse
+ )
+
+ rpc(
+ :Delete,
+ InternalApi.Gofer.DeploymentTargets.DeleteRequest,
+ InternalApi.Gofer.DeploymentTargets.DeleteResponse
+ )
+end
+
+defmodule InternalApi.Gofer.DeploymentTargets.DeploymentTargets.Stub do
+ @moduledoc false
+ use GRPC.Stub, service: InternalApi.Gofer.DeploymentTargets.DeploymentTargets.Service
+end
diff --git a/plumber/proto/lib/internal_api/gofer.switch.pb.ex b/plumber/proto/lib/internal_api/gofer.switch.pb.ex
index 6f778647c..649fe0c13 100644
--- a/plumber/proto/lib/internal_api/gofer.switch.pb.ex
+++ b/plumber/proto/lib/internal_api/gofer.switch.pb.ex
@@ -33,19 +33,19 @@ defmodule InternalApi.Gofer.CreateRequest do
:pr_sha
]
- field :pipeline_id, 1, type: :string
- field :targets, 2, repeated: true, type: InternalApi.Gofer.Target
- field :branch_name, 4, type: :string
- field :prev_ppl_artefact_ids, 5, repeated: true, type: :string
- field :label, 6, type: :string
- field :git_ref_type, 7, type: InternalApi.Gofer.GitRefType, enum: true
- field :project_id, 8, type: :string
- field :commit_sha, 9, type: :string
- field :working_dir, 10, type: :string
- field :commit_range, 11, type: :string
- field :yml_file_name, 12, type: :string
- field :pr_base, 13, type: :string
- field :pr_sha, 14, type: :string
+ field(:pipeline_id, 1, type: :string)
+ field(:targets, 2, repeated: true, type: InternalApi.Gofer.Target)
+ field(:branch_name, 4, type: :string)
+ field(:prev_ppl_artefact_ids, 5, repeated: true, type: :string)
+ field(:label, 6, type: :string)
+ field(:git_ref_type, 7, type: InternalApi.Gofer.GitRefType, enum: true)
+ field(:project_id, 8, type: :string)
+ field(:commit_sha, 9, type: :string)
+ field(:working_dir, 10, type: :string)
+ field(:commit_range, 11, type: :string)
+ field(:yml_file_name, 12, type: :string)
+ field(:pr_base, 13, type: :string)
+ field(:pr_sha, 14, type: :string)
end
defmodule InternalApi.Gofer.Target do
@@ -69,12 +69,12 @@ defmodule InternalApi.Gofer.Target do
:deployment_target
]
- field :name, 1, type: :string
- field :pipeline_path, 2, type: :string
- field :auto_trigger_on, 5, repeated: true, type: InternalApi.Gofer.AutoTriggerCond
- field :parameter_env_vars, 6, repeated: true, type: InternalApi.Gofer.ParamEnvVar
- field :auto_promote_when, 7, type: :string
- field :deployment_target, 8, type: :string
+ field(:name, 1, type: :string)
+ field(:pipeline_path, 2, type: :string)
+ field(:auto_trigger_on, 5, repeated: true, type: InternalApi.Gofer.AutoTriggerCond)
+ field(:parameter_env_vars, 6, repeated: true, type: InternalApi.Gofer.ParamEnvVar)
+ field(:auto_promote_when, 7, type: :string)
+ field(:deployment_target, 8, type: :string)
end
defmodule InternalApi.Gofer.ParamEnvVar do
@@ -90,11 +90,11 @@ defmodule InternalApi.Gofer.ParamEnvVar do
}
defstruct [:name, :options, :required, :default_value, :description]
- field :name, 1, type: :string
- field :options, 2, repeated: true, type: :string
- field :required, 3, type: :bool
- field :default_value, 4, type: :string
- field :description, 5, type: :string
+ field(:name, 1, type: :string)
+ field(:options, 2, repeated: true, type: :string)
+ field(:required, 3, type: :bool)
+ field(:default_value, 4, type: :string)
+ field(:description, 5, type: :string)
end
defmodule InternalApi.Gofer.AutoTriggerCond do
@@ -110,11 +110,11 @@ defmodule InternalApi.Gofer.AutoTriggerCond do
}
defstruct [:result, :branch, :result_reason, :labels, :label_patterns]
- field :result, 1, type: :string
- field :branch, 2, repeated: true, type: :string
- field :result_reason, 3, type: :string
- field :labels, 4, repeated: true, type: :string
- field :label_patterns, 5, repeated: true, type: :string
+ field(:result, 1, type: :string)
+ field(:branch, 2, repeated: true, type: :string)
+ field(:result_reason, 3, type: :string)
+ field(:labels, 4, repeated: true, type: :string)
+ field(:label_patterns, 5, repeated: true, type: :string)
end
defmodule InternalApi.Gofer.CreateResponse do
@@ -127,8 +127,8 @@ defmodule InternalApi.Gofer.CreateResponse do
}
defstruct [:response_status, :switch_id]
- field :response_status, 1, type: InternalApi.Gofer.ResponseStatus
- field :switch_id, 2, type: :string
+ field(:response_status, 1, type: InternalApi.Gofer.ResponseStatus)
+ field(:switch_id, 2, type: :string)
end
defmodule InternalApi.Gofer.DescribeRequest do
@@ -142,9 +142,9 @@ defmodule InternalApi.Gofer.DescribeRequest do
}
defstruct [:switch_id, :events_per_target, :requester_id]
- field :switch_id, 1, type: :string
- field :events_per_target, 2, type: :int32
- field :requester_id, 3, type: :string
+ field(:switch_id, 1, type: :string)
+ field(:events_per_target, 2, type: :int32)
+ field(:requester_id, 3, type: :string)
end
defmodule InternalApi.Gofer.DescribeResponse do
@@ -170,13 +170,13 @@ defmodule InternalApi.Gofer.DescribeResponse do
:pipeline_result_reason
]
- field :response_status, 1, type: InternalApi.Gofer.ResponseStatus
- field :switch_id, 2, type: :string
- field :ppl_id, 3, type: :string
- field :pipeline_done, 4, type: :bool
- field :pipeline_result, 5, type: :string
- field :targets, 6, repeated: true, type: InternalApi.Gofer.TargetDescription
- field :pipeline_result_reason, 7, type: :string
+ field(:response_status, 1, type: InternalApi.Gofer.ResponseStatus)
+ field(:switch_id, 2, type: :string)
+ field(:ppl_id, 3, type: :string)
+ field(:pipeline_done, 4, type: :bool)
+ field(:pipeline_result, 5, type: :string)
+ field(:targets, 6, repeated: true, type: InternalApi.Gofer.TargetDescription)
+ field(:pipeline_result_reason, 7, type: :string)
end
defmodule InternalApi.Gofer.TargetDescription do
@@ -200,12 +200,12 @@ defmodule InternalApi.Gofer.TargetDescription do
:dt_description
]
- field :name, 1, type: :string
- field :pipeline_path, 2, type: :string
- field :trigger_events, 4, repeated: true, type: InternalApi.Gofer.TriggerEvent
- field :auto_trigger_on, 6, repeated: true, type: InternalApi.Gofer.AutoTriggerCond
- field :parameter_env_vars, 7, repeated: true, type: InternalApi.Gofer.ParamEnvVar
- field :dt_description, 8, type: InternalApi.Gofer.DeploymentTargetDescription
+ field(:name, 1, type: :string)
+ field(:pipeline_path, 2, type: :string)
+ field(:trigger_events, 4, repeated: true, type: InternalApi.Gofer.TriggerEvent)
+ field(:auto_trigger_on, 6, repeated: true, type: InternalApi.Gofer.AutoTriggerCond)
+ field(:parameter_env_vars, 7, repeated: true, type: InternalApi.Gofer.ParamEnvVar)
+ field(:dt_description, 8, type: InternalApi.Gofer.DeploymentTargetDescription)
end
defmodule InternalApi.Gofer.DeploymentTargetDescription do
@@ -219,9 +219,9 @@ defmodule InternalApi.Gofer.DeploymentTargetDescription do
}
defstruct [:target_id, :target_name, :access]
- field :target_id, 1, type: :string
- field :target_name, 2, type: :string
- field :access, 3, type: InternalApi.Gofer.DeploymentTargetDescription.Access
+ field(:target_id, 1, type: :string)
+ field(:target_name, 2, type: :string)
+ field(:access, 3, type: InternalApi.Gofer.DeploymentTargetDescription.Access)
end
defmodule InternalApi.Gofer.DeploymentTargetDescription.Access do
@@ -235,22 +235,22 @@ defmodule InternalApi.Gofer.DeploymentTargetDescription.Access do
}
defstruct [:allowed, :reason, :message]
- field :allowed, 1, type: :bool
- field :reason, 2, type: InternalApi.Gofer.DeploymentTargetDescription.Access.Reason, enum: true
- field :message, 3, type: :string
+ field(:allowed, 1, type: :bool)
+ field(:reason, 2, type: InternalApi.Gofer.DeploymentTargetDescription.Access.Reason, enum: true)
+ field(:message, 3, type: :string)
end
defmodule InternalApi.Gofer.DeploymentTargetDescription.Access.Reason do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :INTERNAL_ERROR, 0
- field :NO_REASON, 1
- field :SYNCING_TARGET, 2
- field :CORRUPTED_TARGET, 3
- field :BANNED_SUBJECT, 4
- field :BANNED_OBJECT, 5
- field :CORDONED_TARGET, 6
+ field(:INTERNAL_ERROR, 0)
+ field(:NO_REASON, 1)
+ field(:SYNCING_TARGET, 2)
+ field(:CORRUPTED_TARGET, 3)
+ field(:BANNED_SUBJECT, 4)
+ field(:BANNED_OBJECT, 5)
+ field(:CORDONED_TARGET, 6)
end
defmodule InternalApi.Gofer.TriggerEvent do
@@ -284,25 +284,25 @@ defmodule InternalApi.Gofer.TriggerEvent do
:env_variables
]
- field :target_name, 1, type: :string
- field :triggered_at, 2, type: Google.Protobuf.Timestamp
- field :auto_triggered, 3, type: :bool
- field :triggered_by, 4, type: :string
- field :override, 5, type: :bool
- field :processed, 6, type: :bool
- field :processing_result, 7, type: InternalApi.Gofer.TriggerEvent.ProcessingResult, enum: true
- field :scheduled_at, 8, type: Google.Protobuf.Timestamp
- field :scheduled_pipeline_id, 9, type: :string
- field :error_response, 10, type: :string
- field :env_variables, 11, repeated: true, type: InternalApi.Gofer.EnvVariable
+ field(:target_name, 1, type: :string)
+ field(:triggered_at, 2, type: Google.Protobuf.Timestamp)
+ field(:auto_triggered, 3, type: :bool)
+ field(:triggered_by, 4, type: :string)
+ field(:override, 5, type: :bool)
+ field(:processed, 6, type: :bool)
+ field(:processing_result, 7, type: InternalApi.Gofer.TriggerEvent.ProcessingResult, enum: true)
+ field(:scheduled_at, 8, type: Google.Protobuf.Timestamp)
+ field(:scheduled_pipeline_id, 9, type: :string)
+ field(:error_response, 10, type: :string)
+ field(:env_variables, 11, repeated: true, type: InternalApi.Gofer.EnvVariable)
end
defmodule InternalApi.Gofer.TriggerEvent.ProcessingResult do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PASSED, 0
- field :FAILED, 1
+ field(:PASSED, 0)
+ field(:FAILED, 1)
end
defmodule InternalApi.Gofer.DescribeManyRequest do
@@ -316,9 +316,9 @@ defmodule InternalApi.Gofer.DescribeManyRequest do
}
defstruct [:switch_ids, :events_per_target, :requester_id]
- field :switch_ids, 1, repeated: true, type: :string
- field :events_per_target, 2, type: :int32
- field :requester_id, 3, type: :string
+ field(:switch_ids, 1, repeated: true, type: :string)
+ field(:events_per_target, 2, type: :int32)
+ field(:requester_id, 3, type: :string)
end
defmodule InternalApi.Gofer.DescribeManyResponse do
@@ -331,8 +331,8 @@ defmodule InternalApi.Gofer.DescribeManyResponse do
}
defstruct [:response_status, :switches]
- field :response_status, 1, type: InternalApi.Gofer.ResponseStatus
- field :switches, 2, repeated: true, type: InternalApi.Gofer.SwitchDetails
+ field(:response_status, 1, type: InternalApi.Gofer.ResponseStatus)
+ field(:switches, 2, repeated: true, type: InternalApi.Gofer.SwitchDetails)
end
defmodule InternalApi.Gofer.SwitchDetails do
@@ -356,12 +356,12 @@ defmodule InternalApi.Gofer.SwitchDetails do
:targets
]
- field :switch_id, 1, type: :string
- field :ppl_id, 2, type: :string
- field :pipeline_done, 3, type: :bool
- field :pipeline_result, 4, type: :string
- field :pipeline_result_reason, 5, type: :string
- field :targets, 6, repeated: true, type: InternalApi.Gofer.TargetDescription
+ field(:switch_id, 1, type: :string)
+ field(:ppl_id, 2, type: :string)
+ field(:pipeline_done, 3, type: :bool)
+ field(:pipeline_result, 4, type: :string)
+ field(:pipeline_result_reason, 5, type: :string)
+ field(:targets, 6, repeated: true, type: InternalApi.Gofer.TargetDescription)
end
defmodule InternalApi.Gofer.ListTriggerEventsRequest do
@@ -376,10 +376,10 @@ defmodule InternalApi.Gofer.ListTriggerEventsRequest do
}
defstruct [:switch_id, :target_name, :page, :page_size]
- field :switch_id, 1, type: :string
- field :target_name, 2, type: :string
- field :page, 3, type: :int32
- field :page_size, 4, type: :int32
+ field(:switch_id, 1, type: :string)
+ field(:target_name, 2, type: :string)
+ field(:page, 3, type: :int32)
+ field(:page_size, 4, type: :int32)
end
defmodule InternalApi.Gofer.ListTriggerEventsResponse do
@@ -403,12 +403,12 @@ defmodule InternalApi.Gofer.ListTriggerEventsResponse do
:total_pages
]
- field :response_status, 1, type: InternalApi.Gofer.ResponseStatus
- field :trigger_events, 2, repeated: true, type: InternalApi.Gofer.TriggerEvent
- field :page_number, 3, type: :int32
- field :page_size, 4, type: :int32
- field :total_entries, 5, type: :int32
- field :total_pages, 6, type: :int32
+ field(:response_status, 1, type: InternalApi.Gofer.ResponseStatus)
+ field(:trigger_events, 2, repeated: true, type: InternalApi.Gofer.TriggerEvent)
+ field(:page_number, 3, type: :int32)
+ field(:page_size, 4, type: :int32)
+ field(:total_entries, 5, type: :int32)
+ field(:total_pages, 6, type: :int32)
end
defmodule InternalApi.Gofer.PipelineDoneRequest do
@@ -422,9 +422,9 @@ defmodule InternalApi.Gofer.PipelineDoneRequest do
}
defstruct [:switch_id, :result, :result_reason]
- field :switch_id, 1, type: :string
- field :result, 2, type: :string
- field :result_reason, 3, type: :string
+ field(:switch_id, 1, type: :string)
+ field(:result, 2, type: :string)
+ field(:result_reason, 3, type: :string)
end
defmodule InternalApi.Gofer.PipelineDoneResponse do
@@ -436,7 +436,7 @@ defmodule InternalApi.Gofer.PipelineDoneResponse do
}
defstruct [:response_status]
- field :response_status, 1, type: InternalApi.Gofer.ResponseStatus
+ field(:response_status, 1, type: InternalApi.Gofer.ResponseStatus)
end
defmodule InternalApi.Gofer.TriggerRequest do
@@ -453,12 +453,12 @@ defmodule InternalApi.Gofer.TriggerRequest do
}
defstruct [:switch_id, :target_name, :triggered_by, :override, :request_token, :env_variables]
- field :switch_id, 1, type: :string
- field :target_name, 2, type: :string
- field :triggered_by, 3, type: :string
- field :override, 4, type: :bool
- field :request_token, 5, type: :string
- field :env_variables, 6, repeated: true, type: InternalApi.Gofer.EnvVariable
+ field(:switch_id, 1, type: :string)
+ field(:target_name, 2, type: :string)
+ field(:triggered_by, 3, type: :string)
+ field(:override, 4, type: :bool)
+ field(:request_token, 5, type: :string)
+ field(:env_variables, 6, repeated: true, type: InternalApi.Gofer.EnvVariable)
end
defmodule InternalApi.Gofer.EnvVariable do
@@ -471,8 +471,8 @@ defmodule InternalApi.Gofer.EnvVariable do
}
defstruct [:name, :value]
- field :name, 1, type: :string
- field :value, 2, type: :string
+ field(:name, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.Gofer.TriggerResponse do
@@ -484,7 +484,7 @@ defmodule InternalApi.Gofer.TriggerResponse do
}
defstruct [:response_status]
- field :response_status, 1, type: InternalApi.Gofer.ResponseStatus
+ field(:response_status, 1, type: InternalApi.Gofer.ResponseStatus)
end
defmodule InternalApi.Gofer.VersionRequest do
@@ -503,7 +503,7 @@ defmodule InternalApi.Gofer.VersionResponse do
}
defstruct [:version]
- field :version, 1, type: :string
+ field(:version, 1, type: :string)
end
defmodule InternalApi.Gofer.ResponseStatus do
@@ -516,48 +516,60 @@ defmodule InternalApi.Gofer.ResponseStatus do
}
defstruct [:code, :message]
- field :code, 1, type: InternalApi.Gofer.ResponseStatus.ResponseCode, enum: true
- field :message, 2, type: :string
+ field(:code, 1, type: InternalApi.Gofer.ResponseStatus.ResponseCode, enum: true)
+ field(:message, 2, type: :string)
end
defmodule InternalApi.Gofer.ResponseStatus.ResponseCode do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :OK, 0
- field :BAD_PARAM, 1
- field :NOT_FOUND, 2
- field :RESULT_CHANGED, 3
- field :FAILED, 4
- field :REFUSED, 5
- field :RESULT_REASON_CHANGED, 6
- field :MALFORMED, 7
+ field(:OK, 0)
+ field(:BAD_PARAM, 1)
+ field(:NOT_FOUND, 2)
+ field(:RESULT_CHANGED, 3)
+ field(:FAILED, 4)
+ field(:REFUSED, 5)
+ field(:RESULT_REASON_CHANGED, 6)
+ field(:MALFORMED, 7)
end
defmodule InternalApi.Gofer.GitRefType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :BRANCH, 0
- field :TAG, 1
- field :PR, 2
+ field(:BRANCH, 0)
+ field(:TAG, 1)
+ field(:PR, 2)
end
defmodule InternalApi.Gofer.Switch.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Gofer.Switch"
- rpc :Create, InternalApi.Gofer.CreateRequest, InternalApi.Gofer.CreateResponse
- rpc :Describe, InternalApi.Gofer.DescribeRequest, InternalApi.Gofer.DescribeResponse
- rpc :DescribeMany, InternalApi.Gofer.DescribeManyRequest, InternalApi.Gofer.DescribeManyResponse
-
- rpc :ListTriggerEvents,
- InternalApi.Gofer.ListTriggerEventsRequest,
- InternalApi.Gofer.ListTriggerEventsResponse
-
- rpc :PipelineDone, InternalApi.Gofer.PipelineDoneRequest, InternalApi.Gofer.PipelineDoneResponse
- rpc :Trigger, InternalApi.Gofer.TriggerRequest, InternalApi.Gofer.TriggerResponse
- rpc :Version, InternalApi.Gofer.VersionRequest, InternalApi.Gofer.VersionResponse
+ rpc(:Create, InternalApi.Gofer.CreateRequest, InternalApi.Gofer.CreateResponse)
+ rpc(:Describe, InternalApi.Gofer.DescribeRequest, InternalApi.Gofer.DescribeResponse)
+
+ rpc(
+ :DescribeMany,
+ InternalApi.Gofer.DescribeManyRequest,
+ InternalApi.Gofer.DescribeManyResponse
+ )
+
+ rpc(
+ :ListTriggerEvents,
+ InternalApi.Gofer.ListTriggerEventsRequest,
+ InternalApi.Gofer.ListTriggerEventsResponse
+ )
+
+ rpc(
+ :PipelineDone,
+ InternalApi.Gofer.PipelineDoneRequest,
+ InternalApi.Gofer.PipelineDoneResponse
+ )
+
+ rpc(:Trigger, InternalApi.Gofer.TriggerRequest, InternalApi.Gofer.TriggerResponse)
+ rpc(:Version, InternalApi.Gofer.VersionRequest, InternalApi.Gofer.VersionResponse)
end
defmodule InternalApi.Gofer.Switch.Stub do
diff --git a/plumber/proto/lib/internal_api/health.pb.ex b/plumber/proto/lib/internal_api/health.pb.ex
index 4d6202fd9..3395b3ef1 100644
--- a/plumber/proto/lib/internal_api/health.pb.ex
+++ b/plumber/proto/lib/internal_api/health.pb.ex
@@ -7,7 +7,7 @@ defmodule Grpc.Health.V1.HealthCheckRequest do
}
defstruct [:service]
- field :service, 1, type: :string
+ field(:service, 1, type: :string)
end
defmodule Grpc.Health.V1.HealthCheckResponse do
@@ -19,25 +19,25 @@ defmodule Grpc.Health.V1.HealthCheckResponse do
}
defstruct [:status]
- field :status, 1, type: Grpc.Health.V1.HealthCheckResponse.ServingStatus, enum: true
+ field(:status, 1, type: Grpc.Health.V1.HealthCheckResponse.ServingStatus, enum: true)
end
defmodule Grpc.Health.V1.HealthCheckResponse.ServingStatus do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :UNKNOWN, 0
- field :SERVING, 1
- field :NOT_SERVING, 2
- field :SERVICE_UNKNOWN, 3
+ field(:UNKNOWN, 0)
+ field(:SERVING, 1)
+ field(:NOT_SERVING, 2)
+ field(:SERVICE_UNKNOWN, 3)
end
defmodule Grpc.Health.V1.Health.Service do
@moduledoc false
use GRPC.Service, name: "grpc.health.v1.Health"
- rpc :Check, Grpc.Health.V1.HealthCheckRequest, Grpc.Health.V1.HealthCheckResponse
- rpc :Watch, Grpc.Health.V1.HealthCheckRequest, stream(Grpc.Health.V1.HealthCheckResponse)
+ rpc(:Check, Grpc.Health.V1.HealthCheckRequest, Grpc.Health.V1.HealthCheckResponse)
+ rpc(:Watch, Grpc.Health.V1.HealthCheckRequest, stream(Grpc.Health.V1.HealthCheckResponse))
end
defmodule Grpc.Health.V1.Health.Stub do
diff --git a/plumber/proto/lib/internal_api/include/google/protobuf/timestamp.pb.ex b/plumber/proto/lib/internal_api/include/google/protobuf/timestamp.pb.ex
index e4bfa3138..ee7e4c0a5 100644
--- a/plumber/proto/lib/internal_api/include/google/protobuf/timestamp.pb.ex
+++ b/plumber/proto/lib/internal_api/include/google/protobuf/timestamp.pb.ex
@@ -8,6 +8,6 @@ defmodule Google.Protobuf.Timestamp do
}
defstruct [:seconds, :nanos]
- field :seconds, 1, type: :int64
- field :nanos, 2, type: :int32
+ field(:seconds, 1, type: :int64)
+ field(:nanos, 2, type: :int32)
end
diff --git a/plumber/proto/lib/internal_api/include/google/rpc/code.pb.ex b/plumber/proto/lib/internal_api/include/google/rpc/code.pb.ex
index 7a22be44b..227b27a8e 100644
--- a/plumber/proto/lib/internal_api/include/google/rpc/code.pb.ex
+++ b/plumber/proto/lib/internal_api/include/google/rpc/code.pb.ex
@@ -2,21 +2,21 @@ defmodule Google.Rpc.Code do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :OK, 0
- field :CANCELLED, 1
- field :UNKNOWN, 2
- field :INVALID_ARGUMENT, 3
- field :DEADLINE_EXCEEDED, 4
- field :NOT_FOUND, 5
- field :ALREADY_EXISTS, 6
- field :PERMISSION_DENIED, 7
- field :UNAUTHENTICATED, 16
- field :RESOURCE_EXHAUSTED, 8
- field :FAILED_PRECONDITION, 9
- field :ABORTED, 10
- field :OUT_OF_RANGE, 11
- field :UNIMPLEMENTED, 12
- field :INTERNAL, 13
- field :UNAVAILABLE, 14
- field :DATA_LOSS, 15
+ field(:OK, 0)
+ field(:CANCELLED, 1)
+ field(:UNKNOWN, 2)
+ field(:INVALID_ARGUMENT, 3)
+ field(:DEADLINE_EXCEEDED, 4)
+ field(:NOT_FOUND, 5)
+ field(:ALREADY_EXISTS, 6)
+ field(:PERMISSION_DENIED, 7)
+ field(:UNAUTHENTICATED, 16)
+ field(:RESOURCE_EXHAUSTED, 8)
+ field(:FAILED_PRECONDITION, 9)
+ field(:ABORTED, 10)
+ field(:OUT_OF_RANGE, 11)
+ field(:UNIMPLEMENTED, 12)
+ field(:INTERNAL, 13)
+ field(:UNAVAILABLE, 14)
+ field(:DATA_LOSS, 15)
end
diff --git a/plumber/proto/lib/internal_api/include/google/rpc/status.pb.ex b/plumber/proto/lib/internal_api/include/google/rpc/status.pb.ex
index 1378cc93e..24523dc63 100644
--- a/plumber/proto/lib/internal_api/include/google/rpc/status.pb.ex
+++ b/plumber/proto/lib/internal_api/include/google/rpc/status.pb.ex
@@ -9,7 +9,7 @@ defmodule Google.Rpc.Status do
}
defstruct [:code, :message, :details]
- field :code, 1, type: :int32
- field :message, 2, type: :string
- field :details, 3, repeated: true, type: Google.Protobuf.Any
+ field(:code, 1, type: :int32)
+ field(:message, 2, type: :string)
+ field(:details, 3, repeated: true, type: Google.Protobuf.Any)
end
diff --git a/plumber/proto/lib/internal_api/include/internal_api/response_status.pb.ex b/plumber/proto/lib/internal_api/include/internal_api/response_status.pb.ex
index 24fb7f21a..ac10ea64e 100644
--- a/plumber/proto/lib/internal_api/include/internal_api/response_status.pb.ex
+++ b/plumber/proto/lib/internal_api/include/internal_api/response_status.pb.ex
@@ -8,14 +8,14 @@ defmodule InternalApi.ResponseStatus do
}
defstruct [:code, :message]
- field :code, 1, type: InternalApi.ResponseStatus.Code, enum: true
- field :message, 2, type: :string
+ field(:code, 1, type: InternalApi.ResponseStatus.Code, enum: true)
+ field(:message, 2, type: :string)
end
defmodule InternalApi.ResponseStatus.Code do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :OK, 0
- field :BAD_PARAM, 1
+ field(:OK, 0)
+ field(:BAD_PARAM, 1)
end
diff --git a/plumber/proto/lib/internal_api/include/internal_api/status.pb.ex b/plumber/proto/lib/internal_api/include/internal_api/status.pb.ex
index 1884872da..2897c7cd1 100644
--- a/plumber/proto/lib/internal_api/include/internal_api/status.pb.ex
+++ b/plumber/proto/lib/internal_api/include/internal_api/status.pb.ex
@@ -8,6 +8,6 @@ defmodule InternalApi.Status do
}
defstruct [:code, :message]
- field :code, 1, type: Google.Rpc.Code, enum: true
- field :message, 2, type: :string
+ field(:code, 1, type: Google.Rpc.Code, enum: true)
+ field(:message, 2, type: :string)
end
diff --git a/plumber/proto/lib/internal_api/organization.pb.ex b/plumber/proto/lib/internal_api/organization.pb.ex
index cecce5430..741f05b53 100644
--- a/plumber/proto/lib/internal_api/organization.pb.ex
+++ b/plumber/proto/lib/internal_api/organization.pb.ex
@@ -5,13 +5,15 @@ defmodule InternalApi.Organization.DescribeRequest do
@type t :: %__MODULE__{
org_id: String.t(),
org_username: String.t(),
- include_quotas: boolean
+ include_quotas: boolean,
+ soft_deleted: boolean
}
- defstruct [:org_id, :org_username, :include_quotas]
+ defstruct [:org_id, :org_username, :include_quotas, :soft_deleted]
- field :org_id, 1, type: :string
- field :org_username, 2, type: :string
- field :include_quotas, 3, type: :bool
+ field(:org_id, 1, type: :string)
+ field(:org_username, 2, type: :string)
+ field(:include_quotas, 3, type: :bool)
+ field(:soft_deleted, 4, type: :bool)
end
defmodule InternalApi.Organization.DescribeResponse do
@@ -24,8 +26,8 @@ defmodule InternalApi.Organization.DescribeResponse do
}
defstruct [:status, :organization]
- field :status, 1, type: InternalApi.ResponseStatus
- field :organization, 2, type: InternalApi.Organization.Organization
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:organization, 2, type: InternalApi.Organization.Organization)
end
defmodule InternalApi.Organization.DescribeManyRequest do
@@ -33,11 +35,13 @@ defmodule InternalApi.Organization.DescribeManyRequest do
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
- org_ids: [String.t()]
+ org_ids: [String.t()],
+ soft_deleted: boolean
}
- defstruct [:org_ids]
+ defstruct [:org_ids, :soft_deleted]
- field :org_ids, 1, repeated: true, type: :string
+ field(:org_ids, 1, repeated: true, type: :string)
+ field(:soft_deleted, 2, type: :bool)
end
defmodule InternalApi.Organization.DescribeManyResponse do
@@ -49,7 +53,7 @@ defmodule InternalApi.Organization.DescribeManyResponse do
}
defstruct [:organizations]
- field :organizations, 1, repeated: true, type: InternalApi.Organization.Organization
+ field(:organizations, 1, repeated: true, type: InternalApi.Organization.Organization)
end
defmodule InternalApi.Organization.ListRequest do
@@ -61,23 +65,25 @@ defmodule InternalApi.Organization.ListRequest do
created_at_gt: Google.Protobuf.Timestamp.t(),
order: integer,
page_size: integer,
- page_token: String.t()
+ page_token: String.t(),
+ soft_deleted: boolean
}
- defstruct [:user_id, :created_at_gt, :order, :page_size, :page_token]
+ defstruct [:user_id, :created_at_gt, :order, :page_size, :page_token, :soft_deleted]
- field :user_id, 2, type: :string
- field :created_at_gt, 3, type: Google.Protobuf.Timestamp
- field :order, 4, type: InternalApi.Organization.ListRequest.Order, enum: true
- field :page_size, 5, type: :int32
- field :page_token, 6, type: :string
+ field(:user_id, 2, type: :string)
+ field(:created_at_gt, 3, type: Google.Protobuf.Timestamp)
+ field(:order, 4, type: InternalApi.Organization.ListRequest.Order, enum: true)
+ field(:page_size, 5, type: :int32)
+ field(:page_token, 6, type: :string)
+ field(:soft_deleted, 7, type: :bool)
end
defmodule InternalApi.Organization.ListRequest.Order do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :BY_NAME_ASC, 0
- field :BY_CREATION_TIME_ASC, 1
+ field(:BY_NAME_ASC, 0)
+ field(:BY_CREATION_TIME_ASC, 1)
end
defmodule InternalApi.Organization.ListResponse do
@@ -91,9 +97,9 @@ defmodule InternalApi.Organization.ListResponse do
}
defstruct [:status, :organizations, :next_page_token]
- field :status, 1, type: InternalApi.ResponseStatus
- field :organizations, 2, repeated: true, type: InternalApi.Organization.Organization
- field :next_page_token, 3, type: :string
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:organizations, 2, repeated: true, type: InternalApi.Organization.Organization)
+ field(:next_page_token, 3, type: :string)
end
defmodule InternalApi.Organization.CreateRequest do
@@ -107,9 +113,9 @@ defmodule InternalApi.Organization.CreateRequest do
}
defstruct [:creator_id, :organization_name, :organization_username]
- field :creator_id, 1, type: :string
- field :organization_name, 2, type: :string
- field :organization_username, 3, type: :string
+ field(:creator_id, 1, type: :string)
+ field(:organization_name, 2, type: :string)
+ field(:organization_username, 3, type: :string)
end
defmodule InternalApi.Organization.CreateResponse do
@@ -122,34 +128,8 @@ defmodule InternalApi.Organization.CreateResponse do
}
defstruct [:status, :organization]
- field :status, 1, type: InternalApi.ResponseStatus
- field :organization, 2, type: InternalApi.Organization.Organization
-end
-
-defmodule InternalApi.Organization.CreateWithQuotasRequest do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- organization: InternalApi.Organization.Organization.t(),
- quotas: [InternalApi.Organization.Quota.t()]
- }
- defstruct [:organization, :quotas]
-
- field :organization, 1, type: InternalApi.Organization.Organization
- field :quotas, 2, repeated: true, type: InternalApi.Organization.Quota
-end
-
-defmodule InternalApi.Organization.CreateWithQuotasResponse do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- organization: InternalApi.Organization.Organization.t()
- }
- defstruct [:organization]
-
- field :organization, 1, type: InternalApi.Organization.Organization
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:organization, 2, type: InternalApi.Organization.Organization)
end
defmodule InternalApi.Organization.UpdateRequest do
@@ -161,7 +141,7 @@ defmodule InternalApi.Organization.UpdateRequest do
}
defstruct [:organization]
- field :organization, 1, type: InternalApi.Organization.Organization
+ field(:organization, 1, type: InternalApi.Organization.Organization)
end
defmodule InternalApi.Organization.UpdateResponse do
@@ -174,8 +154,8 @@ defmodule InternalApi.Organization.UpdateResponse do
}
defstruct [:status, :organization]
- field :status, 1, type: Google.Rpc.Status
- field :organization, 2, type: InternalApi.Organization.Organization
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:organization, 2, type: InternalApi.Organization.Organization)
end
defmodule InternalApi.Organization.IsValidResponse do
@@ -188,8 +168,8 @@ defmodule InternalApi.Organization.IsValidResponse do
}
defstruct [:is_valid, :errors]
- field :is_valid, 1, type: :bool
- field :errors, 2, type: :string
+ field(:is_valid, 1, type: :bool)
+ field(:errors, 2, type: :string)
end
defmodule InternalApi.Organization.IsMemberRequest do
@@ -203,9 +183,9 @@ defmodule InternalApi.Organization.IsMemberRequest do
}
defstruct [:user_id, :org_id, :org_username]
- field :user_id, 1, type: :string
- field :org_id, 3, type: :string
- field :org_username, 4, type: :string
+ field(:user_id, 1, type: :string)
+ field(:org_id, 3, type: :string)
+ field(:org_username, 4, type: :string)
end
defmodule InternalApi.Organization.IsMemberResponse do
@@ -218,8 +198,8 @@ defmodule InternalApi.Organization.IsMemberResponse do
}
defstruct [:status, :is_member]
- field :status, 1, type: InternalApi.ResponseStatus
- field :is_member, 2, type: :bool
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:is_member, 2, type: :bool)
end
defmodule InternalApi.Organization.IsOwnerRequest do
@@ -232,8 +212,8 @@ defmodule InternalApi.Organization.IsOwnerRequest do
}
defstruct [:user_id, :org_id]
- field :user_id, 1, type: :string
- field :org_id, 2, type: :string
+ field(:user_id, 1, type: :string)
+ field(:org_id, 2, type: :string)
end
defmodule InternalApi.Organization.IsOwnerResponse do
@@ -246,8 +226,8 @@ defmodule InternalApi.Organization.IsOwnerResponse do
}
defstruct [:status, :is_owner]
- field :status, 1, type: InternalApi.ResponseStatus
- field :is_owner, 2, type: :bool
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:is_owner, 2, type: :bool)
end
defmodule InternalApi.Organization.MakeOwnerRequest do
@@ -260,8 +240,8 @@ defmodule InternalApi.Organization.MakeOwnerRequest do
}
defstruct [:org_id, :owner_id]
- field :org_id, 1, type: :string
- field :owner_id, 2, type: :string
+ field(:org_id, 1, type: :string)
+ field(:owner_id, 2, type: :string)
end
defmodule InternalApi.Organization.MembersRequest do
@@ -276,10 +256,10 @@ defmodule InternalApi.Organization.MembersRequest do
}
defstruct [:org_id, :org_username, :only_members, :name_contains]
- field :org_id, 1, type: :string
- field :org_username, 2, type: :string
- field :only_members, 3, type: :bool
- field :name_contains, 4, type: :string
+ field(:org_id, 1, type: :string)
+ field(:org_username, 2, type: :string)
+ field(:only_members, 3, type: :bool)
+ field(:name_contains, 4, type: :string)
end
defmodule InternalApi.Organization.MembersResponse do
@@ -293,9 +273,9 @@ defmodule InternalApi.Organization.MembersResponse do
}
defstruct [:status, :members, :not_logged_in_members]
- field :status, 1, type: InternalApi.ResponseStatus
- field :members, 2, repeated: true, type: InternalApi.Organization.Member
- field :not_logged_in_members, 3, repeated: true, type: InternalApi.Organization.Member
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:members, 2, repeated: true, type: InternalApi.Organization.Member)
+ field(:not_logged_in_members, 3, repeated: true, type: InternalApi.Organization.Member)
end
defmodule InternalApi.Organization.AddMemberRequest do
@@ -309,9 +289,9 @@ defmodule InternalApi.Organization.AddMemberRequest do
}
defstruct [:org_id, :creator_id, :username]
- field :org_id, 1, type: :string
- field :creator_id, 2, type: :string
- field :username, 3, type: :string
+ field(:org_id, 1, type: :string)
+ field(:creator_id, 2, type: :string)
+ field(:username, 3, type: :string)
end
defmodule InternalApi.Organization.AddMemberResponse do
@@ -324,8 +304,8 @@ defmodule InternalApi.Organization.AddMemberResponse do
}
defstruct [:status, :member]
- field :status, 1, type: Google.Rpc.Status
- field :member, 2, type: InternalApi.Organization.Member
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:member, 2, type: InternalApi.Organization.Member)
end
defmodule InternalApi.Organization.AddMembersRequest do
@@ -339,12 +319,13 @@ defmodule InternalApi.Organization.AddMembersRequest do
}
defstruct [:org_id, :creator_id, :members_data]
- field :org_id, 1, type: :string
- field :creator_id, 2, type: :string
+ field(:org_id, 1, type: :string)
+ field(:creator_id, 2, type: :string)
- field :members_data, 3,
+ field(:members_data, 3,
repeated: true,
type: InternalApi.Organization.AddMembersRequest.MemberData
+ )
end
defmodule InternalApi.Organization.AddMembersRequest.MemberData do
@@ -358,9 +339,9 @@ defmodule InternalApi.Organization.AddMembersRequest.MemberData do
}
defstruct [:github_username, :github_uid, :invite_email]
- field :github_username, 1, type: :string
- field :github_uid, 2, type: :string
- field :invite_email, 3, type: :string
+ field(:github_username, 1, type: :string)
+ field(:github_uid, 2, type: :string)
+ field(:invite_email, 3, type: :string)
end
defmodule InternalApi.Organization.AddMembersResponse do
@@ -372,7 +353,7 @@ defmodule InternalApi.Organization.AddMembersResponse do
}
defstruct [:members]
- field :members, 1, repeated: true, type: InternalApi.Organization.Member
+ field(:members, 1, repeated: true, type: InternalApi.Organization.Member)
end
defmodule InternalApi.Organization.DeleteMemberRequest do
@@ -386,9 +367,9 @@ defmodule InternalApi.Organization.DeleteMemberRequest do
}
defstruct [:org_id, :membership_id, :user_id]
- field :org_id, 1, type: :string
- field :membership_id, 3, type: :string
- field :user_id, 4, type: :string
+ field(:org_id, 1, type: :string)
+ field(:membership_id, 3, type: :string)
+ field(:user_id, 4, type: :string)
end
defmodule InternalApi.Organization.DeleteMemberResponse do
@@ -400,7 +381,7 @@ defmodule InternalApi.Organization.DeleteMemberResponse do
}
defstruct [:status]
- field :status, 1, type: Google.Rpc.Status
+ field(:status, 1, type: Google.Rpc.Status)
end
defmodule InternalApi.Organization.SuspendRequest do
@@ -415,10 +396,10 @@ defmodule InternalApi.Organization.SuspendRequest do
}
defstruct [:org_id, :origin, :description, :reason]
- field :org_id, 1, type: :string
- field :origin, 2, type: :string
- field :description, 3, type: :string
- field :reason, 4, type: InternalApi.Organization.Suspension.Reason, enum: true
+ field(:org_id, 1, type: :string)
+ field(:origin, 2, type: :string)
+ field(:description, 3, type: :string)
+ field(:reason, 4, type: InternalApi.Organization.Suspension.Reason, enum: true)
end
defmodule InternalApi.Organization.SuspendResponse do
@@ -430,7 +411,7 @@ defmodule InternalApi.Organization.SuspendResponse do
}
defstruct [:status]
- field :status, 1, type: Google.Rpc.Status
+ field(:status, 1, type: Google.Rpc.Status)
end
defmodule InternalApi.Organization.SetOpenSourceRequest do
@@ -442,7 +423,7 @@ defmodule InternalApi.Organization.SetOpenSourceRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.SetOpenSourceResponse do
@@ -454,7 +435,7 @@ defmodule InternalApi.Organization.SetOpenSourceResponse do
}
defstruct [:organization]
- field :organization, 1, type: InternalApi.Organization.Organization
+ field(:organization, 1, type: InternalApi.Organization.Organization)
end
defmodule InternalApi.Organization.UnsuspendRequest do
@@ -469,10 +450,10 @@ defmodule InternalApi.Organization.UnsuspendRequest do
}
defstruct [:org_id, :origin, :description, :reason]
- field :org_id, 1, type: :string
- field :origin, 3, type: :string
- field :description, 2, type: :string
- field :reason, 4, type: InternalApi.Organization.Suspension.Reason, enum: true
+ field(:org_id, 1, type: :string)
+ field(:origin, 3, type: :string)
+ field(:description, 2, type: :string)
+ field(:reason, 4, type: InternalApi.Organization.Suspension.Reason, enum: true)
end
defmodule InternalApi.Organization.UnsuspendResponse do
@@ -484,7 +465,7 @@ defmodule InternalApi.Organization.UnsuspendResponse do
}
defstruct [:status]
- field :status, 1, type: Google.Rpc.Status
+ field(:status, 1, type: Google.Rpc.Status)
end
defmodule InternalApi.Organization.VerifyRequest do
@@ -496,7 +477,7 @@ defmodule InternalApi.Organization.VerifyRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.ListSuspensionsRequest do
@@ -508,7 +489,7 @@ defmodule InternalApi.Organization.ListSuspensionsRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.ListSuspensionsResponse do
@@ -521,8 +502,8 @@ defmodule InternalApi.Organization.ListSuspensionsResponse do
}
defstruct [:status, :suspensions]
- field :status, 1, type: Google.Rpc.Status
- field :suspensions, 2, repeated: true, type: InternalApi.Organization.Suspension
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:suspensions, 2, repeated: true, type: InternalApi.Organization.Suspension)
end
defmodule InternalApi.Organization.DestroyRequest do
@@ -534,7 +515,19 @@ defmodule InternalApi.Organization.DestroyRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
+end
+
+defmodule InternalApi.Organization.RestoreRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ org_id: String.t()
+ }
+ defstruct [:org_id]
+
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.Organization do
@@ -556,7 +549,6 @@ defmodule InternalApi.Organization.Organization do
allowed_id_providers: [String.t()],
deny_member_workflows: boolean,
deny_non_member_workflows: boolean,
- quotas: [InternalApi.Organization.Quota.t()],
settings: [InternalApi.Organization.OrganizationSetting.t()]
}
defstruct [
@@ -574,26 +566,24 @@ defmodule InternalApi.Organization.Organization do
:allowed_id_providers,
:deny_member_workflows,
:deny_non_member_workflows,
- :quotas,
:settings
]
- field :org_username, 1, type: :string
- field :created_at, 2, type: Google.Protobuf.Timestamp
- field :avatar_url, 3, type: :string
- field :org_id, 4, type: :string
- field :name, 5, type: :string
- field :owner_id, 6, type: :string
- field :suspended, 7, type: :bool
- field :open_source, 9, type: :bool
- field :verified, 10, type: :bool
- field :restricted, 11, type: :bool
- field :ip_allow_list, 12, repeated: true, type: :string
- field :allowed_id_providers, 13, repeated: true, type: :string
- field :deny_member_workflows, 14, type: :bool
- field :deny_non_member_workflows, 15, type: :bool
- field :quotas, 8, repeated: true, type: InternalApi.Organization.Quota
- field :settings, 16, repeated: true, type: InternalApi.Organization.OrganizationSetting
+ field(:org_username, 1, type: :string)
+ field(:created_at, 2, type: Google.Protobuf.Timestamp)
+ field(:avatar_url, 3, type: :string)
+ field(:org_id, 4, type: :string)
+ field(:name, 5, type: :string)
+ field(:owner_id, 6, type: :string)
+ field(:suspended, 7, type: :bool)
+ field(:open_source, 9, type: :bool)
+ field(:verified, 10, type: :bool)
+ field(:restricted, 11, type: :bool)
+ field(:ip_allow_list, 12, repeated: true, type: :string)
+ field(:allowed_id_providers, 13, repeated: true, type: :string)
+ field(:deny_member_workflows, 14, type: :bool)
+ field(:deny_non_member_workflows, 15, type: :bool)
+ field(:settings, 16, repeated: true, type: InternalApi.Organization.OrganizationSetting)
end
defmodule InternalApi.Organization.Suspension do
@@ -608,19 +598,19 @@ defmodule InternalApi.Organization.Suspension do
}
defstruct [:origin, :description, :reason, :created_at]
- field :origin, 1, type: :string
- field :description, 2, type: :string
- field :reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true
- field :created_at, 4, type: Google.Protobuf.Timestamp
+ field(:origin, 1, type: :string)
+ field(:description, 2, type: :string)
+ field(:reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true)
+ field(:created_at, 4, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Organization.Suspension.Reason do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :INSUFFICIENT_FUNDS, 0
- field :ACCOUNT_AT_RISK, 1
- field :VIOLATION_OF_TOS, 2
+ field(:INSUFFICIENT_FUNDS, 0)
+ field(:ACCOUNT_AT_RISK, 1)
+ field(:VIOLATION_OF_TOS, 2)
end
defmodule InternalApi.Organization.Member do
@@ -648,51 +638,23 @@ defmodule InternalApi.Organization.Member do
:github_uid
]
- field :screen_name, 1, type: :string
- field :avatar_url, 2, type: :string
- field :user_id, 3, type: :string
- field :role, 4, type: InternalApi.Organization.Member.Role, enum: true
- field :invited_at, 5, type: Google.Protobuf.Timestamp
- field :membership_id, 6, type: :string
- field :github_username, 7, type: :string
- field :github_uid, 8, type: :string
+ field(:screen_name, 1, type: :string)
+ field(:avatar_url, 2, type: :string)
+ field(:user_id, 3, type: :string)
+ field(:role, 4, type: InternalApi.Organization.Member.Role, enum: true)
+ field(:invited_at, 5, type: Google.Protobuf.Timestamp)
+ field(:membership_id, 6, type: :string)
+ field(:github_username, 7, type: :string)
+ field(:github_uid, 8, type: :string)
end
defmodule InternalApi.Organization.Member.Role do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :MEMBER, 0
- field :OWNER, 1
- field :ADMIN, 2
-end
-
-defmodule InternalApi.Organization.Quota do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- type: integer,
- value: non_neg_integer
- }
- defstruct [:type, :value]
-
- field :type, 1, type: InternalApi.Organization.Quota.Type, enum: true
- field :value, 2, type: :uint32
-end
-
-defmodule InternalApi.Organization.Quota.Type do
- @moduledoc false
- use Protobuf, enum: true, syntax: :proto3
-
- field :MAX_PEOPLE_IN_ORG, 0
- field :MAX_PARALELLISM_IN_ORG, 1
- field :MAX_PROJECTS_IN_ORG, 7
- field :MAX_PARALLEL_E1_STANDARD_2, 2
- field :MAX_PARALLEL_E1_STANDARD_4, 3
- field :MAX_PARALLEL_E1_STANDARD_8, 4
- field :MAX_PARALLEL_A1_STANDARD_4, 5
- field :MAX_PARALLEL_A1_STANDARD_8, 6
+ field(:MEMBER, 0)
+ field(:OWNER, 1)
+ field(:ADMIN, 2)
end
defmodule InternalApi.Organization.OrganizationSetting do
@@ -705,60 +667,8 @@ defmodule InternalApi.Organization.OrganizationSetting do
}
defstruct [:key, :value]
- field :key, 1, type: :string
- field :value, 2, type: :string
-end
-
-defmodule InternalApi.Organization.GetQuotasRequest do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- org_id: String.t(),
- types: [integer]
- }
- defstruct [:org_id, :types]
-
- field :org_id, 1, type: :string
- field :types, 2, repeated: true, type: InternalApi.Organization.Quota.Type, enum: true
-end
-
-defmodule InternalApi.Organization.GetQuotaResponse do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- quotas: [InternalApi.Organization.Quota.t()]
- }
- defstruct [:quotas]
-
- field :quotas, 1, repeated: true, type: InternalApi.Organization.Quota
-end
-
-defmodule InternalApi.Organization.UpdateQuotasRequest do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- org_id: String.t(),
- quotas: [InternalApi.Organization.Quota.t()]
- }
- defstruct [:org_id, :quotas]
-
- field :org_id, 1, type: :string
- field :quotas, 2, repeated: true, type: InternalApi.Organization.Quota
-end
-
-defmodule InternalApi.Organization.UpdateQuotasResponse do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- quotas: [InternalApi.Organization.Quota.t()]
- }
- defstruct [:quotas]
-
- field :quotas, 1, repeated: true, type: InternalApi.Organization.Quota
+ field(:key, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.Organization.RepositoryIntegratorsRequest do
@@ -770,7 +680,7 @@ defmodule InternalApi.Organization.RepositoryIntegratorsRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.RepositoryIntegratorsResponse do
@@ -784,17 +694,19 @@ defmodule InternalApi.Organization.RepositoryIntegratorsResponse do
}
defstruct [:primary, :enabled, :available]
- field :primary, 1, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
+ field(:primary, 1, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
- field :enabled, 2,
+ field(:enabled, 2,
repeated: true,
type: InternalApi.RepositoryIntegrator.IntegrationType,
enum: true
+ )
- field :available, 3,
+ field(:available, 3,
repeated: true,
type: InternalApi.RepositoryIntegrator.IntegrationType,
enum: true
+ )
end
defmodule InternalApi.Organization.FetchOrganizationContactsRequest do
@@ -806,7 +718,7 @@ defmodule InternalApi.Organization.FetchOrganizationContactsRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.FetchOrganizationContactsResponse do
@@ -818,7 +730,7 @@ defmodule InternalApi.Organization.FetchOrganizationContactsResponse do
}
defstruct [:org_contacts]
- field :org_contacts, 1, repeated: true, type: InternalApi.Organization.OrganizationContact
+ field(:org_contacts, 1, repeated: true, type: InternalApi.Organization.OrganizationContact)
end
defmodule InternalApi.Organization.ModifyOrganizationContactRequest do
@@ -830,7 +742,7 @@ defmodule InternalApi.Organization.ModifyOrganizationContactRequest do
}
defstruct [:org_contact]
- field :org_contact, 1, type: InternalApi.Organization.OrganizationContact
+ field(:org_contact, 1, type: InternalApi.Organization.OrganizationContact)
end
defmodule InternalApi.Organization.ModifyOrganizationContactResponse do
@@ -853,21 +765,21 @@ defmodule InternalApi.Organization.OrganizationContact do
}
defstruct [:org_id, :type, :name, :email, :phone]
- field :org_id, 1, type: :string
- field :type, 2, type: InternalApi.Organization.OrganizationContact.ContactType, enum: true
- field :name, 3, type: :string
- field :email, 4, type: :string
- field :phone, 5, type: :string
+ field(:org_id, 1, type: :string)
+ field(:type, 2, type: InternalApi.Organization.OrganizationContact.ContactType, enum: true)
+ field(:name, 3, type: :string)
+ field(:email, 4, type: :string)
+ field(:phone, 5, type: :string)
end
defmodule InternalApi.Organization.OrganizationContact.ContactType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :CONTACT_TYPE_UNSPECIFIED, 0
- field :CONTACT_TYPE_MAIN, 1
- field :CONTACT_TYPE_FINANCES, 2
- field :CONTACT_TYPE_SECURITY, 3
+ field(:CONTACT_TYPE_UNSPECIFIED, 0)
+ field(:CONTACT_TYPE_MAIN, 1)
+ field(:CONTACT_TYPE_FINANCES, 2)
+ field(:CONTACT_TYPE_SECURITY, 3)
end
defmodule InternalApi.Organization.FetchOrganizationSettingsRequest do
@@ -879,7 +791,7 @@ defmodule InternalApi.Organization.FetchOrganizationSettingsRequest do
}
defstruct [:org_id]
- field :org_id, 1, type: :string
+ field(:org_id, 1, type: :string)
end
defmodule InternalApi.Organization.FetchOrganizationSettingsResponse do
@@ -891,7 +803,7 @@ defmodule InternalApi.Organization.FetchOrganizationSettingsResponse do
}
defstruct [:settings]
- field :settings, 1, repeated: true, type: InternalApi.Organization.OrganizationSetting
+ field(:settings, 1, repeated: true, type: InternalApi.Organization.OrganizationSetting)
end
defmodule InternalApi.Organization.ModifyOrganizationSettingsRequest do
@@ -904,8 +816,8 @@ defmodule InternalApi.Organization.ModifyOrganizationSettingsRequest do
}
defstruct [:org_id, :settings]
- field :org_id, 1, type: :string
- field :settings, 2, repeated: true, type: InternalApi.Organization.OrganizationSetting
+ field(:org_id, 1, type: :string)
+ field(:settings, 2, repeated: true, type: InternalApi.Organization.OrganizationSetting)
end
defmodule InternalApi.Organization.ModifyOrganizationSettingsResponse do
@@ -917,7 +829,7 @@ defmodule InternalApi.Organization.ModifyOrganizationSettingsResponse do
}
defstruct [:settings]
- field :settings, 1, repeated: true, type: InternalApi.Organization.OrganizationSetting
+ field(:settings, 1, repeated: true, type: InternalApi.Organization.OrganizationSetting)
end
defmodule InternalApi.Organization.OrganizationCreated do
@@ -930,8 +842,8 @@ defmodule InternalApi.Organization.OrganizationCreated do
}
defstruct [:org_id, :timestamp]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Organization.OrganizationDeleted do
@@ -944,8 +856,8 @@ defmodule InternalApi.Organization.OrganizationDeleted do
}
defstruct [:org_id, :timestamp]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Organization.OrganizationUpdated do
@@ -958,8 +870,8 @@ defmodule InternalApi.Organization.OrganizationUpdated do
}
defstruct [:org_id, :timestamp]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Organization.OrganizationBlocked do
@@ -973,9 +885,9 @@ defmodule InternalApi.Organization.OrganizationBlocked do
}
defstruct [:org_id, :timestamp, :reason]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true)
end
defmodule InternalApi.Organization.OrganizationSuspensionCreated do
@@ -989,9 +901,9 @@ defmodule InternalApi.Organization.OrganizationSuspensionCreated do
}
defstruct [:org_id, :timestamp, :reason]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true)
end
defmodule InternalApi.Organization.OrganizationSuspensionRemoved do
@@ -1005,9 +917,9 @@ defmodule InternalApi.Organization.OrganizationSuspensionRemoved do
}
defstruct [:org_id, :timestamp, :reason]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:reason, 3, type: InternalApi.Organization.Suspension.Reason, enum: true)
end
defmodule InternalApi.Organization.OrganizationUnblocked do
@@ -1020,8 +932,8 @@ defmodule InternalApi.Organization.OrganizationUnblocked do
}
defstruct [:org_id, :timestamp]
- field :org_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Organization.OrganizationDailyUpdate do
@@ -1055,106 +967,136 @@ defmodule InternalApi.Organization.OrganizationDailyUpdate do
:timestamp
]
- field :org_id, 1, type: :string
- field :org_username, 2, type: :string
- field :org_name, 3, type: :string
- field :created_at, 4, type: Google.Protobuf.Timestamp
- field :projects_count, 5, type: :int32
- field :member_count, 6, type: :int32
- field :invited_count, 7, type: :int32
- field :owner_id, 8, type: :string
- field :owner_email, 9, type: :string
- field :owner_owned_orgs_count, 10, type: :int32
- field :timestamp, 11, type: Google.Protobuf.Timestamp
+ field(:org_id, 1, type: :string)
+ field(:org_username, 2, type: :string)
+ field(:org_name, 3, type: :string)
+ field(:created_at, 4, type: Google.Protobuf.Timestamp)
+ field(:projects_count, 5, type: :int32)
+ field(:member_count, 6, type: :int32)
+ field(:invited_count, 7, type: :int32)
+ field(:owner_id, 8, type: :string)
+ field(:owner_email, 9, type: :string)
+ field(:owner_owned_orgs_count, 10, type: :int32)
+ field(:timestamp, 11, type: Google.Protobuf.Timestamp)
end
-defmodule InternalApi.Organization.OrganizationService.Service do
+defmodule InternalApi.Organization.OrganizationRestored do
@moduledoc false
- use GRPC.Service, name: "InternalApi.Organization.OrganizationService"
-
- rpc :Describe,
- InternalApi.Organization.DescribeRequest,
- InternalApi.Organization.DescribeResponse
-
- rpc :DescribeMany,
- InternalApi.Organization.DescribeManyRequest,
- InternalApi.Organization.DescribeManyResponse
-
- rpc :List, InternalApi.Organization.ListRequest, InternalApi.Organization.ListResponse
- rpc :Create, InternalApi.Organization.CreateRequest, InternalApi.Organization.CreateResponse
-
- rpc :CreateWithQuotas,
- InternalApi.Organization.CreateWithQuotasRequest,
- InternalApi.Organization.CreateWithQuotasResponse
-
- rpc :Update, InternalApi.Organization.UpdateRequest, InternalApi.Organization.UpdateResponse
- rpc :IsValid, InternalApi.Organization.Organization, InternalApi.Organization.IsValidResponse
-
- rpc :IsMember,
- InternalApi.Organization.IsMemberRequest,
- InternalApi.Organization.IsMemberResponse
-
- rpc :IsOwner, InternalApi.Organization.IsOwnerRequest, InternalApi.Organization.IsOwnerResponse
- rpc :MakeOwner, InternalApi.Organization.MakeOwnerRequest, Google.Protobuf.Empty
- rpc :Members, InternalApi.Organization.MembersRequest, InternalApi.Organization.MembersResponse
-
- rpc :AddMember,
- InternalApi.Organization.AddMemberRequest,
- InternalApi.Organization.AddMemberResponse
-
- rpc :AddMembers,
- InternalApi.Organization.AddMembersRequest,
- InternalApi.Organization.AddMembersResponse
-
- rpc :DeleteMember,
- InternalApi.Organization.DeleteMemberRequest,
- InternalApi.Organization.DeleteMemberResponse
-
- rpc :Suspend, InternalApi.Organization.SuspendRequest, InternalApi.Organization.SuspendResponse
-
- rpc :Unsuspend,
- InternalApi.Organization.UnsuspendRequest,
- InternalApi.Organization.UnsuspendResponse
-
- rpc :Verify, InternalApi.Organization.VerifyRequest, InternalApi.Organization.Organization
-
- rpc :SetOpenSource,
- InternalApi.Organization.SetOpenSourceRequest,
- InternalApi.Organization.SetOpenSourceResponse
-
- rpc :ListSuspensions,
- InternalApi.Organization.ListSuspensionsRequest,
- InternalApi.Organization.ListSuspensionsResponse
-
- rpc :UpdateQuotas,
- InternalApi.Organization.UpdateQuotasRequest,
- InternalApi.Organization.UpdateQuotasResponse
-
- rpc :GetQuotas,
- InternalApi.Organization.GetQuotasRequest,
- InternalApi.Organization.GetQuotaResponse
-
- rpc :Destroy, InternalApi.Organization.DestroyRequest, Google.Protobuf.Empty
-
- rpc :RepositoryIntegrators,
- InternalApi.Organization.RepositoryIntegratorsRequest,
- InternalApi.Organization.RepositoryIntegratorsResponse
+ use Protobuf, syntax: :proto3
- rpc :FetchOrganizationContacts,
- InternalApi.Organization.FetchOrganizationContactsRequest,
- InternalApi.Organization.FetchOrganizationContactsResponse
+ @type t :: %__MODULE__{
+ org_id: String.t(),
+ timestamp: Google.Protobuf.Timestamp.t()
+ }
+ defstruct [:org_id, :timestamp]
- rpc :ModifyOrganizationContact,
- InternalApi.Organization.ModifyOrganizationContactRequest,
- InternalApi.Organization.ModifyOrganizationContactResponse
+ field(:org_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+end
- rpc :FetchOrganizationSettings,
- InternalApi.Organization.FetchOrganizationSettingsRequest,
- InternalApi.Organization.FetchOrganizationSettingsResponse
+defmodule InternalApi.Organization.OrganizationService.Service do
+ @moduledoc false
+ use GRPC.Service, name: "InternalApi.Organization.OrganizationService"
- rpc :ModifyOrganizationSettings,
- InternalApi.Organization.ModifyOrganizationSettingsRequest,
- InternalApi.Organization.ModifyOrganizationSettingsResponse
+ rpc(
+ :Describe,
+ InternalApi.Organization.DescribeRequest,
+ InternalApi.Organization.DescribeResponse
+ )
+
+ rpc(
+ :DescribeMany,
+ InternalApi.Organization.DescribeManyRequest,
+ InternalApi.Organization.DescribeManyResponse
+ )
+
+ rpc(:List, InternalApi.Organization.ListRequest, InternalApi.Organization.ListResponse)
+ rpc(:Create, InternalApi.Organization.CreateRequest, InternalApi.Organization.CreateResponse)
+ rpc(:Update, InternalApi.Organization.UpdateRequest, InternalApi.Organization.UpdateResponse)
+ rpc(:IsValid, InternalApi.Organization.Organization, InternalApi.Organization.IsValidResponse)
+
+ rpc(
+ :IsMember,
+ InternalApi.Organization.IsMemberRequest,
+ InternalApi.Organization.IsMemberResponse
+ )
+
+ rpc(:IsOwner, InternalApi.Organization.IsOwnerRequest, InternalApi.Organization.IsOwnerResponse)
+ rpc(:MakeOwner, InternalApi.Organization.MakeOwnerRequest, Google.Protobuf.Empty)
+ rpc(:Members, InternalApi.Organization.MembersRequest, InternalApi.Organization.MembersResponse)
+
+ rpc(
+ :AddMember,
+ InternalApi.Organization.AddMemberRequest,
+ InternalApi.Organization.AddMemberResponse
+ )
+
+ rpc(
+ :AddMembers,
+ InternalApi.Organization.AddMembersRequest,
+ InternalApi.Organization.AddMembersResponse
+ )
+
+ rpc(
+ :DeleteMember,
+ InternalApi.Organization.DeleteMemberRequest,
+ InternalApi.Organization.DeleteMemberResponse
+ )
+
+ rpc(:Suspend, InternalApi.Organization.SuspendRequest, InternalApi.Organization.SuspendResponse)
+
+ rpc(
+ :Unsuspend,
+ InternalApi.Organization.UnsuspendRequest,
+ InternalApi.Organization.UnsuspendResponse
+ )
+
+ rpc(:Verify, InternalApi.Organization.VerifyRequest, InternalApi.Organization.Organization)
+
+ rpc(
+ :SetOpenSource,
+ InternalApi.Organization.SetOpenSourceRequest,
+ InternalApi.Organization.SetOpenSourceResponse
+ )
+
+ rpc(
+ :ListSuspensions,
+ InternalApi.Organization.ListSuspensionsRequest,
+ InternalApi.Organization.ListSuspensionsResponse
+ )
+
+ rpc(:Destroy, InternalApi.Organization.DestroyRequest, Google.Protobuf.Empty)
+ rpc(:Restore, InternalApi.Organization.RestoreRequest, Google.Protobuf.Empty)
+
+ rpc(
+ :RepositoryIntegrators,
+ InternalApi.Organization.RepositoryIntegratorsRequest,
+ InternalApi.Organization.RepositoryIntegratorsResponse
+ )
+
+ rpc(
+ :FetchOrganizationContacts,
+ InternalApi.Organization.FetchOrganizationContactsRequest,
+ InternalApi.Organization.FetchOrganizationContactsResponse
+ )
+
+ rpc(
+ :ModifyOrganizationContact,
+ InternalApi.Organization.ModifyOrganizationContactRequest,
+ InternalApi.Organization.ModifyOrganizationContactResponse
+ )
+
+ rpc(
+ :FetchOrganizationSettings,
+ InternalApi.Organization.FetchOrganizationSettingsRequest,
+ InternalApi.Organization.FetchOrganizationSettingsResponse
+ )
+
+ rpc(
+ :ModifyOrganizationSettings,
+ InternalApi.Organization.ModifyOrganizationSettingsRequest,
+ InternalApi.Organization.ModifyOrganizationSettingsResponse
+ )
end
defmodule InternalApi.Organization.OrganizationService.Stub do
diff --git a/plumber/proto/lib/internal_api/paparazzo.snapshot.pb.ex b/plumber/proto/lib/internal_api/paparazzo.snapshot.pb.ex
index 669d35960..716aed907 100644
--- a/plumber/proto/lib/internal_api/paparazzo.snapshot.pb.ex
+++ b/plumber/proto/lib/internal_api/paparazzo.snapshot.pb.ex
@@ -8,8 +8,8 @@ defmodule InternalApi.Paparazzo.PutRequest do
}
defstruct [:content, :ttl_sec]
- field :content, 2, type: :string
- field :ttl_sec, 3, type: :uint32
+ field(:content, 2, type: :string)
+ field(:ttl_sec, 3, type: :uint32)
end
defmodule InternalApi.Paparazzo.PutResponse do
@@ -22,8 +22,8 @@ defmodule InternalApi.Paparazzo.PutResponse do
}
defstruct [:status, :id]
- field :status, 1, type: Google.Rpc.Status
- field :id, 2, type: :string
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Paparazzo.GetRequest do
@@ -35,7 +35,7 @@ defmodule InternalApi.Paparazzo.GetRequest do
}
defstruct [:id]
- field :id, 1, type: :string
+ field(:id, 1, type: :string)
end
defmodule InternalApi.Paparazzo.GetResponse do
@@ -48,8 +48,8 @@ defmodule InternalApi.Paparazzo.GetResponse do
}
defstruct [:status, :content]
- field :status, 1, type: Google.Rpc.Status
- field :content, 2, type: :string
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:content, 2, type: :string)
end
defmodule InternalApi.Paparazzo.DeleteRequest do
@@ -61,7 +61,7 @@ defmodule InternalApi.Paparazzo.DeleteRequest do
}
defstruct [:id]
- field :id, 1, type: :string
+ field(:id, 1, type: :string)
end
defmodule InternalApi.Paparazzo.DeleteResponse do
@@ -74,8 +74,8 @@ defmodule InternalApi.Paparazzo.DeleteResponse do
}
defstruct [:status, :content]
- field :status, 1, type: Google.Rpc.Status
- field :content, 2, type: :string
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:content, 2, type: :string)
end
defmodule InternalApi.Paparazzo.GetFileRequest do
@@ -88,8 +88,8 @@ defmodule InternalApi.Paparazzo.GetFileRequest do
}
defstruct [:id, :path]
- field :id, 1, type: :string
- field :path, 2, type: :string
+ field(:id, 1, type: :string)
+ field(:path, 2, type: :string)
end
defmodule InternalApi.Paparazzo.GetFileResponse do
@@ -102,18 +102,18 @@ defmodule InternalApi.Paparazzo.GetFileResponse do
}
defstruct [:status, :content]
- field :status, 1, type: Google.Rpc.Status
- field :content, 2, type: :string
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:content, 2, type: :string)
end
defmodule InternalApi.Paparazzo.SnapshotService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Paparazzo.SnapshotService"
- rpc :Put, InternalApi.Paparazzo.PutRequest, InternalApi.Paparazzo.PutResponse
- rpc :Get, InternalApi.Paparazzo.GetRequest, InternalApi.Paparazzo.GetResponse
- rpc :Delete, InternalApi.Paparazzo.DeleteRequest, InternalApi.Paparazzo.DeleteResponse
- rpc :GetFile, InternalApi.Paparazzo.GetFileRequest, InternalApi.Paparazzo.GetFileResponse
+ rpc(:Put, InternalApi.Paparazzo.PutRequest, InternalApi.Paparazzo.PutResponse)
+ rpc(:Get, InternalApi.Paparazzo.GetRequest, InternalApi.Paparazzo.GetResponse)
+ rpc(:Delete, InternalApi.Paparazzo.DeleteRequest, InternalApi.Paparazzo.DeleteResponse)
+ rpc(:GetFile, InternalApi.Paparazzo.GetFileRequest, InternalApi.Paparazzo.GetFileResponse)
end
defmodule InternalApi.Paparazzo.SnapshotService.Stub do
diff --git a/plumber/proto/lib/internal_api/plumber.admin.pb.ex b/plumber/proto/lib/internal_api/plumber.admin.pb.ex
index dd15a5f64..f445c1c8c 100644
--- a/plumber/proto/lib/internal_api/plumber.admin.pb.ex
+++ b/plumber/proto/lib/internal_api/plumber.admin.pb.ex
@@ -7,7 +7,7 @@ defmodule InternalApi.Plumber.GetYamlRequest do
}
defstruct [:ppl_id]
- field :ppl_id, 1, type: :string
+ field(:ppl_id, 1, type: :string)
end
defmodule InternalApi.Plumber.GetYamlResponse do
@@ -20,8 +20,8 @@ defmodule InternalApi.Plumber.GetYamlResponse do
}
defstruct [:response_status, :yaml]
- field :response_status, 1, type: InternalApi.Plumber.ResponseStatus
- field :yaml, 2, type: :string
+ field(:response_status, 1, type: InternalApi.Plumber.ResponseStatus)
+ field(:yaml, 2, type: :string)
end
defmodule InternalApi.Plumber.TerminateAllRequest do
@@ -36,18 +36,18 @@ defmodule InternalApi.Plumber.TerminateAllRequest do
}
defstruct [:requester_token, :project_id, :branch_name, :reason]
- field :requester_token, 1, type: :string
- field :project_id, 2, type: :string
- field :branch_name, 3, type: :string
- field :reason, 4, type: InternalApi.Plumber.TerminateAllRequest.Reason, enum: true
+ field(:requester_token, 1, type: :string)
+ field(:project_id, 2, type: :string)
+ field(:branch_name, 3, type: :string)
+ field(:reason, 4, type: InternalApi.Plumber.TerminateAllRequest.Reason, enum: true)
end
defmodule InternalApi.Plumber.TerminateAllRequest.Reason do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ADMIN_ACTION, 0
- field :BRANCH_DELETION, 1
+ field(:ADMIN_ACTION, 0)
+ field(:BRANCH_DELETION, 1)
end
defmodule InternalApi.Plumber.TerminateAllResponse do
@@ -59,18 +59,20 @@ defmodule InternalApi.Plumber.TerminateAllResponse do
}
defstruct [:response_status]
- field :response_status, 1, type: InternalApi.Plumber.ResponseStatus
+ field(:response_status, 1, type: InternalApi.Plumber.ResponseStatus)
end
defmodule InternalApi.Plumber.Admin.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Plumber.Admin"
- rpc :TerminateAll,
- InternalApi.Plumber.TerminateAllRequest,
- InternalApi.Plumber.TerminateAllResponse
+ rpc(
+ :TerminateAll,
+ InternalApi.Plumber.TerminateAllRequest,
+ InternalApi.Plumber.TerminateAllResponse
+ )
- rpc :GetYaml, InternalApi.Plumber.GetYamlRequest, InternalApi.Plumber.GetYamlResponse
+ rpc(:GetYaml, InternalApi.Plumber.GetYamlRequest, InternalApi.Plumber.GetYamlResponse)
end
defmodule InternalApi.Plumber.Admin.Stub do
diff --git a/plumber/proto/lib/internal_api/pre_flight_checks_hub.pb.ex b/plumber/proto/lib/internal_api/pre_flight_checks_hub.pb.ex
index a6e58e2b0..cfa2724cf 100644
--- a/plumber/proto/lib/internal_api/pre_flight_checks_hub.pb.ex
+++ b/plumber/proto/lib/internal_api/pre_flight_checks_hub.pb.ex
@@ -8,8 +8,8 @@ defmodule InternalApi.PreFlightChecksHub.PreFlightChecks do
}
defstruct [:organization_pfc, :project_pfc]
- field :organization_pfc, 1, type: InternalApi.PreFlightChecksHub.OrganizationPFC
- field :project_pfc, 2, type: InternalApi.PreFlightChecksHub.ProjectPFC
+ field(:organization_pfc, 1, type: InternalApi.PreFlightChecksHub.OrganizationPFC)
+ field(:project_pfc, 2, type: InternalApi.PreFlightChecksHub.ProjectPFC)
end
defmodule InternalApi.PreFlightChecksHub.OrganizationPFC do
@@ -26,12 +26,12 @@ defmodule InternalApi.PreFlightChecksHub.OrganizationPFC do
}
defstruct [:commands, :secrets, :agent, :requester_id, :created_at, :updated_at]
- field :commands, 1, repeated: true, type: :string
- field :secrets, 2, repeated: true, type: :string
- field :agent, 3, type: InternalApi.PreFlightChecksHub.Agent
- field :requester_id, 4, type: :string
- field :created_at, 5, type: Google.Protobuf.Timestamp
- field :updated_at, 6, type: Google.Protobuf.Timestamp
+ field(:commands, 1, repeated: true, type: :string)
+ field(:secrets, 2, repeated: true, type: :string)
+ field(:agent, 3, type: InternalApi.PreFlightChecksHub.Agent)
+ field(:requester_id, 4, type: :string)
+ field(:created_at, 5, type: Google.Protobuf.Timestamp)
+ field(:updated_at, 6, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.PreFlightChecksHub.ProjectPFC do
@@ -48,12 +48,12 @@ defmodule InternalApi.PreFlightChecksHub.ProjectPFC do
}
defstruct [:commands, :secrets, :requester_id, :created_at, :updated_at, :agent]
- field :commands, 1, repeated: true, type: :string
- field :secrets, 2, repeated: true, type: :string
- field :requester_id, 3, type: :string
- field :created_at, 4, type: Google.Protobuf.Timestamp
- field :updated_at, 5, type: Google.Protobuf.Timestamp
- field :agent, 6, type: InternalApi.PreFlightChecksHub.Agent
+ field(:commands, 1, repeated: true, type: :string)
+ field(:secrets, 2, repeated: true, type: :string)
+ field(:requester_id, 3, type: :string)
+ field(:created_at, 4, type: Google.Protobuf.Timestamp)
+ field(:updated_at, 5, type: Google.Protobuf.Timestamp)
+ field(:agent, 6, type: InternalApi.PreFlightChecksHub.Agent)
end
defmodule InternalApi.PreFlightChecksHub.Agent do
@@ -66,8 +66,8 @@ defmodule InternalApi.PreFlightChecksHub.Agent do
}
defstruct [:machine_type, :os_image]
- field :machine_type, 1, type: :string
- field :os_image, 2, type: :string
+ field(:machine_type, 1, type: :string)
+ field(:os_image, 2, type: :string)
end
defmodule InternalApi.PreFlightChecksHub.DescribeRequest do
@@ -81,9 +81,9 @@ defmodule InternalApi.PreFlightChecksHub.DescribeRequest do
}
defstruct [:level, :organization_id, :project_id]
- field :level, 1, type: InternalApi.PreFlightChecksHub.PFCLevel, enum: true
- field :organization_id, 2, type: :string
- field :project_id, 3, type: :string
+ field(:level, 1, type: InternalApi.PreFlightChecksHub.PFCLevel, enum: true)
+ field(:organization_id, 2, type: :string)
+ field(:project_id, 3, type: :string)
end
defmodule InternalApi.PreFlightChecksHub.DescribeResponse do
@@ -96,8 +96,8 @@ defmodule InternalApi.PreFlightChecksHub.DescribeResponse do
}
defstruct [:status, :pre_flight_checks]
- field :status, 1, type: InternalApi.Status
- field :pre_flight_checks, 2, type: InternalApi.PreFlightChecksHub.PreFlightChecks
+ field(:status, 1, type: InternalApi.Status)
+ field(:pre_flight_checks, 2, type: InternalApi.PreFlightChecksHub.PreFlightChecks)
end
defmodule InternalApi.PreFlightChecksHub.ApplyRequest do
@@ -113,11 +113,11 @@ defmodule InternalApi.PreFlightChecksHub.ApplyRequest do
}
defstruct [:level, :organization_id, :project_id, :requester_id, :pre_flight_checks]
- field :level, 1, type: InternalApi.PreFlightChecksHub.PFCLevel, enum: true
- field :organization_id, 2, type: :string
- field :project_id, 3, type: :string
- field :requester_id, 4, type: :string
- field :pre_flight_checks, 5, type: InternalApi.PreFlightChecksHub.PreFlightChecks
+ field(:level, 1, type: InternalApi.PreFlightChecksHub.PFCLevel, enum: true)
+ field(:organization_id, 2, type: :string)
+ field(:project_id, 3, type: :string)
+ field(:requester_id, 4, type: :string)
+ field(:pre_flight_checks, 5, type: InternalApi.PreFlightChecksHub.PreFlightChecks)
end
defmodule InternalApi.PreFlightChecksHub.ApplyResponse do
@@ -130,8 +130,8 @@ defmodule InternalApi.PreFlightChecksHub.ApplyResponse do
}
defstruct [:status, :pre_flight_checks]
- field :status, 1, type: InternalApi.Status
- field :pre_flight_checks, 2, type: InternalApi.PreFlightChecksHub.PreFlightChecks
+ field(:status, 1, type: InternalApi.Status)
+ field(:pre_flight_checks, 2, type: InternalApi.PreFlightChecksHub.PreFlightChecks)
end
defmodule InternalApi.PreFlightChecksHub.DestroyRequest do
@@ -146,10 +146,10 @@ defmodule InternalApi.PreFlightChecksHub.DestroyRequest do
}
defstruct [:level, :organization_id, :project_id, :requester_id]
- field :level, 1, type: InternalApi.PreFlightChecksHub.PFCLevel, enum: true
- field :organization_id, 2, type: :string
- field :project_id, 3, type: :string
- field :requester_id, 4, type: :string
+ field(:level, 1, type: InternalApi.PreFlightChecksHub.PFCLevel, enum: true)
+ field(:organization_id, 2, type: :string)
+ field(:project_id, 3, type: :string)
+ field(:requester_id, 4, type: :string)
end
defmodule InternalApi.PreFlightChecksHub.DestroyResponse do
@@ -161,33 +161,39 @@ defmodule InternalApi.PreFlightChecksHub.DestroyResponse do
}
defstruct [:status]
- field :status, 1, type: InternalApi.Status
+ field(:status, 1, type: InternalApi.Status)
end
defmodule InternalApi.PreFlightChecksHub.PFCLevel do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ORGANIZATION, 0
- field :PROJECT, 1
- field :EVERYTHING, 2
+ field(:ORGANIZATION, 0)
+ field(:PROJECT, 1)
+ field(:EVERYTHING, 2)
end
defmodule InternalApi.PreFlightChecksHub.PreFlightChecksService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.PreFlightChecksHub.PreFlightChecksService"
- rpc :Describe,
- InternalApi.PreFlightChecksHub.DescribeRequest,
- InternalApi.PreFlightChecksHub.DescribeResponse
-
- rpc :Apply,
- InternalApi.PreFlightChecksHub.ApplyRequest,
- InternalApi.PreFlightChecksHub.ApplyResponse
-
- rpc :Destroy,
- InternalApi.PreFlightChecksHub.DestroyRequest,
- InternalApi.PreFlightChecksHub.DestroyResponse
+ rpc(
+ :Describe,
+ InternalApi.PreFlightChecksHub.DescribeRequest,
+ InternalApi.PreFlightChecksHub.DescribeResponse
+ )
+
+ rpc(
+ :Apply,
+ InternalApi.PreFlightChecksHub.ApplyRequest,
+ InternalApi.PreFlightChecksHub.ApplyResponse
+ )
+
+ rpc(
+ :Destroy,
+ InternalApi.PreFlightChecksHub.DestroyRequest,
+ InternalApi.PreFlightChecksHub.DestroyResponse
+ )
end
defmodule InternalApi.PreFlightChecksHub.PreFlightChecksService.Stub do
diff --git a/plumber/proto/lib/internal_api/projecthub.pb.ex b/plumber/proto/lib/internal_api/projecthub.pb.ex
index 12b6fc84a..8cb1ba837 100644
--- a/plumber/proto/lib/internal_api/projecthub.pb.ex
+++ b/plumber/proto/lib/internal_api/projecthub.pb.ex
@@ -11,11 +11,11 @@ defmodule InternalApi.Projecthub.RequestMeta do
}
defstruct [:api_version, :kind, :req_id, :org_id, :user_id]
- field :api_version, 1, type: :string
- field :kind, 2, type: :string
- field :req_id, 3, type: :string
- field :org_id, 4, type: :string
- field :user_id, 5, type: :string
+ field(:api_version, 1, type: :string)
+ field(:kind, 2, type: :string)
+ field(:req_id, 3, type: :string)
+ field(:org_id, 4, type: :string)
+ field(:user_id, 5, type: :string)
end
defmodule InternalApi.Projecthub.ResponseMeta do
@@ -32,12 +32,12 @@ defmodule InternalApi.Projecthub.ResponseMeta do
}
defstruct [:api_version, :kind, :req_id, :org_id, :user_id, :status]
- field :api_version, 1, type: :string
- field :kind, 2, type: :string
- field :req_id, 3, type: :string
- field :org_id, 4, type: :string
- field :user_id, 5, type: :string
- field :status, 6, type: InternalApi.Projecthub.ResponseMeta.Status
+ field(:api_version, 1, type: :string)
+ field(:kind, 2, type: :string)
+ field(:req_id, 3, type: :string)
+ field(:org_id, 4, type: :string)
+ field(:user_id, 5, type: :string)
+ field(:status, 6, type: InternalApi.Projecthub.ResponseMeta.Status)
end
defmodule InternalApi.Projecthub.ResponseMeta.Status do
@@ -50,17 +50,17 @@ defmodule InternalApi.Projecthub.ResponseMeta.Status do
}
defstruct [:code, :message]
- field :code, 1, type: InternalApi.Projecthub.ResponseMeta.Code, enum: true
- field :message, 2, type: :string
+ field(:code, 1, type: InternalApi.Projecthub.ResponseMeta.Code, enum: true)
+ field(:message, 2, type: :string)
end
defmodule InternalApi.Projecthub.ResponseMeta.Code do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :OK, 0
- field :NOT_FOUND, 2
- field :FAILED_PRECONDITION, 3
+ field(:OK, 0)
+ field(:NOT_FOUND, 2)
+ field(:FAILED_PRECONDITION, 3)
end
defmodule InternalApi.Projecthub.PaginationRequest do
@@ -73,8 +73,8 @@ defmodule InternalApi.Projecthub.PaginationRequest do
}
defstruct [:page, :page_size]
- field :page, 1, type: :int32
- field :page_size, 2, type: :int32
+ field(:page, 1, type: :int32)
+ field(:page_size, 2, type: :int32)
end
defmodule InternalApi.Projecthub.PaginationResponse do
@@ -89,10 +89,10 @@ defmodule InternalApi.Projecthub.PaginationResponse do
}
defstruct [:page_number, :page_size, :total_entries, :total_pages]
- field :page_number, 1, type: :int32
- field :page_size, 2, type: :int32
- field :total_entries, 3, type: :int32
- field :total_pages, 4, type: :int32
+ field(:page_number, 1, type: :int32)
+ field(:page_size, 2, type: :int32)
+ field(:total_entries, 3, type: :int32)
+ field(:total_pages, 4, type: :int32)
end
defmodule InternalApi.Projecthub.Project do
@@ -106,9 +106,9 @@ defmodule InternalApi.Projecthub.Project do
}
defstruct [:metadata, :spec, :status]
- field :metadata, 1, type: InternalApi.Projecthub.Project.Metadata
- field :spec, 2, type: InternalApi.Projecthub.Project.Spec
- field :status, 3, type: InternalApi.Projecthub.Project.Status
+ field(:metadata, 1, type: InternalApi.Projecthub.Project.Metadata)
+ field(:spec, 2, type: InternalApi.Projecthub.Project.Spec)
+ field(:status, 3, type: InternalApi.Projecthub.Project.Status)
end
defmodule InternalApi.Projecthub.Project.Metadata do
@@ -125,12 +125,12 @@ defmodule InternalApi.Projecthub.Project.Metadata do
}
defstruct [:name, :id, :owner_id, :org_id, :description, :created_at]
- field :name, 1, type: :string
- field :id, 2, type: :string
- field :owner_id, 3, type: :string
- field :org_id, 4, type: :string
- field :description, 5, type: :string
- field :created_at, 6, type: Google.Protobuf.Timestamp
+ field(:name, 1, type: :string)
+ field(:id, 2, type: :string)
+ field(:owner_id, 3, type: :string)
+ field(:org_id, 4, type: :string)
+ field(:description, 5, type: :string)
+ field(:created_at, 6, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Projecthub.Project.Spec do
@@ -166,27 +166,29 @@ defmodule InternalApi.Projecthub.Project.Spec do
:tasks
]
- field :repository, 1, type: InternalApi.Projecthub.Project.Spec.Repository
- field :schedulers, 2, repeated: true, type: InternalApi.Projecthub.Project.Spec.Scheduler
- field :private, 3, type: :bool
- field :public, 4, type: :bool
- field :visibility, 5, type: InternalApi.Projecthub.Project.Spec.Visibility, enum: true
+ field(:repository, 1, type: InternalApi.Projecthub.Project.Spec.Repository)
+ field(:schedulers, 2, repeated: true, type: InternalApi.Projecthub.Project.Spec.Scheduler)
+ field(:private, 3, type: :bool)
+ field(:public, 4, type: :bool)
+ field(:visibility, 5, type: InternalApi.Projecthub.Project.Spec.Visibility, enum: true)
- field :debug_permissions, 6,
+ field(:debug_permissions, 6,
repeated: true,
type: InternalApi.Projecthub.Project.Spec.PermissionType,
enum: true
+ )
- field :attach_permissions, 7,
+ field(:attach_permissions, 7,
repeated: true,
type: InternalApi.Projecthub.Project.Spec.PermissionType,
enum: true
+ )
- field :custom_permissions, 8, type: :bool
- field :artifact_store_id, 9, type: :string
- field :cache_id, 10, type: :string
- field :docker_registry_id, 11, type: :string
- field :tasks, 12, repeated: true, type: InternalApi.Projecthub.Project.Spec.Task
+ field(:custom_permissions, 8, type: :bool)
+ field(:artifact_store_id, 9, type: :string)
+ field(:cache_id, 10, type: :string)
+ field(:docker_registry_id, 11, type: :string)
+ field(:tasks, 12, repeated: true, type: InternalApi.Projecthub.Project.Spec.Task)
end
defmodule InternalApi.Projecthub.Project.Spec.Repository do
@@ -227,28 +229,30 @@ defmodule InternalApi.Projecthub.Project.Spec.Repository do
:default_branch
]
- oneof :run_present, 0
- field :url, 1, type: :string
- field :name, 2, type: :string
- field :owner, 3, type: :string
+ oneof(:run_present, 0)
+ field(:url, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:owner, 3, type: :string)
- field :run_on, 4,
+ field(:run_on, 4,
repeated: true,
type: InternalApi.Projecthub.Project.Spec.Repository.RunType,
enum: true
+ )
- field :forked_pull_requests, 5,
+ field(:forked_pull_requests, 5,
type: InternalApi.Projecthub.Project.Spec.Repository.ForkedPullRequests
+ )
- field :run, 6, type: :bool, oneof: 0
- field :pipeline_file, 7, type: :string
- field :status, 8, type: InternalApi.Projecthub.Project.Spec.Repository.Status
- field :whitelist, 9, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist
- field :public, 10, type: :bool
- field :integration_type, 11, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :connected, 12, type: :bool
- field :id, 13, type: :string
- field :default_branch, 14, type: :string
+ field(:run, 6, type: :bool, oneof: 0)
+ field(:pipeline_file, 7, type: :string)
+ field(:status, 8, type: InternalApi.Projecthub.Project.Spec.Repository.Status)
+ field(:whitelist, 9, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist)
+ field(:public, 10, type: :bool)
+ field(:integration_type, 11, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:connected, 12, type: :bool)
+ field(:id, 13, type: :string)
+ field(:default_branch, 14, type: :string)
end
defmodule InternalApi.Projecthub.Project.Spec.Repository.ForkedPullRequests do
@@ -261,8 +265,8 @@ defmodule InternalApi.Projecthub.Project.Spec.Repository.ForkedPullRequests do
}
defstruct [:allowed_secrets, :allowed_contributors]
- field :allowed_secrets, 1, repeated: true, type: :string
- field :allowed_contributors, 2, repeated: true, type: :string
+ field(:allowed_secrets, 1, repeated: true, type: :string)
+ field(:allowed_contributors, 2, repeated: true, type: :string)
end
defmodule InternalApi.Projecthub.Project.Spec.Repository.Status do
@@ -274,9 +278,10 @@ defmodule InternalApi.Projecthub.Project.Spec.Repository.Status do
}
defstruct [:pipeline_files]
- field :pipeline_files, 1,
+ field(:pipeline_files, 1,
repeated: true,
type: InternalApi.Projecthub.Project.Spec.Repository.Status.PipelineFile
+ )
end
defmodule InternalApi.Projecthub.Project.Spec.Repository.Status.PipelineFile do
@@ -289,19 +294,20 @@ defmodule InternalApi.Projecthub.Project.Spec.Repository.Status.PipelineFile do
}
defstruct [:path, :level]
- field :path, 1, type: :string
+ field(:path, 1, type: :string)
- field :level, 2,
+ field(:level, 2,
type: InternalApi.Projecthub.Project.Spec.Repository.Status.PipelineFile.Level,
enum: true
+ )
end
defmodule InternalApi.Projecthub.Project.Spec.Repository.Status.PipelineFile.Level do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :BLOCK, 0
- field :PIPELINE, 1
+ field(:BLOCK, 0)
+ field(:PIPELINE, 1)
end
defmodule InternalApi.Projecthub.Project.Spec.Repository.Whitelist do
@@ -314,18 +320,19 @@ defmodule InternalApi.Projecthub.Project.Spec.Repository.Whitelist do
}
defstruct [:branches, :tags]
- field :branches, 1, repeated: true, type: :string
- field :tags, 2, repeated: true, type: :string
+ field(:branches, 1, repeated: true, type: :string)
+ field(:tags, 2, repeated: true, type: :string)
end
defmodule InternalApi.Projecthub.Project.Spec.Repository.RunType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :BRANCHES, 0
- field :TAGS, 1
- field :PULL_REQUESTS, 2
- field :FORKED_PULL_REQUESTS, 3
+ field(:BRANCHES, 0)
+ field(:TAGS, 1)
+ field(:PULL_REQUESTS, 2)
+ field(:FORKED_PULL_REQUESTS, 3)
+ field(:DRAFT_PULL_REQUESTS, 4)
end
defmodule InternalApi.Projecthub.Project.Spec.Scheduler do
@@ -342,21 +349,21 @@ defmodule InternalApi.Projecthub.Project.Spec.Scheduler do
}
defstruct [:id, :name, :branch, :at, :pipeline_file, :status]
- field :id, 1, type: :string
- field :name, 2, type: :string
- field :branch, 3, type: :string
- field :at, 4, type: :string
- field :pipeline_file, 5, type: :string
- field :status, 6, type: InternalApi.Projecthub.Project.Spec.Scheduler.Status, enum: true
+ field(:id, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:branch, 3, type: :string)
+ field(:at, 4, type: :string)
+ field(:pipeline_file, 5, type: :string)
+ field(:status, 6, type: InternalApi.Projecthub.Project.Spec.Scheduler.Status, enum: true)
end
defmodule InternalApi.Projecthub.Project.Spec.Scheduler.Status do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :STATUS_UNSPECIFIED, 0
- field :STATUS_INACTIVE, 1
- field :STATUS_ACTIVE, 2
+ field(:STATUS_UNSPECIFIED, 0)
+ field(:STATUS_INACTIVE, 1)
+ field(:STATUS_ACTIVE, 2)
end
defmodule InternalApi.Projecthub.Project.Spec.Task do
@@ -386,15 +393,15 @@ defmodule InternalApi.Projecthub.Project.Spec.Task do
:description
]
- field :id, 1, type: :string
- field :name, 2, type: :string
- field :branch, 3, type: :string
- field :at, 4, type: :string
- field :pipeline_file, 5, type: :string
- field :status, 6, type: InternalApi.Projecthub.Project.Spec.Task.Status, enum: true
- field :recurring, 7, type: :bool
- field :parameters, 8, repeated: true, type: InternalApi.Projecthub.Project.Spec.Task.Parameter
- field :description, 9, type: :string
+ field(:id, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:branch, 3, type: :string)
+ field(:at, 4, type: :string)
+ field(:pipeline_file, 5, type: :string)
+ field(:status, 6, type: InternalApi.Projecthub.Project.Spec.Task.Status, enum: true)
+ field(:recurring, 7, type: :bool)
+ field(:parameters, 8, repeated: true, type: InternalApi.Projecthub.Project.Spec.Task.Parameter)
+ field(:description, 9, type: :string)
end
defmodule InternalApi.Projecthub.Project.Spec.Task.Parameter do
@@ -410,40 +417,40 @@ defmodule InternalApi.Projecthub.Project.Spec.Task.Parameter do
}
defstruct [:name, :required, :description, :default_value, :options]
- field :name, 1, type: :string
- field :required, 2, type: :bool
- field :description, 3, type: :string
- field :default_value, 4, type: :string
- field :options, 5, repeated: true, type: :string
+ field(:name, 1, type: :string)
+ field(:required, 2, type: :bool)
+ field(:description, 3, type: :string)
+ field(:default_value, 4, type: :string)
+ field(:options, 5, repeated: true, type: :string)
end
defmodule InternalApi.Projecthub.Project.Spec.Task.Status do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :STATUS_UNSPECIFIED, 0
- field :STATUS_INACTIVE, 1
- field :STATUS_ACTIVE, 2
+ field(:STATUS_UNSPECIFIED, 0)
+ field(:STATUS_INACTIVE, 1)
+ field(:STATUS_ACTIVE, 2)
end
defmodule InternalApi.Projecthub.Project.Spec.Visibility do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PRIVATE, 0
- field :PUBLIC, 1
+ field(:PRIVATE, 0)
+ field(:PUBLIC, 1)
end
defmodule InternalApi.Projecthub.Project.Spec.PermissionType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :EMPTY, 0
- field :DEFAULT_BRANCH, 1
- field :NON_DEFAULT_BRANCH, 2
- field :PULL_REQUEST, 3
- field :FORKED_PULL_REQUEST, 4
- field :TAG, 5
+ field(:EMPTY, 0)
+ field(:DEFAULT_BRANCH, 1)
+ field(:NON_DEFAULT_BRANCH, 2)
+ field(:PULL_REQUEST, 3)
+ field(:FORKED_PULL_REQUEST, 4)
+ field(:TAG, 5)
end
defmodule InternalApi.Projecthub.Project.Status do
@@ -461,13 +468,13 @@ defmodule InternalApi.Projecthub.Project.Status do
}
defstruct [:state, :state_reason, :cache, :artifact_store, :repository, :analysis, :permissions]
- field :state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true
- field :state_reason, 2, type: :string
- field :cache, 3, type: InternalApi.Projecthub.Project.Status.Cache
- field :artifact_store, 4, type: InternalApi.Projecthub.Project.Status.ArtifactStore
- field :repository, 5, type: InternalApi.Projecthub.Project.Status.Repository
- field :analysis, 6, type: InternalApi.Projecthub.Project.Status.Analysis
- field :permissions, 7, type: InternalApi.Projecthub.Project.Status.Permissions
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
+ field(:state_reason, 2, type: :string)
+ field(:cache, 3, type: InternalApi.Projecthub.Project.Status.Cache)
+ field(:artifact_store, 4, type: InternalApi.Projecthub.Project.Status.ArtifactStore)
+ field(:repository, 5, type: InternalApi.Projecthub.Project.Status.Repository)
+ field(:analysis, 6, type: InternalApi.Projecthub.Project.Status.Analysis)
+ field(:permissions, 7, type: InternalApi.Projecthub.Project.Status.Permissions)
end
defmodule InternalApi.Projecthub.Project.Status.Cache do
@@ -479,7 +486,7 @@ defmodule InternalApi.Projecthub.Project.Status.Cache do
}
defstruct [:state]
- field :state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
end
defmodule InternalApi.Projecthub.Project.Status.ArtifactStore do
@@ -491,7 +498,7 @@ defmodule InternalApi.Projecthub.Project.Status.ArtifactStore do
}
defstruct [:state]
- field :state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
end
defmodule InternalApi.Projecthub.Project.Status.Repository do
@@ -503,7 +510,7 @@ defmodule InternalApi.Projecthub.Project.Status.Repository do
}
defstruct [:state]
- field :state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
end
defmodule InternalApi.Projecthub.Project.Status.Analysis do
@@ -515,7 +522,7 @@ defmodule InternalApi.Projecthub.Project.Status.Analysis do
}
defstruct [:state]
- field :state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
end
defmodule InternalApi.Projecthub.Project.Status.Permissions do
@@ -527,16 +534,17 @@ defmodule InternalApi.Projecthub.Project.Status.Permissions do
}
defstruct [:state]
- field :state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true
+ field(:state, 1, type: InternalApi.Projecthub.Project.Status.State, enum: true)
end
defmodule InternalApi.Projecthub.Project.Status.State do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :INITIALIZING, 0
- field :READY, 1
- field :ERROR, 2
+ field(:INITIALIZING, 0)
+ field(:READY, 1)
+ field(:ERROR, 2)
+ field(:ONBOARDING, 3)
end
defmodule InternalApi.Projecthub.ListRequest do
@@ -547,14 +555,16 @@ defmodule InternalApi.Projecthub.ListRequest do
metadata: InternalApi.Projecthub.RequestMeta.t(),
pagination: InternalApi.Projecthub.PaginationRequest.t(),
owner_id: String.t(),
- repo_url: String.t()
+ repo_url: String.t(),
+ soft_deleted: boolean
}
- defstruct [:metadata, :pagination, :owner_id, :repo_url]
+ defstruct [:metadata, :pagination, :owner_id, :repo_url, :soft_deleted]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :pagination, 2, type: InternalApi.Projecthub.PaginationRequest
- field :owner_id, 3, type: :string
- field :repo_url, 4, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:pagination, 2, type: InternalApi.Projecthub.PaginationRequest)
+ field(:owner_id, 3, type: :string)
+ field(:repo_url, 4, type: :string)
+ field(:soft_deleted, 5, type: :bool)
end
defmodule InternalApi.Projecthub.ListResponse do
@@ -568,9 +578,9 @@ defmodule InternalApi.Projecthub.ListResponse do
}
defstruct [:metadata, :pagination, :projects]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :pagination, 2, type: InternalApi.Projecthub.PaginationResponse
- field :projects, 3, repeated: true, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:pagination, 2, type: InternalApi.Projecthub.PaginationResponse)
+ field(:projects, 3, repeated: true, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.ListKeysetRequest do
@@ -588,21 +598,21 @@ defmodule InternalApi.Projecthub.ListKeysetRequest do
}
defstruct [:metadata, :page_size, :page_token, :direction, :owner_id, :repo_url, :created_after]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :page_size, 2, type: :int32
- field :page_token, 3, type: :string
- field :direction, 4, type: InternalApi.Projecthub.ListKeysetRequest.Direction, enum: true
- field :owner_id, 5, type: :string
- field :repo_url, 6, type: :string
- field :created_after, 7, type: Google.Protobuf.Timestamp
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:page_size, 2, type: :int32)
+ field(:page_token, 3, type: :string)
+ field(:direction, 4, type: InternalApi.Projecthub.ListKeysetRequest.Direction, enum: true)
+ field(:owner_id, 5, type: :string)
+ field(:repo_url, 6, type: :string)
+ field(:created_after, 7, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Projecthub.ListKeysetRequest.Direction do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :NEXT, 0
- field :PREVIOUS, 1
+ field(:NEXT, 0)
+ field(:PREVIOUS, 1)
end
defmodule InternalApi.Projecthub.ListKeysetResponse do
@@ -617,10 +627,10 @@ defmodule InternalApi.Projecthub.ListKeysetResponse do
}
defstruct [:metadata, :projects, :next_page_token, :previous_page_token]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :projects, 2, repeated: true, type: InternalApi.Projecthub.Project
- field :next_page_token, 3, type: :string
- field :previous_page_token, 4, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:projects, 2, repeated: true, type: InternalApi.Projecthub.Project)
+ field(:next_page_token, 3, type: :string)
+ field(:previous_page_token, 4, type: :string)
end
defmodule InternalApi.Projecthub.DescribeRequest do
@@ -631,14 +641,16 @@ defmodule InternalApi.Projecthub.DescribeRequest do
metadata: InternalApi.Projecthub.RequestMeta.t(),
id: String.t(),
name: String.t(),
- detailed: boolean
+ detailed: boolean,
+ soft_deleted: boolean
}
- defstruct [:metadata, :id, :name, :detailed]
+ defstruct [:metadata, :id, :name, :detailed, :soft_deleted]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
- field :name, 3, type: :string
- field :detailed, 4, type: :bool
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+ field(:name, 3, type: :string)
+ field(:detailed, 4, type: :bool)
+ field(:soft_deleted, 5, type: :bool)
end
defmodule InternalApi.Projecthub.DescribeResponse do
@@ -651,8 +663,8 @@ defmodule InternalApi.Projecthub.DescribeResponse do
}
defstruct [:metadata, :project]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :project, 2, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.DescribeManyRequest do
@@ -661,12 +673,14 @@ defmodule InternalApi.Projecthub.DescribeManyRequest do
@type t :: %__MODULE__{
metadata: InternalApi.Projecthub.RequestMeta.t(),
- ids: [String.t()]
+ ids: [String.t()],
+ soft_deleted: boolean
}
- defstruct [:metadata, :ids]
+ defstruct [:metadata, :ids, :soft_deleted]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :ids, 2, repeated: true, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:ids, 2, repeated: true, type: :string)
+ field(:soft_deleted, 3, type: :bool)
end
defmodule InternalApi.Projecthub.DescribeManyResponse do
@@ -679,8 +693,8 @@ defmodule InternalApi.Projecthub.DescribeManyResponse do
}
defstruct [:metadata, :projects]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :projects, 2, repeated: true, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:projects, 2, repeated: true, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.CreateRequest do
@@ -689,12 +703,14 @@ defmodule InternalApi.Projecthub.CreateRequest do
@type t :: %__MODULE__{
metadata: InternalApi.Projecthub.RequestMeta.t(),
- project: InternalApi.Projecthub.Project.t()
+ project: InternalApi.Projecthub.Project.t(),
+ skip_onboarding: boolean
}
- defstruct [:metadata, :project]
+ defstruct [:metadata, :project, :skip_onboarding]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :project, 2, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
+ field(:skip_onboarding, 3, type: :bool)
end
defmodule InternalApi.Projecthub.CreateResponse do
@@ -707,8 +723,8 @@ defmodule InternalApi.Projecthub.CreateResponse do
}
defstruct [:metadata, :project]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :project, 2, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.UpdateRequest do
@@ -722,9 +738,9 @@ defmodule InternalApi.Projecthub.UpdateRequest do
}
defstruct [:metadata, :project, :omit_schedulers_and_tasks]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :project, 2, type: InternalApi.Projecthub.Project
- field :omit_schedulers_and_tasks, 3, type: :bool
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
+ field(:omit_schedulers_and_tasks, 3, type: :bool)
end
defmodule InternalApi.Projecthub.UpdateResponse do
@@ -737,8 +753,8 @@ defmodule InternalApi.Projecthub.UpdateResponse do
}
defstruct [:metadata, :project]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :project, 2, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.DestroyRequest do
@@ -752,9 +768,9 @@ defmodule InternalApi.Projecthub.DestroyRequest do
}
defstruct [:metadata, :id, :name]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
- field :name, 3, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+ field(:name, 3, type: :string)
end
defmodule InternalApi.Projecthub.DestroyResponse do
@@ -766,7 +782,33 @@ defmodule InternalApi.Projecthub.DestroyResponse do
}
defstruct [:metadata]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+end
+
+defmodule InternalApi.Projecthub.RestoreRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ id: String.t()
+ }
+ defstruct [:metadata, :id]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+end
+
+defmodule InternalApi.Projecthub.RestoreResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t()
+ }
+ defstruct [:metadata]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
end
defmodule InternalApi.Projecthub.UsersRequest do
@@ -779,8 +821,8 @@ defmodule InternalApi.Projecthub.UsersRequest do
}
defstruct [:metadata, :id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Projecthub.UsersResponse do
@@ -793,8 +835,8 @@ defmodule InternalApi.Projecthub.UsersResponse do
}
defstruct [:metadata, :users]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :users, 2, repeated: true, type: InternalApi.User.User
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:users, 2, repeated: true, type: InternalApi.User.User)
end
defmodule InternalApi.Projecthub.CheckDeployKeyRequest do
@@ -807,8 +849,8 @@ defmodule InternalApi.Projecthub.CheckDeployKeyRequest do
}
defstruct [:metadata, :id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Projecthub.CheckDeployKeyResponse do
@@ -821,8 +863,8 @@ defmodule InternalApi.Projecthub.CheckDeployKeyResponse do
}
defstruct [:metadata, :deploy_key]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :deploy_key, 2, type: InternalApi.Projecthub.CheckDeployKeyResponse.DeployKey
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:deploy_key, 2, type: InternalApi.Projecthub.CheckDeployKeyResponse.DeployKey)
end
defmodule InternalApi.Projecthub.CheckDeployKeyResponse.DeployKey do
@@ -832,13 +874,15 @@ defmodule InternalApi.Projecthub.CheckDeployKeyResponse.DeployKey do
@type t :: %__MODULE__{
title: String.t(),
fingerprint: String.t(),
- created_at: Google.Protobuf.Timestamp.t()
+ created_at: Google.Protobuf.Timestamp.t(),
+ public_key: String.t()
}
- defstruct [:title, :fingerprint, :created_at]
+ defstruct [:title, :fingerprint, :created_at, :public_key]
- field :title, 1, type: :string
- field :fingerprint, 2, type: :string
- field :created_at, 3, type: Google.Protobuf.Timestamp
+ field(:title, 1, type: :string)
+ field(:fingerprint, 2, type: :string)
+ field(:created_at, 3, type: Google.Protobuf.Timestamp)
+ field(:public_key, 4, type: :string)
end
defmodule InternalApi.Projecthub.RegenerateDeployKeyRequest do
@@ -851,8 +895,8 @@ defmodule InternalApi.Projecthub.RegenerateDeployKeyRequest do
}
defstruct [:metadata, :id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Projecthub.RegenerateDeployKeyResponse do
@@ -865,8 +909,8 @@ defmodule InternalApi.Projecthub.RegenerateDeployKeyResponse do
}
defstruct [:metadata, :deploy_key]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :deploy_key, 2, type: InternalApi.Projecthub.RegenerateDeployKeyResponse.DeployKey
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:deploy_key, 2, type: InternalApi.Projecthub.RegenerateDeployKeyResponse.DeployKey)
end
defmodule InternalApi.Projecthub.RegenerateDeployKeyResponse.DeployKey do
@@ -876,13 +920,15 @@ defmodule InternalApi.Projecthub.RegenerateDeployKeyResponse.DeployKey do
@type t :: %__MODULE__{
title: String.t(),
fingerprint: String.t(),
- created_at: Google.Protobuf.Timestamp.t()
+ created_at: Google.Protobuf.Timestamp.t(),
+ public_key: String.t()
}
- defstruct [:title, :fingerprint, :created_at]
+ defstruct [:title, :fingerprint, :created_at, :public_key]
- field :title, 1, type: :string
- field :fingerprint, 2, type: :string
- field :created_at, 3, type: Google.Protobuf.Timestamp
+ field(:title, 1, type: :string)
+ field(:fingerprint, 2, type: :string)
+ field(:created_at, 3, type: Google.Protobuf.Timestamp)
+ field(:public_key, 4, type: :string)
end
defmodule InternalApi.Projecthub.CheckWebhookRequest do
@@ -895,8 +941,8 @@ defmodule InternalApi.Projecthub.CheckWebhookRequest do
}
defstruct [:metadata, :id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Projecthub.CheckWebhookResponse do
@@ -909,8 +955,8 @@ defmodule InternalApi.Projecthub.CheckWebhookResponse do
}
defstruct [:metadata, :webhook]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :webhook, 2, type: InternalApi.Projecthub.Webhook
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:webhook, 2, type: InternalApi.Projecthub.Webhook)
end
defmodule InternalApi.Projecthub.RegenerateWebhookRequest do
@@ -923,8 +969,8 @@ defmodule InternalApi.Projecthub.RegenerateWebhookRequest do
}
defstruct [:metadata, :id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Projecthub.RegenerateWebhookResponse do
@@ -937,8 +983,8 @@ defmodule InternalApi.Projecthub.RegenerateWebhookResponse do
}
defstruct [:metadata, :webhook]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :webhook, 2, type: InternalApi.Projecthub.Webhook
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:webhook, 2, type: InternalApi.Projecthub.Webhook)
end
defmodule InternalApi.Projecthub.Webhook do
@@ -950,7 +996,7 @@ defmodule InternalApi.Projecthub.Webhook do
}
defstruct [:url]
- field :url, 1, type: :string
+ field(:url, 1, type: :string)
end
defmodule InternalApi.Projecthub.ChangeProjectOwnerRequest do
@@ -964,9 +1010,9 @@ defmodule InternalApi.Projecthub.ChangeProjectOwnerRequest do
}
defstruct [:metadata, :id, :user_id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
- field :user_id, 3, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+ field(:user_id, 3, type: :string)
end
defmodule InternalApi.Projecthub.ChangeProjectOwnerResponse do
@@ -978,7 +1024,7 @@ defmodule InternalApi.Projecthub.ChangeProjectOwnerResponse do
}
defstruct [:metadata]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
end
defmodule InternalApi.Projecthub.ForkAndCreateRequest do
@@ -991,8 +1037,8 @@ defmodule InternalApi.Projecthub.ForkAndCreateRequest do
}
defstruct [:metadata, :project]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :project, 2, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.ForkAndCreateResponse do
@@ -1005,8 +1051,8 @@ defmodule InternalApi.Projecthub.ForkAndCreateResponse do
}
defstruct [:metadata, :project]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
- field :project, 2, type: InternalApi.Projecthub.Project
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:project, 2, type: InternalApi.Projecthub.Project)
end
defmodule InternalApi.Projecthub.GithubAppSwitchRequest do
@@ -1019,8 +1065,8 @@ defmodule InternalApi.Projecthub.GithubAppSwitchRequest do
}
defstruct [:metadata, :id]
- field :metadata, 1, type: InternalApi.Projecthub.RequestMeta
- field :id, 2, type: :string
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
end
defmodule InternalApi.Projecthub.GithubAppSwitchResponse do
@@ -1032,7 +1078,61 @@ defmodule InternalApi.Projecthub.GithubAppSwitchResponse do
}
defstruct [:metadata]
- field :metadata, 1, type: InternalApi.Projecthub.ResponseMeta
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+end
+
+defmodule InternalApi.Projecthub.FinishOnboardingRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ id: String.t()
+ }
+ defstruct [:metadata, :id]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+end
+
+defmodule InternalApi.Projecthub.FinishOnboardingResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t()
+ }
+ defstruct [:metadata]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+end
+
+defmodule InternalApi.Projecthub.RegenerateWebhookSecretRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.RequestMeta.t(),
+ id: String.t()
+ }
+ defstruct [:metadata, :id]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.RequestMeta)
+ field(:id, 2, type: :string)
+end
+
+defmodule InternalApi.Projecthub.RegenerateWebhookSecretResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ metadata: InternalApi.Projecthub.ResponseMeta.t(),
+ secret: String.t()
+ }
+ defstruct [:metadata, :secret]
+
+ field(:metadata, 1, type: InternalApi.Projecthub.ResponseMeta)
+ field(:secret, 2, type: :string)
end
defmodule InternalApi.Projecthub.ProjectCreated do
@@ -1046,9 +1146,9 @@ defmodule InternalApi.Projecthub.ProjectCreated do
}
defstruct [:project_id, :timestamp, :org_id]
- field :project_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :org_id, 3, type: :string
+ field(:project_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:org_id, 3, type: :string)
end
defmodule InternalApi.Projecthub.ProjectDeleted do
@@ -1062,9 +1162,25 @@ defmodule InternalApi.Projecthub.ProjectDeleted do
}
defstruct [:project_id, :timestamp, :org_id]
- field :project_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :org_id, 3, type: :string
+ field(:project_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:org_id, 3, type: :string)
+end
+
+defmodule InternalApi.Projecthub.ProjectRestored do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ project_id: String.t(),
+ timestamp: Google.Protobuf.Timestamp.t(),
+ org_id: String.t()
+ }
+ defstruct [:project_id, :timestamp, :org_id]
+
+ field(:project_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:org_id, 3, type: :string)
end
defmodule InternalApi.Projecthub.ProjectUpdated do
@@ -1078,9 +1194,9 @@ defmodule InternalApi.Projecthub.ProjectUpdated do
}
defstruct [:project_id, :org_id, :timestamp]
- field :project_id, 1, type: :string
- field :org_id, 2, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:project_id, 1, type: :string)
+ field(:org_id, 2, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Projecthub.CollaboratorsChanged do
@@ -1093,58 +1209,89 @@ defmodule InternalApi.Projecthub.CollaboratorsChanged do
}
defstruct [:project_id, :timestamp]
- field :project_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:project_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Projecthub.ProjectService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Projecthub.ProjectService"
- rpc :List, InternalApi.Projecthub.ListRequest, InternalApi.Projecthub.ListResponse
-
- rpc :ListKeyset,
- InternalApi.Projecthub.ListKeysetRequest,
- InternalApi.Projecthub.ListKeysetResponse
-
- rpc :Describe, InternalApi.Projecthub.DescribeRequest, InternalApi.Projecthub.DescribeResponse
-
- rpc :DescribeMany,
- InternalApi.Projecthub.DescribeManyRequest,
- InternalApi.Projecthub.DescribeManyResponse
-
- rpc :Create, InternalApi.Projecthub.CreateRequest, InternalApi.Projecthub.CreateResponse
- rpc :Update, InternalApi.Projecthub.UpdateRequest, InternalApi.Projecthub.UpdateResponse
- rpc :Destroy, InternalApi.Projecthub.DestroyRequest, InternalApi.Projecthub.DestroyResponse
- rpc :Users, InternalApi.Projecthub.UsersRequest, InternalApi.Projecthub.UsersResponse
-
- rpc :CheckDeployKey,
- InternalApi.Projecthub.CheckDeployKeyRequest,
- InternalApi.Projecthub.CheckDeployKeyResponse
-
- rpc :RegenerateDeployKey,
- InternalApi.Projecthub.RegenerateDeployKeyRequest,
- InternalApi.Projecthub.RegenerateDeployKeyResponse
-
- rpc :CheckWebhook,
- InternalApi.Projecthub.CheckWebhookRequest,
- InternalApi.Projecthub.CheckWebhookResponse
-
- rpc :RegenerateWebhook,
- InternalApi.Projecthub.RegenerateWebhookRequest,
- InternalApi.Projecthub.RegenerateWebhookResponse
-
- rpc :ChangeProjectOwner,
- InternalApi.Projecthub.ChangeProjectOwnerRequest,
- InternalApi.Projecthub.ChangeProjectOwnerResponse
-
- rpc :ForkAndCreate,
- InternalApi.Projecthub.ForkAndCreateRequest,
- InternalApi.Projecthub.ForkAndCreateResponse
-
- rpc :GithubAppSwitch,
- InternalApi.Projecthub.GithubAppSwitchRequest,
- InternalApi.Projecthub.GithubAppSwitchResponse
+ rpc(:List, InternalApi.Projecthub.ListRequest, InternalApi.Projecthub.ListResponse)
+
+ rpc(
+ :ListKeyset,
+ InternalApi.Projecthub.ListKeysetRequest,
+ InternalApi.Projecthub.ListKeysetResponse
+ )
+
+ rpc(:Describe, InternalApi.Projecthub.DescribeRequest, InternalApi.Projecthub.DescribeResponse)
+
+ rpc(
+ :DescribeMany,
+ InternalApi.Projecthub.DescribeManyRequest,
+ InternalApi.Projecthub.DescribeManyResponse
+ )
+
+ rpc(:Create, InternalApi.Projecthub.CreateRequest, InternalApi.Projecthub.CreateResponse)
+ rpc(:Update, InternalApi.Projecthub.UpdateRequest, InternalApi.Projecthub.UpdateResponse)
+ rpc(:Destroy, InternalApi.Projecthub.DestroyRequest, InternalApi.Projecthub.DestroyResponse)
+ rpc(:Restore, InternalApi.Projecthub.RestoreRequest, InternalApi.Projecthub.RestoreResponse)
+ rpc(:Users, InternalApi.Projecthub.UsersRequest, InternalApi.Projecthub.UsersResponse)
+
+ rpc(
+ :CheckDeployKey,
+ InternalApi.Projecthub.CheckDeployKeyRequest,
+ InternalApi.Projecthub.CheckDeployKeyResponse
+ )
+
+ rpc(
+ :RegenerateDeployKey,
+ InternalApi.Projecthub.RegenerateDeployKeyRequest,
+ InternalApi.Projecthub.RegenerateDeployKeyResponse
+ )
+
+ rpc(
+ :CheckWebhook,
+ InternalApi.Projecthub.CheckWebhookRequest,
+ InternalApi.Projecthub.CheckWebhookResponse
+ )
+
+ rpc(
+ :RegenerateWebhook,
+ InternalApi.Projecthub.RegenerateWebhookRequest,
+ InternalApi.Projecthub.RegenerateWebhookResponse
+ )
+
+ rpc(
+ :RegenerateWebhookSecret,
+ InternalApi.Projecthub.RegenerateWebhookSecretRequest,
+ InternalApi.Projecthub.RegenerateWebhookSecretResponse
+ )
+
+ rpc(
+ :ChangeProjectOwner,
+ InternalApi.Projecthub.ChangeProjectOwnerRequest,
+ InternalApi.Projecthub.ChangeProjectOwnerResponse
+ )
+
+ rpc(
+ :ForkAndCreate,
+ InternalApi.Projecthub.ForkAndCreateRequest,
+ InternalApi.Projecthub.ForkAndCreateResponse
+ )
+
+ rpc(
+ :GithubAppSwitch,
+ InternalApi.Projecthub.GithubAppSwitchRequest,
+ InternalApi.Projecthub.GithubAppSwitchResponse
+ )
+
+ rpc(
+ :FinishOnboarding,
+ InternalApi.Projecthub.FinishOnboardingRequest,
+ InternalApi.Projecthub.FinishOnboardingResponse
+ )
end
defmodule InternalApi.Projecthub.ProjectService.Stub do
diff --git a/plumber/proto/lib/internal_api/repo_proxy.pb.ex b/plumber/proto/lib/internal_api/repo_proxy.pb.ex
index a73d868d7..0f0f6c774 100644
--- a/plumber/proto/lib/internal_api/repo_proxy.pb.ex
+++ b/plumber/proto/lib/internal_api/repo_proxy.pb.ex
@@ -7,7 +7,7 @@ defmodule InternalApi.RepoProxy.DescribeRequest do
}
defstruct [:hook_id]
- field :hook_id, 1, type: :string
+ field(:hook_id, 1, type: :string)
end
defmodule InternalApi.RepoProxy.DescribeResponse do
@@ -20,8 +20,8 @@ defmodule InternalApi.RepoProxy.DescribeResponse do
}
defstruct [:status, :hook]
- field :status, 1, type: InternalApi.ResponseStatus
- field :hook, 2, type: InternalApi.RepoProxy.Hook
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:hook, 2, type: InternalApi.RepoProxy.Hook)
end
defmodule InternalApi.RepoProxy.Hook do
@@ -79,38 +79,38 @@ defmodule InternalApi.RepoProxy.Hook do
:branch_name
]
- field :hook_id, 1, type: :string
- field :head_commit_sha, 2, type: :string
- field :commit_message, 3, type: :string
- field :commit_range, 21, type: :string
- field :commit_author, 24, type: :string
- field :repo_host_url, 4, type: :string
- field :repo_host_username, 7, type: :string
- field :repo_host_email, 8, type: :string
- field :repo_host_avatar_url, 10, type: :string
- field :repo_host_uid, 25, type: :string
- field :user_id, 9, type: :string
- field :semaphore_email, 6, type: :string
- field :repo_slug, 17, type: :string
- field :git_ref, 20, type: :string
- field :git_ref_type, 15, type: InternalApi.RepoProxy.Hook.Type, enum: true
- field :pr_slug, 18, type: :string
- field :pr_name, 12, type: :string
- field :pr_number, 13, type: :string
- field :pr_sha, 19, type: :string
- field :pr_mergeable, 22, type: :bool
- field :pr_branch_name, 23, type: :string
- field :tag_name, 14, type: :string
- field :branch_name, 16, type: :string
+ field(:hook_id, 1, type: :string)
+ field(:head_commit_sha, 2, type: :string)
+ field(:commit_message, 3, type: :string)
+ field(:commit_range, 21, type: :string)
+ field(:commit_author, 24, type: :string)
+ field(:repo_host_url, 4, type: :string)
+ field(:repo_host_username, 7, type: :string)
+ field(:repo_host_email, 8, type: :string)
+ field(:repo_host_avatar_url, 10, type: :string)
+ field(:repo_host_uid, 25, type: :string)
+ field(:user_id, 9, type: :string)
+ field(:semaphore_email, 6, type: :string)
+ field(:repo_slug, 17, type: :string)
+ field(:git_ref, 20, type: :string)
+ field(:git_ref_type, 15, type: InternalApi.RepoProxy.Hook.Type, enum: true)
+ field(:pr_slug, 18, type: :string)
+ field(:pr_name, 12, type: :string)
+ field(:pr_number, 13, type: :string)
+ field(:pr_sha, 19, type: :string)
+ field(:pr_mergeable, 22, type: :bool)
+ field(:pr_branch_name, 23, type: :string)
+ field(:tag_name, 14, type: :string)
+ field(:branch_name, 16, type: :string)
end
defmodule InternalApi.RepoProxy.Hook.Type do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :BRANCH, 0
- field :TAG, 1
- field :PR, 2
+ field(:BRANCH, 0)
+ field(:TAG, 1)
+ field(:PR, 2)
end
defmodule InternalApi.RepoProxy.DescribeManyRequest do
@@ -122,7 +122,7 @@ defmodule InternalApi.RepoProxy.DescribeManyRequest do
}
defstruct [:hook_ids]
- field :hook_ids, 1, repeated: true, type: :string
+ field(:hook_ids, 1, repeated: true, type: :string)
end
defmodule InternalApi.RepoProxy.DescribeManyResponse do
@@ -135,8 +135,8 @@ defmodule InternalApi.RepoProxy.DescribeManyResponse do
}
defstruct [:status, :hooks]
- field :status, 1, type: InternalApi.ResponseStatus
- field :hooks, 2, repeated: true, type: InternalApi.RepoProxy.Hook
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:hooks, 2, repeated: true, type: InternalApi.RepoProxy.Hook)
end
defmodule InternalApi.RepoProxy.ListBlockedHooksRequest do
@@ -149,8 +149,8 @@ defmodule InternalApi.RepoProxy.ListBlockedHooksRequest do
}
defstruct [:project_id, :git_ref]
- field :project_id, 1, type: :string
- field :git_ref, 2, type: :string
+ field(:project_id, 1, type: :string)
+ field(:git_ref, 2, type: :string)
end
defmodule InternalApi.RepoProxy.ListBlockedHooksResponse do
@@ -163,8 +163,8 @@ defmodule InternalApi.RepoProxy.ListBlockedHooksResponse do
}
defstruct [:status, :hooks]
- field :status, 1, type: InternalApi.ResponseStatus
- field :hooks, 2, repeated: true, type: InternalApi.RepoProxy.Hook
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:hooks, 2, repeated: true, type: InternalApi.RepoProxy.Hook)
end
defmodule InternalApi.RepoProxy.ScheduleBlockedHookRequest do
@@ -177,8 +177,8 @@ defmodule InternalApi.RepoProxy.ScheduleBlockedHookRequest do
}
defstruct [:hook_id, :project_id]
- field :hook_id, 1, type: :string
- field :project_id, 2, type: :string
+ field(:hook_id, 1, type: :string)
+ field(:project_id, 2, type: :string)
end
defmodule InternalApi.RepoProxy.ScheduleBlockedHookResponse do
@@ -192,9 +192,9 @@ defmodule InternalApi.RepoProxy.ScheduleBlockedHookResponse do
}
defstruct [:status, :wf_id, :ppl_id]
- field :status, 1, type: InternalApi.ResponseStatus
- field :wf_id, 2, type: :string
- field :ppl_id, 3, type: :string
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:wf_id, 2, type: :string)
+ field(:ppl_id, 3, type: :string)
end
defmodule InternalApi.RepoProxy.CreateRequest do
@@ -211,12 +211,12 @@ defmodule InternalApi.RepoProxy.CreateRequest do
}
defstruct [:request_token, :project_id, :requester_id, :definition_file, :triggered_by, :git]
- field :request_token, 1, type: :string
- field :project_id, 2, type: :string
- field :requester_id, 3, type: :string
- field :definition_file, 4, type: :string
- field :triggered_by, 5, type: InternalApi.PlumberWF.TriggeredBy, enum: true
- field :git, 6, type: InternalApi.RepoProxy.CreateRequest.Git
+ field(:request_token, 1, type: :string)
+ field(:project_id, 2, type: :string)
+ field(:requester_id, 3, type: :string)
+ field(:definition_file, 4, type: :string)
+ field(:triggered_by, 5, type: InternalApi.PlumberWF.TriggeredBy, enum: true)
+ field(:git, 6, type: InternalApi.RepoProxy.CreateRequest.Git)
end
defmodule InternalApi.RepoProxy.CreateRequest.Git do
@@ -229,8 +229,8 @@ defmodule InternalApi.RepoProxy.CreateRequest.Git do
}
defstruct [:reference, :commit_sha]
- field :reference, 1, type: :string
- field :commit_sha, 2, type: :string
+ field(:reference, 1, type: :string)
+ field(:commit_sha, 2, type: :string)
end
defmodule InternalApi.RepoProxy.CreateResponse do
@@ -244,9 +244,9 @@ defmodule InternalApi.RepoProxy.CreateResponse do
}
defstruct [:hook_id, :workflow_id, :pipeline_id]
- field :hook_id, 1, type: :string
- field :workflow_id, 2, type: :string
- field :pipeline_id, 3, type: :string
+ field(:hook_id, 1, type: :string)
+ field(:workflow_id, 2, type: :string)
+ field(:pipeline_id, 3, type: :string)
end
defmodule InternalApi.RepoProxy.CreateBlankRequest do
@@ -274,14 +274,14 @@ defmodule InternalApi.RepoProxy.CreateBlankRequest do
:git
]
- field :request_token, 1, type: :string
- field :project_id, 2, type: :string
- field :requester_id, 3, type: :string
- field :definition_file, 4, type: :string
- field :pipeline_id, 5, type: :string
- field :wf_id, 6, type: :string
- field :triggered_by, 7, type: InternalApi.PlumberWF.TriggeredBy, enum: true
- field :git, 8, type: InternalApi.RepoProxy.CreateBlankRequest.Git
+ field(:request_token, 1, type: :string)
+ field(:project_id, 2, type: :string)
+ field(:requester_id, 3, type: :string)
+ field(:definition_file, 4, type: :string)
+ field(:pipeline_id, 5, type: :string)
+ field(:wf_id, 6, type: :string)
+ field(:triggered_by, 7, type: InternalApi.PlumberWF.TriggeredBy, enum: true)
+ field(:git, 8, type: InternalApi.RepoProxy.CreateBlankRequest.Git)
end
defmodule InternalApi.RepoProxy.CreateBlankRequest.Git do
@@ -294,8 +294,8 @@ defmodule InternalApi.RepoProxy.CreateBlankRequest.Git do
}
defstruct [:reference, :commit_sha]
- field :reference, 1, type: :string
- field :commit_sha, 2, type: :string
+ field(:reference, 1, type: :string)
+ field(:commit_sha, 2, type: :string)
end
defmodule InternalApi.RepoProxy.CreateBlankResponse do
@@ -311,11 +311,11 @@ defmodule InternalApi.RepoProxy.CreateBlankResponse do
}
defstruct [:hook_id, :wf_id, :pipeline_id, :branch_id, :repo]
- field :hook_id, 1, type: :string
- field :wf_id, 2, type: :string
- field :pipeline_id, 3, type: :string
- field :branch_id, 4, type: :string
- field :repo, 5, type: InternalApi.RepoProxy.CreateBlankResponse.Repo
+ field(:hook_id, 1, type: :string)
+ field(:wf_id, 2, type: :string)
+ field(:pipeline_id, 3, type: :string)
+ field(:branch_id, 4, type: :string)
+ field(:repo, 5, type: InternalApi.RepoProxy.CreateBlankResponse.Repo)
end
defmodule InternalApi.RepoProxy.CreateBlankResponse.Repo do
@@ -331,36 +331,60 @@ defmodule InternalApi.RepoProxy.CreateBlankResponse.Repo do
}
defstruct [:owner, :repo_name, :branch_name, :commit_sha, :repository_id]
- field :owner, 1, type: :string
- field :repo_name, 2, type: :string
- field :branch_name, 3, type: :string
- field :commit_sha, 4, type: :string
- field :repository_id, 5, type: :string
+ field(:owner, 1, type: :string)
+ field(:repo_name, 2, type: :string)
+ field(:branch_name, 3, type: :string)
+ field(:commit_sha, 4, type: :string)
+ field(:repository_id, 5, type: :string)
end
-defmodule InternalApi.RepoProxy.RepoProxyService.Service do
+defmodule InternalApi.RepoProxy.PullRequestUnmergeable do
@moduledoc false
- use GRPC.Service, name: "InternalApi.RepoProxy.RepoProxyService"
-
- rpc :Describe, InternalApi.RepoProxy.DescribeRequest, InternalApi.RepoProxy.DescribeResponse
-
- rpc :DescribeMany,
- InternalApi.RepoProxy.DescribeManyRequest,
- InternalApi.RepoProxy.DescribeManyResponse
+ use Protobuf, syntax: :proto3
- rpc :ListBlockedHooks,
- InternalApi.RepoProxy.ListBlockedHooksRequest,
- InternalApi.RepoProxy.ListBlockedHooksResponse
+ @type t :: %__MODULE__{
+ project_id: String.t(),
+ branch_name: String.t(),
+ timestamp: Google.Protobuf.Timestamp.t()
+ }
+ defstruct [:project_id, :branch_name, :timestamp]
- rpc :ScheduleBlockedHook,
- InternalApi.RepoProxy.ScheduleBlockedHookRequest,
- InternalApi.RepoProxy.ScheduleBlockedHookResponse
+ field(:project_id, 1, type: :string)
+ field(:branch_name, 2, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
+end
- rpc :Create, InternalApi.RepoProxy.CreateRequest, InternalApi.RepoProxy.CreateResponse
+defmodule InternalApi.RepoProxy.RepoProxyService.Service do
+ @moduledoc false
+ use GRPC.Service, name: "InternalApi.RepoProxy.RepoProxyService"
- rpc :CreateBlank,
- InternalApi.RepoProxy.CreateBlankRequest,
- InternalApi.RepoProxy.CreateBlankResponse
+ rpc(:Describe, InternalApi.RepoProxy.DescribeRequest, InternalApi.RepoProxy.DescribeResponse)
+
+ rpc(
+ :DescribeMany,
+ InternalApi.RepoProxy.DescribeManyRequest,
+ InternalApi.RepoProxy.DescribeManyResponse
+ )
+
+ rpc(
+ :ListBlockedHooks,
+ InternalApi.RepoProxy.ListBlockedHooksRequest,
+ InternalApi.RepoProxy.ListBlockedHooksResponse
+ )
+
+ rpc(
+ :ScheduleBlockedHook,
+ InternalApi.RepoProxy.ScheduleBlockedHookRequest,
+ InternalApi.RepoProxy.ScheduleBlockedHookResponse
+ )
+
+ rpc(:Create, InternalApi.RepoProxy.CreateRequest, InternalApi.RepoProxy.CreateResponse)
+
+ rpc(
+ :CreateBlank,
+ InternalApi.RepoProxy.CreateBlankRequest,
+ InternalApi.RepoProxy.CreateBlankResponse
+ )
end
defmodule InternalApi.RepoProxy.RepoProxyService.Stub do
diff --git a/plumber/proto/lib/internal_api/repository.pb.ex b/plumber/proto/lib/internal_api/repository.pb.ex
index ca400b59d..06aa6b023 100644
--- a/plumber/proto/lib/internal_api/repository.pb.ex
+++ b/plumber/proto/lib/internal_api/repository.pb.ex
@@ -8,8 +8,8 @@ defmodule InternalApi.Repository.DescribeRevisionRequest do
}
defstruct [:repository_id, :revision]
- field :repository_id, 1, type: :string
- field :revision, 2, type: InternalApi.Repository.Revision
+ field(:repository_id, 1, type: :string)
+ field(:revision, 2, type: InternalApi.Repository.Revision)
end
defmodule InternalApi.Repository.DescribeRevisionResponse do
@@ -21,7 +21,7 @@ defmodule InternalApi.Repository.DescribeRevisionResponse do
}
defstruct [:commit]
- field :commit, 1, type: InternalApi.Repository.Commit
+ field(:commit, 1, type: InternalApi.Repository.Commit)
end
defmodule InternalApi.Repository.Commit do
@@ -37,11 +37,11 @@ defmodule InternalApi.Repository.Commit do
}
defstruct [:sha, :msg, :author_name, :author_uuid, :author_avatar_url]
- field :sha, 1, type: :string
- field :msg, 2, type: :string
- field :author_name, 3, type: :string
- field :author_uuid, 4, type: :string
- field :author_avatar_url, 5, type: :string
+ field(:sha, 1, type: :string)
+ field(:msg, 2, type: :string)
+ field(:author_name, 3, type: :string)
+ field(:author_uuid, 4, type: :string)
+ field(:author_avatar_url, 5, type: :string)
end
defmodule InternalApi.Repository.DeployKey do
@@ -51,13 +51,15 @@ defmodule InternalApi.Repository.DeployKey do
@type t :: %__MODULE__{
title: String.t(),
fingerprint: String.t(),
- created_at: Google.Protobuf.Timestamp.t()
+ created_at: Google.Protobuf.Timestamp.t(),
+ public_key: String.t()
}
- defstruct [:title, :fingerprint, :created_at]
+ defstruct [:title, :fingerprint, :created_at, :public_key]
- field :title, 1, type: :string
- field :fingerprint, 2, type: :string
- field :created_at, 3, type: Google.Protobuf.Timestamp
+ field(:title, 1, type: :string)
+ field(:fingerprint, 2, type: :string)
+ field(:created_at, 3, type: Google.Protobuf.Timestamp)
+ field(:public_key, 4, type: :string)
end
defmodule InternalApi.Repository.DescribeRemoteRepositoryRequest do
@@ -71,9 +73,9 @@ defmodule InternalApi.Repository.DescribeRemoteRepositoryRequest do
}
defstruct [:user_id, :integration_type, :url]
- field :user_id, 1, type: :string
- field :integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :url, 3, type: :string
+ field(:user_id, 1, type: :string)
+ field(:integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:url, 3, type: :string)
end
defmodule InternalApi.Repository.DescribeRemoteRepositoryResponse do
@@ -85,7 +87,7 @@ defmodule InternalApi.Repository.DescribeRemoteRepositoryResponse do
}
defstruct [:remote_repository]
- field :remote_repository, 1, type: InternalApi.Repository.RemoteRepository
+ field(:remote_repository, 1, type: InternalApi.Repository.RemoteRepository)
end
defmodule InternalApi.Repository.CheckDeployKeyRequest do
@@ -97,7 +99,7 @@ defmodule InternalApi.Repository.CheckDeployKeyRequest do
}
defstruct [:repository_id]
- field :repository_id, 1, type: :string
+ field(:repository_id, 1, type: :string)
end
defmodule InternalApi.Repository.CheckDeployKeyResponse do
@@ -109,7 +111,7 @@ defmodule InternalApi.Repository.CheckDeployKeyResponse do
}
defstruct [:deploy_key]
- field :deploy_key, 1, type: InternalApi.Repository.DeployKey
+ field(:deploy_key, 1, type: InternalApi.Repository.DeployKey)
end
defmodule InternalApi.Repository.RegenerateDeployKeyRequest do
@@ -121,7 +123,7 @@ defmodule InternalApi.Repository.RegenerateDeployKeyRequest do
}
defstruct [:repository_id]
- field :repository_id, 1, type: :string
+ field(:repository_id, 1, type: :string)
end
defmodule InternalApi.Repository.RegenerateDeployKeyResponse do
@@ -133,7 +135,7 @@ defmodule InternalApi.Repository.RegenerateDeployKeyResponse do
}
defstruct [:deploy_key]
- field :deploy_key, 1, type: InternalApi.Repository.DeployKey
+ field(:deploy_key, 1, type: InternalApi.Repository.DeployKey)
end
defmodule InternalApi.Repository.Webhook do
@@ -145,7 +147,7 @@ defmodule InternalApi.Repository.Webhook do
}
defstruct [:url]
- field :url, 1, type: :string
+ field(:url, 1, type: :string)
end
defmodule InternalApi.Repository.CheckWebhookRequest do
@@ -157,7 +159,7 @@ defmodule InternalApi.Repository.CheckWebhookRequest do
}
defstruct [:repository_id]
- field :repository_id, 1, type: :string
+ field(:repository_id, 1, type: :string)
end
defmodule InternalApi.Repository.CheckWebhookResponse do
@@ -169,7 +171,7 @@ defmodule InternalApi.Repository.CheckWebhookResponse do
}
defstruct [:webhook]
- field :webhook, 1, type: InternalApi.Repository.Webhook
+ field(:webhook, 1, type: InternalApi.Repository.Webhook)
end
defmodule InternalApi.Repository.RegenerateWebhookRequest do
@@ -181,7 +183,7 @@ defmodule InternalApi.Repository.RegenerateWebhookRequest do
}
defstruct [:repository_id]
- field :repository_id, 1, type: :string
+ field(:repository_id, 1, type: :string)
end
defmodule InternalApi.Repository.RegenerateWebhookResponse do
@@ -193,7 +195,7 @@ defmodule InternalApi.Repository.RegenerateWebhookResponse do
}
defstruct [:webhook]
- field :webhook, 1, type: InternalApi.Repository.Webhook
+ field(:webhook, 1, type: InternalApi.Repository.Webhook)
end
defmodule InternalApi.Repository.ForkRequest do
@@ -207,9 +209,9 @@ defmodule InternalApi.Repository.ForkRequest do
}
defstruct [:user_id, :integration_type, :url]
- field :user_id, 1, type: :string
- field :integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :url, 3, type: :string
+ field(:user_id, 1, type: :string)
+ field(:integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:url, 3, type: :string)
end
defmodule InternalApi.Repository.ForkResponse do
@@ -221,7 +223,7 @@ defmodule InternalApi.Repository.ForkResponse do
}
defstruct [:remote_repository]
- field :remote_repository, 1, type: InternalApi.Repository.RemoteRepository
+ field(:remote_repository, 1, type: InternalApi.Repository.RemoteRepository)
end
defmodule InternalApi.Repository.ListAccessibleRepositoriesRequest do
@@ -236,10 +238,10 @@ defmodule InternalApi.Repository.ListAccessibleRepositoriesRequest do
}
defstruct [:user_id, :integration_type, :page_token, :only_public]
- field :user_id, 1, type: :string
- field :integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :page_token, 3, type: :string
- field :only_public, 4, type: :bool
+ field(:user_id, 1, type: :string)
+ field(:integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:page_token, 3, type: :string)
+ field(:only_public, 4, type: :bool)
end
defmodule InternalApi.Repository.ListAccessibleRepositoriesResponse do
@@ -252,8 +254,8 @@ defmodule InternalApi.Repository.ListAccessibleRepositoriesResponse do
}
defstruct [:repositories, :next_page_token]
- field :repositories, 1, repeated: true, type: InternalApi.Repository.RemoteRepository
- field :next_page_token, 2, type: :string
+ field(:repositories, 1, repeated: true, type: InternalApi.Repository.RemoteRepository)
+ field(:next_page_token, 2, type: :string)
end
defmodule InternalApi.Repository.ListCollaboratorsRequest do
@@ -266,8 +268,8 @@ defmodule InternalApi.Repository.ListCollaboratorsRequest do
}
defstruct [:repository_id, :page_token]
- field :repository_id, 1, type: :string
- field :page_token, 2, type: :string
+ field(:repository_id, 1, type: :string)
+ field(:page_token, 2, type: :string)
end
defmodule InternalApi.Repository.ListCollaboratorsResponse do
@@ -280,8 +282,8 @@ defmodule InternalApi.Repository.ListCollaboratorsResponse do
}
defstruct [:collaborators, :next_page_token]
- field :collaborators, 1, repeated: true, type: InternalApi.Repository.Collaborator
- field :next_page_token, 2, type: :string
+ field(:collaborators, 1, repeated: true, type: InternalApi.Repository.Collaborator)
+ field(:next_page_token, 2, type: :string)
end
defmodule InternalApi.Repository.Collaborator do
@@ -295,18 +297,18 @@ defmodule InternalApi.Repository.Collaborator do
}
defstruct [:id, :login, :permission]
- field :id, 1, type: :string
- field :login, 2, type: :string
- field :permission, 3, type: InternalApi.Repository.Collaborator.Permission, enum: true
+ field(:id, 1, type: :string)
+ field(:login, 2, type: :string)
+ field(:permission, 3, type: InternalApi.Repository.Collaborator.Permission, enum: true)
end
defmodule InternalApi.Repository.Collaborator.Permission do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ADMIN, 0
- field :WRITE, 1
- field :READ, 2
+ field(:ADMIN, 0)
+ field(:WRITE, 1)
+ field(:READ, 2)
end
defmodule InternalApi.Repository.CreateBuildStatusRequest do
@@ -323,22 +325,22 @@ defmodule InternalApi.Repository.CreateBuildStatusRequest do
}
defstruct [:repository_id, :commit_sha, :status, :url, :description, :context]
- field :repository_id, 1, type: :string
- field :commit_sha, 2, type: :string
- field :status, 3, type: InternalApi.Repository.CreateBuildStatusRequest.Status, enum: true
- field :url, 4, type: :string
- field :description, 5, type: :string
- field :context, 6, type: :string
+ field(:repository_id, 1, type: :string)
+ field(:commit_sha, 2, type: :string)
+ field(:status, 3, type: InternalApi.Repository.CreateBuildStatusRequest.Status, enum: true)
+ field(:url, 4, type: :string)
+ field(:description, 5, type: :string)
+ field(:context, 6, type: :string)
end
defmodule InternalApi.Repository.CreateBuildStatusRequest.Status do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :SUCCESS, 0
- field :PENDING, 1
- field :FAILURE, 2
- field :STOPPED, 3
+ field(:SUCCESS, 0)
+ field(:PENDING, 1)
+ field(:FAILURE, 2)
+ field(:STOPPED, 3)
end
defmodule InternalApi.Repository.CreateBuildStatusResponse do
@@ -350,19 +352,19 @@ defmodule InternalApi.Repository.CreateBuildStatusResponse do
}
defstruct [:code]
- field :code, 1, type: InternalApi.Repository.CreateBuildStatusResponse.Code, enum: true
+ field(:code, 1, type: InternalApi.Repository.CreateBuildStatusResponse.Code, enum: true)
end
defmodule InternalApi.Repository.CreateBuildStatusResponse.Code do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :CUSTOM, 0
- field :OK, 1
- field :VALIDATION_FAILED, 2
- field :SERVICE_ERROR, 3
- field :UNAUTHORIZED, 4
- field :ACCOUNT_SUSPENDED, 5
+ field(:CUSTOM, 0)
+ field(:OK, 1)
+ field(:VALIDATION_FAILED, 2)
+ field(:SERVICE_ERROR, 3)
+ field(:UNAUTHORIZED, 4)
+ field(:ACCOUNT_SUSPENDED, 5)
end
defmodule InternalApi.Repository.DescribeRequest do
@@ -375,8 +377,8 @@ defmodule InternalApi.Repository.DescribeRequest do
}
defstruct [:repository_id, :include_private_ssh_key]
- field :repository_id, 1, type: :string
- field :include_private_ssh_key, 2, type: :bool
+ field(:repository_id, 1, type: :string)
+ field(:include_private_ssh_key, 2, type: :bool)
end
defmodule InternalApi.Repository.DescribeResponse do
@@ -389,8 +391,8 @@ defmodule InternalApi.Repository.DescribeResponse do
}
defstruct [:repository, :private_ssh_key]
- field :repository, 1, type: InternalApi.Repository.Repository
- field :private_ssh_key, 2, type: :string
+ field(:repository, 1, type: InternalApi.Repository.Repository)
+ field(:private_ssh_key, 2, type: :string)
end
defmodule InternalApi.Repository.DescribeManyRequest do
@@ -403,8 +405,8 @@ defmodule InternalApi.Repository.DescribeManyRequest do
}
defstruct [:repository_ids, :project_ids]
- field :repository_ids, 1, repeated: true, type: :string
- field :project_ids, 2, repeated: true, type: :string
+ field(:repository_ids, 1, repeated: true, type: :string)
+ field(:project_ids, 2, repeated: true, type: :string)
end
defmodule InternalApi.Repository.DescribeManyResponse do
@@ -416,7 +418,7 @@ defmodule InternalApi.Repository.DescribeManyResponse do
}
defstruct [:repositories]
- field :repositories, 1, repeated: true, type: InternalApi.Repository.Repository
+ field(:repositories, 1, repeated: true, type: InternalApi.Repository.Repository)
end
defmodule InternalApi.Repository.ListRequest do
@@ -428,7 +430,7 @@ defmodule InternalApi.Repository.ListRequest do
}
defstruct [:project_id]
- field :project_id, 1, type: :string
+ field(:project_id, 1, type: :string)
end
defmodule InternalApi.Repository.ListResponse do
@@ -440,7 +442,7 @@ defmodule InternalApi.Repository.ListResponse do
}
defstruct [:repositories]
- field :repositories, 1, repeated: true, type: InternalApi.Repository.Repository
+ field(:repositories, 1, repeated: true, type: InternalApi.Repository.Repository)
end
defmodule InternalApi.Repository.Repository do
@@ -460,7 +462,8 @@ defmodule InternalApi.Repository.Repository do
commit_status: InternalApi.Projecthub.Project.Spec.Repository.Status.t(),
whitelist: InternalApi.Projecthub.Project.Spec.Repository.Whitelist.t(),
hook_id: String.t(),
- default_branch: String.t()
+ default_branch: String.t(),
+ connected: boolean
}
defstruct [
:id,
@@ -475,22 +478,24 @@ defmodule InternalApi.Repository.Repository do
:commit_status,
:whitelist,
:hook_id,
- :default_branch
+ :default_branch,
+ :connected
]
- field :id, 1, type: :string
- field :name, 2, type: :string
- field :owner, 3, type: :string
- field :private, 4, type: :bool
- field :provider, 5, type: :string
- field :url, 6, type: :string
- field :project_id, 7, type: :string
- field :pipeline_file, 8, type: :string
- field :integration_type, 9, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :commit_status, 10, type: InternalApi.Projecthub.Project.Spec.Repository.Status
- field :whitelist, 11, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist
- field :hook_id, 12, type: :string
- field :default_branch, 13, type: :string
+ field(:id, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:owner, 3, type: :string)
+ field(:private, 4, type: :bool)
+ field(:provider, 5, type: :string)
+ field(:url, 6, type: :string)
+ field(:project_id, 7, type: :string)
+ field(:pipeline_file, 8, type: :string)
+ field(:integration_type, 9, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:commit_status, 10, type: InternalApi.Projecthub.Project.Spec.Repository.Status)
+ field(:whitelist, 11, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist)
+ field(:hook_id, 12, type: :string)
+ field(:default_branch, 13, type: :string)
+ field(:connected, 14, type: :bool)
end
defmodule InternalApi.Repository.RemoteRepository do
@@ -508,13 +513,13 @@ defmodule InternalApi.Repository.RemoteRepository do
}
defstruct [:id, :name, :description, :url, :full_name, :addable, :reason]
- field :id, 1, type: :string
- field :name, 2, type: :string
- field :description, 3, type: :string
- field :url, 4, type: :string
- field :full_name, 5, type: :string
- field :addable, 6, type: :bool
- field :reason, 7, type: :string
+ field(:id, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:description, 3, type: :string)
+ field(:url, 4, type: :string)
+ field(:full_name, 5, type: :string)
+ field(:addable, 6, type: :bool)
+ field(:reason, 7, type: :string)
end
defmodule InternalApi.Repository.Revision do
@@ -527,8 +532,8 @@ defmodule InternalApi.Repository.Revision do
}
defstruct [:commit_sha, :reference]
- field :commit_sha, 1, type: :string
- field :reference, 2, type: :string
+ field(:commit_sha, 1, type: :string)
+ field(:reference, 2, type: :string)
end
defmodule InternalApi.Repository.GetFileRequest do
@@ -542,9 +547,9 @@ defmodule InternalApi.Repository.GetFileRequest do
}
defstruct [:repository_id, :commit_sha, :path]
- field :repository_id, 1, type: :string
- field :commit_sha, 2, type: :string
- field :path, 3, type: :string
+ field(:repository_id, 1, type: :string)
+ field(:commit_sha, 2, type: :string)
+ field(:path, 3, type: :string)
end
defmodule InternalApi.Repository.GetFileResponse do
@@ -556,7 +561,7 @@ defmodule InternalApi.Repository.GetFileResponse do
}
defstruct [:file]
- field :file, 1, type: InternalApi.Repository.File
+ field(:file, 1, type: InternalApi.Repository.File)
end
defmodule InternalApi.Repository.GetFilesRequest do
@@ -571,10 +576,10 @@ defmodule InternalApi.Repository.GetFilesRequest do
}
defstruct [:repository_id, :revision, :selectors, :include_content]
- field :repository_id, 1, type: :string
- field :revision, 2, type: InternalApi.Repository.Revision
- field :selectors, 3, repeated: true, type: InternalApi.Repository.GetFilesRequest.Selector
- field :include_content, 4, type: :bool
+ field(:repository_id, 1, type: :string)
+ field(:revision, 2, type: InternalApi.Repository.Revision)
+ field(:selectors, 3, repeated: true, type: InternalApi.Repository.GetFilesRequest.Selector)
+ field(:include_content, 4, type: :bool)
end
defmodule InternalApi.Repository.GetFilesRequest.Selector do
@@ -587,8 +592,8 @@ defmodule InternalApi.Repository.GetFilesRequest.Selector do
}
defstruct [:glob, :content_regex]
- field :glob, 1, type: :string
- field :content_regex, 2, type: :string
+ field(:glob, 1, type: :string)
+ field(:content_regex, 2, type: :string)
end
defmodule InternalApi.Repository.GetFilesResponse do
@@ -600,7 +605,7 @@ defmodule InternalApi.Repository.GetFilesResponse do
}
defstruct [:files]
- field :files, 1, repeated: true, type: InternalApi.Repository.File
+ field(:files, 1, repeated: true, type: InternalApi.Repository.File)
end
defmodule InternalApi.Repository.File do
@@ -613,8 +618,8 @@ defmodule InternalApi.Repository.File do
}
defstruct [:path, :content]
- field :path, 1, type: :string
- field :content, 2, type: :string
+ field(:path, 1, type: :string)
+ field(:content, 2, type: :string)
end
defmodule InternalApi.Repository.GetChangedFilePathsRequest do
@@ -629,21 +634,22 @@ defmodule InternalApi.Repository.GetChangedFilePathsRequest do
}
defstruct [:head_rev, :base_rev, :repository_id, :comparison_type]
- field :head_rev, 1, type: InternalApi.Repository.Revision
- field :base_rev, 2, type: InternalApi.Repository.Revision
- field :repository_id, 3, type: :string
+ field(:head_rev, 1, type: InternalApi.Repository.Revision)
+ field(:base_rev, 2, type: InternalApi.Repository.Revision)
+ field(:repository_id, 3, type: :string)
- field :comparison_type, 4,
+ field(:comparison_type, 4,
type: InternalApi.Repository.GetChangedFilePathsRequest.ComparisonType,
enum: true
+ )
end
defmodule InternalApi.Repository.GetChangedFilePathsRequest.ComparisonType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :HEAD_TO_MERGE_BASE, 0
- field :HEAD_TO_HEAD, 1
+ field(:HEAD_TO_MERGE_BASE, 0)
+ field(:HEAD_TO_HEAD, 1)
end
defmodule InternalApi.Repository.GetChangedFilePathsResponse do
@@ -655,7 +661,7 @@ defmodule InternalApi.Repository.GetChangedFilePathsResponse do
}
defstruct [:changed_file_paths]
- field :changed_file_paths, 1, repeated: true, type: :string
+ field(:changed_file_paths, 1, repeated: true, type: :string)
end
defmodule InternalApi.Repository.GetSshKeyRequest do
@@ -667,7 +673,7 @@ defmodule InternalApi.Repository.GetSshKeyRequest do
}
defstruct [:repository_id]
- field :repository_id, 1, type: :string
+ field(:repository_id, 1, type: :string)
end
defmodule InternalApi.Repository.GetSshKeyResponse do
@@ -679,7 +685,7 @@ defmodule InternalApi.Repository.GetSshKeyResponse do
}
defstruct [:private_ssh_key]
- field :private_ssh_key, 1, type: :string
+ field(:private_ssh_key, 1, type: :string)
end
defmodule InternalApi.Repository.CommitRequest do
@@ -695,11 +701,11 @@ defmodule InternalApi.Repository.CommitRequest do
}
defstruct [:repository_id, :user_id, :branch_name, :commit_message, :changes]
- field :repository_id, 1, type: :string
- field :user_id, 2, type: :string
- field :branch_name, 3, type: :string
- field :commit_message, 4, type: :string
- field :changes, 5, repeated: true, type: InternalApi.Repository.CommitRequest.Change
+ field(:repository_id, 1, type: :string)
+ field(:user_id, 2, type: :string)
+ field(:branch_name, 3, type: :string)
+ field(:commit_message, 4, type: :string)
+ field(:changes, 5, repeated: true, type: InternalApi.Repository.CommitRequest.Change)
end
defmodule InternalApi.Repository.CommitRequest.Change do
@@ -712,17 +718,17 @@ defmodule InternalApi.Repository.CommitRequest.Change do
}
defstruct [:action, :file]
- field :action, 1, type: InternalApi.Repository.CommitRequest.Change.Action, enum: true
- field :file, 2, type: InternalApi.Repository.File
+ field(:action, 1, type: InternalApi.Repository.CommitRequest.Change.Action, enum: true)
+ field(:file, 2, type: InternalApi.Repository.File)
end
defmodule InternalApi.Repository.CommitRequest.Change.Action do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ADD_FILE, 0
- field :MODIFY_FILE, 1
- field :DELETE_FILE, 2
+ field(:ADD_FILE, 0)
+ field(:MODIFY_FILE, 1)
+ field(:DELETE_FILE, 2)
end
defmodule InternalApi.Repository.CommitResponse do
@@ -734,7 +740,7 @@ defmodule InternalApi.Repository.CommitResponse do
}
defstruct [:revision]
- field :revision, 1, type: InternalApi.Repository.Revision
+ field(:revision, 1, type: InternalApi.Repository.Revision)
end
defmodule InternalApi.Repository.CreateRequest do
@@ -766,16 +772,16 @@ defmodule InternalApi.Repository.CreateRequest do
:default_branch
]
- field :project_id, 1, type: :string
- field :user_id, 2, type: :string
- field :pipeline_file, 3, type: :string
- field :repository_url, 4, type: :string
- field :request_id, 5, type: :string
- field :only_public, 6, type: :bool
- field :integration_type, 7, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :commit_status, 8, type: InternalApi.Projecthub.Project.Spec.Repository.Status
- field :whitelist, 9, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist
- field :default_branch, 10, type: :string
+ field(:project_id, 1, type: :string)
+ field(:user_id, 2, type: :string)
+ field(:pipeline_file, 3, type: :string)
+ field(:repository_url, 4, type: :string)
+ field(:request_id, 5, type: :string)
+ field(:only_public, 6, type: :bool)
+ field(:integration_type, 7, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:commit_status, 8, type: InternalApi.Projecthub.Project.Spec.Repository.Status)
+ field(:whitelist, 9, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist)
+ field(:default_branch, 10, type: :string)
end
defmodule InternalApi.Repository.CreateResponse do
@@ -787,7 +793,7 @@ defmodule InternalApi.Repository.CreateResponse do
}
defstruct [:repository]
- field :repository, 1, type: InternalApi.Repository.Repository
+ field(:repository, 1, type: InternalApi.Repository.Repository)
end
defmodule InternalApi.Repository.DeleteRequest do
@@ -799,7 +805,7 @@ defmodule InternalApi.Repository.DeleteRequest do
}
defstruct [:repository_id]
- field :repository_id, 1, type: :string
+ field(:repository_id, 1, type: :string)
end
defmodule InternalApi.Repository.DeleteResponse do
@@ -811,7 +817,7 @@ defmodule InternalApi.Repository.DeleteResponse do
}
defstruct [:repository]
- field :repository, 1, type: InternalApi.Repository.Repository
+ field(:repository, 1, type: InternalApi.Repository.Repository)
end
defmodule InternalApi.Repository.UpdateRequest do
@@ -837,13 +843,13 @@ defmodule InternalApi.Repository.UpdateRequest do
:default_branch
]
- field :repository_id, 1, type: :string
- field :url, 2, type: :string
- field :pipeline_file, 3, type: :string
- field :integration_type, 4, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :commit_status, 5, type: InternalApi.Projecthub.Project.Spec.Repository.Status
- field :whitelist, 6, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist
- field :default_branch, 7, type: :string
+ field(:repository_id, 1, type: :string)
+ field(:url, 2, type: :string)
+ field(:pipeline_file, 3, type: :string)
+ field(:integration_type, 4, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:commit_status, 5, type: InternalApi.Projecthub.Project.Spec.Repository.Status)
+ field(:whitelist, 6, type: InternalApi.Projecthub.Project.Spec.Repository.Whitelist)
+ field(:default_branch, 7, type: :string)
end
defmodule InternalApi.Repository.UpdateResponse do
@@ -855,7 +861,7 @@ defmodule InternalApi.Repository.UpdateResponse do
}
defstruct [:repository]
- field :repository, 1, type: InternalApi.Repository.Repository
+ field(:repository, 1, type: InternalApi.Repository.Repository)
end
defmodule InternalApi.Repository.RemoteRepositoryChanged do
@@ -868,8 +874,8 @@ defmodule InternalApi.Repository.RemoteRepositoryChanged do
}
defstruct [:remote_id, :timestamp]
- field :remote_id, 1, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:remote_id, 1, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Repository.VerifyWebhookSignatureRequest do
@@ -884,10 +890,10 @@ defmodule InternalApi.Repository.VerifyWebhookSignatureRequest do
}
defstruct [:organization_id, :repository_id, :payload, :signature]
- field :organization_id, 1, type: :string
- field :repository_id, 2, type: :string
- field :payload, 3, type: :string
- field :signature, 4, type: :string
+ field(:organization_id, 1, type: :string)
+ field(:repository_id, 2, type: :string)
+ field(:payload, 3, type: :string)
+ field(:signature, 4, type: :string)
end
defmodule InternalApi.Repository.VerifyWebhookSignatureResponse do
@@ -899,77 +905,163 @@ defmodule InternalApi.Repository.VerifyWebhookSignatureResponse do
}
defstruct [:valid]
- field :valid, 1, type: :bool
+ field(:valid, 1, type: :bool)
end
-defmodule InternalApi.Repository.RepositoryService.Service do
+defmodule InternalApi.Repository.ClearExternalDataRequest do
@moduledoc false
- use GRPC.Service, name: "InternalApi.Repository.RepositoryService"
-
- rpc :Describe, InternalApi.Repository.DescribeRequest, InternalApi.Repository.DescribeResponse
-
- rpc :DescribeMany,
- InternalApi.Repository.DescribeManyRequest,
- InternalApi.Repository.DescribeManyResponse
-
- rpc :List, InternalApi.Repository.ListRequest, InternalApi.Repository.ListResponse
- rpc :Create, InternalApi.Repository.CreateRequest, InternalApi.Repository.CreateResponse
- rpc :Update, InternalApi.Repository.UpdateRequest, InternalApi.Repository.UpdateResponse
- rpc :Delete, InternalApi.Repository.DeleteRequest, InternalApi.Repository.DeleteResponse
- rpc :GetFile, InternalApi.Repository.GetFileRequest, InternalApi.Repository.GetFileResponse
- rpc :GetFiles, InternalApi.Repository.GetFilesRequest, InternalApi.Repository.GetFilesResponse
-
- rpc :GetChangedFilePaths,
- InternalApi.Repository.GetChangedFilePathsRequest,
- InternalApi.Repository.GetChangedFilePathsResponse
+ use Protobuf, syntax: :proto3
- rpc :Commit, InternalApi.Repository.CommitRequest, InternalApi.Repository.CommitResponse
+ @type t :: %__MODULE__{
+ repository_id: String.t()
+ }
+ defstruct [:repository_id]
- rpc :GetSshKey,
- InternalApi.Repository.GetSshKeyRequest,
- InternalApi.Repository.GetSshKeyResponse
+ field(:repository_id, 1, type: :string)
+end
- rpc :ListAccessibleRepositories,
- InternalApi.Repository.ListAccessibleRepositoriesRequest,
- InternalApi.Repository.ListAccessibleRepositoriesResponse
+defmodule InternalApi.Repository.ClearExternalDataResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
- rpc :ListCollaborators,
- InternalApi.Repository.ListCollaboratorsRequest,
- InternalApi.Repository.ListCollaboratorsResponse
+ @type t :: %__MODULE__{
+ repository: InternalApi.Repository.Repository.t()
+ }
+ defstruct [:repository]
- rpc :CreateBuildStatus,
- InternalApi.Repository.CreateBuildStatusRequest,
- InternalApi.Repository.CreateBuildStatusResponse
+ field(:repository, 1, type: InternalApi.Repository.Repository)
+end
- rpc :CheckDeployKey,
- InternalApi.Repository.CheckDeployKeyRequest,
- InternalApi.Repository.CheckDeployKeyResponse
+defmodule InternalApi.Repository.RegenerateWebhookSecretRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
- rpc :RegenerateDeployKey,
- InternalApi.Repository.RegenerateDeployKeyRequest,
- InternalApi.Repository.RegenerateDeployKeyResponse
+ @type t :: %__MODULE__{
+ repository_id: String.t()
+ }
+ defstruct [:repository_id]
- rpc :CheckWebhook,
- InternalApi.Repository.CheckWebhookRequest,
- InternalApi.Repository.CheckWebhookResponse
+ field(:repository_id, 1, type: :string)
+end
- rpc :RegenerateWebhook,
- InternalApi.Repository.RegenerateWebhookRequest,
- InternalApi.Repository.RegenerateWebhookResponse
+defmodule InternalApi.Repository.RegenerateWebhookSecretResponse do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
- rpc :Fork, InternalApi.Repository.ForkRequest, InternalApi.Repository.ForkResponse
+ @type t :: %__MODULE__{
+ secret: String.t()
+ }
+ defstruct [:secret]
- rpc :DescribeRemoteRepository,
- InternalApi.Repository.DescribeRemoteRepositoryRequest,
- InternalApi.Repository.DescribeRemoteRepositoryResponse
+ field(:secret, 1, type: :string)
+end
- rpc :DescribeRevision,
- InternalApi.Repository.DescribeRevisionRequest,
- InternalApi.Repository.DescribeRevisionResponse
+defmodule InternalApi.Repository.RepositoryService.Service do
+ @moduledoc false
+ use GRPC.Service, name: "InternalApi.Repository.RepositoryService"
- rpc :VerifyWebhookSignature,
- InternalApi.Repository.VerifyWebhookSignatureRequest,
- InternalApi.Repository.VerifyWebhookSignatureResponse
+ rpc(:Describe, InternalApi.Repository.DescribeRequest, InternalApi.Repository.DescribeResponse)
+
+ rpc(
+ :DescribeMany,
+ InternalApi.Repository.DescribeManyRequest,
+ InternalApi.Repository.DescribeManyResponse
+ )
+
+ rpc(:List, InternalApi.Repository.ListRequest, InternalApi.Repository.ListResponse)
+ rpc(:Create, InternalApi.Repository.CreateRequest, InternalApi.Repository.CreateResponse)
+ rpc(:Update, InternalApi.Repository.UpdateRequest, InternalApi.Repository.UpdateResponse)
+ rpc(:Delete, InternalApi.Repository.DeleteRequest, InternalApi.Repository.DeleteResponse)
+ rpc(:GetFile, InternalApi.Repository.GetFileRequest, InternalApi.Repository.GetFileResponse)
+ rpc(:GetFiles, InternalApi.Repository.GetFilesRequest, InternalApi.Repository.GetFilesResponse)
+
+ rpc(
+ :GetChangedFilePaths,
+ InternalApi.Repository.GetChangedFilePathsRequest,
+ InternalApi.Repository.GetChangedFilePathsResponse
+ )
+
+ rpc(:Commit, InternalApi.Repository.CommitRequest, InternalApi.Repository.CommitResponse)
+
+ rpc(
+ :GetSshKey,
+ InternalApi.Repository.GetSshKeyRequest,
+ InternalApi.Repository.GetSshKeyResponse
+ )
+
+ rpc(
+ :ListAccessibleRepositories,
+ InternalApi.Repository.ListAccessibleRepositoriesRequest,
+ InternalApi.Repository.ListAccessibleRepositoriesResponse
+ )
+
+ rpc(
+ :ListCollaborators,
+ InternalApi.Repository.ListCollaboratorsRequest,
+ InternalApi.Repository.ListCollaboratorsResponse
+ )
+
+ rpc(
+ :CreateBuildStatus,
+ InternalApi.Repository.CreateBuildStatusRequest,
+ InternalApi.Repository.CreateBuildStatusResponse
+ )
+
+ rpc(
+ :CheckDeployKey,
+ InternalApi.Repository.CheckDeployKeyRequest,
+ InternalApi.Repository.CheckDeployKeyResponse
+ )
+
+ rpc(
+ :RegenerateDeployKey,
+ InternalApi.Repository.RegenerateDeployKeyRequest,
+ InternalApi.Repository.RegenerateDeployKeyResponse
+ )
+
+ rpc(
+ :CheckWebhook,
+ InternalApi.Repository.CheckWebhookRequest,
+ InternalApi.Repository.CheckWebhookResponse
+ )
+
+ rpc(
+ :RegenerateWebhook,
+ InternalApi.Repository.RegenerateWebhookRequest,
+ InternalApi.Repository.RegenerateWebhookResponse
+ )
+
+ rpc(:Fork, InternalApi.Repository.ForkRequest, InternalApi.Repository.ForkResponse)
+
+ rpc(
+ :DescribeRemoteRepository,
+ InternalApi.Repository.DescribeRemoteRepositoryRequest,
+ InternalApi.Repository.DescribeRemoteRepositoryResponse
+ )
+
+ rpc(
+ :DescribeRevision,
+ InternalApi.Repository.DescribeRevisionRequest,
+ InternalApi.Repository.DescribeRevisionResponse
+ )
+
+ rpc(
+ :VerifyWebhookSignature,
+ InternalApi.Repository.VerifyWebhookSignatureRequest,
+ InternalApi.Repository.VerifyWebhookSignatureResponse
+ )
+
+ rpc(
+ :ClearExternalData,
+ InternalApi.Repository.ClearExternalDataRequest,
+ InternalApi.Repository.ClearExternalDataResponse
+ )
+
+ rpc(
+ :RegenerateWebhookSecret,
+ InternalApi.Repository.RegenerateWebhookSecretRequest,
+ InternalApi.Repository.RegenerateWebhookSecretResponse
+ )
end
defmodule InternalApi.Repository.RepositoryService.Stub do
diff --git a/plumber/proto/lib/internal_api/repository_integrator.pb.ex b/plumber/proto/lib/internal_api/repository_integrator.pb.ex
index 6ecdada36..ac05f1b5a 100644
--- a/plumber/proto/lib/internal_api/repository_integrator.pb.ex
+++ b/plumber/proto/lib/internal_api/repository_integrator.pb.ex
@@ -10,10 +10,10 @@ defmodule InternalApi.RepositoryIntegrator.GetTokenRequest do
}
defstruct [:user_id, :repository_slug, :integration_type, :project_id]
- field :user_id, 1, type: :string
- field :repository_slug, 2, type: :string
- field :integration_type, 3, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
- field :project_id, 4, type: :string
+ field(:user_id, 1, type: :string)
+ field(:repository_slug, 2, type: :string)
+ field(:integration_type, 3, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
+ field(:project_id, 4, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.GetTokenResponse do
@@ -26,8 +26,8 @@ defmodule InternalApi.RepositoryIntegrator.GetTokenResponse do
}
defstruct [:token, :expires_at]
- field :token, 1, type: :string
- field :expires_at, 2, type: Google.Protobuf.Timestamp
+ field(:token, 1, type: :string)
+ field(:expires_at, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.RepositoryIntegrator.CheckTokenRequest do
@@ -39,7 +39,7 @@ defmodule InternalApi.RepositoryIntegrator.CheckTokenRequest do
}
defstruct [:project_id]
- field :project_id, 1, type: :string
+ field(:project_id, 1, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.CheckTokenResponse do
@@ -52,8 +52,12 @@ defmodule InternalApi.RepositoryIntegrator.CheckTokenResponse do
}
defstruct [:valid, :integration_scope]
- field :valid, 1, type: :bool
- field :integration_scope, 2, type: InternalApi.RepositoryIntegrator.IntegrationScope, enum: true
+ field(:valid, 1, type: :bool)
+
+ field(:integration_scope, 2,
+ type: InternalApi.RepositoryIntegrator.IntegrationScope,
+ enum: true
+ )
end
defmodule InternalApi.RepositoryIntegrator.PreheatFileCacheRequest do
@@ -67,9 +71,9 @@ defmodule InternalApi.RepositoryIntegrator.PreheatFileCacheRequest do
}
defstruct [:project_id, :path, :ref]
- field :project_id, 1, type: :string
- field :path, 2, type: :string
- field :ref, 3, type: :string
+ field(:project_id, 1, type: :string)
+ field(:path, 2, type: :string)
+ field(:ref, 3, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.GetFileRequest do
@@ -83,9 +87,9 @@ defmodule InternalApi.RepositoryIntegrator.GetFileRequest do
}
defstruct [:project_id, :path, :ref]
- field :project_id, 1, type: :string
- field :path, 2, type: :string
- field :ref, 3, type: :string
+ field(:project_id, 1, type: :string)
+ field(:path, 2, type: :string)
+ field(:ref, 3, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.GetFileResponse do
@@ -97,7 +101,7 @@ defmodule InternalApi.RepositoryIntegrator.GetFileResponse do
}
defstruct [:content]
- field :content, 1, type: :string
+ field(:content, 1, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.GithubInstallationInfoRequest do
@@ -109,7 +113,7 @@ defmodule InternalApi.RepositoryIntegrator.GithubInstallationInfoRequest do
}
defstruct [:project_id]
- field :project_id, 1, type: :string
+ field(:project_id, 1, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.GithubInstallationInfoResponse do
@@ -123,9 +127,9 @@ defmodule InternalApi.RepositoryIntegrator.GithubInstallationInfoResponse do
}
defstruct [:installation_id, :application_url, :installation_url]
- field :installation_id, 1, type: :int64
- field :application_url, 2, type: :string
- field :installation_url, 3, type: :string
+ field(:installation_id, 1, type: :int64)
+ field(:application_url, 2, type: :string)
+ field(:installation_url, 3, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.InitGithubInstallationRequest do
@@ -152,8 +156,8 @@ defmodule InternalApi.RepositoryIntegrator.GetRepositoriesRequest do
}
defstruct [:user_id, :integration_type]
- field :user_id, 1, type: :string
- field :integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
+ field(:user_id, 1, type: :string)
+ field(:integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
end
defmodule InternalApi.RepositoryIntegrator.GetRepositoriesResponse do
@@ -165,7 +169,7 @@ defmodule InternalApi.RepositoryIntegrator.GetRepositoriesResponse do
}
defstruct [:repositories]
- field :repositories, 1, repeated: true, type: InternalApi.RepositoryIntegrator.Repository
+ field(:repositories, 1, repeated: true, type: InternalApi.RepositoryIntegrator.Repository)
end
defmodule InternalApi.RepositoryIntegrator.Repository do
@@ -181,64 +185,78 @@ defmodule InternalApi.RepositoryIntegrator.Repository do
}
defstruct [:addable, :name, :full_name, :url, :description]
- field :addable, 1, type: :bool
- field :name, 2, type: :string
- field :full_name, 4, type: :string
- field :url, 3, type: :string
- field :description, 5, type: :string
+ field(:addable, 1, type: :bool)
+ field(:name, 2, type: :string)
+ field(:full_name, 4, type: :string)
+ field(:url, 3, type: :string)
+ field(:description, 5, type: :string)
end
defmodule InternalApi.RepositoryIntegrator.IntegrationType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :GITHUB_OAUTH_TOKEN, 0
- field :GITHUB_APP, 1
- field :BITBUCKET, 2
- field :GITLAB, 3
- field :GIT, 4
+ field(:GITHUB_OAUTH_TOKEN, 0)
+ field(:GITHUB_APP, 1)
+ field(:BITBUCKET, 2)
+ field(:GITLAB, 3)
+ field(:GIT, 4)
end
defmodule InternalApi.RepositoryIntegrator.IntegrationScope do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :FULL_CONNECTION, 0
- field :ONLY_PUBLIC, 1
- field :NO_CONNECTION, 2
+ field(:FULL_CONNECTION, 0)
+ field(:ONLY_PUBLIC, 1)
+ field(:NO_CONNECTION, 2)
end
defmodule InternalApi.RepositoryIntegrator.RepositoryIntegratorService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.RepositoryIntegrator.RepositoryIntegratorService"
- rpc :GetToken,
- InternalApi.RepositoryIntegrator.GetTokenRequest,
- InternalApi.RepositoryIntegrator.GetTokenResponse
-
- rpc :CheckToken,
- InternalApi.RepositoryIntegrator.CheckTokenRequest,
- InternalApi.RepositoryIntegrator.CheckTokenResponse
-
- rpc :PreheatFileCache,
- InternalApi.RepositoryIntegrator.PreheatFileCacheRequest,
- Google.Protobuf.Empty
-
- rpc :GetFile,
- InternalApi.RepositoryIntegrator.GetFileRequest,
- InternalApi.RepositoryIntegrator.GetFileResponse
-
- rpc :GithubInstallationInfo,
- InternalApi.RepositoryIntegrator.GithubInstallationInfoRequest,
- InternalApi.RepositoryIntegrator.GithubInstallationInfoResponse
-
- rpc :InitGithubInstallation,
- InternalApi.RepositoryIntegrator.InitGithubInstallationRequest,
- InternalApi.RepositoryIntegrator.InitGithubInstallationResponse
-
- rpc :GetRepositories,
- InternalApi.RepositoryIntegrator.GetRepositoriesRequest,
- InternalApi.RepositoryIntegrator.GetRepositoriesResponse
+ rpc(
+ :GetToken,
+ InternalApi.RepositoryIntegrator.GetTokenRequest,
+ InternalApi.RepositoryIntegrator.GetTokenResponse
+ )
+
+ rpc(
+ :CheckToken,
+ InternalApi.RepositoryIntegrator.CheckTokenRequest,
+ InternalApi.RepositoryIntegrator.CheckTokenResponse
+ )
+
+ rpc(
+ :PreheatFileCache,
+ InternalApi.RepositoryIntegrator.PreheatFileCacheRequest,
+ Google.Protobuf.Empty
+ )
+
+ rpc(
+ :GetFile,
+ InternalApi.RepositoryIntegrator.GetFileRequest,
+ InternalApi.RepositoryIntegrator.GetFileResponse
+ )
+
+ rpc(
+ :GithubInstallationInfo,
+ InternalApi.RepositoryIntegrator.GithubInstallationInfoRequest,
+ InternalApi.RepositoryIntegrator.GithubInstallationInfoResponse
+ )
+
+ rpc(
+ :InitGithubInstallation,
+ InternalApi.RepositoryIntegrator.InitGithubInstallationRequest,
+ InternalApi.RepositoryIntegrator.InitGithubInstallationResponse
+ )
+
+ rpc(
+ :GetRepositories,
+ InternalApi.RepositoryIntegrator.GetRepositoriesRequest,
+ InternalApi.RepositoryIntegrator.GetRepositoriesResponse
+ )
end
defmodule InternalApi.RepositoryIntegrator.RepositoryIntegratorService.Stub do
diff --git a/plumber/proto/lib/internal_api/stethoscope.listener_proxy.pb.ex b/plumber/proto/lib/internal_api/stethoscope.listener_proxy.pb.ex
index 02637b60b..aa9866af3 100644
--- a/plumber/proto/lib/internal_api/stethoscope.listener_proxy.pb.ex
+++ b/plumber/proto/lib/internal_api/stethoscope.listener_proxy.pb.ex
@@ -9,13 +9,14 @@ defmodule InternalApi.Stethoscope.EventRequest do
}
defstruct [:request_token, :listener, :attributes]
- field :request_token, 1, type: :string
- field :listener, 2, type: :string
+ field(:request_token, 1, type: :string)
+ field(:listener, 2, type: :string)
- field :attributes, 3,
+ field(:attributes, 3,
repeated: true,
type: InternalApi.Stethoscope.EventRequest.AttributesEntry,
map: true
+ )
end
defmodule InternalApi.Stethoscope.EventRequest.AttributesEntry do
@@ -28,8 +29,8 @@ defmodule InternalApi.Stethoscope.EventRequest.AttributesEntry do
}
defstruct [:key, :value]
- field :key, 1, type: :string
- field :value, 2, type: :string
+ field(:key, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.Stethoscope.EventResponse do
@@ -41,7 +42,7 @@ defmodule InternalApi.Stethoscope.EventResponse do
}
defstruct [:event_id]
- field :event_id, 1, type: :string
+ field(:event_id, 1, type: :string)
end
defmodule InternalApi.Stethoscope.GetBlobRequest do
@@ -54,8 +55,8 @@ defmodule InternalApi.Stethoscope.GetBlobRequest do
}
defstruct [:wf_id, :uri]
- field :wf_id, 1, type: :string
- field :uri, 2, type: :string
+ field(:wf_id, 1, type: :string)
+ field(:uri, 2, type: :string)
end
defmodule InternalApi.Stethoscope.GetBlobResponse do
@@ -67,15 +68,15 @@ defmodule InternalApi.Stethoscope.GetBlobResponse do
}
defstruct [:content]
- field :content, 1, type: :string
+ field(:content, 1, type: :string)
end
defmodule InternalApi.Stethoscope.StethoscopeService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Stethoscope.StethoscopeService"
- rpc :Event, InternalApi.Stethoscope.EventRequest, InternalApi.Stethoscope.EventResponse
- rpc :GetBlob, InternalApi.Stethoscope.GetBlobRequest, InternalApi.Stethoscope.GetBlobResponse
+ rpc(:Event, InternalApi.Stethoscope.EventRequest, InternalApi.Stethoscope.EventResponse)
+ rpc(:GetBlob, InternalApi.Stethoscope.GetBlobRequest, InternalApi.Stethoscope.GetBlobResponse)
end
defmodule InternalApi.Stethoscope.StethoscopeService.Stub do
diff --git a/plumber/proto/lib/internal_api/task.pb.ex b/plumber/proto/lib/internal_api/task.pb.ex
index b68c5b8e1..ebb395e1a 100644
--- a/plumber/proto/lib/internal_api/task.pb.ex
+++ b/plumber/proto/lib/internal_api/task.pb.ex
@@ -27,16 +27,16 @@ defmodule InternalApi.Task.Task do
:finished_at
]
- field :id, 1, type: :string
- field :state, 2, type: InternalApi.Task.Task.State, enum: true
- field :result, 3, type: InternalApi.Task.Task.Result, enum: true
- field :jobs, 4, repeated: true, type: InternalApi.Task.Task.Job
- field :ppl_id, 5, type: :string
- field :wf_id, 6, type: :string
- field :hook_id, 10, type: :string
- field :request_token, 7, type: :string
- field :created_at, 8, type: Google.Protobuf.Timestamp
- field :finished_at, 9, type: Google.Protobuf.Timestamp
+ field(:id, 1, type: :string)
+ field(:state, 2, type: InternalApi.Task.Task.State, enum: true)
+ field(:result, 3, type: InternalApi.Task.Task.Result, enum: true)
+ field(:jobs, 4, repeated: true, type: InternalApi.Task.Task.Job)
+ field(:ppl_id, 5, type: :string)
+ field(:wf_id, 6, type: :string)
+ field(:hook_id, 10, type: :string)
+ field(:request_token, 7, type: :string)
+ field(:created_at, 8, type: Google.Protobuf.Timestamp)
+ field(:finished_at, 9, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Task.Task.Job do
@@ -70,54 +70,54 @@ defmodule InternalApi.Task.Task.Job do
:priority
]
- field :id, 1, type: :string
- field :state, 2, type: InternalApi.Task.Task.Job.State, enum: true
- field :result, 3, type: InternalApi.Task.Task.Job.Result, enum: true
- field :name, 4, type: :string
- field :index, 5, type: :int32
- field :created_at, 7, type: Google.Protobuf.Timestamp
- field :enqueued_at, 8, type: Google.Protobuf.Timestamp
- field :scheduled_at, 9, type: Google.Protobuf.Timestamp
- field :started_at, 10, type: Google.Protobuf.Timestamp
- field :finished_at, 11, type: Google.Protobuf.Timestamp
- field :priority, 12, type: :int32
+ field(:id, 1, type: :string)
+ field(:state, 2, type: InternalApi.Task.Task.Job.State, enum: true)
+ field(:result, 3, type: InternalApi.Task.Task.Job.Result, enum: true)
+ field(:name, 4, type: :string)
+ field(:index, 5, type: :int32)
+ field(:created_at, 7, type: Google.Protobuf.Timestamp)
+ field(:enqueued_at, 8, type: Google.Protobuf.Timestamp)
+ field(:scheduled_at, 9, type: Google.Protobuf.Timestamp)
+ field(:started_at, 10, type: Google.Protobuf.Timestamp)
+ field(:finished_at, 11, type: Google.Protobuf.Timestamp)
+ field(:priority, 12, type: :int32)
end
defmodule InternalApi.Task.Task.Job.State do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :ENQUEUED, 0
- field :RUNNING, 1
- field :STOPPING, 2
- field :FINISHED, 3
+ field(:ENQUEUED, 0)
+ field(:RUNNING, 1)
+ field(:STOPPING, 2)
+ field(:FINISHED, 3)
end
defmodule InternalApi.Task.Task.Job.Result do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PASSED, 0
- field :FAILED, 1
- field :STOPPED, 2
+ field(:PASSED, 0)
+ field(:FAILED, 1)
+ field(:STOPPED, 2)
end
defmodule InternalApi.Task.Task.State do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :RUNNING, 0
- field :STOPPING, 1
- field :FINISHED, 2
+ field(:RUNNING, 0)
+ field(:STOPPING, 1)
+ field(:FINISHED, 2)
end
defmodule InternalApi.Task.Task.Result do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PASSED, 0
- field :FAILED, 1
- field :STOPPED, 2
+ field(:PASSED, 0)
+ field(:FAILED, 1)
+ field(:STOPPED, 2)
end
defmodule InternalApi.Task.ScheduleRequest do
@@ -149,16 +149,16 @@ defmodule InternalApi.Task.ScheduleRequest do
:fail_fast
]
- field :jobs, 1, repeated: true, type: InternalApi.Task.ScheduleRequest.Job
- field :request_token, 2, type: :string
- field :ppl_id, 3, type: :string
- field :wf_id, 4, type: :string
- field :hook_id, 8, type: :string
- field :project_id, 5, type: :string
- field :repository_id, 9, type: :string
- field :deployment_target_id, 10, type: :string
- field :org_id, 6, type: :string
- field :fail_fast, 7, type: InternalApi.Task.ScheduleRequest.FailFast, enum: true
+ field(:jobs, 1, repeated: true, type: InternalApi.Task.ScheduleRequest.Job)
+ field(:request_token, 2, type: :string)
+ field(:ppl_id, 3, type: :string)
+ field(:wf_id, 4, type: :string)
+ field(:hook_id, 8, type: :string)
+ field(:project_id, 5, type: :string)
+ field(:repository_id, 9, type: :string)
+ field(:deployment_target_id, 10, type: :string)
+ field(:org_id, 6, type: :string)
+ field(:fail_fast, 7, type: InternalApi.Task.ScheduleRequest.FailFast, enum: true)
end
defmodule InternalApi.Task.ScheduleRequest.Job do
@@ -192,17 +192,17 @@ defmodule InternalApi.Task.ScheduleRequest.Job do
:priority
]
- field :name, 1, type: :string
- field :agent, 2, type: InternalApi.Task.ScheduleRequest.Job.Agent
- field :env_vars, 3, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.EnvVar
- field :secrets, 4, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.Secret
- field :prologue_commands, 5, repeated: true, type: :string
- field :commands, 6, repeated: true, type: :string
- field :epilogue_always_cmds, 8, repeated: true, type: :string
- field :epilogue_on_pass_cmds, 9, repeated: true, type: :string
- field :epilogue_on_fail_cmds, 10, repeated: true, type: :string
- field :execution_time_limit, 11, type: :int32
- field :priority, 12, type: :int32
+ field(:name, 1, type: :string)
+ field(:agent, 2, type: InternalApi.Task.ScheduleRequest.Job.Agent)
+ field(:env_vars, 3, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.EnvVar)
+ field(:secrets, 4, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.Secret)
+ field(:prologue_commands, 5, repeated: true, type: :string)
+ field(:commands, 6, repeated: true, type: :string)
+ field(:epilogue_always_cmds, 8, repeated: true, type: :string)
+ field(:epilogue_on_pass_cmds, 9, repeated: true, type: :string)
+ field(:epilogue_on_fail_cmds, 10, repeated: true, type: :string)
+ field(:execution_time_limit, 11, type: :int32)
+ field(:priority, 12, type: :int32)
end
defmodule InternalApi.Task.ScheduleRequest.Job.Agent do
@@ -216,12 +216,17 @@ defmodule InternalApi.Task.ScheduleRequest.Job.Agent do
}
defstruct [:machine, :containers, :image_pull_secrets]
- field :machine, 1, type: InternalApi.Task.ScheduleRequest.Job.Agent.Machine
- field :containers, 2, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.Agent.Container
+ field(:machine, 1, type: InternalApi.Task.ScheduleRequest.Job.Agent.Machine)
- field :image_pull_secrets, 3,
+ field(:containers, 2,
+ repeated: true,
+ type: InternalApi.Task.ScheduleRequest.Job.Agent.Container
+ )
+
+ field(:image_pull_secrets, 3,
repeated: true,
type: InternalApi.Task.ScheduleRequest.Job.Agent.ImagePullSecret
+ )
end
defmodule InternalApi.Task.ScheduleRequest.Job.Agent.Machine do
@@ -234,8 +239,8 @@ defmodule InternalApi.Task.ScheduleRequest.Job.Agent.Machine do
}
defstruct [:type, :os_image]
- field :type, 1, type: :string
- field :os_image, 2, type: :string
+ field(:type, 1, type: :string)
+ field(:os_image, 2, type: :string)
end
defmodule InternalApi.Task.ScheduleRequest.Job.Agent.Container do
@@ -253,13 +258,13 @@ defmodule InternalApi.Task.ScheduleRequest.Job.Agent.Container do
}
defstruct [:name, :image, :command, :env_vars, :secrets, :entrypoint, :user]
- field :name, 1, type: :string
- field :image, 2, type: :string
- field :command, 3, type: :string
- field :env_vars, 4, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.EnvVar
- field :secrets, 5, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.Secret
- field :entrypoint, 6, type: :string
- field :user, 7, type: :string
+ field(:name, 1, type: :string)
+ field(:image, 2, type: :string)
+ field(:command, 3, type: :string)
+ field(:env_vars, 4, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.EnvVar)
+ field(:secrets, 5, repeated: true, type: InternalApi.Task.ScheduleRequest.Job.Secret)
+ field(:entrypoint, 6, type: :string)
+ field(:user, 7, type: :string)
end
defmodule InternalApi.Task.ScheduleRequest.Job.Agent.ImagePullSecret do
@@ -271,7 +276,7 @@ defmodule InternalApi.Task.ScheduleRequest.Job.Agent.ImagePullSecret do
}
defstruct [:name]
- field :name, 1, type: :string
+ field(:name, 1, type: :string)
end
defmodule InternalApi.Task.ScheduleRequest.Job.EnvVar do
@@ -284,8 +289,8 @@ defmodule InternalApi.Task.ScheduleRequest.Job.EnvVar do
}
defstruct [:name, :value]
- field :name, 1, type: :string
- field :value, 2, type: :string
+ field(:name, 1, type: :string)
+ field(:value, 2, type: :string)
end
defmodule InternalApi.Task.ScheduleRequest.Job.Secret do
@@ -297,16 +302,16 @@ defmodule InternalApi.Task.ScheduleRequest.Job.Secret do
}
defstruct [:name]
- field :name, 1, type: :string
+ field(:name, 1, type: :string)
end
defmodule InternalApi.Task.ScheduleRequest.FailFast do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :NONE, 0
- field :STOP, 1
- field :CANCEL, 2
+ field(:NONE, 0)
+ field(:STOP, 1)
+ field(:CANCEL, 2)
end
defmodule InternalApi.Task.ScheduleResponse do
@@ -318,7 +323,7 @@ defmodule InternalApi.Task.ScheduleResponse do
}
defstruct [:task]
- field :task, 1, type: InternalApi.Task.Task
+ field(:task, 1, type: InternalApi.Task.Task)
end
defmodule InternalApi.Task.DescribeRequest do
@@ -330,7 +335,7 @@ defmodule InternalApi.Task.DescribeRequest do
}
defstruct [:task_id]
- field :task_id, 1, type: :string
+ field(:task_id, 1, type: :string)
end
defmodule InternalApi.Task.DescribeResponse do
@@ -342,7 +347,7 @@ defmodule InternalApi.Task.DescribeResponse do
}
defstruct [:task]
- field :task, 1, type: InternalApi.Task.Task
+ field(:task, 1, type: InternalApi.Task.Task)
end
defmodule InternalApi.Task.DescribeManyRequest do
@@ -354,7 +359,7 @@ defmodule InternalApi.Task.DescribeManyRequest do
}
defstruct [:task_ids]
- field :task_ids, 1, repeated: true, type: :string
+ field(:task_ids, 1, repeated: true, type: :string)
end
defmodule InternalApi.Task.DescribeManyResponse do
@@ -366,7 +371,7 @@ defmodule InternalApi.Task.DescribeManyResponse do
}
defstruct [:tasks]
- field :tasks, 1, repeated: true, type: InternalApi.Task.Task
+ field(:tasks, 1, repeated: true, type: InternalApi.Task.Task)
end
defmodule InternalApi.Task.TerminateRequest do
@@ -378,7 +383,7 @@ defmodule InternalApi.Task.TerminateRequest do
}
defstruct [:task_id]
- field :task_id, 1, type: :string
+ field(:task_id, 1, type: :string)
end
defmodule InternalApi.Task.TerminateResponse do
@@ -390,7 +395,7 @@ defmodule InternalApi.Task.TerminateResponse do
}
defstruct [:message]
- field :message, 1, type: :string
+ field(:message, 1, type: :string)
end
defmodule InternalApi.Task.TaskStarted do
@@ -403,8 +408,8 @@ defmodule InternalApi.Task.TaskStarted do
}
defstruct [:task_id, :timestamp]
- field :task_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:task_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Task.TaskFinished do
@@ -417,18 +422,18 @@ defmodule InternalApi.Task.TaskFinished do
}
defstruct [:task_id, :timestamp]
- field :task_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:task_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.Task.TaskService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.Task.TaskService"
- rpc :Schedule, InternalApi.Task.ScheduleRequest, InternalApi.Task.ScheduleResponse
- rpc :Describe, InternalApi.Task.DescribeRequest, InternalApi.Task.DescribeResponse
- rpc :DescribeMany, InternalApi.Task.DescribeManyRequest, InternalApi.Task.DescribeManyResponse
- rpc :Terminate, InternalApi.Task.TerminateRequest, InternalApi.Task.TerminateResponse
+ rpc(:Schedule, InternalApi.Task.ScheduleRequest, InternalApi.Task.ScheduleResponse)
+ rpc(:Describe, InternalApi.Task.DescribeRequest, InternalApi.Task.DescribeResponse)
+ rpc(:DescribeMany, InternalApi.Task.DescribeManyRequest, InternalApi.Task.DescribeManyResponse)
+ rpc(:Terminate, InternalApi.Task.TerminateRequest, InternalApi.Task.TerminateResponse)
end
defmodule InternalApi.Task.TaskService.Stub do
diff --git a/plumber/proto/lib/internal_api/user.pb.ex b/plumber/proto/lib/internal_api/user.pb.ex
index 4a6d31c78..5fc2ba9a3 100644
--- a/plumber/proto/lib/internal_api/user.pb.ex
+++ b/plumber/proto/lib/internal_api/user.pb.ex
@@ -8,8 +8,8 @@ defmodule InternalApi.User.ListFavoritesRequest do
}
defstruct [:user_id, :organization_id]
- field :user_id, 1, type: :string
- field :organization_id, 2, type: :string
+ field(:user_id, 1, type: :string)
+ field(:organization_id, 2, type: :string)
end
defmodule InternalApi.User.ListFavoritesResponse do
@@ -21,7 +21,7 @@ defmodule InternalApi.User.ListFavoritesResponse do
}
defstruct [:favorites]
- field :favorites, 1, repeated: true, type: InternalApi.User.Favorite
+ field(:favorites, 1, repeated: true, type: InternalApi.User.Favorite)
end
defmodule InternalApi.User.Favorite do
@@ -36,18 +36,18 @@ defmodule InternalApi.User.Favorite do
}
defstruct [:user_id, :organization_id, :favorite_id, :kind]
- field :user_id, 1, type: :string
- field :organization_id, 2, type: :string
- field :favorite_id, 3, type: :string
- field :kind, 4, type: InternalApi.User.Favorite.Kind, enum: true
+ field(:user_id, 1, type: :string)
+ field(:organization_id, 2, type: :string)
+ field(:favorite_id, 3, type: :string)
+ field(:kind, 4, type: InternalApi.User.Favorite.Kind, enum: true)
end
defmodule InternalApi.User.Favorite.Kind do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :PROJECT, 0
- field :DASHBOARD, 1
+ field(:PROJECT, 0)
+ field(:DASHBOARD, 1)
end
defmodule InternalApi.User.DescribeManyRequest do
@@ -59,7 +59,7 @@ defmodule InternalApi.User.DescribeManyRequest do
}
defstruct [:user_ids]
- field :user_ids, 1, repeated: true, type: :string
+ field(:user_ids, 1, repeated: true, type: :string)
end
defmodule InternalApi.User.DescribeManyResponse do
@@ -72,8 +72,8 @@ defmodule InternalApi.User.DescribeManyResponse do
}
defstruct [:users, :status]
- field :users, 1, repeated: true, type: InternalApi.User.User
- field :status, 2, type: InternalApi.ResponseStatus
+ field(:users, 1, repeated: true, type: InternalApi.User.User)
+ field(:status, 2, type: InternalApi.ResponseStatus)
end
defmodule InternalApi.User.DescribeRequest do
@@ -85,7 +85,7 @@ defmodule InternalApi.User.DescribeRequest do
}
defstruct [:user_id]
- field :user_id, 2, type: :string
+ field(:user_id, 2, type: :string)
end
defmodule InternalApi.User.DescribeResponse do
@@ -127,30 +127,30 @@ defmodule InternalApi.User.DescribeResponse do
:user
]
- field :status, 1, type: InternalApi.ResponseStatus
- field :email, 3, type: :string
- field :created_at, 4, type: Google.Protobuf.Timestamp
- field :avatar_url, 5, type: :string
- field :user_id, 6, type: :string
- field :github_token, 7, type: :string
- field :github_scope, 12, type: InternalApi.User.DescribeResponse.RepoScope, enum: true
- field :github_uid, 8, type: :string
- field :name, 10, type: :string
- field :github_login, 11, type: :string
- field :company, 13, type: :string
- field :blocked_at, 14, type: Google.Protobuf.Timestamp
- field :repository_scopes, 15, type: InternalApi.User.RepositoryScopes
- field :repository_providers, 16, repeated: true, type: InternalApi.User.RepositoryProvider
- field :user, 17, type: InternalApi.User.User
+ field(:status, 1, type: InternalApi.ResponseStatus)
+ field(:email, 3, type: :string)
+ field(:created_at, 4, type: Google.Protobuf.Timestamp)
+ field(:avatar_url, 5, type: :string)
+ field(:user_id, 6, type: :string)
+ field(:github_token, 7, type: :string)
+ field(:github_scope, 12, type: InternalApi.User.DescribeResponse.RepoScope, enum: true)
+ field(:github_uid, 8, type: :string)
+ field(:name, 10, type: :string)
+ field(:github_login, 11, type: :string)
+ field(:company, 13, type: :string)
+ field(:blocked_at, 14, type: Google.Protobuf.Timestamp)
+ field(:repository_scopes, 15, type: InternalApi.User.RepositoryScopes)
+ field(:repository_providers, 16, repeated: true, type: InternalApi.User.RepositoryProvider)
+ field(:user, 17, type: InternalApi.User.User)
end
defmodule InternalApi.User.DescribeResponse.RepoScope do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :NONE, 0
- field :PUBLIC, 1
- field :PRIVATE, 2
+ field(:NONE, 0)
+ field(:PUBLIC, 1)
+ field(:PRIVATE, 2)
end
defmodule InternalApi.User.RepositoryProvider do
@@ -165,29 +165,29 @@ defmodule InternalApi.User.RepositoryProvider do
}
defstruct [:type, :scope, :login, :uid]
- field :type, 1, type: InternalApi.User.RepositoryProvider.Type, enum: true
- field :scope, 2, type: InternalApi.User.RepositoryProvider.Scope, enum: true
- field :login, 3, type: :string
- field :uid, 4, type: :string
+ field(:type, 1, type: InternalApi.User.RepositoryProvider.Type, enum: true)
+ field(:scope, 2, type: InternalApi.User.RepositoryProvider.Scope, enum: true)
+ field(:login, 3, type: :string)
+ field(:uid, 4, type: :string)
end
defmodule InternalApi.User.RepositoryProvider.Type do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :GITHUB, 0
- field :BITBUCKET, 1
- field :GITLAB, 2
+ field(:GITHUB, 0)
+ field(:BITBUCKET, 1)
+ field(:GITLAB, 2)
end
defmodule InternalApi.User.RepositoryProvider.Scope do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :NONE, 0
- field :EMAIL, 1
- field :PUBLIC, 2
- field :PRIVATE, 3
+ field(:NONE, 0)
+ field(:EMAIL, 1)
+ field(:PUBLIC, 2)
+ field(:PRIVATE, 3)
end
defmodule InternalApi.User.RepositoryScopes do
@@ -200,8 +200,8 @@ defmodule InternalApi.User.RepositoryScopes do
}
defstruct [:github, :bitbucket]
- field :github, 1, type: InternalApi.User.RepositoryScopes.RepositoryScope
- field :bitbucket, 2, type: InternalApi.User.RepositoryScopes.RepositoryScope
+ field(:github, 1, type: InternalApi.User.RepositoryScopes.RepositoryScope)
+ field(:bitbucket, 2, type: InternalApi.User.RepositoryScopes.RepositoryScope)
end
defmodule InternalApi.User.RepositoryScopes.RepositoryScope do
@@ -215,19 +215,19 @@ defmodule InternalApi.User.RepositoryScopes.RepositoryScope do
}
defstruct [:scope, :login, :uid]
- field :scope, 2, type: InternalApi.User.RepositoryScopes.RepositoryScope.Scope, enum: true
- field :login, 3, type: :string
- field :uid, 4, type: :string
+ field(:scope, 2, type: InternalApi.User.RepositoryScopes.RepositoryScope.Scope, enum: true)
+ field(:login, 3, type: :string)
+ field(:uid, 4, type: :string)
end
defmodule InternalApi.User.RepositoryScopes.RepositoryScope.Scope do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :NONE, 0
- field :EMAIL, 1
- field :PUBLIC, 2
- field :PRIVATE, 3
+ field(:NONE, 0)
+ field(:EMAIL, 1)
+ field(:PUBLIC, 2)
+ field(:PRIVATE, 3)
end
defmodule InternalApi.User.UpdateRequest do
@@ -239,7 +239,7 @@ defmodule InternalApi.User.UpdateRequest do
}
defstruct [:user]
- field :user, 1, type: InternalApi.User.User
+ field(:user, 1, type: InternalApi.User.User)
end
defmodule InternalApi.User.UpdateResponse do
@@ -252,8 +252,8 @@ defmodule InternalApi.User.UpdateResponse do
}
defstruct [:status, :user]
- field :status, 1, type: Google.Rpc.Status
- field :user, 2, type: InternalApi.User.User
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:user, 2, type: InternalApi.User.User)
end
defmodule InternalApi.User.SearchUsersRequest do
@@ -266,8 +266,8 @@ defmodule InternalApi.User.SearchUsersRequest do
}
defstruct [:query, :limit]
- field :query, 1, type: :string
- field :limit, 2, type: :int32
+ field(:query, 1, type: :string)
+ field(:limit, 2, type: :int32)
end
defmodule InternalApi.User.SearchUsersResponse do
@@ -279,7 +279,7 @@ defmodule InternalApi.User.SearchUsersResponse do
}
defstruct [:users]
- field :users, 1, repeated: true, type: InternalApi.User.User
+ field(:users, 1, repeated: true, type: InternalApi.User.User)
end
defmodule InternalApi.User.DeleteWithOwnedOrgsRequest do
@@ -291,7 +291,7 @@ defmodule InternalApi.User.DeleteWithOwnedOrgsRequest do
}
defstruct [:user_id]
- field :user_id, 1, type: :string
+ field(:user_id, 1, type: :string)
end
defmodule InternalApi.User.RegenerateTokenRequest do
@@ -303,7 +303,7 @@ defmodule InternalApi.User.RegenerateTokenRequest do
}
defstruct [:user_id]
- field :user_id, 1, type: :string
+ field(:user_id, 1, type: :string)
end
defmodule InternalApi.User.RegenerateTokenResponse do
@@ -316,36 +316,8 @@ defmodule InternalApi.User.RegenerateTokenResponse do
}
defstruct [:status, :api_token]
- field :status, 1, type: Google.Rpc.Status
- field :api_token, 3, type: :string
-end
-
-defmodule InternalApi.User.RefererRequest do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- user_id: String.t()
- }
- defstruct [:user_id]
-
- field :user_id, 1, type: :string
-end
-
-defmodule InternalApi.User.RefererResponse do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- user_id: String.t(),
- entry_url: String.t(),
- http_referer: String.t()
- }
- defstruct [:user_id, :entry_url, :http_referer]
-
- field :user_id, 1, type: :string
- field :entry_url, 2, type: :string
- field :http_referer, 3, type: :string
+ field(:status, 1, type: Google.Rpc.Status)
+ field(:api_token, 3, type: :string)
end
defmodule InternalApi.User.CheckGithubTokenRequest do
@@ -357,7 +329,7 @@ defmodule InternalApi.User.CheckGithubTokenRequest do
}
defstruct [:user_id]
- field :user_id, 1, type: :string
+ field(:user_id, 1, type: :string)
end
defmodule InternalApi.User.CheckGithubTokenResponse do
@@ -371,9 +343,9 @@ defmodule InternalApi.User.CheckGithubTokenResponse do
}
defstruct [:revoked, :repo, :public_repo]
- field :revoked, 1, type: :bool
- field :repo, 2, type: :bool
- field :public_repo, 3, type: :bool
+ field(:revoked, 1, type: :bool)
+ field(:repo, 2, type: :bool)
+ field(:public_repo, 3, type: :bool)
end
defmodule InternalApi.User.BlockAccountRequest do
@@ -385,7 +357,7 @@ defmodule InternalApi.User.BlockAccountRequest do
}
defstruct [:user_id]
- field :user_id, 1, type: :string
+ field(:user_id, 1, type: :string)
end
defmodule InternalApi.User.UnblockAccountRequest do
@@ -397,7 +369,7 @@ defmodule InternalApi.User.UnblockAccountRequest do
}
defstruct [:user_id]
- field :user_id, 1, type: :string
+ field(:user_id, 1, type: :string)
end
defmodule InternalApi.User.GetRepositoryTokenRequest do
@@ -410,8 +382,8 @@ defmodule InternalApi.User.GetRepositoryTokenRequest do
}
defstruct [:user_id, :integration_type]
- field :user_id, 1, type: :string
- field :integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true
+ field(:user_id, 1, type: :string)
+ field(:integration_type, 2, type: InternalApi.RepositoryIntegrator.IntegrationType, enum: true)
end
defmodule InternalApi.User.GetRepositoryTokenResponse do
@@ -424,8 +396,8 @@ defmodule InternalApi.User.GetRepositoryTokenResponse do
}
defstruct [:token, :expires_at]
- field :token, 1, type: :string
- field :expires_at, 2, type: Google.Protobuf.Timestamp
+ field(:token, 1, type: :string)
+ field(:expires_at, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.DescribeByRepositoryProviderRequest do
@@ -437,7 +409,19 @@ defmodule InternalApi.User.DescribeByRepositoryProviderRequest do
}
defstruct [:provider]
- field :provider, 1, type: InternalApi.User.RepositoryProvider
+ field(:provider, 1, type: InternalApi.User.RepositoryProvider)
+end
+
+defmodule InternalApi.User.DescribeByEmailRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ email: String.t()
+ }
+ defstruct [:email]
+
+ field(:email, 1, type: :string)
end
defmodule InternalApi.User.RefreshRepositoryProviderRequest do
@@ -450,8 +434,8 @@ defmodule InternalApi.User.RefreshRepositoryProviderRequest do
}
defstruct [:user_id, :type]
- field :user_id, 1, type: :string
- field :type, 2, type: InternalApi.User.RepositoryProvider.Type, enum: true
+ field(:user_id, 1, type: :string)
+ field(:type, 2, type: InternalApi.User.RepositoryProvider.Type, enum: true)
end
defmodule InternalApi.User.RefreshRepositoryProviderResponse do
@@ -464,8 +448,28 @@ defmodule InternalApi.User.RefreshRepositoryProviderResponse do
}
defstruct [:user_id, :repository_provider]
- field :user_id, 1, type: :string
- field :repository_provider, 2, type: InternalApi.User.RepositoryProvider
+ field(:user_id, 1, type: :string)
+ field(:repository_provider, 2, type: InternalApi.User.RepositoryProvider)
+end
+
+defmodule InternalApi.User.CreateRequest do
+ @moduledoc false
+ use Protobuf, syntax: :proto3
+
+ @type t :: %__MODULE__{
+ email: String.t(),
+ name: String.t(),
+ password: String.t(),
+ repository_providers: [InternalApi.User.RepositoryProvider.t()],
+ skip_password_change: boolean
+ }
+ defstruct [:email, :name, :password, :repository_providers, :skip_password_change]
+
+ field(:email, 1, type: :string)
+ field(:name, 2, type: :string)
+ field(:password, 3, type: :string)
+ field(:repository_providers, 4, repeated: true, type: InternalApi.User.RepositoryProvider)
+ field(:skip_password_change, 5, type: :bool)
end
defmodule InternalApi.User.User do
@@ -507,29 +511,29 @@ defmodule InternalApi.User.User do
:deactivated
]
- field :id, 1, type: :string
- field :avatar_url, 3, type: :string
- field :github_uid, 4, type: :string
- field :name, 5, type: :string
- field :github_login, 7, type: :string
- field :company, 8, type: :string
- field :email, 9, type: :string
- field :blocked_at, 10, type: Google.Protobuf.Timestamp
- field :created_at, 11, type: Google.Protobuf.Timestamp
- field :repository_providers, 12, repeated: true, type: InternalApi.User.RepositoryProvider
- field :visited_at, 13, type: Google.Protobuf.Timestamp
- field :single_org_user, 14, type: :bool
- field :org_id, 15, type: :string
- field :creation_source, 16, type: InternalApi.User.User.CreationSource, enum: true
- field :deactivated, 17, type: :bool
+ field(:id, 1, type: :string)
+ field(:avatar_url, 3, type: :string)
+ field(:github_uid, 4, type: :string)
+ field(:name, 5, type: :string)
+ field(:github_login, 7, type: :string)
+ field(:company, 8, type: :string)
+ field(:email, 9, type: :string)
+ field(:blocked_at, 10, type: Google.Protobuf.Timestamp)
+ field(:created_at, 11, type: Google.Protobuf.Timestamp)
+ field(:repository_providers, 12, repeated: true, type: InternalApi.User.RepositoryProvider)
+ field(:visited_at, 13, type: Google.Protobuf.Timestamp)
+ field(:single_org_user, 14, type: :bool)
+ field(:org_id, 15, type: :string)
+ field(:creation_source, 16, type: InternalApi.User.User.CreationSource, enum: true)
+ field(:deactivated, 17, type: :bool)
end
defmodule InternalApi.User.User.CreationSource do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
- field :NOT_SET, 0
- field :OKTA, 1
+ field(:NOT_SET, 0)
+ field(:OKTA, 1)
end
defmodule InternalApi.User.UserCreated do
@@ -543,9 +547,9 @@ defmodule InternalApi.User.UserCreated do
}
defstruct [:user_id, :timestamp, :invited]
- field :user_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :invited, 3, type: :bool
+ field(:user_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:invited, 3, type: :bool)
end
defmodule InternalApi.User.UserDeleted do
@@ -558,8 +562,8 @@ defmodule InternalApi.User.UserDeleted do
}
defstruct [:user_id, :timestamp]
- field :user_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:user_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.UserUpdated do
@@ -572,24 +576,8 @@ defmodule InternalApi.User.UserUpdated do
}
defstruct [:user_id, :timestamp]
- field :user_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
-end
-
-defmodule InternalApi.User.UserRefererCreated do
- @moduledoc false
- use Protobuf, syntax: :proto3
-
- @type t :: %__MODULE__{
- user_id: String.t(),
- entry_url: String.t(),
- http_referer: String.t()
- }
- defstruct [:user_id, :entry_url, :http_referer]
-
- field :user_id, 1, type: :string
- field :entry_url, 2, type: :string
- field :http_referer, 3, type: :string
+ field(:user_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.UserJoinedOrganization do
@@ -603,9 +591,9 @@ defmodule InternalApi.User.UserJoinedOrganization do
}
defstruct [:user_id, :org_id, :timestamp]
- field :user_id, 1, type: :string
- field :org_id, 2, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:user_id, 1, type: :string)
+ field(:org_id, 2, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.UserLeftOrganization do
@@ -619,9 +607,9 @@ defmodule InternalApi.User.UserLeftOrganization do
}
defstruct [:user_id, :org_id, :timestamp]
- field :user_id, 1, type: :string
- field :org_id, 2, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:user_id, 1, type: :string)
+ field(:org_id, 2, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.MemberInvited do
@@ -635,9 +623,9 @@ defmodule InternalApi.User.MemberInvited do
}
defstruct [:github_username, :org_id, :timestamp]
- field :github_username, 1, type: :string
- field :org_id, 2, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:github_username, 1, type: :string)
+ field(:org_id, 2, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.ActiveOwner do
@@ -650,8 +638,8 @@ defmodule InternalApi.User.ActiveOwner do
}
defstruct [:user_id, :timestamp]
- field :user_id, 1, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:user_id, 1, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.InactiveOwner do
@@ -664,8 +652,8 @@ defmodule InternalApi.User.InactiveOwner do
}
defstruct [:user_id, :timestamp]
- field :user_id, 1, type: :string
- field :timestamp, 3, type: Google.Protobuf.Timestamp
+ field(:user_id, 1, type: :string)
+ field(:timestamp, 3, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.WorkEmailAdded do
@@ -680,10 +668,10 @@ defmodule InternalApi.User.WorkEmailAdded do
}
defstruct [:user_id, :timestamp, :old_email, :new_email]
- field :user_id, 1, type: :string
- field :timestamp, 2, type: Google.Protobuf.Timestamp
- field :old_email, 3, type: :string
- field :new_email, 4, type: :string
+ field(:user_id, 1, type: :string)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
+ field(:old_email, 3, type: :string)
+ field(:new_email, 4, type: :string)
end
defmodule InternalApi.User.FavoriteCreated do
@@ -696,8 +684,8 @@ defmodule InternalApi.User.FavoriteCreated do
}
defstruct [:favorite, :timestamp]
- field :favorite, 1, type: InternalApi.User.Favorite
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:favorite, 1, type: InternalApi.User.Favorite)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.FavoriteDeleted do
@@ -710,51 +698,65 @@ defmodule InternalApi.User.FavoriteDeleted do
}
defstruct [:favorite, :timestamp]
- field :favorite, 1, type: InternalApi.User.Favorite
- field :timestamp, 2, type: Google.Protobuf.Timestamp
+ field(:favorite, 1, type: InternalApi.User.Favorite)
+ field(:timestamp, 2, type: Google.Protobuf.Timestamp)
end
defmodule InternalApi.User.UserService.Service do
@moduledoc false
use GRPC.Service, name: "InternalApi.User.UserService"
- rpc :Describe, InternalApi.User.DescribeRequest, InternalApi.User.DescribeResponse
-
- rpc :DescribeByRepositoryProvider,
- InternalApi.User.DescribeByRepositoryProviderRequest,
- InternalApi.User.User
-
- rpc :SearchUsers, InternalApi.User.SearchUsersRequest, InternalApi.User.SearchUsersResponse
- rpc :DescribeMany, InternalApi.User.DescribeManyRequest, InternalApi.User.DescribeManyResponse
- rpc :Update, InternalApi.User.UpdateRequest, InternalApi.User.UpdateResponse
- rpc :DeleteWithOwnedOrgs, InternalApi.User.DeleteWithOwnedOrgsRequest, InternalApi.User.User
-
- rpc :RegenerateToken,
- InternalApi.User.RegenerateTokenRequest,
- InternalApi.User.RegenerateTokenResponse
-
- rpc :ListFavorites,
- InternalApi.User.ListFavoritesRequest,
- InternalApi.User.ListFavoritesResponse
-
- rpc :CreateFavorite, InternalApi.User.Favorite, InternalApi.User.Favorite
- rpc :DeleteFavorite, InternalApi.User.Favorite, InternalApi.User.Favorite
- rpc :Referer, InternalApi.User.RefererRequest, InternalApi.User.RefererResponse
-
- rpc :CheckGithubToken,
- InternalApi.User.CheckGithubTokenRequest,
- InternalApi.User.CheckGithubTokenResponse
-
- rpc :BlockAccount, InternalApi.User.BlockAccountRequest, InternalApi.User.User
- rpc :UnblockAccount, InternalApi.User.UnblockAccountRequest, InternalApi.User.User
-
- rpc :GetRepositoryToken,
- InternalApi.User.GetRepositoryTokenRequest,
- InternalApi.User.GetRepositoryTokenResponse
-
- rpc :RefreshRepositoryProvider,
- InternalApi.User.RefreshRepositoryProviderRequest,
- InternalApi.User.RefreshRepositoryProviderResponse
+ rpc(:Describe, InternalApi.User.DescribeRequest, InternalApi.User.DescribeResponse)
+
+ rpc(
+ :DescribeByRepositoryProvider,
+ InternalApi.User.DescribeByRepositoryProviderRequest,
+ InternalApi.User.User
+ )
+
+ rpc(:DescribeByEmail, InternalApi.User.DescribeByEmailRequest, InternalApi.User.User)
+ rpc(:SearchUsers, InternalApi.User.SearchUsersRequest, InternalApi.User.SearchUsersResponse)
+ rpc(:DescribeMany, InternalApi.User.DescribeManyRequest, InternalApi.User.DescribeManyResponse)
+ rpc(:Update, InternalApi.User.UpdateRequest, InternalApi.User.UpdateResponse)
+ rpc(:DeleteWithOwnedOrgs, InternalApi.User.DeleteWithOwnedOrgsRequest, InternalApi.User.User)
+
+ rpc(
+ :RegenerateToken,
+ InternalApi.User.RegenerateTokenRequest,
+ InternalApi.User.RegenerateTokenResponse
+ )
+
+ rpc(
+ :ListFavorites,
+ InternalApi.User.ListFavoritesRequest,
+ InternalApi.User.ListFavoritesResponse
+ )
+
+ rpc(:CreateFavorite, InternalApi.User.Favorite, InternalApi.User.Favorite)
+ rpc(:DeleteFavorite, InternalApi.User.Favorite, InternalApi.User.Favorite)
+
+ rpc(
+ :CheckGithubToken,
+ InternalApi.User.CheckGithubTokenRequest,
+ InternalApi.User.CheckGithubTokenResponse
+ )
+
+ rpc(:BlockAccount, InternalApi.User.BlockAccountRequest, InternalApi.User.User)
+ rpc(:UnblockAccount, InternalApi.User.UnblockAccountRequest, InternalApi.User.User)
+
+ rpc(
+ :GetRepositoryToken,
+ InternalApi.User.GetRepositoryTokenRequest,
+ InternalApi.User.GetRepositoryTokenResponse
+ )
+
+ rpc(
+ :RefreshRepositoryProvider,
+ InternalApi.User.RefreshRepositoryProviderRequest,
+ InternalApi.User.RefreshRepositoryProviderResponse
+ )
+
+ rpc(:Create, InternalApi.User.CreateRequest, InternalApi.User.User)
end
defmodule InternalApi.User.UserService.Stub do
diff --git a/plumber/proto/mix.exs b/plumber/proto/mix.exs
index d368819ba..0359cadae 100644
--- a/plumber/proto/mix.exs
+++ b/plumber/proto/mix.exs
@@ -5,12 +5,14 @@ defmodule Proto.Mixfile do
use Mix.Project
def project do
- [app: :proto,
- version: "0.2.0",
- elixir: "~> 1.11",
- build_embedded: Mix.env == :prod,
- start_permanent: Mix.env == :prod,
- deps: deps()]
+ [
+ app: :proto,
+ version: "0.2.0",
+ elixir: "~> 1.11",
+ build_embedded: Mix.env() == :prod,
+ start_permanent: Mix.env() == :prod,
+ deps: deps()
+ ]
end
# Configuration for the OTP application
@@ -18,8 +20,7 @@ defmodule Proto.Mixfile do
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
- [extra_applications: [:logger],
- mod: {Proto.Application, []}]
+ [extra_applications: [:logger], mod: {Proto.Application, []}]
end
# Dependencies can be Hex packages:
@@ -35,7 +36,7 @@ defmodule Proto.Mixfile do
[
{:grpc, "~> 0.3"},
{:log_tee, git: "https://github.com/renderedtext/log-tee.git"},
- {:uuid, "~> 1.1"},
+ {:uuid, "~> 1.1"}
]
end
end
From 952680813b1e6b013e0bb5fcb5e7354f0e6a0c1e Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Mon, 14 Jul 2025 11:09:11 +0200
Subject: [PATCH 25/87] fix(plumber): handle skipped blocks in partial pipeline
rebuilds (#429)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
When performing a partial rebuild, skipped blocks from the original
pipeline caused the system to get stuck because:
## Result
- ✅ Skipped blocks in partial rebuilds now transition directly to
`done/passed/skipped`
- ✅ No more failed `Block.duplicate` calls with `nil` `block_id`
- ✅ Rebuilt pipelines preserve the skipped state semantically
- ✅ System no longer gets stuck in `initializing` state for skipped
blocks
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.../.semaphore/skip_block_with_failed_job.yml | 27 +++++++++++++++++
.../stm_handler/initializing_state.ex | 4 +++
plumber/ppl/test/grpc/server_test.exs | 29 +++++++++++++++++++
3 files changed, 60 insertions(+)
create mode 100644 plumber/block/priv/repos/22_skip_block/.semaphore/skip_block_with_failed_job.yml
diff --git a/plumber/block/priv/repos/22_skip_block/.semaphore/skip_block_with_failed_job.yml b/plumber/block/priv/repos/22_skip_block/.semaphore/skip_block_with_failed_job.yml
new file mode 100644
index 000000000..01da187ce
--- /dev/null
+++ b/plumber/block/priv/repos/22_skip_block/.semaphore/skip_block_with_failed_job.yml
@@ -0,0 +1,27 @@
+version: "v1.0"
+name: Pipeline
+agent:
+ machine:
+ type: e1-standard-2
+ os_image: ubuntu1804
+blocks:
+ - name: Failing
+ dependencies: []
+ task:
+ jobs:
+ - commands:
+ - exit 127
+ - name: B
+ dependencies: []
+ task:
+ jobs:
+ - commands:
+ - echo ok
+ - name: Deployment - skip on dev branches
+ dependencies: [B]
+ skip:
+ when: "(branch =~ '^dev' and branch != 'dev-execute-dpl') or tag =~ '^v0\.' or pull_request =~ '1.*'"
+ task:
+ jobs:
+ - commands:
+ - exit 127
diff --git a/plumber/ppl/lib/ppl/ppl_blocks/stm_handler/initializing_state.ex b/plumber/ppl/lib/ppl/ppl_blocks/stm_handler/initializing_state.ex
index 27599dd72..8ecf30666 100644
--- a/plumber/ppl/lib/ppl/ppl_blocks/stm_handler/initializing_state.ex
+++ b/plumber/ppl/lib/ppl/ppl_blocks/stm_handler/initializing_state.ex
@@ -60,6 +60,10 @@ defmodule Ppl.PplBlocks.STMHandler.InitializingState do
end
end
+ defp rebuild_or_duplicate_block(orig_ppl_blk = %{block_id: nil, state: "done", result: "passed", result_reason: "skipped"}, new_ppl_id) do
+ {:ok, fn _, _ -> {:ok, %{state: "done", result: "passed", result_reason: "skipped"}} end}
+ end
+
defp rebuild_or_duplicate_block(orig_ppl_blk = %{state: "done", result: "passed"}, new_ppl_id) do
case Block.duplicate(orig_ppl_blk.block_id, new_ppl_id) do
{:ok, new_block_id} ->
diff --git a/plumber/ppl/test/grpc/server_test.exs b/plumber/ppl/test/grpc/server_test.exs
index c9cf2179a..f32acb7fc 100644
--- a/plumber/ppl/test/grpc/server_test.exs
+++ b/plumber/ppl/test/grpc/server_test.exs
@@ -2268,6 +2268,35 @@ defmodule Ppl.Grpc.Server.Test do
end
end
+ @tag :integration
+ test "gRPC partial_rebuild() - handles skipped blocks correctly" do
+ {:ok, %{ppl_id: ppl_id}} =
+ %{"repo_name" => "22_skip_block", "file_name" => "skip_block_with_failed_job.yml", "label" => "dev-test"}
+ |> Test.Helpers.schedule_request_factory(:local)
+ |> Actions.schedule()
+
+ loopers = Test.Helpers.start_all_loopers()
+ {:ok, _ppl} = Test.Helpers.wait_for_ppl_state(ppl_id, "done", 18_000)
+
+
+ {:ok, orig_blk} = PplBlocksQueries.get_by_id_and_index(ppl_id, 2)
+ assert orig_blk.result == "passed"
+ assert orig_blk.result_reason == "skipped"
+ assert orig_blk.block_id == nil
+
+ request_token = UUID.uuid4()
+ new_ppl_id = assert_partial_rebuild(ppl_id, request_token, :ok)
+
+ {:ok, _new_ppl} = Test.Helpers.wait_for_ppl_state(new_ppl_id, "done", 18_000)
+ Test.Helpers.stop_all_loopers(loopers)
+
+ {:ok, new_blk} = PplBlocksQueries.get_by_id_and_index(new_ppl_id, 2)
+ assert new_blk.result == "passed"
+ assert new_blk.result_reason == "skipped"
+ assert new_blk.block_id == nil
+ assert new_blk.duplicate == true
+ end
+
defp create_pipeline_with_deployment_target(deployment_target_id) do
source_args = Test.Support.RequestFactory.source_args(%{})
From b485c3acab8ec7eef957083be809c91e58fa212f Mon Sep 17 00:00:00 2001
From: Amir Hasanbasic <43892661+hamir-suspect@users.noreply.github.com>
Date: Mon, 14 Jul 2025 12:45:39 +0200
Subject: [PATCH 26/87] fix(front): default simple template for new project
onboarding (#430)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
`simple.yml` template was removed by misstake here:
https://github.com/semaphoreio/semaphore/pull/31/files
add template validation
## ✅ Checklist
- [ ] I have tested this change
- [ ] This change requires documentation update
---
front/Makefile | 12 +-
front/scripts/check-templates.sh | 150 ++++++++++++++++++
.../ce_new/templates/elixir_docker_etl.yml | 4 +
.../saas_new/templates/java_saas_spring.yml | 4 +
.../saas_new/templates/phoenix_docker.yml | 1 +
.../saas_new/templates/simple.yml | 47 ++++++
.../saas_new/templates/simple_saas.yml | 1 +
7 files changed, 212 insertions(+), 7 deletions(-)
create mode 100755 front/scripts/check-templates.sh
create mode 100644 front/workflow_templates/saas_new/templates/simple.yml
diff --git a/front/Makefile b/front/Makefile
index 4c43954d8..a3cd56b22 100644
--- a/front/Makefile
+++ b/front/Makefile
@@ -83,13 +83,6 @@ pb.gen.public:
scripts/vagrant_sudo chown -R $$(id -u $${USER}):$$(id -g $${USER}) lib/public_api
rm -rf $(TMP_REPO_DIR)
-workflow.templates.gen:
- @echo "Generating workflow templates"
- rm -rf $(TMP_REPO_DIR)
- git clone git@github.com:renderedtext/app-design.git $(TMP_REPO_DIR) && (cd $(TMP_REPO_DIR) && git checkout $(APP_DESIGN_BRANCH) && make check.templates && cd -)
- ./scripts/generate-workflow-templates.sh $(TMP_REPO_DIR)
- rm -rf $(TMP_REPO_DIR)
-
deps.check: build
ifeq ($(CI),)
docker compose $(DOCKER_COMPOSE_OPTS) run --no-deps -e MIX_ENV=dev app ash -c 'mix deps.unlock --check-unused'
@@ -158,3 +151,8 @@ dev.ce.server: build
console.ce.bash:
DOCKER_COMPOSE_RUN_OPTS="--service-ports $(CONTAINER_CE_ENV_VARS)" $(MAKE) console.bash
+
+workflow.templates.check:
+ @echo "📦 Checking templates..."
+ ./scripts/check-templates.sh workflow_templates/saas_new
+ ./scripts/check-templates.sh workflow_templates/ce_new
diff --git a/front/scripts/check-templates.sh b/front/scripts/check-templates.sh
new file mode 100755
index 000000000..a2552968a
--- /dev/null
+++ b/front/scripts/check-templates.sh
@@ -0,0 +1,150 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+IFS=$'\n\t'
+
+if [ $# -lt 1 ]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+
+root_dir="$1"
+properties_dir="${root_dir}/properties"
+setup_json="${root_dir}/setup.json"
+
+# First check if setup.json exists and is valid
+if [ ! -f "$setup_json" ]; then
+ echo "❗️ Error: setup.json not found in root directory"
+ exit 1
+fi
+
+# Validate setup.json
+if ! jq empty "$setup_json" 2>/dev/null; then
+ echo "❗️ Error: setup.json is not a valid JSON file"
+ exit 1
+else
+ echo "✅ setup.json is valid"
+fi
+
+if [ ! -d "$properties_dir" ]; then
+ echo "❗️ Error: Properties directory not found: $properties_dir"
+ exit 1
+fi
+
+required_fields=(
+ "title"
+ "description"
+ "short_description"
+ "environment"
+ "icon"
+ "template_path"
+)
+
+exit_code=0
+templates_checked=0
+
+# Function to validate YAML block dependencies
+validate_yaml_blocks() {
+ local yaml_file="$1"
+ local title="$2"
+
+ # First check if the YAML is valid
+ if ! yq eval '.' "$yaml_file" > /dev/null 2>&1; then
+ echo "❌ Error: Invalid YAML in template for '${title}': ${yaml_file}"
+ return 1
+ fi
+
+ # Get all block names
+ local block_names
+ block_names=$(yq eval '.blocks[].name' "$yaml_file")
+
+ if [ -z "$block_names" ]; then
+ echo "⚠️ Warning: No blocks found in template for '${title}': ${yaml_file}"
+ return 0
+ fi
+
+ # Check blocks for valid dependencies (if present)
+ local invalid_deps=0
+ while IFS= read -r block; do
+ local block_name
+ block_name=$(yq eval '.blocks[] | select(.name == "'$block'") | .name' "$yaml_file")
+
+ # Check if dependencies field exists (optional)
+ local deps_field
+ deps_field=$(yq eval '.blocks[] | select(.name == "'$block'") | has("dependencies")' "$yaml_file")
+
+ # Only validate dependencies if the field exists
+ if [ "$deps_field" = "true" ]; then
+ # Check if dependencies field is an array
+ local deps_type
+ deps_type=$(yq eval '.blocks[] | select(.name == "'$block'") | .dependencies | type' "$yaml_file")
+
+ if [ "$deps_type" != "!!seq" ]; then
+ echo "❌ Error: Block '${block_name}' in '${title}' has dependencies field that is not an array"
+ ((invalid_deps++))
+ else
+ # Validate each dependency in this block
+ while IFS= read -r dep; do
+ if [ ! -z "$dep" ] && ! echo "$block_names" | grep -Fxq "$dep"; then
+ echo "❌ Error: Block '${block_name}' in '${title}' has undefined dependency: ${dep}"
+ ((invalid_deps++))
+ fi
+ done < <(yq eval '.blocks[] | select(.name == "'$block'") | .dependencies[]' "$yaml_file")
+ fi
+ fi
+ done < <(yq eval '.blocks[].name' "$yaml_file")
+
+ if [ $invalid_deps -eq 0 ]; then
+ echo "✅ Block dependencies valid in template for '${title}': ${yaml_file}"
+ fi
+
+ return $invalid_deps
+}
+
+for json_file in "$properties_dir"/*.properties.json; do
+ if [ ! -f "$json_file" ]; then
+ continue
+ fi
+
+ # Check if the JSON file is valid
+ if ! jq empty "$json_file" 2>/dev/null; then
+ echo "❌ Error: $(basename "$json_file") is not a valid JSON file"
+ exit_code=1
+ continue
+ fi
+
+ # Check required fields
+ for field in "${required_fields[@]}"; do
+ value=$(jq -r ".$field" "$json_file")
+ if [ "$value" = "null" ] || [ -z "$value" ]; then
+ echo "❗️ Error: Required field '$field' is missing in $(basename "$json_file")"
+ exit_code=1
+ fi
+ done
+
+ template_path=$(jq -r '.template_path' "$json_file")
+ title=$(jq -r '.title' "$json_file")
+
+ if [ "$template_path" = "null" ]; then
+ echo "❗️ Warning: No template_path found in $json_file"
+ continue
+ fi
+
+ full_template_path="${root_dir}/${template_path}"
+ if [ ! -f "$full_template_path" ]; then
+ echo "❌ Error: Template not found for '${title}', properties define template as: ${template_path}"
+ exit_code=1
+ else
+ # Validate YAML block dependencies
+ if ! validate_yaml_blocks "$full_template_path" "$title"; then
+ exit_code=1
+ fi
+ fi
+ templates_checked=$((templates_checked + 1))
+done
+
+if [ $exit_code -eq 0 ] && [ $templates_checked -gt 0 ]; then
+ echo "✅ All templates in ${root_dir} are valid"
+fi
+
+exit $exit_code
\ No newline at end of file
diff --git a/front/workflow_templates/ce_new/templates/elixir_docker_etl.yml b/front/workflow_templates/ce_new/templates/elixir_docker_etl.yml
index dfd3b9c74..a95ad8801 100644
--- a/front/workflow_templates/ce_new/templates/elixir_docker_etl.yml
+++ b/front/workflow_templates/ce_new/templates/elixir_docker_etl.yml
@@ -41,6 +41,7 @@ global_job_config:
blocks:
- name: "🛠️ Setup"
+ dependencies: []
task:
jobs:
- name: "Compile Code"
@@ -48,6 +49,7 @@ blocks:
- mix compile # Compile the project
- name: "🔍 Analyze Code"
+ dependencies: ["🛠️ Setup"]
task:
jobs:
- name: "Run Credo"
@@ -58,6 +60,7 @@ blocks:
- mix format --check-formatted # Ensure all code is properly formatted
- name: "🧪 Run Tests"
+ dependencies: ["🔍 Analyze Code"]
task:
jobs:
- name: "Run Tests and Generate Coverage"
@@ -71,6 +74,7 @@ blocks:
- cache store coverage-$SEMAPHORE_WORKFLOW_ID cover # Store coverage results
- name: "📦 Build Docker Image"
+ dependencies: ["🧪 Run Tests"]
task:
prologue:
commands:
diff --git a/front/workflow_templates/saas_new/templates/java_saas_spring.yml b/front/workflow_templates/saas_new/templates/java_saas_spring.yml
index 97c07cb96..2b7287c8a 100644
--- a/front/workflow_templates/saas_new/templates/java_saas_spring.yml
+++ b/front/workflow_templates/saas_new/templates/java_saas_spring.yml
@@ -24,6 +24,7 @@ global_job_config:
blocks:
- name: "⚙️ Build"
+ dependencies: []
task:
env_vars:
- name: MAVEN_OPTS
@@ -35,6 +36,7 @@ blocks:
- cache store # Cache the build output for later use
- name: "🧪 Test"
+ dependencies: ["⚙️ Build"]
task:
env_vars:
- name: MAVEN_OPTS
@@ -54,6 +56,7 @@ blocks:
- test-results publish target/surefire-reports/*.xml # Publish test reports to Semaphore
- name: "🚀 Performance Tests"
+ dependencies: ["⚙️ Build"]
task:
env_vars:
- name: MAVEN_OPTS
@@ -68,6 +71,7 @@ blocks:
- 'mvn jmeter:results' # Process JMeter test results
- name: "🐳 Dockerize"
+ dependencies: ["🚀 Performance Tests"]
task:
env_vars:
- name: MAVEN_OPTS
diff --git a/front/workflow_templates/saas_new/templates/phoenix_docker.yml b/front/workflow_templates/saas_new/templates/phoenix_docker.yml
index 7b3a68c7e..a2325f065 100644
--- a/front/workflow_templates/saas_new/templates/phoenix_docker.yml
+++ b/front/workflow_templates/saas_new/templates/phoenix_docker.yml
@@ -44,6 +44,7 @@ global_job_config:
blocks:
# ⚙️ Block 1: Install dependencies & compile – caches Hex/Rebar & JS assets
- name: "⚙️ Install & Compile"
+ dependencies: []
task:
jobs:
- name: "⚙️ Install & Compile"
diff --git a/front/workflow_templates/saas_new/templates/simple.yml b/front/workflow_templates/saas_new/templates/simple.yml
new file mode 100644
index 000000000..4a27989ec
--- /dev/null
+++ b/front/workflow_templates/saas_new/templates/simple.yml
@@ -0,0 +1,47 @@
+# This is a simple Semaphore configuration file
+# For more information about Semaphore configuration visit:
+# https://docs.semaphoreci.com/reference/pipeline-yaml-reference/
+
+version: v1.0 # Semaphore configuration version
+name: "🎯 Simple CI Pipeline" # Pipeline display name
+
+# Define the machine type and OS image
+agent:
+ machine:
+ type: {{ machine_type }}
+ os_image: {{ os_image }}
+
+# Configure when to stop the pipeline early
+fail_fast:
+ stop:
+ when: branch != 'main' # Stop all blocks if a job fails on non-main branches
+auto_cancel:
+ running:
+ when: branch != 'main' # Cancel running pipelines on non-main branches
+ queued:
+ when: branch = 'main' # Cancel queued pipelines on main branch
+
+# Pipeline blocks represent groups of jobs that can run in parallel
+blocks:
+ # Block for testing
+ - name: "🧪 Test"
+ task:
+ jobs:
+ - name: "🟢 Run Tests"
+ commands:
+ - echo "Checking out code..."
+ - echo "Installing dependencies..."
+ - echo "Running test suite..."
+ - echo "Tests completed successfully!"
+
+ # Block for deployment
+ - name: "🚀 Deploy"
+ dependencies: ["🧪 Test"] # Only deploy if tests pass
+ task:
+ jobs:
+ - name: "📦 Deploy to Production"
+ commands:
+ - echo "Building application..."
+ - echo "Running security checks..."
+ - echo "Deploying to production..."
+ - echo "Deployment completed successfully!"
\ No newline at end of file
diff --git a/front/workflow_templates/saas_new/templates/simple_saas.yml b/front/workflow_templates/saas_new/templates/simple_saas.yml
index 133628847..88698ddf5 100644
--- a/front/workflow_templates/saas_new/templates/simple_saas.yml
+++ b/front/workflow_templates/saas_new/templates/simple_saas.yml
@@ -25,6 +25,7 @@ auto_cancel:
blocks:
# Block for testing
- name: "🧪 Test"
+ dependencies: []
task:
jobs:
- name: "🟢 Run Tests"
From 19813e806ea5c024d11221d23cbb7a9ee122ea57 Mon Sep 17 00:00:00 2001
From: Dejan K
Date: Mon, 14 Jul 2025 16:38:41 +0200
Subject: [PATCH 27/87] fix(public-api-gateway): add AMQP_URL env. var (#432)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Fixes `public-api-gateway` deployment issues:
* Adds missing `AMQP_URL` environment variable
* Corrects `JOBS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT` gRPC endpoint
Also updates E2E test configuration to align with changes in secret
creation during bootstrapping.
Related
[task](https://github.com/renderedtext/project-tasks/issues/2640).
## ✅ Checklist
- [x] I have tested this change
- [ ] ~This change requires documentation update~ N/A
---
e2e/scripts/test-setup.sh | 8 ++++----
e2e/test/e2e/ui/git_integrations_test.exs | 2 +-
public-api-gateway/helm/templates/dpl.yaml | 7 ++++++-
3 files changed, 11 insertions(+), 6 deletions(-)
diff --git a/e2e/scripts/test-setup.sh b/e2e/scripts/test-setup.sh
index 258eb61ef..7d8f82b07 100755
--- a/e2e/scripts/test-setup.sh
+++ b/e2e/scripts/test-setup.sh
@@ -7,14 +7,14 @@ if [[ "$CLOUD_TEST_ENVIRONMENT_TYPE" == "gke" ]]; then
artifact pull project "environments/${CLOUD_TEST_ENV_PREFIX}/terraform.tfstate" -d terraform.tfstate
CLUSTER_NAME=$(terraform output -raw cluster_name)
gcloud container clusters get-credentials ${CLUSTER_NAME} --region us-east4 --project ${GOOGLE_PROJECT_NAME}
- export SEMAPHORE_API_TOKEN=$(kubectl get secret root-user -o jsonpath='{.data.token}' | base64 -d)
- export SEMAPHORE_USER_PASSWORD=$(kubectl get secret root-user -o jsonpath='{.data.password}' | base64 -d)
+ export SEMAPHORE_API_TOKEN=$(kubectl get secret semaphore-authentication -o jsonpath='{.data.ROOT_USER_TOKEN}' | base64 -d)
+ export SEMAPHORE_USER_PASSWORD=$(kubectl get secret semaphore-authentication -o jsonpath='{.data.ROOT_USER_PASSWORD}' | base64 -d)
elif [[ "$CLOUD_TEST_ENVIRONMENT_TYPE" == "single-vm" ]]; then
artifact pull project "environments/${CLOUD_TEST_ENV_PREFIX}/private-ssh-key" -d private-ssh-key
artifact pull project "environments/${CLOUD_TEST_ENV_PREFIX}/private-ssh-key.pub" -d private-ssh-key.pub
chmod 400 private-ssh-key
- export SEMAPHORE_API_TOKEN=$(gcloud compute ssh --ssh-key-file private-ssh-key test-${CLOUD_TEST_ENV_PREFIX} --command "kubectl get secret root-user -o jsonpath='{.data.token}' | base64 -d")
- export SEMAPHORE_USER_PASSWORD=$(gcloud compute ssh --ssh-key-file private-ssh-key test-${CLOUD_TEST_ENV_PREFIX} --command "kubectl get secret root-user -o jsonpath='{.data.password}' | base64 -d")
+ export SEMAPHORE_API_TOKEN=$(gcloud compute ssh --ssh-key-file private-ssh-key test-${CLOUD_TEST_ENV_PREFIX} --command "kubectl get secret semaphore-authentication -o jsonpath='{.data.ROOT_USER_TOKEN}' | base64 -d")
+ export SEMAPHORE_USER_PASSWORD=$(gcloud compute ssh --ssh-key-file private-ssh-key test-${CLOUD_TEST_ENV_PREFIX} --command "kubectl get secret semaphore-authentication -o jsonpath='{.data.ROOT_USER_PASSWORD}' | base64 -d")
else
echo "Unknown environment type: ${CLOUD_TEST_ENVIRONMENT_TYPE}"
exit 1
diff --git a/e2e/test/e2e/ui/git_integrations_test.exs b/e2e/test/e2e/ui/git_integrations_test.exs
index 0e4a2ec6c..aeb3e4a08 100644
--- a/e2e/test/e2e/ui/git_integrations_test.exs
+++ b/e2e/test/e2e/ui/git_integrations_test.exs
@@ -98,7 +98,7 @@ defmodule E2E.UI.GitIntegrationsTest do
session
|> assert_has(Wallaby.Query.text("GitHub App Connection"))
# Green circle indicator
- |> assert_has(Wallaby.Query.css("[data-testid='connection-status']"))
+ |> assert_has(Wallaby.Query.css("[data-testid='connection-status']", minimum: 1))
end
test "has required permissions section", %{session: session} do
diff --git a/public-api-gateway/helm/templates/dpl.yaml b/public-api-gateway/helm/templates/dpl.yaml
index 52a99f179..566f27579 100644
--- a/public-api-gateway/helm/templates/dpl.yaml
+++ b/public-api-gateway/helm/templates/dpl.yaml
@@ -60,11 +60,16 @@ spec:
- name: DASHBOARDS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT
value: "dashboards-v1alpha-public-grpc-api:50051"
- name: JOBS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT
- value: "jobs-v1alpha-public-grpc-api:50051"
+ value: "jobs-v1alpha-public-grpc-api-headless:50051"
- name: NOTIFICATIONS_V1ALPHA_PUBLIC_GRPC_API_ENDPOINT
value: "notifications-v1alpha-public-grpc-api:50051"
- name: PROJECT_SECRETS_V1_PUBLIC_GRPC_API_ENDPOINT
value: "secrethub-grpc:50051"
+ - name: AMQP_URL
+ valueFrom:
+ secretKeyRef:
+ name: {{ .Values.global.rabbitmq.secretName }}
+ key: amqp-url
{{- if .Values.resources }}
resources:
{{ toYaml .Values.resources | indent 13 }}
From cfab6f1f7757454ae067ee413e2e97127281194a Mon Sep 17 00:00:00 2001
From: Veljko Maksimovic <45179708+VeljkoMaksimovic@users.noreply.github.com>
Date: Mon, 14 Jul 2025 17:10:13 +0200
Subject: [PATCH 28/87] fix(front): Fix templates that dont have valid
semaphore yamls (#433)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
While working on [Pen test
env](https://github.com/renderedtext/project-tasks/issues/2580) I've
noticed some starter templates are not working properly. There were no
explicit dependencies added for all of the blocks.
## ✅ Checklist
- [ ] I have tested this change
- [ ] This change requires documentation update
---
front/workflow_templates/saas_new/templates/simple.yml | 1 +
front/workflow_templates/saas_new/templates/simple_docker.yml | 1 +
front/workflow_templates/saas_new/templates/xcode_saas.yml | 1 +
3 files changed, 3 insertions(+)
diff --git a/front/workflow_templates/saas_new/templates/simple.yml b/front/workflow_templates/saas_new/templates/simple.yml
index 4a27989ec..44863fe6c 100644
--- a/front/workflow_templates/saas_new/templates/simple.yml
+++ b/front/workflow_templates/saas_new/templates/simple.yml
@@ -25,6 +25,7 @@ auto_cancel:
blocks:
# Block for testing
- name: "🧪 Test"
+ dependencies: [] # Only deploy if tests pass
task:
jobs:
- name: "🟢 Run Tests"
diff --git a/front/workflow_templates/saas_new/templates/simple_docker.yml b/front/workflow_templates/saas_new/templates/simple_docker.yml
index b0624fdf3..f571d2a7c 100644
--- a/front/workflow_templates/saas_new/templates/simple_docker.yml
+++ b/front/workflow_templates/saas_new/templates/simple_docker.yml
@@ -29,6 +29,7 @@ auto_cancel:
blocks:
# Block for testing
- name: "🧪 Test"
+ dependencies: []
task:
jobs:
- name: "🟢 Run Tests"
diff --git a/front/workflow_templates/saas_new/templates/xcode_saas.yml b/front/workflow_templates/saas_new/templates/xcode_saas.yml
index 6fdbf65ec..73c69544c 100644
--- a/front/workflow_templates/saas_new/templates/xcode_saas.yml
+++ b/front/workflow_templates/saas_new/templates/xcode_saas.yml
@@ -26,6 +26,7 @@ global_job_config:
blocks:
- name: "🛠️ Setup and Cache"
+ dependencies: []
task:
jobs:
- name: "Install Dependencies"
From 1e934807096852389662d15f428f6d0e3b7c65d6 Mon Sep 17 00:00:00 2001
From: Veljko Maksimovic <45179708+VeljkoMaksimovic@users.noreply.github.com>
Date: Wed, 16 Jul 2025 13:47:24 +0200
Subject: [PATCH 29/87] fix(ppl): Add label to schedule wf via api request
(#436)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
branch "run" conditions rely of pipeline label to determine what is the
current branhc, and we were not setting up labels at all when triggering
wfs via API.
https://github.com/renderedtext/tasks/issues/8281
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
.../pipelines_api/workflow_client/wf_request_formatter.ex | 7 +++++++
public-api/v1alpha/test/workflow_client_test.exs | 1 +
2 files changed, 8 insertions(+)
diff --git a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
index 68fbedfa2..df1f57fe6 100644
--- a/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
+++ b/public-api/v1alpha/lib/pipelines_api/workflow_client/wf_request_formatter.ex
@@ -14,6 +14,7 @@ defmodule PipelinesAPI.WorkflowClient.WFRequestFormatter do
def form_schedule_request(params) when is_map(params) do
%{
service: service_type(params["repository"].integration_type),
+ label: params |> Map.get("reference", "") |> label(),
repo: %{
branch_name: params |> Map.get("reference", "") |> branch_name(),
commit_sha: params |> Map.get("commit_sha", "")
@@ -59,6 +60,12 @@ defmodule PipelinesAPI.WorkflowClient.WFRequestFormatter do
defp branch_name("refs/heads/" <> branch_name), do: branch_name
defp branch_name(name), do: name
+ defp label(""), do: ""
+ defp label("refs/tags/" <> tag), do: tag
+ defp label("refs/pull/" <> number), do: number
+ defp label("refs/heads/" <> branch_name), do: branch_name
+ defp label(name), do: name
+
# Terminate
def form_terminate_request(wf_id, requester_id) when is_binary(wf_id) do
diff --git a/public-api/v1alpha/test/workflow_client_test.exs b/public-api/v1alpha/test/workflow_client_test.exs
index d5dff39db..bec57ec89 100644
--- a/public-api/v1alpha/test/workflow_client_test.exs
+++ b/public-api/v1alpha/test/workflow_client_test.exs
@@ -55,6 +55,7 @@ defmodule PipelinesAPI.WorkflowClient.Test do
assert {:ok, request} = WFRequestFormatter.form_schedule_request(params)
assert request.service == ServiceType.value(:GIT_HUB)
+ assert request.label == "main"
assert request.repo.branch_name == "main"
assert request.repo.commit_sha == "773d5c953bd68cc97efa81d2e014449336265fb4"
assert {:ok, _} = UUID.info(request.request_token)
From 02cc44ad923035d2a0d02b2e70051ae42d2dc8d1 Mon Sep 17 00:00:00 2001
From: Veljko Maksimovic <45179708+VeljkoMaksimovic@users.noreply.github.com>
Date: Wed, 16 Jul 2025 16:15:25 +0200
Subject: [PATCH 30/87] toil(guard): Change abuseprevention address in guard
(#437)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
https://github.com/renderedtext/tasks/issues/8283
## ✅ Checklist
- [ ] I have tested this change
- [ ] This change requires documentation update
---
guard/templates/blocked.html.eex | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/guard/templates/blocked.html.eex b/guard/templates/blocked.html.eex
index 1164da0b6..ae2737d82 100644
--- a/guard/templates/blocked.html.eex
+++ b/guard/templates/blocked.html.eex
@@ -59,7 +59,7 @@
If you are trying to join an existing Semaphore organization, please ask your organization owner or admin to send you an invite.
- If you're new to Semaphore, please reach out to us at abuseprevention@semaphoreci.com, preferably from a business email address, let us know how you login, and we will review your request.
+ If you're new to Semaphore, please reach out to us at abuseprevention@semaphore.io, preferably from a business email address, let us know how you login, and we will review your request.
From 531d9008e35697fcea57d7d056013a12f42a27b9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Rados=C5=82aw=20Wo=C5=BAniak?=
<184065+radwo@users.noreply.github.com>
Date: Thu, 17 Jul 2025 12:33:12 +0200
Subject: [PATCH 31/87] Update get credentials script (#428)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
The place of storing credentials was changed in this PR
https://github.com/semaphoreio/semaphore/pull/338
## ✅ Checklist
- [x] I have tested this change
- [x] This change requires documentation update
---
LOCAL-DEVELOPMENT.md | 6 +++---
helm-chart/templates/NOTES.txt | 2 +-
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/LOCAL-DEVELOPMENT.md b/LOCAL-DEVELOPMENT.md
index 0061f65eb..0b3cb5a33 100644
--- a/LOCAL-DEVELOPMENT.md
+++ b/LOCAL-DEVELOPMENT.md
@@ -61,9 +61,9 @@ For development and testing, you can run Semaphore locally using Minikube:
```bash
# Get credentials
- kubectl get secret root-user -n default -o jsonpath='{.data.email}' | base64 -d
- kubectl get secret root-user -n default -o jsonpath='{.data.password}' | base64 -d
- kubectl get secret root-user -n default -o jsonpath='{.data.token}' | base64 -d
+ kubectl get secret semaphore-authentication -n default -o jsonpath='{.data.ROOT_USER_EMAIL}' | base64 -d
+ kubectl get secret semaphore-authentication -n default -o jsonpath='{.data.ROOT_USER_PASSWORD}' | base64 -d
+ kubectl get secret semaphore-authentication -n default -o jsonpath='{.data.ROOT_USER_TOKEN}' | base64 -d
```
Open `https://id.semaphore.localhost` and log in!
diff --git a/helm-chart/templates/NOTES.txt b/helm-chart/templates/NOTES.txt
index 3c38aeaf3..e60283504 100644
--- a/helm-chart/templates/NOTES.txt
+++ b/helm-chart/templates/NOTES.txt
@@ -5,5 +5,5 @@ To start using the app, go to: https://id.{{ .Values.global.domain.name }}/login
You can fetch credentials for the login running this command:
-echo "Email: $(kubectl get secret {{ .Values.global.rootUser.secretName }} -n {{ .Release.Namespace }} -o jsonpath='{.data.email}' | base64 -d)"; echo "Password: $(kubectl get secret {{ .Values.global.rootUser.secretName }} -n {{ .Release.Namespace }} -o jsonpath='{.data.password}' | base64 -d)"; echo "API Token: $(kubectl get secret {{ .Values.global.rootUser.secretName }} -n {{ .Release.Namespace }} -o jsonpath='{.data.token}' | base64 -d)"
+echo "Email: $(kubectl get secret {{ include "secrets.authentication.name" . }} -n {{ .Release.Namespace }} -o jsonpath='{.data.ROOT_USER_EMAIL}' | base64 -d)"; echo "Password: $(kubectl get secret {{ include "secrets.authentication.name" . }} -n {{ .Release.Namespace }} -o jsonpath='{.data.ROOT_USER_PASSWORD}' | base64 -d)"; echo "API Token: $(kubectl get secret {{ include "secrets.authentication.name" . }} -n {{ .Release.Namespace }} -o jsonpath='{.data.ROOT_USER_TOKEN}' | base64 -d)"
=============================================================================================
From 0754cd708db707a424e1845b3bf029757e677542 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Rados=C5=82aw=20Wo=C5=BAniak?=
<184065+radwo@users.noreply.github.com>
Date: Thu, 17 Jul 2025 17:34:22 +0200
Subject: [PATCH 32/87] fix(public-api-gateway): add missing rabbitmq init
container (#439)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Service is using rabbitmq but is missing init container that waits for
rabbitmq pod bootup
## 📝 Description
## ✅ Checklist
- [x] I have tested this change
- [ ] ~This change requires documentation update~
---
public-api-gateway/helm/templates/dpl.yaml | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/public-api-gateway/helm/templates/dpl.yaml b/public-api-gateway/helm/templates/dpl.yaml
index 566f27579..6041c2508 100644
--- a/public-api-gateway/helm/templates/dpl.yaml
+++ b/public-api-gateway/helm/templates/dpl.yaml
@@ -40,6 +40,8 @@ spec:
- name: {{ . }}
{{- end }}
{{- end }}
+ initContainers:
+{{ include "initContainers.waitForRabbitMQ" . | indent 8 }}
automountServiceAccountToken: false
containers:
- name: "{{ .Chart.Name }}"
@@ -73,4 +75,4 @@ spec:
{{- if .Values.resources }}
resources:
{{ toYaml .Values.resources | indent 13 }}
-{{- end }}
\ No newline at end of file
+{{- end }}
From b1ace6de0299d2a1cb9e75b0edf340171f360bea Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Rados=C5=82aw=20Wo=C5=BAniak?=
<184065+radwo@users.noreply.github.com>
Date: Fri, 18 Jul 2025 10:18:36 +0200
Subject: [PATCH 33/87] config(public-api): increase memory resources (#440)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
During bootup, service is often killed by OOMKiller
## 📝 Description
## ✅ Checklist
- [x] I have tested this change
- [ ] ~This change requires documentation update~
---
public-api/v2/helm/values.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/public-api/v2/helm/values.yaml b/public-api/v2/helm/values.yaml
index 442ccfb65..ccbaad19c 100644
--- a/public-api/v2/helm/values.yaml
+++ b/public-api/v2/helm/values.yaml
@@ -10,7 +10,7 @@ resources:
cpu: 500m
memory: 100Mi
limits:
- memory: 250Mi
+ memory: 300Mi
statsd:
resources:
@@ -19,4 +19,4 @@ statsd:
memory: "50Mi"
requests:
cpu: "5m"
- memory: "25Mi"
\ No newline at end of file
+ memory: "25Mi"
From c5fc907ee9f564b06e81d3c366502425d3bde713 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miko=C5=82aj=20Kutryj?=
Date: Wed, 23 Jul 2025 13:32:20 +0200
Subject: [PATCH 34/87] toil(bootstrapper): fix CVE-2025-22868, bump go to 1.23
(#444)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
- Fixes CVE-2025-22868
- Bumps golang to 1.23
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
bootstrapper/Dockerfile | 6 +++---
bootstrapper/go.mod | 6 +++---
bootstrapper/go.sum | 4 ++--
3 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/bootstrapper/Dockerfile b/bootstrapper/Dockerfile
index f8f9acb0f..c6c2e9948 100644
--- a/bootstrapper/Dockerfile
+++ b/bootstrapper/Dockerfile
@@ -1,4 +1,4 @@
-ARG GO_VERSION=1.22
+ARG GO_VERSION=1.23
ARG UBUNTU_VERSION=3.17.7
ARG ALPINE_VERSION=3.20.3
ARG BUILDER_IMAGE="golang:${GO_VERSION}"
@@ -12,7 +12,7 @@ ENV APP_NAME=${APP_NAME}
RUN echo "Build of $APP_NAME started"
RUN apt-get update -y && apt-get install --no-install-recommends -y ca-certificates unzip curl libc-bin libc6 \
- && apt-get clean && rm -f /var/lib/apt/lists/*_*
+ && apt-get clean && rm -f /var/lib/apt/lists/*_*
WORKDIR /app
COPY pkg pkg
@@ -63,4 +63,4 @@ COPY --from=builder --chown=nobody:root /app/build/${APP_NAME} /app/build/${APP_
USER nobody
-CMD [ "/bin/sh", "-c", "/app/build/${APP_NAME}" ]
\ No newline at end of file
+CMD [ "/bin/sh", "-c", "/app/build/${APP_NAME}" ]
diff --git a/bootstrapper/go.mod b/bootstrapper/go.mod
index e9b2bb488..f8a3254cc 100644
--- a/bootstrapper/go.mod
+++ b/bootstrapper/go.mod
@@ -1,8 +1,8 @@
module github.com/semaphoreio/semaphore/bootstrapper
-go 1.22.7
+go 1.23.0
-toolchain go1.22.9
+toolchain go1.24.3
require (
github.com/golang/protobuf v1.5.4
@@ -43,7 +43,7 @@ require (
github.com/stretchr/objx v0.5.2 // indirect
github.com/x448/float16 v0.8.4 // indirect
golang.org/x/net v0.33.0 // indirect
- golang.org/x/oauth2 v0.23.0 // indirect
+ golang.org/x/oauth2 v0.27.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/term v0.27.0 // indirect
golang.org/x/text v0.21.0 // indirect
diff --git a/bootstrapper/go.sum b/bootstrapper/go.sum
index fca5c4e55..9ea308c9e 100644
--- a/bootstrapper/go.sum
+++ b/bootstrapper/go.sum
@@ -116,8 +116,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
-golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
-golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
+golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
From 00507cee2eb06531ba8dfd403a2959acf4d6cd36 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Rados=C5=82aw=20Wo=C5=BAniak?=
<184065+radwo@users.noreply.github.com>
Date: Thu, 24 Jul 2025 13:51:46 +0200
Subject: [PATCH 35/87] toil(front): treat ppl name as a plain txt (#445)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Render the pipeline name as a plain text.
## ✅ Checklist
- [x] I have tested this change
- [ ] ~This change requires documentation update~
---
front/assets/js/test_results/util/interactive_pipeline_tree.tsx | 2 +-
.../front_web/templates/test_results/member/pipeline.html.eex | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/front/assets/js/test_results/util/interactive_pipeline_tree.tsx b/front/assets/js/test_results/util/interactive_pipeline_tree.tsx
index 1d4bd72cc..e6404d536 100644
--- a/front/assets/js/test_results/util/interactive_pipeline_tree.tsx
+++ b/front/assets/js/test_results/util/interactive_pipeline_tree.tsx
@@ -99,7 +99,7 @@ export const InteractivePipelineTree = (props: Props) => {
diff --git a/front/lib/front_web/templates/test_results/member/pipeline.html.eex b/front/lib/front_web/templates/test_results/member/pipeline.html.eex
index 26c869226..3423d31e0 100644
--- a/front/lib/front_web/templates/test_results/member/pipeline.html.eex
+++ b/front/lib/front_web/templates/test_results/member/pipeline.html.eex
@@ -8,7 +8,7 @@
window.InjectedDataByBackend.pollURL = "<%= workflow_path(@conn, :status, @workflow.id) %>";
window.InjectedDataByBackend.pipelineId = "<%= @selected_pipeline_id %>";
window.InjectedDataByBackend.pipelineStatus = "<%= raw FrontWeb.PipelineView.pipeline_status_large(@pipeline) %>";
- window.InjectedDataByBackend.pipelineName = "<%= @pipeline.name %>"
+ window.InjectedDataByBackend.pipelineName = "<%= raw @pipeline.name %>";
From 99ea842b017f6d9a3b6958a2120319902bd485b9 Mon Sep 17 00:00:00 2001
From: Dejan K
Date: Thu, 24 Jul 2025 16:35:53 +0200
Subject: [PATCH 36/87] feat(security-toolbox/docker): configure
vuln-severity-source (#446)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
Adds support for configuring which vulnerability severity source to use
when scanning Docker images.
## ✅ Checklist
- [x] I have tested this change
- [ ] This change requires documentation update
---
security-toolbox/docker | 4 ++++
security-toolbox/policies/docker/trivy_image.rb | 5 +++++
2 files changed, 9 insertions(+)
diff --git a/security-toolbox/docker b/security-toolbox/docker
index 35c216f35..500f29e5d 100755
--- a/security-toolbox/docker
+++ b/security-toolbox/docker
@@ -38,6 +38,10 @@ OptionParser.new do |parser|
args[:install_dependencies] = true
end
+ parser.on("-v", "--vuln-severity-source SOURCES", "Vulnerability severity source (e.g., nvd,auto)") do |vuln_severity_source|
+ args[:vuln_severity_source] = vuln_severity_source
+ end
+
parser.on("", "--scanners SCANNERS", "Which scanners to use") do |scanners|
args[:scanners] = scanners
end
diff --git a/security-toolbox/policies/docker/trivy_image.rb b/security-toolbox/policies/docker/trivy_image.rb
index 25c5c1660..306f68d1f 100644
--- a/security-toolbox/policies/docker/trivy_image.rb
+++ b/security-toolbox/policies/docker/trivy_image.rb
@@ -12,6 +12,7 @@ def initialize(args)
@severity = args[:severity] || "HIGH,CRITICAL"
@ignore_policy = args[:ignore_policy] || nil
@scanners = args[:scanners] || "vuln,secret,license,misconfig"
+ @vuln_severity_source = args[:vuln_severity_source]
@skip_files = args[:skip_files].to_s.split(",") || []
@skip_dirs = args[:skip_dirs].to_s.split(",") || []
@@ -35,6 +36,10 @@ def test
command << "--ignore-policy #{@ignore_policy}"
end
+ if @vuln_severity_source != nil
+ command << "--vuln-severity-source #{@vuln_severity_source}"
+ end
+
@skip_files.each do |skip_file|
command << "--skip-files #{skip_file}"
end
From 9c286a7b4fe454968dcc059078f34502b627f203 Mon Sep 17 00:00:00 2001
From: Tomas Fernandez
Date: Mon, 28 Jul 2025 14:09:06 +0100
Subject: [PATCH 37/87] docs: reorganize sidebars and readd infracost recipe
(#438)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 📝 Description
This change:
- Moves the cache page under the Workflow category
- Creates a recipe category to add integrations with 3rd party tool
- Optimization category has been renamed containers, it only shows
docker-related pages
- The nginx routing config has been updated to maintain the old links
- Reintroduces the infracost integration/recipe page
- Re-enables collapsible categories on the sidebars
- Fixes some rendering issues, `` caused some
problems with the markdown rendering so I removed the `<>` symbols
- Removed the announcement bar
See https://github.com/semaphoreio/semaphore/issues/409 for more details
## ✅ Checklist
- [X] I have tested this change
- [ ] This change requires documentation update
---
docs/default.conf | 25 +-
docs/docs/getting-started/changelog.md | 819 +++++++++---------
docs/docs/getting-started/faq.md | 36 +-
.../getting-started/migration/bitbucket.md | 12 +-
docs/docs/getting-started/migration/circle.md | 22 +-
.../migration/github-actions.md | 18 +-
.../getting-started/migration/overview.md | 14 +-
docs/docs/getting-started/migration/travis.md | 15 +-
docs/docs/reference/agent-aws-stack.md | 52 +-
docs/docs/reference/env-vars.md | 10 +-
docs/docs/reference/toolbox.md | 27 +-
.../{optimization => }/cache.md | 31 +-
.../_category_.json | 0
.../container-registry.md | 0
.../{optimization => containers}/docker.md | 2 +-
.../img/add-metric.jpg | Bin
.../img/change-skip-vs-run.jpg | Bin
.../img/custom-create.jpg | Bin
.../img/dockerhub-secret.jpg | Bin
.../img/freq-cd.jpg | Bin
.../img/freq-ci.jpg | Bin
.../img/insights-settings.jpg | Bin
.../img/insights-tab.jpg | Bin
.../img/new-metric.jpg | Bin
.../img/org-health-location.jpg | Bin
.../img/org-health-overview.jpg | Bin
.../img/perf-cd.jpg | Bin
.../img/perf-ci.jpg | Bin
.../img/rel-cd.jpg | Bin
.../img/rel-ci.jpg | Bin
.../img/select-date-range.jpg | Bin
.../img/workflow-monorepo.jpg | Bin
.../docs/using-semaphore/languages/android.md | 3 +-
.../languages/elixir-erlang.md | 7 +-
docs/docs/using-semaphore/languages/go.md | 2 +-
.../using-semaphore/languages/javascript.md | 7 +-
docs/docs/using-semaphore/languages/php.md | 6 +-
docs/docs/using-semaphore/languages/python.md | 4 +-
docs/docs/using-semaphore/languages/ruby.md | 7 +-
docs/docs/using-semaphore/languages/rust.md | 4 +-
docs/docs/using-semaphore/monorepo.md | 15 +-
docs/docs/using-semaphore/pipelines.md | 31 +-
.../recipes/img/infracost-key-secret.jpg | Bin 0 -> 115039 bytes
.../recipes/img/infracost-secret.jpg | Bin 0 -> 104229 bytes
.../docs/using-semaphore/recipes/infracost.md | 195 +++++
docs/docs/using-semaphore/self-hosted-aws.md | 23 +-
docs/docs/using-semaphore/self-hosted.md | 4 +-
docs/docusaurus.config.js | 14 +-
docs/sidebars.js | 32 +-
docs/src/components/FeatureCards/index.jsx | 4 +-
.../version-CE/getting-started/faq.md | 26 +-
.../getting-started/migration/bitbucket.md | 11 +-
.../getting-started/migration/circle.md | 19 +-
.../migration/github-actions.md | 16 +-
.../getting-started/migration/overview.md | 11 +-
.../getting-started/migration/travis.md | 13 +-
.../version-CE/reference/agent-aws-stack.md | 49 +-
.../version-CE/reference/env-vars.md | 14 +-
.../version-CE/reference/toolbox.md | 22 +-
.../{optimization => }/cache.md | 29 +-
.../_category_.json | 0
.../container-registry.md | 0
.../{optimization => containers}/docker.md | 0
.../img/add-metric.jpg | Bin
.../img/change-skip-vs-run.jpg | Bin
.../img/custom-create.jpg | Bin
.../img/dockerhub-secret.jpg | Bin
.../img/freq-cd.jpg | Bin
.../img/freq-ci.jpg | Bin
.../img/insights-settings.jpg | Bin
.../img/insights-tab.jpg | Bin
.../img/new-metric.jpg | Bin
.../img/org-health-location.jpg | Bin
.../img/org-health-overview.jpg | Bin
.../img/perf-cd.jpg | Bin
.../img/perf-ci.jpg | Bin
.../img/rel-cd.jpg | Bin
.../img/rel-ci.jpg | Bin
.../img/select-date-range.jpg | Bin
.../img/workflow-monorepo.jpg | Bin
.../version-CE/using-semaphore/pipelines.md | 26 +-
.../recipes/img/infracost-key-secret.jpg | Bin 0 -> 115039 bytes
.../recipes/img/infracost-secret.jpg | Bin 0 -> 104229 bytes
.../using-semaphore/recipes/infracost.md | 195 +++++
.../using-semaphore/self-hosted-aws.md | 2 +-
.../version-CE/using-semaphore/self-hosted.md | 4 +-
.../version-EE/getting-started/faq.md | 26 +-
.../getting-started/migration/bitbucket.md | 11 +-
.../getting-started/migration/circle.md | 19 +-
.../migration/github-actions.md | 16 +-
.../getting-started/migration/overview.md | 11 +-
.../getting-started/migration/travis.md | 13 +-
.../version-EE/reference/agent-aws-stack.md | 50 +-
.../version-EE/reference/env-vars.md | 10 +-
.../version-EE/reference/toolbox.md | 22 +-
.../{optimization => }/cache.md | 29 +-
.../_category_.json | 0
.../container-registry.md | 0
.../{optimization => containers}/docker.md | 0
.../img/add-metric.jpg | Bin
.../img/change-skip-vs-run.jpg | Bin
.../img/custom-create.jpg | Bin
.../img/dockerhub-secret.jpg | Bin
.../img/freq-cd.jpg | Bin
.../img/freq-ci.jpg | Bin
.../img/insights-settings.jpg | Bin
.../img/insights-tab.jpg | Bin
.../img/new-metric.jpg | Bin
.../img/org-health-location.jpg | Bin
.../img/org-health-overview.jpg | Bin
.../img/perf-cd.jpg | Bin
.../img/perf-ci.jpg | Bin
.../img/rel-cd.jpg | Bin
.../img/rel-ci.jpg | Bin
.../img/select-date-range.jpg | Bin
.../img/workflow-monorepo.jpg | Bin
.../version-EE/using-semaphore/pipelines.md | 31 +-
.../recipes/img/infracost-key-secret.jpg | Bin 0 -> 115039 bytes
.../recipes/img/infracost-secret.jpg | Bin 0 -> 104229 bytes
.../using-semaphore/recipes/infracost.md | 195 +++++
.../using-semaphore/self-hosted-aws.md | 2 +-
.../version-EE/using-semaphore/self-hosted.md | 4 +-
.../version-CE-sidebars.json | 28 +-
.../version-EE-sidebars.json | 28 +-
docs/yarn.lock | 40 +
125 files changed, 1441 insertions(+), 972 deletions(-)
rename docs/docs/using-semaphore/{optimization => }/cache.md (82%)
rename docs/docs/using-semaphore/{optimization => containers}/_category_.json (100%)
rename docs/docs/using-semaphore/{optimization => containers}/container-registry.md (100%)
rename docs/docs/using-semaphore/{optimization => containers}/docker.md (99%)
rename docs/docs/using-semaphore/{optimization => containers}/img/add-metric.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/change-skip-vs-run.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/custom-create.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/dockerhub-secret.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/freq-cd.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/freq-ci.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/insights-settings.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/insights-tab.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/new-metric.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/org-health-location.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/org-health-overview.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/perf-cd.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/perf-ci.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/rel-cd.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/rel-ci.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/select-date-range.jpg (100%)
rename docs/docs/using-semaphore/{optimization => containers}/img/workflow-monorepo.jpg (100%)
create mode 100644 docs/docs/using-semaphore/recipes/img/infracost-key-secret.jpg
create mode 100644 docs/docs/using-semaphore/recipes/img/infracost-secret.jpg
create mode 100644 docs/docs/using-semaphore/recipes/infracost.md
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => }/cache.md (83%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/_category_.json (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/container-registry.md (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/docker.md (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/add-metric.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/change-skip-vs-run.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/custom-create.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/dockerhub-secret.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/freq-cd.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/freq-ci.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/insights-settings.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/insights-tab.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/new-metric.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/org-health-location.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/org-health-overview.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/perf-cd.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/perf-ci.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/rel-cd.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/rel-ci.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/select-date-range.jpg (100%)
rename docs/versioned_docs/version-CE/using-semaphore/{optimization => containers}/img/workflow-monorepo.jpg (100%)
create mode 100644 docs/versioned_docs/version-CE/using-semaphore/recipes/img/infracost-key-secret.jpg
create mode 100644 docs/versioned_docs/version-CE/using-semaphore/recipes/img/infracost-secret.jpg
create mode 100644 docs/versioned_docs/version-CE/using-semaphore/recipes/infracost.md
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => }/cache.md (83%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/_category_.json (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/container-registry.md (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/docker.md (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/add-metric.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/change-skip-vs-run.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/custom-create.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/dockerhub-secret.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/freq-cd.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/freq-ci.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/insights-settings.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/insights-tab.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/new-metric.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/org-health-location.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/org-health-overview.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/perf-cd.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/perf-ci.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/rel-cd.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/rel-ci.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/select-date-range.jpg (100%)
rename docs/versioned_docs/version-EE/using-semaphore/{optimization => containers}/img/workflow-monorepo.jpg (100%)
create mode 100644 docs/versioned_docs/version-EE/using-semaphore/recipes/img/infracost-key-secret.jpg
create mode 100644 docs/versioned_docs/version-EE/using-semaphore/recipes/img/infracost-secret.jpg
create mode 100644 docs/versioned_docs/version-EE/using-semaphore/recipes/infracost.md
diff --git a/docs/default.conf b/docs/default.conf
index 014c65573..89d741382 100644
--- a/docs/default.conf
+++ b/docs/default.conf
@@ -49,6 +49,9 @@ server {
application/vnd.ms-fontobject
image/svg+xml;
+ location ~ ^/using-semaphore/optimization/cache?$ {return 301 /using-semaphore/cache;}
+ location ~ ^/using-semaphore/optimization/docker?$ {return 301 /using-semaphore/containers/docker;}
+ location ~ ^/using-semaphore/optimization/container-registry?$ {return 301 /using-semaphore/containers/container-registry;}
location ~ ^/account-management/billing-faq/?$ {return 301 /getting-started/faq#billing;}
location ~ ^/account-management/connecting-bitbucket-and-semaphore/?$ {return 301 /using-semaphore/connect-bitbucket;}
location ~ ^/account-management/connecting-github-and-semaphore/?$ {return 301 /using-semaphore/connect-github;}
@@ -56,13 +59,13 @@ server {
location ~ ^/account-management/permission-levels/?$ {return 301 /using-semaphore/rbac;}
location ~ ^/account-management/working-with-github-sso/?$ {return 301 /using-semaphore/github-sso;}
location ~ ^/advanced-ci/flaky-tests/?$ {return 301 /using-semaphore/tests/flaky-tests;}
- location ~ ^/ci-cd-environment/android-images/?$ {return 301 /using-semaphore/optimization/container-registry#android;}
+ location ~ ^/ci-cd-environment/android-images/?$ {return 301 /using-semaphore/containers/container-registry#android;}
location ~ ^/ci-cd-environment/aws-support/?$ {return 301 /using-semaphore/self-hosted-aws;}
location ~ ^/ci-cd-environment/choosing-between-a-vm-and-docker-based-environment/?$ {return 301 /using-semaphore/pipelines#docker-environments;}
location ~ ^/ci-cd-environment/configure-self-hosted-agent/?$ {return 301 /using-semaphore/self-hosted;}
location ~ ^/ci-cd-environment/custom-ci-cd-environment-with-docker/?$ {return 301 /using-semaphore/pipelines#docker-environments;}
- location ~ ^/ci-cd-environment/docker-authentication/?$ {return 301 /using-semaphore/optimization/docker#auth;}
- location ~ ^/ci-cd-environment/docker-layer-caching/?$ {return 301 /using-semaphore/optimization/docker#caching;}
+ location ~ ^/ci-cd-environment/docker-authentication/?$ {return 301 /using-semaphore/containers/docker#auth;}
+ location ~ ^/ci-cd-environment/docker-layer-caching/?$ {return 301 /using-semaphore/containers/docker#caching;}
location ~ ^/ci-cd-environment/environment-variables/?$ {return 301 /reference/env-vars;}
location ~ ^/ci-cd-environment/install-self-hosted-agent/?$ {return 301 /using-semaphore/self-hosted-install;}
location ~ ^/ci-cd-environment/job-environment/?$ {return 301 /using-semaphore/jobs;}
@@ -77,19 +80,19 @@ server {
location ~ ^/ci-cd-environment/self-hosted-agents-overview/?$ {return 301 /using-semaphore/self-hosted;}
location ~ ^/ci-cd-environment/sem-service-managing-databases-and-services-on-linux/?$ {return 301 /reference/toolbox#sem-service;}
location ~ ^/ci-cd-environment/sem-version-managing-language-versions-on-linux/?$ {return 301 /reference/toolbox#sem-version;}
- location ~ ^/ci-cd-environment/semaphore-registry-images/?$ {return 301 /using-semaphore/optimization/container-registry;}
- location ~ ^/ci-cd-environment/set-up-caching-on-aws-s3/?$ {return 301 /using-semaphore/optimization/cache#aws;}
+ location ~ ^/ci-cd-environment/semaphore-registry-images/?$ {return 301 /using-semaphore/containers/container-registry;}
+ location ~ ^/ci-cd-environment/set-up-caching-on-aws-s3/?$ {return 301 /using-semaphore/containers/cache#aws;}
location ~ ^/ci-cd-environment/ubuntu-18.04-image/?$ {return 301 /reference/os-ubuntu;}
location ~ ^/ci-cd-environment/ubuntu-20.04-image/?$ {return 301 /reference/os-ubuntu#ubuntu2004;}
location ~ ^/ci-cd-environment/ubuntu-22.04-arm-image/?$ {return 301 /reference/os-ubuntu#ubuntu2204-arm;}
location ~ ^/ci-cd-environment/ubuntu-22.04-image/?$ {return 301 /reference/os-ubuntu#ubuntu2204-x86;}
- location ~ ^/ci-cd-environment/working-with-docker/?$ {return 301 /using-semaphore/optimization/docker;}
+ location ~ ^/ci-cd-environment/working-with-docker/?$ {return 301 /using-semaphore/containers/docker;}
location ~ ^/article/75-debugging-with-ssh-access/?$ {return 301 /using-semaphore/jobs#ssh-into-agent;}
location ~ ^/essentials/artifacts/?$ {return 301 /using-semaphore/artifacts;}
location ~ ^/essentials/auto-cancel-previous-pipelines-on-a-new-push/?$ {return 301 /using-semaphore/pipelines#auto-cancel;}
location ~ ^/essentials/build-matrix/?$ {return 301 /using-semaphore/jobs#matrix;}
location ~ ^/essentials/building-monorepo-projects/?$ {return 301 /using-semaphore/monorepo;}
- location ~ ^/essentials/caching-dependencies-and-directories/?$ {return 301 /using-semaphore/optimization/cache;}
+ location ~ ^/essentials/caching-dependencies-and-directories/?$ {return 301 /using-semaphore/cache;}
location ~ ^/essentials/concepts/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/essentials/configuring-status-checks/?$ {return 301 /using-semaphore/projects#status-checks;}
location ~ ^/essentials/debugging-with-ssh-access/?$ {return 301 /using-semaphore/jobs#debug-jobs;}
@@ -204,14 +207,14 @@ server {
location ~ ^/examples/laravel-php-continuous-integration/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/examples/netlify-continuous-deployment/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/examples/node-js-and-typescript-continuous-integration/?$ {return 301 /using-semaphore/workflows;}
- location ~ ^/examples/publishing-docker-images-on-dockerhub/?$ {return 301 /using-semaphore/optimization/docker#auth;}
- location ~ ^/examples/pushing-docker-images-to-aws-elastic-container-registry-ecr/?$ {return 301 /using-semaphore/optimization/docker#auth;}
- location ~ ^/examples/pushing-docker-images-to-google-container-registry-gcr/?$ {return 301 /using-semaphore/optimization/docker#auth;}
+ location ~ ^/examples/publishing-docker-images-on-dockerhub/?$ {return 301 /using-semaphore/containers/docker#auth;}
+ location ~ ^/examples/pushing-docker-images-to-aws-elastic-container-registry-ecr/?$ {return 301 /using-semaphore/containers/docker#auth;}
+ location ~ ^/examples/pushing-docker-images-to-google-container-registry-gcr/?$ {return 301 /using-semaphore/containers/docker#auth;}
location ~ ^/examples/rails-continuous-integration/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/examples/scala-play-continuous-integration/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/examples/testflight-ios-app-distribution/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/examples/tutorials-and-example-projects/?$ {return 301 /using-semaphore/workflows;}
- location ~ ^/examples/using-docker-compose-in-ci/?$ {return 301 /using-semaphore/optimization/docker;}
+ location ~ ^/examples/using-docker-compose-in-ci/?$ {return 301 /using-semaphore/containers/docker;}
location ~ ^/examples/using-terraform-with-google-cloud/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/examples/zeit-now-continuous-deployment/?$ {return 301 /using-semaphore/workflows;}
location ~ ^/faq/migration-guide-for-semaphore-classic-users/?$ {return 301 /using-semaphore/workflows;}
diff --git a/docs/docs/getting-started/changelog.md b/docs/docs/getting-started/changelog.md
index f5c933ea3..eb8c54056 100644
--- a/docs/docs/getting-started/changelog.md
+++ b/docs/docs/getting-started/changelog.md
@@ -68,7 +68,7 @@ To learn more about this image, check our [macOS Xcode 16](https://docs.semaphor
- nvm 0.40.1 -> 0.40.2
- pip3 24.2 -> 25.0
- postgres 14.13 -> 14.17
-- python3 3.12.6 -> 3.13.2
+- python3 3.12.6 -> 3.13.2
- ruby system 3.3.2 -> 3.4.2
- ruby rbenv 3.3.5 -> 3.2.8
- Simulator iOS 18.2 -> 18.4
@@ -541,14 +541,15 @@ To learn more about these images check our [Ubuntu 20.04](../reference/os-ubuntu
**(Improved) macos-xcode15 (apple silicon) image update**
-**(New)** Packages:
+**(New)** Packages:
+
- Xcode 15.4 (default)
**(Updated)** :
- iOS 17.5
- watchOS 10.5
-- tvOS 17.5
+- tvOS 17.5
**(Improved) Ubuntu 20.04, Ubuntu 22.04 and Ubuntu 22.04 ARM image update**
@@ -1301,7 +1302,6 @@ The Ubuntu 18.04 image has been deprecated.
- Fastlane 2.211.0 -> 2.212.1
- Flutter 3.7.0 -> 3.7.10
-
**(Improved) Ubuntu 20.04 image update**
**(New)** Packages
@@ -1432,7 +1432,6 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Java OpenJDK 17.0.5 -> 17.0.6
- Terraform 1.3.9 -> 1.4.2
-
:::info
The Ubuntu 18.04 image has been deprecated.
@@ -1569,7 +1568,6 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Kubectl 1.26.0 -> 1.26.1
- Pip 22.3.1 -> 23.0
-
:::info
The Ubuntu 18.04 image has been deprecated.
@@ -1588,7 +1586,6 @@ The Ubuntu 18.04 image has been deprecated.
- Flutter 3.3.0 -> 3.7.0
- NodeJS 16 -> 18
-
## Week of January 16, 2023
**(Improved) Ubuntu 20.04 image update**
@@ -1853,7 +1850,6 @@ The Ubuntu 18.04 image has been deprecated.
## Week of November 14, 2022
-
:::info
macOS Xcode 12 is deprecated.
@@ -1936,11 +1932,11 @@ The Ubuntu 18.04 image has been deprecated.
- Google Chrome 106 -> 107
- ChromeDriver 106 -> 107
- Helm 3.9.4 -> 3.10.1
-- Heroku 7.64.0 -> 7.65.0
+- Heroku 7.64.0 -> 7.65.0
- Kubectl 1.25.2 -> 1.25.3
- Sbt 1.7.2 -> 1.7.3
- Terraform 1.3.1 -> 1.3.3
-- Pip 22.2.2 -> 22.3
+- Pip 22.2.2 -> 22.3
:::info
@@ -2026,7 +2022,7 @@ The Ubuntu 18.04 image has been deprecated.
## Week of September 26, 2022
-**(New) macos-xcode14 image released **
+**(New) macos-xcode14 image released**
**(Updated)** Updated packages:
@@ -2034,7 +2030,7 @@ The Ubuntu 18.04 image has been deprecated.
- Kernel version: 21.6.0
- Fastlane: 2.210.0
- Cocoapods: 1.11.3
-- Nvm : 9.39.1
+- Nvm : 9.39.1
- Java: openjdk17
- Postgres: 14.5
- iOS SDK: 16.0
@@ -2136,6 +2132,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Changed)** packages:
`sem-version firefox` versions available:
+
- Firefox 68.9.0esr: `68`, `esr-old`
- Firefox 78.1.0esr: `78`, `default`, `esr`
- Firefox 102.2.0esr: `102`, `esr-new`, `esr-latest`
@@ -2177,6 +2174,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
**(Changed)** packages:
`sem-version firefox` versions available:
+
- Firefox 68.9.0esr: `68`, `esr-old`
- Firefox 78.1.0esr: `78`, `default`, `esr`
- Firefox 102.2.0esr: `102`, `esr-new`, `esr-latest`
@@ -2263,7 +2261,7 @@ The Ubuntu 18.04 image has been deprecated.
- Kernel 5.13.0-52-generic -> 5.15.0.41-generic
- Aws-cli 1.25.26 -> 1.25.34
- Aws-cli 2.7.14 -> 2.7.17
-- Erlang 23.3.4.15 -> 23.3.4.16
+- Erlang 23.3.4.15 -> 23.3.4.16
- Erlang 25.0.2 -> 25.0.3
- Github-cli 2.13.0 -> 2.14.2
- Git 2.37.0 -> 2.37.1
@@ -2411,9 +2409,11 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 13 image update**
**(New)** Packages:
+
- Xcode 13.4.1
**(Removed)** Packages:
+
- Xcode 13.1
- Xcode 13.4
@@ -2424,7 +2424,6 @@ The Ubuntu 18.04 image has been deprecated.
- Homebrew 3 -> 3.5.1
- PostgreSQL 14.2 -> 14.3
-
**(Improved) Ubuntu 20.04 image update**
**(New)** Packages:
@@ -2656,7 +2655,7 @@ The Ubuntu 18.04 image has been deprecated.
- Gradle 7.3 -> 7.4.2
- Java OpenJDK 11.0.14 -> 11.0.15
- Java OpenJDK 17.0.2 -> 17.0.3
-- NodeJS 16.14.2 -> 16.15.0
+- NodeJS 16.14.2 -> 16.15.0
- Npm 8.5.0 -> 8.5.5
- Heroku 7.60.1 -> 7.60.2
- Terraform 1.1.8 -> 1.1.9
@@ -2679,7 +2678,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Gradle 7.3 -> 7.4.2
- Java OpenJDK 11.0.14 -> 11.0.15
- Java OpenJDK 17.0.2 -> 17.0.3
-- NodeJS 16.14.2 -> 16.15.0
+- NodeJS 16.14.2 -> 16.15.0
- Npm 8.5.0 -> 8.5.5
- Heroku 7.60.1 -> 7.60.2
- Terraform 1.1.8 -> 1.1.9
@@ -2735,11 +2734,11 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Aws-cli 1.22.86 -> 1.22.96
- Aws-cli 2.5.1 -> 2.5.5
- Azure-cli 2.34.1 -> 2.35.0
-- Github-cli 2.7.0 -> 2.8.0
+- Github-cli 2.7.0 -> 2.8.0
- Git 2.35.1 -> 2.35.3
- Bazel 5.1.0 -> 5.1.1
- Helm 3.8.1 -> 3.8.2
-- Pypy 7.3.8 -> 7.3.9
+- Pypy 7.3.8 -> 7.3.9
- Pypy3 7.3.8 -> 7.3.9
- Venv 20.13.0 -> 20.14.1
- Kerl 2.2.3 -> 2.3.0
@@ -2759,9 +2758,11 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 13 image update**
**(New)** Packages:
+
- Xcode 13.3
**(Removed)** Packages:
+
- Xcode 13.0
**(Updated)** Updated packages:
@@ -2864,9 +2865,9 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Aws-cli 1.22.67 -> 1.22.77
- Aws-cli 2.4.23 -> 2.4.27
-- Github-cli 2.5.2 -> 2.6.0
+- Github-cli 2.5.2 -> 2.6.0
- Docker 20.10.12 -> 20.10.13
-- Helm 3.8.0 -> 3.8.1
+- Helm 3.8.0 -> 3.8.1
- Heroku 7.59.2 -> 7.59.4
- Java 11.0.13 -> 11.0.14
- Java 17.0.1 -> 17.0.2
@@ -2957,7 +2958,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Go 1.17.7
- Elixir 1.13.3 (default)
-- Ruby 3.1.1
+- Ruby 3.1.1
- PHP
- 7.4.28 (default)
- 8.0.16
@@ -2970,7 +2971,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Github-cli 2.5.0 > 2.5.1
- Git-lfs 3.0.2 -> 3.1.2
- Nodejs 16.13.2 -> 16.14.0
-- Npm 8.1.2 -> 8.3.1
+- Npm 8.1.2 -> 8.3.1
- Pypy 7.3.6 -> 7.3.8
- Pypy3 7.3.7 > 7.3.8
@@ -2995,8 +2996,8 @@ The Ubuntu 18.04 image has been deprecated.
- Helm 3.7.2 -> 3.8.0
- Virtualenv 16.0.0 -> 20.13.0
- Pip 21.3.1 -> 22.0.3
-- Erlang
- - 23.3.4.10 -> 23.3.4.11
+- Erlang
+ - 23.3.4.10 -> 23.3.4.11
- 24.2 -> 24.2.1
To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ubuntu-images/ubuntu-2004-image).
@@ -3015,7 +3016,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Virtualenv 16.0.0 -> 20.13.0
- Pip (for Python 3.6) 20.3.4 -> 21.3.1
- Pip (for Python 3.7 and above) 20.3.4 -> 22.0.3
-- Erlang
+- Erlang
- 23.3.4.10 -> 23.3.4.11
- 24.2 -> 24.2.1
@@ -3103,8 +3104,8 @@ The Ubuntu 18.04 image has been deprecated.
- Erlang 24.2 (default)
- Elixir 1.13.1 (default)
- Ruby 3.1.0
-- PHP
- - 7.4.27 (default)
+- PHP
+ - 7.4.27 (default)
- 8.0.14
**(Updated)** Updated packages:
@@ -3138,7 +3139,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Yarn 1.22.15 -> 1.22.17
- Java 8u292 -> 8u312
- Java 11.0.11 -> 11.0.13
-- Kerl 2.1.2 -> 2.2.3
+- Kerl 2.1.2 -> 2.2.3
- Rebar 3.15.2 -> 3.17.0
- Erlang
- 22.3.4.23-> 22.3.4.24
@@ -3155,6 +3156,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 13 image update**
**(New)** Packages:
+
- Xcode 13.2.1
**(Updated)** Updated packages:
@@ -3240,7 +3242,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
**(New)** Packages:
- PHP 7.3.33, 7.4.26 (default), 8.0.13
-- Ruby 2.6.9, 2.7.5 (default), 3.0.3
+- Ruby 2.6.9, 2.7.5 (default), 3.0.3
- Github-cli 2.2.0
**(Updated)** Updated packages:
@@ -3273,8 +3275,8 @@ The Ubuntu 18.04 image has been deprecated.
- Aws-cli 1.21.7 -> 1.22.5
- Google Chrome 95 -> 96
- ChromeDriver 95 -> 96
-- Erlang
- - 23.3.4.8 -> 23.3.4.9
+- Erlang
+ - 23.3.4.8 -> 23.3.4.9
- 24.1.3 -> 24.1.5 (default)
Default Elixir version changed from 1.11.4 to 1.12.3
@@ -3288,9 +3290,9 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Azure-cli 2.29.2 -> 2.30.0
- Google Chrome 95 -> 96
- ChromeDriver 95 -> 96
-- Erlang
- - 22.3.4.22 -> 22.3.4.23
- - 23.3.4.8 -> 23.3.4.9
+- Erlang
+ - 22.3.4.22 -> 22.3.4.23
+ - 23.3.4.8 -> 23.3.4.9
- 24.1.3 -> 24.1.5 (default)
:::info
@@ -3360,6 +3362,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 13 image update**
**(New)** Packages:
+
- Xcode 13.1
- PostgreSQL 14
@@ -3371,7 +3374,6 @@ The Ubuntu 18.04 image has been deprecated.
- Node v16.6.1 -> v17.0.1
- Yarn 1.22.11 -> 1.22.17
-
## Week of October 18, 2021
**(Improved) Ubuntu 20.04 image update**
@@ -3409,10 +3411,11 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 12 image update**
**(New)** Packages:
+
- Xcode 12.5.1
Updated packages:
-
+
- Flutter 2.5.1 -> 2.5.2
- Node 16.6.1 -> 16.10.0
@@ -3479,7 +3482,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 12 image update**
Updated packages:
-
+
- Cocoapods 1.10.2 -> 1.11.2
- Fastlane 2.191.0 -> 2.195.0
- Flutter 2.0.6 -> 2.5.1
@@ -3494,6 +3497,7 @@ To learn more about this image, check our macOS Xcode 12 page.
- Android:30-flutter image was updated, it now contains flutter version 2.2.3 and 2.5.1(default)
**(Improved) Toolbox update**
+
- Toolbox/sem-version can switch between pre-installed flutter versions 2.5.1(default) and 2.2.3
when using semaphore's android:30-flutter image. Syntax: `sem-version flutter [2.2.3|2.5.1]`
@@ -3539,7 +3543,6 @@ The Ubuntu 18.04 image has been deprecated.
The macOS Xcode 11 image will be removed in one month.
We advise all users to switch to the macOS Xcode 12 image.
-
**(Improved) Ubuntu 20.04 image update**
**(Updated)** Updated packages:
@@ -3608,7 +3611,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Updated)** **Big Sur** version 11.4 updated to 11.5.1
Updated packages:
-
+
- Cocoapods 1.10.1 -> 1.10.2
- Fastlane 2.182.0 -> 2.191.0
- Xcode 12.5 -> 12.5.1
@@ -3616,7 +3619,6 @@ Updated packages:
To learn more about this image, check our macOS Xcode 12 page.
-
**(Improved) Ubuntu 18.04 image update**
- ~~**(Deprecated)** Python 2.7 has reached [End of Life](https://www.python.org/doc/sunset-python-2/) and is no longer maintained, it will be removed from the Bionic image at the beginning of September 2021.~~
@@ -3730,10 +3732,10 @@ Take the advantage of this new feature now to:
To learn more about how to configure the Test Summary feature, check our [docummentation](../using-semaphore/tests/test-reports).
**(New) Parameterized promotions (beta)**
-With parameterized promotions, you can re-use a single promotion pipeline and use environment variables to achieve different results based on the variable values.
+With parameterized promotions, you can re-use a single promotion pipeline and use environment variables to achieve different results based on the variable values.
- Define the parameter name and description in YAML or Workflow Builder.
-- Assign parameter values when manually promoting.
+- Assign parameter values when manually promoting.
- Use parameter values in promoted pipeline names, secret names, pipeline queues, or as environment variables.
This feature is in **private beta**. If you would like to try it out, please [reach out to us](mailto:feedback@semaphoreci.com?subject=Parameterized%20promotions%20-%20beta%20request&body=I%20would%20like%20to%20join%20the%20private%20beta%20for%20parameterized%20promotions%20feature...).
@@ -3793,7 +3795,7 @@ To learn more about this image, check our [ubuntu 20.04 page](../reference/os-ub
- Kerl 1.8.4 -> 2.1.2
- Rebar3 3.12.0 > 3.15.2
-- Azure-cli 2.24.1 -> 2.24.2
+- Azure-cli 2.24.1 -> 2.24.2
- Aws-cli 1.19.89 -> 1.19.92
- Docker 20.10.5 -> 20.10.7
- Docker-compose 1.28.2 -> 1.29.2
@@ -3807,6 +3809,7 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of June 7, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Packages:
@@ -3819,10 +3822,11 @@ The Ubuntu 18.04 image has been deprecated.
- Chromedriver 90 -> 91
## Week of May 31, 2021
+
**(New) Granular repository access via GitHub App**
Semaphore [GitHub App](https://docs.semaphoreci.com/account-management/connecting-github-and-semaphore/#connecting-a-repository-via-github-app) has been introduced as a new way of connecting GitHub repositories with Semaphore.
-Users can now connect Semaphore through the GitHub App instead of via personal access token, and choose which repositories they want to grant access to.
+Users can now connect Semaphore through the GitHub App instead of via personal access token, and choose which repositories they want to grant access to.
Find out more about the difference(s) between GitHub App and personal access token in our [documentation](../using-semaphore/connect-github).
@@ -3867,6 +3871,7 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of May 24, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Packages:
@@ -3886,6 +3891,7 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of May 17, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Packages:
@@ -3907,9 +3913,8 @@ The Ubuntu 18.04 image has been deprecated.
- sem-version fails the job if the switch was unsuccessful
- sem-version **beta** introduced for macOS-based agents, and ruby 2.5.9, 2.6.7, 2.7.3, 3.0.1 versions are also available.
-
-
## Week of May 10, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Tools:
@@ -3939,7 +3944,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Updated)** MacOS Catalina upgraded to **Big Sur**
Updated packages:
-
+
- Homebrew 3.1.4 -> 3.1.5
- Flutter 2.0.4 -> 2.0.6
- Fastlane 2.172.0 -> 2.182.0
@@ -3951,9 +3956,10 @@ Updated packages:
To learn more about this image, check our macOS Xcode 12 page.
## Week of April 26, 2021
+
**(Improved) macOS Xcode 12 image update**
**(Updated)** Updated packages:
-
+
- Homebrew 2.4.3 -> 3.1.4
- Rbenv can install ruby 2.6.7, 2.7.3, 3.0.1
@@ -3983,7 +3989,6 @@ The Ubuntu 18.04 image has been deprecated.
:::
-
## Week of April 12, 2021
**(Improved) Ubuntu 18.04 image update**
@@ -3999,7 +4004,7 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 12 image update**
**(Updated)** Updated packages:
-
+
- Curl 7.64.1 -> 7.71.1
- Flutter 1.17 -> 2.0.4
@@ -4007,24 +4012,23 @@ To learn more about this image, check our macOS Xcode 12 page.
**(Updated)** Docker image:
-New convenience Docker images are available in the [Semaphore Container Registry](../using-semaphore/optimization/container-registry).
+New convenience Docker images are available in the [Semaphore Container Registry](../using-semaphore/containers/container-registry).
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG_04-2021).
-
## Week of April 5, 2021
**(Improved) Monorepo support shipped to GA**
A new set of improvements focused on monorepo support were shipped:
- **Initialization step** - Runs at the start of each monorepo pipeline and compiles the workflow, ensuring that misconfigurations are detected before any job starts.
-- **UI indicator** - A new UI element shows the initialization log, making troubleshooting fast and easy.
-- **Exclude parameter** - A new option in change_in adds the ability to define which folders or files to skip.
+- **UI indicator** - A new UI element shows the initialization log, making troubleshooting fast and easy.
+- **Exclude parameter** - A new option in change_in adds the ability to define which folders or files to skip.
- **Glob pattern support** - change_in conditions have been further extended to allow the use of wildcards.
- **Clearer error messages** - Improved error messages for the most common misconfiguration issues.
-- **Improved stability** - All compilation errors arising from edge cases have been eliminated, making these features more reliable.
+- **Improved stability** - All compilation errors arising from edge cases have been eliminated, making these features more reliable.
-You can read more about this feature in this [blog post](https://semaphoreci.com/blog/monorepo-support-available).
+You can read more about this feature in this [blog post](https://semaphoreci.com/blog/monorepo-support-available).
Additional related reading:
@@ -4032,7 +4036,6 @@ Additional related reading:
- [Monorepo docs](../using-semaphore/monorepo)
- [change_in docs](../reference/conditions-dsl#change-in)
-
## Week of March 29, 2021
**(Improved) Ubuntu 18.04 image update**
@@ -4041,7 +4044,7 @@ Additional related reading:
- Erlang 23.3
**(Updated)** Updated packages:
-
+
- Aws-cli 1.19.25 -> 1.19.39
- Azure-cli 2.20.0 -> 2.21.0
- Git 2.30 -> 2.31
@@ -4059,8 +4062,8 @@ The Ubuntu 18.04 image has been deprecated.
**(Updated)** Sem-service addons:
- elasticsearch (7.10.0, 7.11.2, 7.12.0)
-- mysql (5.7.33, 8.0.23)
-- mongo (4.2.13, 4.4.1, 4.4.4)
+- mysql (5.7.33, 8.0.23)
+- mongo (4.2.13, 4.4.1, 4.4.4)
- postgres (10.16, 11.11, 12.6, 13.2)
- rabbitmq (3.8.14)
- redis (6.2.1)
@@ -4072,22 +4075,24 @@ The Ubuntu 18.04 image has been deprecated.
You can find out more about Semaphore toolbox by checking the [Toolbox Reference](../reference/toolbox).
## Week of March 22, 2021
+
**(Improved) New UI updates**
Based on user feedback, the UI was updated to make navigation even more intuitive.
-Here are some of the changes that have been introduced:
+Here are some of the changes that have been introduced:
- Top right page menu links have been redesigned as tabs.
- New breadcrumbs will always be present on the page.
-- Several new visual queues have been introduced to draw attention to active elements.
+- Several new visual queues have been introduced to draw attention to active elements.
- Updated fonts and icons make it easier to find key information on the page.
If you have any suggestions for further improvements to our UI, we encourage you to send them to [our feedback inbox](mailto:feedback@semaphoreci.com?subject=UI%20Feedback).
## Week of March 08, 2021
-**(Improved) Ubuntu 18.04 image update**
+
+**(Improved) Ubuntu 18.04 image update**
**(Updated)** Updated packages:
-
+
- Docker 20.10.04 -> 20.10.05
- Aws-cli 1.19.18 -> 1.19.25
- Chromedriver 88 -> 89
@@ -4101,10 +4106,11 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of March 01, 2021
-**(Improved) Ubuntu 18.04 image update**
+
+**(Improved) Ubuntu 18.04 image update**
**(Updated)** Updated packages:
-
+
- Aws-cli 1.19.3 -> 1.19.18
- Azure-cli 2.18.0 -> 1.20.0
- Docker 20.10.03 -> 20.10.04
@@ -4119,24 +4125,26 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of February 22, 2021
+
**(Improved) Artifacts CLI fixes and improvements:**
- `push` and `pull` commands now have an output on success.
- Fixed the issue where the name of the file got changed to the parent directory name in some cases.
-- Fixed the issue where the dot at the start of the directory name was not preserved.
+- Fixed the issue where the dot at the start of the directory name was not preserved.
- If the `SEMAPHORE_WORKFLOW_ID` variable is not set, `artifact push workflow` will now fail.
- Added a new alias for `artifact yank`; from now on artifacts can also be deleted by running `artifact delete`.
Check our [Artifact CLI](../reference/toolbox#artifact) documentation to learn more about artifacts.
## Week of February 15, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(New)** New language version:
- Python 3.9
**(Updated)** Updated packages:
-
+
- Aws-cli 1.18.216 -> 1.19.3
- Azure-cli 2.17.1 -> 1.18.0
- Chrome 87 -> 88
@@ -4153,17 +4161,18 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of February 1, 2021
+
**(Improved) macOS Xcode 12 image update**
**(New)** Xcode 12.4 has been installed and set as the default version. To switch to any other supported version simply use `xcversion select `.
**(Updated)** Updated packages:
-
+
- Fastlane 2.170.0 -> 2.172.0
- Cocoapods 1.10.0 -> 1.10.1
To learn more about this image, check our macOS Xcode 12 page.
**(Updated)** Docker image:
-New convenience Docker images are available in the [Semaphore Container Registry](../using-semaphore/optimization/container-registry).
+New convenience Docker images are available in the [Semaphore Container Registry](../using-semaphore/containers/container-registry).
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG_02-2021#L237).
**Minor improvements and fixes:**
@@ -4172,6 +4181,7 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- **(Improved)** Rendering speed has been improved for some logs on the _job page_.
## Week of January 18, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(Reverted)** Docker 20.10.1 -> 19.03.14
@@ -4202,6 +4212,7 @@ The Ubuntu 18.04 image has been deprecated.
- **(Improved)** Artifacts will now open in-browser (depending on mime type)
## Week of January 11, 2021
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Language versions:
@@ -4224,30 +4235,30 @@ The Ubuntu 18.04 image has been deprecated.
- Pypy 7.3.2 -> 7.3.3
- Pip 20.2 -> 20.3.3
-
:::info
The Ubuntu 18.04 image has been deprecated.
:::
-
## Week of December 21, 2020
+
**(Improved) macOS Xcode 12 image update**
**(Removed)** Xcode 12.0 and Xcode 12.0.1 are no longer supported in the macOS Xcode 12 image.
**(New)** Xcode 12.3 was added to the image.
**(Updated)** Updated packages:
-
+
- Fastlane 2.169.0 -> 2.170.0
Note that the default Xcode version was changed to 12.3 for the macOS Xcode 12 image.
**Minor improvements and fixes:**
-- **(Fixed)** Fixed the issue where email invites were not sent to some collaborators during the project creation phase.
+- **(Fixed)** Fixed the issue where email invites were not sent to some collaborators during the project creation phase.
- **(Fixed)** Fixed the issue where multiple instances of a project are created when a user clicks multiple times when choosing a repository.
## Week of December 14, 2020
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Language versions:
@@ -4267,17 +4278,17 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode 11 and 12 image update**
**(Fix)** Fixed the missing Safari browser issue.
**(Updated)** Updated packages:
-
+
- Fastalne 2.167.0 -> 2.169.0
-To learn more about our image macOS images, check our macOS Xcode 12 and macOS Xcode 11 documentation.
+To learn more about our image macOS images, check our macOS Xcode 12 and macOS Xcode 11 documentation.
## Week of November 23, 2020
**(Improved) Ubuntu 18.04 image update**
**(New)** Language versions:
-- Added elixir 1.11.2
+- Added elixir 1.11.2
**(Updated)** Language versions:
@@ -4298,36 +4309,38 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of November 16, 2020
+
**(New) Added ability to overwrite branch whitelist rules**
-On the project page, click on the three dot icon in the top right corner of the workflow list to build a branch that is otherwise blocked by the whitelist rule.
-
+On the project page, click on the three dot icon in the top right corner of the workflow list to build a branch that is otherwise blocked by the whitelist rule.
+
Read our [Workflow triggers](../using-semaphore/workflows#workflow-triggers) documentation to find out how the branch whitelist works.
-
+
**(Improved) macOS Xcode 11 image update**
**(Updated)** Updated packages:
-
+
- Cocoapods 1.9.3 -> 1.10.0
- Fastlane 2.162.0 -> 2.167
- Flutter v1.22.1 -> 1.22.4
-
+
To learn more about this image, check our macOS Xcode 11 page.
## Week of November 9, 2020
+
**(Updated) New UI available to everyone**
The new UI is available to all users. Minor performance updates and fixes have been implemented:
-
-- **(Fixed)** fixed the issue with job logs auto-scrolling to top of the failed command output, instead of to the bottom.
+
+- **(Fixed)** fixed the issue with job logs auto-scrolling to top of the failed command output, instead of to the bottom.
- **(Fixed)** fixed the issue with manual promotions being triggered based on promotion name.
-
+
You can read more about the new UI in this [blog post](https://semaphoreci.com/blog/refreshed-new-ui-for-a-greater-experience).
We appreciate your feedback and encourage you to send any suggestions to [our feedback inbox](mailto:feedback@semaphoreci.com?subject=Beta%20Feedback).
-
+
**(Improved) macOS Xcode 12 image update**
**(Updated)** Xcode 12.2 installed, to switch version simply use `xcversion select 12.2`
**(Deprecated)** Xcode 12.0 will be removed from the images with the next release.
**(Updated)** Updated packages:
-
+
- Cocoapods 1.9.3 -> 1.10.0
To learn more about this image, check our macOS Xcode 12 page.
@@ -4349,7 +4362,7 @@ We appreciate your feedback and encourage you to send any suggestions to [our fe
**(New) Semaphore Container Registry**
For your convenience, we have introduced the Semaphore Container Registry. Inside the Semaphore environment, you can pull these Docker images without any restrictions or limitations.
-You can find the list of supported images in [our documentation](../using-semaphore/optimization/container-registry).
+You can find the list of supported images in [our documentation](../using-semaphore/containers/container-registry).
**(Improved) macOS Xcode 12 image update**
**(Updated)** Xcode 12.1 installed. To switch version simply use `xcversion select 12.1`
@@ -4379,6 +4392,7 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of October 19, 2020
+
**(Updated) New UI - Everyone's latest work page**
We updated the behavior on the main page (Everyone's latest work) to exclude workflows that the current project user doesn't have access to.
If a user doesn't have access to a project, or the project was deleted, that project's workflows won't appear in their workflow list.
@@ -4405,12 +4419,14 @@ The Ubuntu 18.04 image has been deprecated.
**(Updated)** Xcode 12.0.1 installed. To switch version simply use `xcversion select 12.0.1`
**(Updated)** Updated packages:
- - Fastlane 2.158.0 -> 2.162.0
- - Flutter v1.20.2 -> v1.22.1
+
+- Fastlane 2.158.0 -> 2.162.0
+- Flutter v1.20.2 -> v1.22.1
To learn more about this image, check our macOS Xcode 12 page.
## Week of October 5, 2020
+
**(Improved) Ubuntu 18.04 image update**
**(New)** Language versions:
@@ -4455,7 +4471,7 @@ You can access this feature by clicking on the **New project** button in the top
## Week of September 21, 2020
-**(New) Ubuntu 18.04 image additions**
+**(New) Ubuntu 18.04 image additions**
- Added the ability to change `kubectl` version with the sem-version tool: `sem-version kubectl [version]`
- Added elixir versions: 1.10.0, 1.10.2, 1.10.3
@@ -4492,12 +4508,14 @@ If you want to try out the new UI, but it hasn't yet rolled out to your organiza
**(Removed)** Xcode 11.2.1 removed from the images.
**(Updated)** Updated packages:
- - Fastlane 2.149.1 -> 2.158.0
- - Flutter v1.20.2 -> v1.20.3
+
+- Fastlane 2.149.1 -> 2.158.0
+- Flutter v1.20.2 -> v1.20.3
To learn more about this image, check our macOS Xcode 11 page.
-**(New) Ubuntu 18.04 image additions**
+**(New) Ubuntu 18.04 image additions**
+
- Added Go 1.15
- Added azure-cli, version 2.11.1
@@ -4519,6 +4537,7 @@ The Ubuntu 18.04 image has been deprecated.
:::
## Week of August 31, 2020
+
**(Improved) New Semaphore UI public beta release**
After a successful private beta, the new Semaphore UI has been rolled out to a larger number of users.
The new UI brings several improvements:
@@ -4539,6 +4558,7 @@ If you want to try out the new UI, but it hasn't yet rolled out to your organiza
To learn more about this image, check our macOS Xcode 11 page.
## Week of August 24, 2020
+
**(New) Information on promoter is stored in related enviroment variable**
Inside a Semaphore job you can now see who initiated a promotion by checking the value of the `SEMAPHORE_PIPELINE_PROMOTED_BY` environment variable.
@@ -4547,13 +4567,13 @@ Information on all available enviroment variables can be found in our [documenta
**(Improved) Ubuntu 18.04 image update**
**(Updated)** Language versions:
- - Aws-cli 1.18.112 ->1.18.124
- - Elixir 1.10.4 now uses Erlang 23.0.3
- - Heroku 7.42.6 -> 7.42.8
- - Java 8u252 -> 8u265
- - PHP 7.3.20 -> 7.3.21
- - PHP 7.4.8 -> 7.4.9
- - Pypy 5.8.0 -> 7.3.1
+- Aws-cli 1.18.112 ->1.18.124
+- Elixir 1.10.4 now uses Erlang 23.0.3
+- Heroku 7.42.6 -> 7.42.8
+- Java 8u252 -> 8u265
+- PHP 7.3.20 -> 7.3.21
+- PHP 7.4.8 -> 7.4.9
+- Pypy 5.8.0 -> 7.3.1
:::info
@@ -4564,16 +4584,18 @@ The Ubuntu 18.04 image has been deprecated.
**(Improved) macOS Xcode11 image update**
**(Updated)** Updated packages:
- - Fastlane 2.149.1 -> 2.156.1
- - Flutter v1.17.3 -> v1.20.2
+- Fastlane 2.149.1 -> 2.156.1
+- Flutter v1.17.3 -> v1.20.2
To learn more about this image, check our macOS Xcode 11 page.
**Minor improvements and fixes:**
- - **(Fixed)** Fixed an issue where `[skip ci]` was ignored for pull request triggers.
- - **(Improved)** Deleting a git branch will now stop any queued or running pipeline started on that branch.
-
+
+- **(Fixed)** Fixed an issue where `[skip ci]` was ignored for pull request triggers.
+- **(Improved)** Deleting a git branch will now stop any queued or running pipeline started on that branch.
+
## Week of August 17, 2020
+
**(New) Install-package command**
"Toolbox" command line tools have been extended to include the `install-package` utility.
The `install-package` command can help with package installations by automatically caching the desired packages and their dependencies.
@@ -4581,6 +4603,7 @@ The `install-package` command can help with package installations by automatical
To find out more, please check the [install-package](https://docs.semaphoreci.com/reference/toolbox-reference/#install-package) documentation page.
## Week of August 10, 2020
+
**(New) Configurable pipeline queues**
We added the option to override the default queue and create custom queues for pipelines.
Pipelines can be configured to run sequentially or in parallel, based on various conditions.
@@ -4591,14 +4614,14 @@ To learn how to use this feature, check the [pipeline queues](../using-semaphore
**(Improved) Ubuntu 18.04 image update**
-**(New)** Added Firefox version 78.1.0esr.
+**(New)** Added Firefox version 78.1.0esr.
Added ability to change firefox version with the `sem-version` tool: `sem-version firefox [52|68|78]`
**(Updated)** Language versions:
- - Aws-cli 1.18.104 ->1.18.112
- - Heroku 7.42.5 -> 7.42.6
- - Google-cloud-sdk downgraded 300 -> 297
+- Aws-cli 1.18.104 ->1.18.112
+- Heroku 7.42.5 -> 7.42.6
+- Google-cloud-sdk downgraded 300 -> 297
:::info
@@ -4614,15 +4637,15 @@ The Ubuntu 18.04 image has been deprecated.
**(Updated)** Language versions:
- - Aws-cli 1.18.96 -> 1.18.104
- - Go 1.13.12 -> 1.13.14
- - Go 1.14.4 -> 1.14.6
- - Heroku 7.42.2 -> 7.42.5
- - OpenJDK 11.0.7 -> 11.0.8
- - PHP 7.2.31 -> 7.2.32
- - PHP 7.3.19 -> 7.3.20
- - PHP 7.4.7 -> 7.4.8
- - Chrome_driver 83 > 84
+- Aws-cli 1.18.96 -> 1.18.104
+- Go 1.13.12 -> 1.13.14
+- Go 1.14.4 -> 1.14.6
+- Heroku 7.42.2 -> 7.42.5
+- OpenJDK 11.0.7 -> 11.0.8
+- PHP 7.2.31 -> 7.2.32
+- PHP 7.3.19 -> 7.3.20
+- PHP 7.4.7 -> 7.4.8
+- Chrome_driver 83 > 84
:::info
@@ -4653,84 +4676,86 @@ For getting started with an Android project on Semaphore please see this [guide]
## Week of July 13, 2020
- Updates to the ubuntu1804 image:
- - Aws-cli 1.18.90 -> 1.18.96
- - Elixir 1.8.1 -> 1.8.2
- - Elixir 1.9.3 -> 1.9.4
- - Elixir 1.10.3 -> 1.10.4
- - Heroku 7.42.1 -> 7.42.2
- - PHP 7.3.18 -> 7.3.19
- - PHP 7.4.6 -> 7.4.7
+ - Aws-cli 1.18.90 -> 1.18.96
+ - Elixir 1.8.1 -> 1.8.2
+ - Elixir 1.9.3 -> 1.9.4
+ - Elixir 1.10.3 -> 1.10.4
+ - Heroku 7.42.1 -> 7.42.2
+ - PHP 7.3.18 -> 7.3.19
+ - PHP 7.4.6 -> 7.4.7
## Week of June 30, 2020
- Additions to the ubuntu1804 image
- - Sysstat
+ - Sysstat
- Updates to the ubuntu1804 image:
- - Aws-cli 1.18.77 -> 1.18.90
- - Docker-compose 1.24.1 -> 1.26.0
+ - Aws-cli 1.18.77 -> 1.18.90
+ - Docker-compose 1.24.1 -> 1.26.0
- The macos-xcode11 image update.
The following packages were updated:
- - Fastlane 1.50.1
- - Flutter 1.17.5
+ - Fastlane 1.50.1
+ - Flutter 1.17.5
- A new convenience Docker image available in Semaphore's [dockerhub](https://hub.docker.com/u/semaphoreci).
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG#L2939).
## Week of June 15, 2020
- Updates to the macos-xcode11 image:
- - flutter v1.17.3
- - cocoapods 1.9.3
- - nodejs v13.12.0
- - yarn v1.22.4
- - fastlane 2.149.1
+ - flutter v1.17.3
+ - cocoapods 1.9.3
+ - nodejs v13.12.0
+ - yarn v1.22.4
+ - fastlane 2.149.1
- Updates to the ubuntu1804 image:
- - Aws-cli 1.18.68 -> 1.18.77
- - Firefox 68.4esr -> 68.9.0esr
- - Git 2.26 -> 2.27
- - Go 1.13.10 -> 1.13.12
- - Go 1.14.2 -> 1.14.4
- - Heroku 7.41.1 -> 7.42.1
-
+ - Aws-cli 1.18.68 -> 1.18.77
+ - Firefox 68.4esr -> 68.9.0esr
+ - Git 2.26 -> 2.27
+ - Go 1.13.10 -> 1.13.12
+ - Go 1.14.2 -> 1.14.4
+ - Heroku 7.41.1 -> 7.42.1
+
## Week of June 8, 2020
-- The macOS Mojave image (macOS-mojave-xcode11) has been deprecated.
+
+- The macOS Mojave image (macOS-mojave-xcode11) has been deprecated.
## Week of June 1, 2020
- Additions to the Ubuntu1804 image
- - Erlang 23.0
+ - Erlang 23.0
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.18.59 -> 1.18.68
- - Chromedriver 81 -> 83
- - Google Chrome 81 -> 83
- - PHP 7.2.29 -> 7.2.31
- - PHP 7.3.16 -> 7.3.18
- - PHP 7.4.4 -> 7.4.6
+ - Aws-cli 1.18.59 -> 1.18.68
+ - Chromedriver 81 -> 83
+ - Google Chrome 81 -> 83
+ - PHP 7.2.29 -> 7.2.31
+ - PHP 7.3.16 -> 7.3.18
+ - PHP 7.4.4 -> 7.4.6
## Week of May 18, 2020
+
- New convenience Docker images available in Semaphore's DockerHub Account.
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG#L2421)
- Updates to the macOS Catalina Xcode11:
- - Xcode 11.5 installed
- - Flutter version update to 1.17.1
- - Fastlane gem version update to 2.148.1
+ - Xcode 11.5 installed
+ - Flutter version update to 1.17.1
+ - Fastlane gem version update to 2.148.1
- Additions to the Ubuntu1804 image:
- - Doctl 1.43.0
+ - Doctl 1.43.0
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.18.42 -> 1.18.59
- - Git-lfs 2.10.0 -> 2.11.0
- - Go 1.13.9 -> 1.13.10
- - Go 1.14.1 -> 1.14.2
- - Heroku 7.39.3 -> 7.41.1
- - PHP 7.2.28 -> 7.2.29
- - PHP 7.3.15 -> 7.3.16
- - PHP 7.4.3 -> 7.4.4
- - Sbt 0.13.17 -> 1.3.10
+ - Aws-cli 1.18.42 -> 1.18.59
+ - Git-lfs 2.10.0 -> 2.11.0
+ - Go 1.13.9 -> 1.13.10
+ - Go 1.14.1 -> 1.14.2
+ - Heroku 7.39.3 -> 7.41.1
+ - PHP 7.2.28 -> 7.2.29
+ - PHP 7.3.15 -> 7.3.16
+ - PHP 7.4.3 -> 7.4.4
+ - Sbt 0.13.17 -> 1.3.10
## Week of May 11, 2020
@@ -4743,88 +4768,89 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- Names of Semaphore Secrets are now restricted to alphanumeric characters, dashes, dots, and `@`.
Regex: `^[@: -._a-zA-Z0-9]+$`).
- Updates to the macOS image:
- - Flutter version updated to 1.17.0, image spec macOS Catalina Xcode11
+ - Flutter version updated to 1.17.0, image spec macOS Catalina Xcode11
- New convenience Docker images available in Semaphore's DockerHub Account.
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG#L2164).
## Week of April 27, 2020
+
- New macOS image available - macOS Catalina Xcode11
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.18.39 -> 1.18.42
- - Jruby 9.1.17.0 -> 9.2.11.1
- - Heroku 7.39.1 -> 7.39.3
+ - Aws-cli 1.18.39 -> 1.18.42
+ - Jruby 9.1.17.0 -> 9.2.11.1
+ - Heroku 7.39.1 -> 7.39.3
## Week of April 20, 2020
- Updates to the macOS image:
- - `macos-mojave-xcode11` Xcode 11.4.1 installed. To switch version use
+ - `macos-mojave-xcode11` Xcode 11.4.1 installed. To switch version use
`xcversion select 11.4.1`, image spec macOS Mojave Xcode11
- - `macos-mojave-xcode11` Xcode 11.1 and Xcode 11.4 removed from the image.
+ - `macos-mojave-xcode11` Xcode 11.1 and Xcode 11.4 removed from the image.
- Updates to the Ubuntu1804 image:
- - TCP port 8000 is no longer occupied.
+ - TCP port 8000 is no longer occupied.
## Week of April 13, 2020
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.18.32 -> 1.18.39
- - Google Chrome 80 -> 81
- - Chromedriver 80 -> 81
- - Erlang 22.2 -> 22.3
- - Go 1.13.8 -> 1.13.9
- - Go 1.14.0 -> 1.14.1
- - Ruby 2.4.9 -> 2.4.10
- - Ruby 2.5.7 -> 2.5.8
- - Ruby 2.6.5 -> 2.6.6
- - Ruby 2.7.0 -> 2.7.1
+ - Aws-cli 1.18.32 -> 1.18.39
+ - Google Chrome 80 -> 81
+ - Chromedriver 80 -> 81
+ - Erlang 22.2 -> 22.3
+ - Go 1.13.8 -> 1.13.9
+ - Go 1.14.0 -> 1.14.1
+ - Ruby 2.4.9 -> 2.4.10
+ - Ruby 2.5.7 -> 2.5.8
+ - Ruby 2.6.5 -> 2.6.6
+ - Ruby 2.7.0 -> 2.7.1
## Week of April 6, 2020
- Updates to the macOS image:
- - `macos-mojave-xcode11` Xcode 11.4 installed. To switch version use
+ - `macos-mojave-xcode11` Xcode 11.4 installed. To switch version use
`xcversion select 11.4`, image spec macOS Mojave Xcode11
- - `macos-mojave-xcode11` Xcode 11.0 removed from the image.
- - installed packages:
- - google-chrome
- - firefox
- - microsoft-edge
+ - `macos-mojave-xcode11` Xcode 11.0 removed from the image.
+ - installed packages:
+ - google-chrome
+ - firefox
+ - microsoft-edge
- A new convenience Docker image is available in Semaphore's [dockerhub](https://hub.docker.com/u/semaphoreci).
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG#L1345).
## Week of March 30, 2020
- Updates to the the Ubuntu1804 image:
- - Aws-cli 1.18.21 -> 1.18.28
- - Git 2.25 -> 2.26
- - Heroku 7.39.0 -> 7.39.1
+ - Aws-cli 1.18.21 -> 1.18.28
+ - Git 2.25 -> 2.26
+ - Heroku 7.39.0 -> 7.39.1
- Semaphore Toolbox:
- - [autocache](../using-semaphore/optimization/cache) support for Golang (>= 1.11)
+ - [autocache](../using-semaphore/cache) support for Golang (>= 1.11)
## Week of March 16, 2020
- Additions to the Ubuntu1804 image:
- - Elixir 1.10.2
- - Go 1.14
+ - Elixir 1.10.2
+ - Go 1.14
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.18.7 -> 1.18.21
- - Heroku 7.38.2 -> 7.39.0
- - Yarn 1.21.1 -> 1.22.4
+ - Aws-cli 1.18.7 -> 1.18.21
+ - Heroku 7.38.2 -> 7.39.0
+ - Yarn 1.21.1 -> 1.22.4
## Week of March 02, 2020
- Additions to the Ubuntu1804 image:
- - Elixir 1.10.1
- - PHP 7.4.3
+ - Elixir 1.10.1
+ - PHP 7.4.3
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.17.17 -> 1.18.7
- - Erlang 22.1 -> 22.2
- - Go 1.12.10 -> 1.12.17
- - Go 1.13.1 -> 1.13.8
- - Heroku 7.38.1 -> 7.38.2
- - Maven 3.5.4 -> 3.6.3
- - PHP 7.1.32 -> 7.1.33
- - PHP 7.2.27 -> 7.2.28
- - PHP 7.3.11 -> 7.3.15
- - Scala 2.12.7 -> 2.12.10
+ - Aws-cli 1.17.17 -> 1.18.7
+ - Erlang 22.1 -> 22.2
+ - Go 1.12.10 -> 1.12.17
+ - Go 1.13.1 -> 1.13.8
+ - Heroku 7.38.1 -> 7.38.2
+ - Maven 3.5.4 -> 3.6.3
+ - PHP 7.1.32 -> 7.1.33
+ - PHP 7.2.27 -> 7.2.28
+ - PHP 7.3.11 -> 7.3.15
+ - Scala 2.12.7 -> 2.12.10
## Week of February 24, 2020
@@ -4836,14 +4862,14 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- Workflow page: clicking on the list of pipeline runs will open and focus on
that part of the workflow.
- Updates to the Ubuntu1804 image:
- - Aws-cli 1.17.9 -> 1.17.17
- - Chrome 79 -> 80
- - Chromedriver 79 -> 80
- - Heroku 7.37.0 -> 7.38.1
- - Nvm 8.16.2 -> 8.17.0
- - Nvm 10.17.0 -> 10.19.0
- - Nvm 12.13.0 -> 12.16.0
- - Npm 6.4.1 -> 6.13.4
+ - Aws-cli 1.17.9 -> 1.17.17
+ - Chrome 79 -> 80
+ - Chromedriver 79 -> 80
+ - Heroku 7.37.0 -> 7.38.1
+ - Nvm 8.16.2 -> 8.17.0
+ - Nvm 10.17.0 -> 10.19.0
+ - Nvm 12.13.0 -> 12.16.0
+ - Npm 6.4.1 -> 6.13.4
## Week of February 10, 2020
@@ -4856,37 +4882,38 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- Jobs export `SEMAPHORE_WORKFLOW_TRIGGERED_BY_HOOK`, `SEMAPHORE_WORKFLOW_TRIGGERED_BY_SCHEDULE`, and
`SEMAPHORE_WORKFLOW_TRIGGERED_BY_API` [environment variables](../reference/env-vars).
- Environment setup commands in jobs now have descriptive names. The new command names are:
- - `Exporting environment variables`
- - `Injecting Files`
- - `Setting up the Semaphore Toolbox`
- - `Starting an ssh-agent`
- - `Connecting to cache`
- - You can now override the default Docker command and entrypoint in attached containers.
+ - `Exporting environment variables`
+ - `Injecting Files`
+ - `Setting up the Semaphore Toolbox`
+ - `Starting an ssh-agent`
+ - `Connecting to cache`
+- You can now override the default Docker command and entrypoint in attached containers.
## Week of February 3, 2020
- Additions to the Ubuntu1804 image:
- - libmaxminddb0
- - libmaxminddb-dev
+ - libmaxminddb0
+ - libmaxminddb-dev
- Ubuntu1804 kernel settings changes:
- - vm.max_map_count=262144
- - fs.inotify.max_user_instances=524288
- - fs.inotify.max_user_watches=524288
- - fs.inotify.max_queued_events=524288
+ - vm.max_map_count=262144
+ - fs.inotify.max_user_instances=524288
+ - fs.inotify.max_user_watches=524288
+ - fs.inotify.max_queued_events=524288
- Updates to the Ubuntu1804 image:
- - Heroku 7.35.1 -> 7.37.0
- - Java8 u232 -> u242
- - Java11 11.0.5 -> 11.0.6
- - Git-lfs 2.9.2 -> 2.10.0
- - Aws-cli 1.17.2 -> 1.17.9
+ - Heroku 7.35.1 -> 7.37.0
+ - Java8 u232 -> u242
+ - Java11 11.0.5 -> 11.0.6
+ - Git-lfs 2.9.2 -> 2.10.0
+ - Aws-cli 1.17.2 -> 1.17.9
## Week of January 27, 2020
+
- Updates to the macOS image:
- - installed packages:
- - `usbmuxd`
- - `libimobiledevice`
- - `ideviceinstaller`
- - `ios-deploy`
+ - installed packages:
+ - `usbmuxd`
+ - `libimobiledevice`
+ - `ideviceinstaller`
+ - `ios-deploy`
- A new convenience Docker image is available in Semaphore's
DockerHub Account: android with `flutter` preinstalled.
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG#L792).
@@ -4896,27 +4923,27 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- New: "Jump to a branch or pull request" quick search modal dialog
is available on the project page.
- Additions to the ubuntu1804 image:
- - Python 3.8
- - Snapd
+ - Python 3.8
+ - Snapd
- Updates to the Ubuntu1804 image:
- - Awc-cli 1.16 -> 1.17.1
- - Firefox esr 68.2.0 -> 68.4.1
- - Geckodriver 0.21.0 -> 0.26.0
- - Java 8 u201 -> u232
- - Java 11.0.2 -> 11.0.5
- - Heroku 7.35.0 -> 7.35.1
- - Git 2.24 -> 2.25
- - Git-LFS 2.9.1 -> 2.9.2
- - Phpunit 7.5.18 -> 7.5.20
+ - Awc-cli 1.16 -> 1.17.1
+ - Firefox esr 68.2.0 -> 68.4.1
+ - Geckodriver 0.21.0 -> 0.26.0
+ - Java 8 u201 -> u232
+ - Java 11.0.2 -> 11.0.5
+ - Heroku 7.35.0 -> 7.35.1
+ - Git 2.24 -> 2.25
+ - Git-LFS 2.9.1 -> 2.9.2
+ - Phpunit 7.5.18 -> 7.5.20
- Updates to the macOS image:
- - `macos-mojave-xcode11` Xcode 11.3.1 installed. To switch version use
+ - `macos-mojave-xcode11` Xcode 11.3.1 installed. To switch version use
`xcversion select 11.3.1`, image spec macOS Mojave Xcode11
- - installed packages:
- - carthage
- - updated gems:
- - fastlane (2.140.0)
- - cocoapods (1.8.4)
+ - installed packages:
+ - carthage
+ - updated gems:
+ - fastlane (2.140.0)
+ - cocoapods (1.8.4)
- New convenience Docker images are available in Semaphore's DockerHub Account.
A detailed list can be found in the [Docker images changelog](https://github.com/semaphoreci/docker-images/blob/master/CHANGELOG#L528).
- We have retroactively updated previous entries in the changelog to mention new features and improvements.
@@ -4936,7 +4963,7 @@ A detailed list can be found in the [Docker images changelog](https://github.com
rather than for every new line. Several bugs, e.g. related to dealing with very
large lines and low baud rate, have been fixed.
- Additions to ubuntu1804 image
- - Ruby 2.7.0
+ - Ruby 2.7.0
## Week of December 16, 2019
@@ -4946,13 +4973,13 @@ A detailed list can be found in the [Docker images changelog](https://github.com
project](https://github.com/semaphoreci-demos/semaphore-demo-react-native) is
available.
- Updates to the Ubuntu1804 image
- - Git-lfs 2.9.1 -> 2.9.2
- - Yarn 1.19.2 -> 1.21.1
- - Chrome 78 -> 79
- - Chromedriver 78 -> 79
- - PhpUnit 7.5.17 -> 7.5.18
- - Phpbrew 1.23.1 -> 1.24.1
- - APT: removed ppa jonathonf/python-2.7
+ - Git-lfs 2.9.1 -> 2.9.2
+ - Yarn 1.19.2 -> 1.21.1
+ - Chrome 78 -> 79
+ - Chromedriver 78 -> 79
+ - PhpUnit 7.5.17 -> 7.5.18
+ - Phpbrew 1.23.1 -> 1.24.1
+ - APT: removed ppa jonathonf/python-2.7
## Week of December 9, 2019
@@ -4963,75 +4990,76 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- New: [Open source organizations are available](https://semaphoreci.com/blog/free-open-source-cicd). Each open source organization receives unlimited CI/CD minutes for building public repositories, including Linux, Docker, and macOS-based environments.
- New: [Status badges](../using-semaphore/projects#badges).
- Updates to the Ubuntu1804 image
- - Git-lfs 2.9.0 -> 2.9.1
- - Yarn 1.19.1 -> 1.19.2
+ - Git-lfs 2.9.0 -> 2.9.1
+ - Yarn 1.19.1 -> 1.19.2
- Additions to the Ubuntu1804 image
- - Elixir versions 1.9.2, 1.9.3, 1.9.4
+ - Elixir versions 1.9.2, 1.9.3, 1.9.4
## Week of November 18, 2019
- New: [Monorepo support](../using-semaphore/monorepo).
- Updates to the Ubuntu1804 image
- - Git 2.23 -> 2.24
- - Heroku 7.33 -> 7.35
+ - Git 2.23 -> 2.24
+ - Heroku 7.33 -> 7.35
- Additions to the Ubuntu1804 image
- - ImageMagick 8:6.9.7.4
+ - ImageMagick 8:6.9.7.4
## Week of November 11, 2019
+
- macOS image updates:
- - `macos-mojave-xcode11` Xcode 11.2.1 installed. To switch version use
+ - `macos-mojave-xcode11` Xcode 11.2.1 installed. To switch version use
`xcversion select 11.2.1`, image spec macOS Mojave Xcode11
- - nvm pre-installed in `macos-mojave-xcode11` and `macos-mojave-xcode10`, image spec: macOS Mojave Xcode10
- - packages:
+ - nvm pre-installed in `macos-mojave-xcode11` and `macos-mojave-xcode10`, image spec: macOS Mojave Xcode10
+ - packages:
- fastlane 2.135.2
- cocoapods 1.8.4
## Week of November 4, 2019
- - [Status badges](../using-semaphore/projects#badges) are available.
- - Semaphore toolbox:
- - Fixed an issue with cache corruption during parallel uploads.
- - New [environment variables available in Semaphore jobs](../reference/env-vars):
- - `SEMAPHORE_AGENT_MACHINE_TYPE`
- - `SEMAPHORE_AGENT_MACHINE_OS_IMAGE`
- - `SEMAPHORE_AGENT_MACHINE_ENVIRONMENT_TYPE`
- - When creating a project, you can select from a collection of ready-made
+- [Status badges](../using-semaphore/projects#badges) are available.
+- Semaphore toolbox:
+ - Fixed an issue with cache corruption during parallel uploads.
+- New [environment variables available in Semaphore jobs](../reference/env-vars):
+ - `SEMAPHORE_AGENT_MACHINE_TYPE`
+ - `SEMAPHORE_AGENT_MACHINE_OS_IMAGE`
+ - `SEMAPHORE_AGENT_MACHINE_ENVIRONMENT_TYPE`
+- When creating a project, you can select from a collection of ready-made
configuration recipes.
## Week of October 21, 2019
- Updates to the Ubuntu1804 image
- - Pip 19.2 -> 19.3.1
- - Nvm 8.11.3 -> 8.16.2
- - Npm 5.6.0 -> 6.4.1
- - Chrome 77 -> 78
- - Chromedriver 77 -> 78
- - Git-lfs 2.8.0 -> 2.9.0
- - Phpunit 7.15.6 -> 7.15.7
- - Firefox-esr 60 -> 68
- - Ruby 2.3.7 -> 2.3.8
- - Ruby 2.4.4 -> 2.4.9
- - Ruby 2.5.1 -> 2.5.7
- - Ruby 2.6.4 -> 2.6.5
+ - Pip 19.2 -> 19.3.1
+ - Nvm 8.11.3 -> 8.16.2
+ - Npm 5.6.0 -> 6.4.1
+ - Chrome 77 -> 78
+ - Chromedriver 77 -> 78
+ - Git-lfs 2.8.0 -> 2.9.0
+ - Phpunit 7.15.6 -> 7.15.7
+ - Firefox-esr 60 -> 68
+ - Ruby 2.3.7 -> 2.3.8
+ - Ruby 2.4.4 -> 2.4.9
+ - Ruby 2.5.1 -> 2.5.7
+ - Ruby 2.6.4 -> 2.6.5
- Additions
- - Nvm 10.17
- - Nvm 12.3
+ - Nvm 10.17
+ - Nvm 12.3
## Week of October 14, 2019
- Updates to the Ubuntu1804 image
- - Heroku 7.30.0 -> 7.33.3
- - Elixir 1.8.1 -> 1.8.2
- - Elixir 1.9.0 -> 1.9.1
- - Erlang 22.0 -> 22.1
- - Yarn 1.17.3 -> 1.19.1
- - Kerl 1.3.4 -> 1.8.4
- - Rebar3 3.6.1 -> 3.12.0
+ - Heroku 7.30.0 -> 7.33.3
+ - Elixir 1.8.1 -> 1.8.2
+ - Elixir 1.9.0 -> 1.9.1
+ - Erlang 22.0 -> 22.1
+ - Yarn 1.17.3 -> 1.19.1
+ - Kerl 1.3.4 -> 1.8.4
+ - Rebar3 3.6.1 -> 3.12.0
## Week of October 7, 2019
- Updates to the macOS image:
- - `macos-mojave-xcode11` Xcode 11.1 installed. To switch version use
+ - `macos-mojave-xcode11` Xcode 11.1 installed. To switch version use
`xcversion select 11.1`, image spec macOS Mojave Xcode11
- You can change which blocks and pipelines send status checks to pull requests on GitHub. [See how here](../reference/semaphore-cli).
@@ -5052,25 +5080,25 @@ A detailed list can be found in the [Docker images changelog](https://github.com
## Week of September 23, 2019
- New macOS `os_image` types:
- - `macos-mojave-xcode10` with Xcode 10.3 and 10.2.1 installed, macOS Mojave Xcode10 image spec.
- - `macos-mojave-xcode11` with Xcode 11.0 installed, macOS Mojave Xcode11 image spec.
- - `macos-mojave` is no longer available.
+ - `macos-mojave-xcode10` with Xcode 10.3 and 10.2.1 installed, macOS Mojave Xcode10 image spec.
+ - `macos-mojave-xcode11` with Xcode 11.0 installed, macOS Mojave Xcode11 image spec.
+ - `macos-mojave` is no longer available.
- Mojave system update:
- - ProductVersion: 10.14.6
- - BuildVersion: 18G95
- - Kernel Version: Darwin 18.7.0
+ - ProductVersion: 10.14.6
+ - BuildVersion: 18G95
+ - Kernel Version: Darwin 18.7.0
- You can now change the initial pipeline file of your project, from `.semaphore/semaphore.yml` to a custom path. This allows you to create multiple Semaphore projects based on the same GitHub repository. The new option is available on the project settings page, or [by editing pipeline_file property of a project](../reference/semaphore-cli#sem-edit) via the release v0.15.0 of the CLI.
- Workflow Builder now supports setting [job parallelism](../reference/pipeline-yaml#parallelism-in-jobs). The new option is available by clicking on a block and expanding the advanced configuration section for the job you want to replicate with the parallelism feature.
- A new version of CLI v0.16.0 has been released.
- - You can change the initial pipeline file of your project.
- - A fix for debug jobs from Pull Request/Tags has been released. This bug
+ - You can change the initial pipeline file of your project.
+ - A fix for debug jobs from Pull Request/Tags has been released. This bug
caused differences in Environment Variables between job and debug sessions.
- Visual Workflow Builder is now part of the project setup.
## Week of September 16, 2019
- New feature: Artifacts. Persistent storage of final CI/CD deliverables, intermediary assets, and files for debugging. Now in public beta.
- - [Learn more about use cases](../using-semaphore/artifacts) and [how to use the artifacts CLI](../reference/toolbox#artifact).
+ - [Learn more about use cases](../using-semaphore/artifacts) and [how to use the artifacts CLI](../reference/toolbox#artifact).
## Week of September 9, 2019
@@ -5089,57 +5117,57 @@ A detailed list can be found in the [Docker images changelog](https://github.com
## Week of August 19, 2019
- New feature: the [`parallelism` property](../reference/pipeline-yaml#parallelism-in-jobs), used to easily generate parallel jobs.
-- Docker-based agents can now [use private container images from any registry](../using-semaphore/optimization/docker#registries).
+- Docker-based agents can now [use private container images from any registry](../using-semaphore/containers/docker#registries).
## Week of August 12, 2019
- Updates to the Ubuntu 18.04 VM image:
- - Chrome and ChromeDriver updated to version 76
- - docker-ce updated to 19.03.1
- - git-lfs updated to 2.8.0
- - heroku updated to 7.27.1
- - java 8 updated to u222
- - java 11 updated to 11.0.4
- - phpunit updated to 7.5.14
- - pip updated to 19.2.1
- - yarn updated to 1.17.3
+ - Chrome and ChromeDriver updated to version 76
+ - docker-ce updated to 19.03.1
+ - git-lfs updated to 2.8.0
+ - heroku updated to 7.27.1
+ - java 8 updated to u222
+ - java 11 updated to 11.0.4
+ - phpunit updated to 7.5.14
+ - pip updated to 19.2.1
+ - yarn updated to 1.17.3
## Week of August 5, 2019
- New feature: [Global job configuration](../reference/pipeline-yaml#global-job-config). Used to define common configuration and apply it across all blocks in a pipeline.
- You can now whitelist contributors who can run Semaphore workflows for your organization when they submit a pull request from a fork. You can also whitelist secrets to be exposed. See your project's Settings in
the web UI to do this.
-- Docker-based agents can now use [private container images](../using-semaphore/optimization/docker#registries) from Google Container Registry.
-- [Dependency caching](../using-semaphore/optimization/cache) is now much simpler. Just write `cache restore` and `cache store` and Semaphore will configure for common language dependencies.
+- Docker-based agents can now use [private container images](../using-semaphore/containers/docker#registries) from Google Container Registry.
+- [Dependency caching](../using-semaphore/cache) is now much simpler. Just write `cache restore` and `cache store` and Semaphore will configure for common language dependencies.
- macOS platform:
- - Flutter version update to v1.8.3
- - New image spec - macOS Mojave
+ - Flutter version update to v1.8.3
+ - New image spec - macOS Mojave
## Week of July 29, 2019
- New features: [Pull request and Git tag support](../using-semaphore/workflows#workflow-triggers). Have full control over which GitHub triggers new workflows. Choose from default branch only, any push to any branch, push to pull requests, and push to pull requests from forked repositories.
- - As a bonus, you can turn off exposure of secrets in forked pull requests.
- - The project page can now show activity from branches, pull requests, and
+ - As a bonus, you can turn off exposure of secrets in forked pull requests.
+ - The project page can now show activity from branches, pull requests, and
tags separately.
- New feature: [Auto-cancel pipeline strategies](../reference/pipeline-yaml#auto-cancel). You can stop running a pipelines when there are newer commits in the repo. To do this use the new `auto_cancel` property in your pipeline configuration.
- macOS platform:
- - Xcode 11 Beta version update 5 (11M382q).
- - Xcode 10.3 with default simulators preinstalled on Mojave image.
- - Flutter version updated to v1.7.8+hotfix.4.
- - Fastlane version updated to 2.128.1.
- - Cocoapods version updated to 1.7.5.
- - New image spec - macOS Mojave
+ - Xcode 11 Beta version update 5 (11M382q).
+ - Xcode 10.3 with default simulators preinstalled on Mojave image.
+ - Flutter version updated to v1.7.8+hotfix.4.
+ - Fastlane version updated to 2.128.1.
+ - Cocoapods version updated to 1.7.5.
+ - New image spec - macOS Mojave
- New [environment variables available in Semaphore jobs](../reference/env-vars):
- - `SEMAPHORE_GIT_REPO_SLUG`
- - `SEMAPHORE_GIT_REF_TYPE`
- - `SEMAPHORE_GIT_REF`
- - `SEMAPHORE_GIT_COMMIT_RANGE`
- - `SEMAPHORE_GIT_TAG_NAME`
- - `SEMAPHORE_GIT_PR_SLUG`
- - `SEMAPHORE_GIT_PR_SHA`
- - `SEMAPHORE_GIT_PR_NUMBER`
- - `SEMAPHORE_GIT_PR_NAME`
- - `SEMAPHORE_ORGANIZATION_URL`
+ - `SEMAPHORE_GIT_REPO_SLUG`
+ - `SEMAPHORE_GIT_REF_TYPE`
+ - `SEMAPHORE_GIT_REF`
+ - `SEMAPHORE_GIT_COMMIT_RANGE`
+ - `SEMAPHORE_GIT_TAG_NAME`
+ - `SEMAPHORE_GIT_PR_SLUG`
+ - `SEMAPHORE_GIT_PR_SHA`
+ - `SEMAPHORE_GIT_PR_NUMBER`
+ - `SEMAPHORE_GIT_PR_NAME`
+ - `SEMAPHORE_ORGANIZATION_URL`
## Week of July 22, 2019
@@ -5147,8 +5175,8 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- New feature: [fail-fast on the first failure](../using-semaphore/pipelines#fail-fast). Now you can stop everything in your pipeline as soon as a failure is detected, or stops only the jobs and blocks in your pipeline that haven't yet started.
- A new global sidebar that uses less screen real estate, and lets you star projects and dashboards so they appear on top of the list. Also, it loads really fast.
- A new version of CLI v0.14.1 has been released.
- - You can now configure pull-request related settings by editing the project
- - A fix for the race condition between toolbox installation and debug session
+ - You can now configure pull-request related settings by editing the project
+ - A fix for the race condition between toolbox installation and debug session
initialization has been released. This bug manifested as "unknown command checkout" if the debug session entry happened before the toolbox installation finished in the machine.
## Week of July 15, 2019
@@ -5158,20 +5186,20 @@ A detailed list can be found in the [Docker images changelog](https://github.com
## Week of July 1, 2019
-- AWS ECR support for Docker-based environments: host your private Docker images on AWS and use them to define your [custom CI/CD environment](../using-semaphore/optimization/docker#registries) on Semaphore.
+- AWS ECR support for Docker-based environments: host your private Docker images on AWS and use them to define your [custom CI/CD environment](../using-semaphore/containers/docker#registries) on Semaphore.
- [Skip CI](../using-semaphore/workflows#skip): If you add `[skip ci]` or `[ci skip]` to your Git commit message, Semaphore will not trigger a new workflow.
- The context of [Github Status checks](https://developer.github.com/v3/repos/statuses/) has been changed to include information about build source, which can be one of the following:
- - `ci/semaphoreci/push`
- - `ci/semaphoreci/pr`
- - `ci/semaphoreci/tag`
+ - `ci/semaphoreci/push`
+ - `ci/semaphoreci/pr`
+ - `ci/semaphoreci/tag`
[Please update your settings on GitHub](https://help.github.com/en/articles/enabling-required-status-checks) if you are using protected branches with required status checks.
## Week of June 24, 2019
- macOS platform:
- - Xcode 11 Beta with default simulators preinstalled on Mojave image.
- - macOS Mojave updated to 10.14.5.
+ - Xcode 11 Beta with default simulators preinstalled on Mojave image.
+ - macOS Mojave updated to 10.14.5.
## Week of June 10, 2019
@@ -5184,25 +5212,25 @@ A detailed list can be found in the [Docker images changelog](https://github.com
- Launched support for skipping blocks based on conditions, e.g. `branch != 'master'`. Read more about [skipping blocks](../using-semaphore/jobs#skip-run) and the introduction of the [Conditions domain specific language](../reference/conditions-dsl) that allows the expression of complex conditional rules in your pipelines.
- Owners and admins can now set [Budget Alerts](./faq#budget-alerts).
- New Semaphore approved convenience Docker images have been released:
- - [Alpine](https://hub.docker.com/r/semaphoreci/alpine)
- - [Android](https://hub.docker.com/r/semaphoreci/android)
- - [Clojure](https://hub.docker.com/r/semaphoreci/clojure)
- - [Elixir](https://hub.docker.com/r/semaphoreci/elixir)
- - [Golang](https://hub.docker.com/r/semaphoreci/golang)
- - [Haskell](https://hub.docker.com/r/semaphoreci/haskell)
- - [Node](https://hub.docker.com/r/semaphoreci/node)
- - [Openjdk](https://hub.docker.com/r/semaphoreci/openjdk)
- - [Php](https://hub.docker.com/r/semaphoreci/php)
- - [Python](https://hub.docker.com/r/semaphoreci/python)
- - [Ruby](https://hub.docker.com/r/semaphoreci/ruby)
- - [Rust](https://hub.docker.com/r/semaphoreci/rust)
- - [Ubuntu](https://hub.docker.com/r/semaphoreci/ubuntu)
+ - [Alpine](https://hub.docker.com/r/semaphoreci/alpine)
+ - [Android](https://hub.docker.com/r/semaphoreci/android)
+ - [Clojure](https://hub.docker.com/r/semaphoreci/clojure)
+ - [Elixir](https://hub.docker.com/r/semaphoreci/elixir)
+ - [Golang](https://hub.docker.com/r/semaphoreci/golang)
+ - [Haskell](https://hub.docker.com/r/semaphoreci/haskell)
+ - [Node](https://hub.docker.com/r/semaphoreci/node)
+ - [Openjdk](https://hub.docker.com/r/semaphoreci/openjdk)
+ - [Php](https://hub.docker.com/r/semaphoreci/php)
+ - [Python](https://hub.docker.com/r/semaphoreci/python)
+ - [Ruby](https://hub.docker.com/r/semaphoreci/ruby)
+ - [Rust](https://hub.docker.com/r/semaphoreci/rust)
+ - [Ubuntu](https://hub.docker.com/r/semaphoreci/ubuntu)
- Version `v0.13.0` of the Semaphore CLI has been released.
- - `sem debug job` works without configuring the CLI with an SSH key. Keys are now generated server side.
- - `sem attach` can attach to any running job without the need to inject public SSH keys as part of your Pipeline configuration.
- - Debugging and attaching to jobs works for Docker-based CI/CD environments
- - Read the updated documentation rearding [Debugging with SSH Access](../using-semaphore/jobs#debug-jobs).
+ - `sem debug job` works without configuring the CLI with an SSH key. Keys are now generated server side.
+ - `sem attach` can attach to any running job without the need to inject public SSH keys as part of your Pipeline configuration.
+ - Debugging and attaching to jobs works for Docker-based CI/CD environments
+ - Read the updated documentation rearding [Debugging with SSH Access](../using-semaphore/jobs#debug-jobs).
Upgrade to the latest CLI version:
@@ -5214,19 +5242,19 @@ curl https://storage.googleapis.com/sem-cli-releases/get.sh | bash
- [iOS support is in GA](https://semaphoreci.com/blog/introducing-ios-cicd): Semaphore now supports building, testing, and deploying applications for any Apple device.
- macOS platform:
- - Xcode upgraded to 10.2.1
+ - Xcode upgraded to 10.2.1
- New feature: [schedule CI/CD workflows](../using-semaphore/tasks) using standard Cron syntax.
## Week of Apr 22, 2019
- [Fastlane plugin](https://github.com/semaphoreci/fastlane-plugin-semaphore) is now available.
- Platform updates:
- - Chrome 74, ChromeDriver 74
- - Heroku 7.24.1
- - Git-lfs 2.7.2
- - Pip 19.1
- - Phpunit 7.5.9
- - Removed Oracle Java 7,9,10; Java 8 and 11 are now based on OpenJDK.
+ - Chrome 74, ChromeDriver 74
+ - Heroku 7.24.1
+ - Git-lfs 2.7.2
+ - Pip 19.1
+ - Phpunit 7.5.9
+ - Removed Oracle Java 7,9,10; Java 8 and 11 are now based on OpenJDK.
## Week of Apr 15, 2019
@@ -5237,7 +5265,6 @@ curl https://storage.googleapis.com/sem-cli-releases/get.sh | bash
- Jobs now export `PAGER=cat`. This prevents some commands from waiting indefinitely for user input, such as `git log`.
- Job logs are now fully UTF-8 compliant.
-
## Week of Apr 8, 2019
- New feature: [Run jobs inside a custom Docker container](../using-semaphore/pipelines#docker-environments) (beta).
@@ -5247,11 +5274,11 @@ curl https://storage.googleapis.com/sem-cli-releases/get.sh | bash
## Week of Mar 25, 2019
- Platform updates:
- - Chrome 73
- - Elixir 1.8.1
- - Go 1.12.1
- - Ruby versions >=2.6.0 have bundler 2.0.1 and rubygems>3 preinstalled
- - Scala 2.12.7
+ - Chrome 73
+ - Elixir 1.8.1
+ - Go 1.12.1
+ - Ruby versions >=2.6.0 have bundler 2.0.1 and rubygems>3 preinstalled
+ - Scala 2.12.7
## Week of Mar 18, 2019
@@ -5260,8 +5287,8 @@ curl https://storage.googleapis.com/sem-cli-releases/get.sh | bash
## Week of Mar 12, 2019
- Platform updates:
- - Heroku 7.22.4
- - Libvirt, qemu, virsh are now part of the Ubuntu VM image, with virtual network `192.168.123.0/24`
+ - Heroku 7.22.4
+ - Libvirt, qemu, virsh are now part of the Ubuntu VM image, with virtual network `192.168.123.0/24`
## Week of Feb 25, 2019
@@ -5273,14 +5300,14 @@ curl https://storage.googleapis.com/sem-cli-releases/get.sh | bash
a red box.
- Fixed an issue with sliders on Linux/Chrome.
- Platform additions:
- - Go 1.12
- - libvirt-bin, qemu-kvm, and virtinst
+ - Go 1.12
+ - libvirt-bin, qemu-kvm, and virtinst
- Platform updates:
- - git 2.21
- - git-lfs 2.7.1
- - gradle 5.2
- - heroku to 7.22.2
- - sbt 0.13.17
+ - git 2.21
+ - git-lfs 2.7.1
+ - gradle 5.2
+ - heroku to 7.22.2
+ - sbt 0.13.17
a handy page of practical examples of CI/CD pipelines, with links to open source repositories that you can copy and use.
## Week of Feb 18, 2019
@@ -5298,16 +5325,16 @@ curl https://storage.googleapis.com/sem-cli-releases/get.sh | bash
is now available in Semaphore 2.0! Using the command-line interface
remains an option, of course.
- Platform:
- - Added new APT mirrors for faster apt-get installations in the Ubuntu1804 image.
- - Chrome updated to 72.
- - Heroku CLI updated to 7.21.
+ - Added new APT mirrors for faster apt-get installations in the Ubuntu1804 image.
+ - Chrome updated to 72.
+ - Heroku CLI updated to 7.21.
## Week of Feb 4, 2019
- Platform:
- - ChromeDriver updated to 2.46.
- - Added Ruby 2.6.0, 2.6.1.
- - If a repository contains a `.ruby-version` file, Semaphore automatically fetches
+ - ChromeDriver updated to 2.46.
+ - Added Ruby 2.6.0, 2.6.1.
+ - If a repository contains a `.ruby-version` file, Semaphore automatically fetches
a pre-built version of the specified Ruby.
## Week of Jan 28, 2019
diff --git a/docs/docs/getting-started/faq.md b/docs/docs/getting-started/faq.md
index 30805c591..f3ccd709e 100644
--- a/docs/docs/getting-started/faq.md
+++ b/docs/docs/getting-started/faq.md
@@ -96,7 +96,7 @@ In order to change your credit card or PayPal information, follow these steps:
### Can I change my billing information?
-Yes. If you want to change the recipient name, company name, address, phone number, billing email, VAT ID, or country on the invoice, please get in touch with us at: support@semaphoreci.com
+Yes. If you want to change the recipient name, company name, address, phone number, billing email, VAT ID, or country on the invoice, please get in touch with us at: `support@semaphoreci.com`
:::note
@@ -108,7 +108,7 @@ Billing change requests must originate from a user with Admin or Owner permissio
Not directly from the Semaphore website. After a subscription has been purchased, users can't add or change a VAT number (VAT ID) from the UI.
-If you wish to change the VAT number associated with your subscription, please get in touch with support@semaphoreci.com with the VAT number you want to add and we will gladly make it happen.
+If you wish to change the VAT number associated with your subscription, please get in touch with `support@semaphoreci.com` with the VAT number you want to add and we will gladly make it happen.
For security reasons, these requests **must originate from an email** linked to a user with the [Owner role](../using-semaphore/rbac#org-owner), or from the current Billing contact associated with the organization’s FastSpring subscription. This ensuress that only authorized individuals can make changes to the billing information.
@@ -120,15 +120,15 @@ You can also find your invoices at the bottom of the **Plans & Billing** in your
### What is your refund policy?
-Apart from cases of extended downtime (multiple hours in a day, or multiple days in a month), we do not offer refunds.
+Apart from cases of extended downtime (multiple hours in a day, or multiple days in a month), we do not offer refunds.
-We will, however, consider requests for refunds in extenuating circumstances. If you would like to request a refund, please email us at billing@semaphoreci.com and our team will do what we can to work out a solution.
+We will, however, consider requests for refunds in extenuating circumstances. If you would like to request a refund, please email us at `billing@semaphoreci.com` and our team will do what we can to work out a solution.
Please include the affected Workflow ID when contacting our Billing team regarding refunds.
### Why are you still charging my old credit card?
-If you’ve added a new credit card to the subscription, but the old one is still being charged, it means that the new credit card wasn't properly marked for usage.
+If you’ve added a new credit card to the subscription, but the old one is still being charged, it means that the new credit card wasn't properly marked for usage.
Here’s how to do that:
@@ -182,6 +182,7 @@ Yes. To do that, follow these steps:
git submodule init
git submodule update
```
+
2. Append the these commands in the [epilogue](../using-semaphore/jobs#epilogue)
```shell
@@ -190,11 +191,11 @@ Yes. To do that, follow these steps:
-Make sure that Semaphore has permissions to clone your submodules repository.
+Make sure that Semaphore has permissions to clone your submodules repository.
### Can I redeliver webhooks from Github to Semaphore?
-Yes. Rarely Semaphore does not receive a webhook from GitHub. This results in a workflow not being triggered. When this happens, you can redeliver the webhook to trigger the workflow.
+Yes. Rarely Semaphore does not receive a webhook from GitHub. This results in a workflow not being triggered. When this happens, you can redeliver the webhook to trigger the workflow.
These are the steps to redeliver webhooks from Github:
@@ -207,7 +208,7 @@ These are the steps to redeliver webhooks from Github:
### Can I send a comment on a pull request on GitHub from a workflow?
-Yes. You can use the [GitHub API](https://docs.github.com/en/rest/issues?apiVersion=2022-11-28#create-an-issue-comment) to comment on pull requests.
+Yes. You can use the [GitHub API](https://docs.github.com/en/rest/issues?apiVersion=2022-11-28#create-an-issue-comment) to comment on pull requests.
For example:
@@ -288,7 +289,7 @@ Yes. Semaphore provides template support on [parameterized promotions](../using-
### Can I change the timezone?
-The default timezone is UTC. The timezone can be changed in 2 ways in Linux agents:
+The default timezone is UTC. The timezone can be changed in 2 ways in Linux agents:
- Assign a different value to the TZ environment variable:
@@ -365,15 +366,13 @@ While an issue is ongoing, you might consider using a shorter [execution_time_li
:::
-
### Why is my job failing if all commands have passed?
This can happen because of code coverage tools, e.g. simplecov, which can be set to fail the test suite if a [minimum coverage level is not achieved](https://github.com/simplecov-ruby/simplecov#minimum-coverage).
-
### Why are tests passing locally but not on Semaphore?
-The main reason for this behavior is differences in the stacks. As a first step, ensure that the same versions of languages, services, tools, and frameworks such as Selenium, browser drivers, Capybara, Cypress are used both locally and in the CI environment.
+The main reason for this behavior is differences in the stacks. As a first step, ensure that the same versions of languages, services, tools, and frameworks such as Selenium, browser drivers, Capybara, Cypress are used both locally and in the CI environment.
To achieve this, use [sem-service](../reference/toolbox#sem-service), [sem-version](../reference/toolbox#sem-version), and the OS package manager. Environment variables can also lead to unexpected behaviors, for instance, Semaphore will set `CI=true` by default.
@@ -389,7 +388,7 @@ You might be hitting the quota limitation. To see your activity across the organ
2. Select Activity Monitor
3. Check your agent usage, jobs won't start until a suitable agent is free
-You can also run [`sem get jobs`](../reference/semaphore-cli#sem-get-job) to display all running jobs to confirm how much of the quota is being used.
+You can also run [`sem get jobs`](../reference/semaphore-cli#sem-get-job) to display all running jobs to confirm how much of the quota is being used.
### Why does my job fail when I specify "exit 0" in commands?
@@ -411,7 +410,7 @@ Some commands like `bash -e` or `set -x otrace` may override this behavior and m
This is commonly due to a rate-limit of third-party providers such as Docker Hub. These services limit how many unauthenticated pulls you can do in an hour, often based on IP. The machine you are running your jobs on may have been provisioned for another user, resulting in that particular IP being rate-limited.
-You can bypass this issue by creating a free account on Docker Hub, and then [authenticating with Docker](../using-semaphore/optimization/docker#auth) within the job. This way, the [pulls are limited by your account (100 per hour)](https://docs.docker.com/docker-hub/usage/), and not by the IP of the machine.
+You can bypass this issue by creating a free account on Docker Hub, and then [authenticating with Docker](../using-semaphore/containers/docker#auth) within the job. This way, the [pulls are limited by your account (100 per hour)](https://docs.docker.com/docker-hub/usage/), and not by the IP of the machine.
:::tip
@@ -423,7 +422,6 @@ If you cannot authenticate, you can use other third-party Docker registries such
If you have a pull request stuck when using GitHub, check if you have renamed the pipeline recently. If you did, see how to [fix stuck PRs on GitHub](../using-semaphore/connect-github#stuck-pr)
-
## Project
### Can I transfer ownership of a project?
@@ -442,7 +440,7 @@ To change the project ownership:
After project ownership has been transferred, you need to push a new commit. Old workflows cannot be re-run after transferring ownership.
-If you come across any issues, please reach out to support@semaphoreci.com and include the name of the project and the GitHub/Bitbucket username of the new owner in your message.
+If you come across any issues, please reach out to `support@semaphoreci.com` and include the name of the project and the GitHub/Bitbucket/GitLab username of the new owner in your message.
### Can I rename a project?
@@ -474,7 +472,6 @@ Deleting a project cannot be reversed.
:::
-
### Can I change the visibility of a project?
Yes. To make the project visible or private follow these steps:
@@ -484,7 +481,6 @@ Yes. To make the project visible or private follow these steps:
3. Click the link next to **Public** or **Private** to toggle the visibility
4. Press **Save Changes**
-
### Why can't I make my project private?
This might be a limitation related to the plan your organization is using. Open-source and free plans cannot create private projects.
@@ -506,13 +502,12 @@ If you are using a [filter for contributors](../using-semaphore/workflows#projec
Approving forked pull requests is limited to new comments only and is not possible for comment edits. Due to security concerns, `/sem-approve` will work only once. Subsequent pushes to the forked pull request must be approved again.
-
### How do I fix the error "Revision: COMMIT_SHA not found. Exiting"
This happens when the repository receives pushed while Semaphore is still processing the incoming webhook. For example, when someone modifies or removes with a `git rebase` or `git commit --amend` command followed by a `git push --force` shortly after.
You can prevent this error by enabling the [auto-cancel](../using-semaphore/pipelines#auto-cancel) option in the pipeline.
-
+
### Why are my workflows not running in parallel?
Git pushes to the same branch are [queued](../using-semaphore/pipelines#pipeline-queues) by default. Pushes to different branches do run in parallel. You can use [named queues in your pipelines](../using-semaphore/pipelines#named-queues) to better control how workflows are parallelized or activate [auto-cancel](../using-semaphore/pipelines#auto-cancel) to stop running pipelines when new pushes arrive to the queue.
@@ -571,4 +566,3 @@ Enabling the `set -e` option in the Bash shell causes autocomplete to fail and e
### Why are my secrets empty?
We have discontinued exposing secret content via the CLI, API, and web interface to ensure enhanced security measures. Retrieval of secret values is now exclusively available through the job mechanism.
-
diff --git a/docs/docs/getting-started/migration/bitbucket.md b/docs/docs/getting-started/migration/bitbucket.md
index 1b732f186..63a0bfc38 100644
--- a/docs/docs/getting-started/migration/bitbucket.md
+++ b/docs/docs/getting-started/migration/bitbucket.md
@@ -33,10 +33,8 @@ Checkout is implicit in all Travis CI workflows by default.
-
Semaphore does not clone the repository by default. This is because there are certain scenarios in which you don't need the code or you want to customize the cloning process.
-
To clone the repository in Semaphore we only need to execute [`checkout`](../../reference/toolbox#checkout).
```shell
@@ -46,7 +44,6 @@ checkout
cat README.md
```
-
@@ -92,7 +89,6 @@ pipelines:
-
In Semaphore, we use the [artifact](../../reference/toolbox#artifact) command to download and upload files to the artifact store.
The following command stores `test.log` from any job:
@@ -109,7 +105,6 @@ artifact pull workflow test.log
See [artifacts](../../using-semaphore/artifacts) for more details.
-
@@ -153,7 +148,7 @@ bundle install
cache store
```
-See [caching](../../using-semaphore/optimization/cache) for more details.
+See [caching](../../using-semaphore/cache) for more details.
@@ -161,7 +156,6 @@ See [caching](../../using-semaphore/optimization/cache) for more details.
We often need to activate specific language or tool versions to ensure consistent builds.
-
@@ -261,14 +255,12 @@ pipelines:
-
-In Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
+In Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
The secret contents are automatically injected as environment variables in all jobs contained on that block.

-
diff --git a/docs/docs/getting-started/migration/circle.md b/docs/docs/getting-started/migration/circle.md
index 3e8c0ee03..1c141d2eb 100644
--- a/docs/docs/getting-started/migration/circle.md
+++ b/docs/docs/getting-started/migration/circle.md
@@ -75,15 +75,14 @@ global_job_config:
-
### Language versions
-Both CircleCI and Semaphore allow you to use specific language versions.
+Both CircleCI and Semaphore allow you to use specific language versions.
-CircleCI uses a language-specific setup orb.
+CircleCI uses a language-specific setup orb.
The following example sets the Ruby version to `3.3.4`
@@ -101,10 +100,11 @@ jobs:
version: '3.3.4'
# highlight-end
```
+
-Semaphore uses [sem-version](../../reference/toolbox#sem-version) to activate or switch language versions in the CI environment.
+Semaphore uses [sem-version](../../reference/toolbox#sem-version) to activate or switch language versions in the CI environment.
The following example activates Ruby v3.3.4, any commands after the example run on this Ruby version.
@@ -124,7 +124,6 @@ jobs:
-
### Caching
Both CircleCI and Semaphore support manual file caching.
@@ -167,7 +166,7 @@ The following commands, when added to a job downloads, cache, and install Gems i
- cache store
```
-See [caching](../../using-semaphore/optimization/cache) for more details.
+See [caching](../../using-semaphore/cache) for more details.
@@ -177,7 +176,6 @@ See [caching](../../using-semaphore/optimization/cache) for more details.
-
### Database and services
Both CircleCI and Semaphore support starting databases and services via Docker containers.
@@ -219,7 +217,7 @@ jobs:
- sem-service start redis
# highlight-end
```
-
+
@@ -291,7 +289,7 @@ Secrets inject sensitive data and credentials into the workflow securely.
-CircleCI uses contexts instead of secrets. You must create the context and its value through the UI.
+CircleCI uses contexts instead of secrets. You must create the context and its value through the UI.
Then, you can use the `context` keyword to include it in your jobs.
```yaml
@@ -308,7 +306,7 @@ workflows:
-On Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
+On Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
The secret's contents are automatically injected as environment variables in all jobs in that block.
@@ -335,13 +333,12 @@ global_job_config:
-On Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
+On Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
The secret's contents are automatically injected as environment variables in all jobs in that block.

-
@@ -543,7 +540,6 @@ blocks:
-
## See also
- [Migration guide for CircleCI](./circle)
diff --git a/docs/docs/getting-started/migration/github-actions.md b/docs/docs/getting-started/migration/github-actions.md
index e109ddf3a..2965f0c1f 100644
--- a/docs/docs/getting-started/migration/github-actions.md
+++ b/docs/docs/getting-started/migration/github-actions.md
@@ -74,15 +74,14 @@ global_job_config:
-
### Language versions
-Both Github Actions and Semaphore allow you to use specific language versions.
+Both Github Actions and Semaphore allow you to use specific language versions.
-GitHub Actions uses a language-specific setup action.
+GitHub Actions uses a language-specific setup action.
The following example sets the Ruby version to `3.3.4`
@@ -94,10 +93,11 @@ jobs:
with:
ruby-version: '3.3.4'
```
+
-Semaphore uses [sem-version](../../reference/toolbox#sem-version) to activate or switch language versions in the CI environment.
+Semaphore uses [sem-version](../../reference/toolbox#sem-version) to activate or switch language versions in the CI environment.
The following example activates Ruby v3.3.4, any commands after the example run on this Ruby version.
@@ -116,7 +116,6 @@ jobs:
-
### Caching
Both GitHub Actions and Semaphore support manual file caching.
@@ -150,7 +149,7 @@ The following commands, when added to a job downloads, cache, and install Gems i
- cache store
```
-See [caching](../../using-semaphore/optimization/cache) for more details.
+See [caching](../../using-semaphore/cache) for more details.
@@ -160,7 +159,6 @@ See [caching](../../using-semaphore/optimization/cache) for more details.
-
### Database and services
Both Github Actions and Semaphore support starting databases and services via Docker containers.
@@ -199,7 +197,7 @@ jobs:
- sem-service start postgres
- sem-service start redis
```
-
+
@@ -310,13 +308,12 @@ global_job_config:
-On Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
+On Semaphore, we create the [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
The secret's contents are automatically injected as environment variables in all jobs in that block.

-
@@ -479,7 +476,6 @@ blocks:
-
## See also
- [Migration guide for CircleCI](./circle)
diff --git a/docs/docs/getting-started/migration/overview.md b/docs/docs/getting-started/migration/overview.md
index 2d531f126..19355086e 100644
--- a/docs/docs/getting-started/migration/overview.md
+++ b/docs/docs/getting-started/migration/overview.md
@@ -27,7 +27,7 @@ Here is the recommended plan to migrate from any CI provider to Semaphore.
Write down your goals and reasons for migrating your CI to Semaphore. For example, you might wish to reduce costs or speed up your builds. Setting expectations from the get-go will make the whole migration process clearer and more straightforward.
- If at any point in the process, you have doubts, contact us at support@semaphoreci.com. We want this process to be as smooth and painless as possible.
+ If at any point in the process, you have doubts, contact us at `support@semaphoreci.com`. We want this process to be as smooth and painless as possible.
2. Compare features
@@ -36,7 +36,7 @@ Here is the recommended plan to migrate from any CI provider to Semaphore.
- Must have
- Nice to have
- Optional
-
+
Compare the list against [Semaphore Features](https://semaphoreci.com/pricing). Some of the requirements can be implemented in several ways by combining several features.
3. Create a proof of concept in Semaphore
@@ -45,35 +45,33 @@ Here is the recommended plan to migrate from any CI provider to Semaphore.
- [Connect your GitHub](../../using-semaphore/connect-github) or [connect your BitBucket](../../using-semaphore/connect-bitbucket) repository to Semaphore
- Configure a CI pipeline, the objective is to reach a green build. See the [Guided Tour](../guided-tour) to get an overview of Semaphore
-
+
The following pages describe key features you might need for the migration:
- [How to create jobs](../../using-semaphore/jobs)
- [Persist data with Artifacts](../../using-semaphore/artifacts)
- [How to use Docker Environments](../../using-semaphore/pipelines#docker-environments)
- - [How to use the Cache](../../using-semaphore/optimization/cache)
+ - [How to use the Cache](../../using-semaphore/cache)
- [Using promotions for Continuous Delivery](../../using-semaphore/promotions)
- [How to specify language versions](../../reference/toolbox#sem-version)
-
4. Optimize performance
Once your project is building on Semaphore, begin optimizing for performance.
- Pick the best [machine type](../../reference/machine-types) for the CI pipeline
- - Learn and implement the optimization strategies like [caching](../../using-semaphore/optimization/cache), [fail-fast](../../using-semaphore/pipelines#fail-fast), and [auto-cancel](../../using-semaphore/pipelines#auto-cancel)
+ - Learn and implement the optimization strategies like [caching](../../using-semaphore/cache), [fail-fast](../../using-semaphore/pipelines#fail-fast), and [auto-cancel](../../using-semaphore/pipelines#auto-cancel)
5. Optimize costs
Once you have your project running, you can track your spending in the [Plans & Billing page](../../using-semaphore/billing).
The figure to optimize is the cost-per-pipeline. In other words, you should balance the cost of running your CI pipeline against the expected performance.
-
+
You want to focus on the CI pipeline because it accounts for the vast majority of your usage, so optimizing this value has the most impact on your costs.
You can optimize your costs by trying different [machine types](../../reference/machine-types), using [skip conditions](../../using-semaphore/jobs#skip-run), testing different levels of [job parallelism](../../using-semaphore/jobs#job-parallelism) and exploring [pipeline queues](../../using-semaphore/pipelines#pipeline-queues)
-
6. Onboard your team
Once satisfied with your CI workflow it's time to onboard the rest of your team:
diff --git a/docs/docs/getting-started/migration/travis.md b/docs/docs/getting-started/migration/travis.md
index 135e0ff24..382150837 100644
--- a/docs/docs/getting-started/migration/travis.md
+++ b/docs/docs/getting-started/migration/travis.md
@@ -37,7 +37,6 @@ Checkout is implicit in all Travis CI workflows by default.
Semaphore does not clone the repository by default. This is because there are certain scenarios in which you don't need the code or you want to customize the cloning process.
-
To clone the repository in Semaphore we only need to execute [`checkout`](../../reference/toolbox#checkout).
```shell
@@ -68,7 +67,7 @@ addons:
paths:
- $HOME/project/test.log
# highlight-end
-```
+```
@@ -122,19 +121,19 @@ bundle install
cache store
```
-See [caching](../../using-semaphore/optimization/cache) for more details.
+See [caching](../../using-semaphore/cache) for more details.
### Language versions
-Both Travis CI and Semaphore allow you to use specific language versions.
+Both Travis CI and Semaphore allow you to use specific language versions.
-Travis CI uses a language-specific setup keyword.
+Travis CI uses a language-specific setup keyword.
The following example sets the Ruby version to `3.3.4`
@@ -147,7 +146,7 @@ rvm:
-Semaphore uses [sem-version](../../reference/toolbox#sem-version) to activate or switch language versions in the CI environment.
+Semaphore uses [sem-version](../../reference/toolbox#sem-version) to activate or switch language versions in the CI environment.
The following example activates Ruby v3.3.4, any commands after the example run on this Ruby version.
@@ -182,7 +181,7 @@ The following example starts Redis on the default port (6379)
```shell
sem-service start redis
```
-
+
@@ -210,7 +209,7 @@ Using encrypted files uses a different system that's a bit more convoluted.
In Semaphore, secrets are stored on the Semaphore organization or project. Encryption and decryption is automatically handled for environment variables and files.
-First, we create a [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
+First, we create a [secret](../../using-semaphore/secrets) at the organization or project level and activate it on a block.
The secret contents are automatically injected as environment variables in all jobs contained on that block.
diff --git a/docs/docs/reference/agent-aws-stack.md b/docs/docs/reference/agent-aws-stack.md
index 77af843f5..af47a803b 100644
--- a/docs/docs/reference/agent-aws-stack.md
+++ b/docs/docs/reference/agent-aws-stack.md
@@ -14,7 +14,6 @@ import Steps from '@site/src/components/Steps';
This page describes all the settings available to configure [AWS Autoscaler Stack](../using-semaphore/self-hosted-aws).
-
## Overview
The AWS Autoscaler Stack accepts configuration settings in two ways:
@@ -54,7 +53,6 @@ The AWS Autoscaler Stack accepts configuration settings in two ways:
See [Autoscaling with AWS](../using-semaphore/self-hosted-aws) to learn more.
-
## Required parameters
### Endpoint {#endpoint}
@@ -65,12 +63,11 @@ The endpoint the agent uses for registration and sync with your Semaphore organi
If this parameter is not set, you must configure [`SEMAPHORE_ORGANIZATION`](#organization).
-
### Organization {#organization}
- **Parameter name**: `SEMAPHORE_ORGANIZATION`
-The name of your organization.
+The name of your organization.
If [`SEMAPHORE_ENDPOINT`] is not set, this parameter is used to generate the endpoint URL. In this case, the agent assumes the endpoint is `.semaphoreci.com`.
@@ -80,14 +77,12 @@ If [`SEMAPHORE_ENDPOINT`] is not set, this parameter is used to generate the end
The name of the stack. This is the stack name used in Cloudformation and as a prefix to name all the stack resources. When deploying multiple stacks for multiple agent types, different stack names are required.
-
### Agent token {#agent-token}
- **Parameter name**: `SEMAPHORE_AGENT_TOKEN_PARAMETER_NAME`
The AWS SSM parameter name contains the Semaphore agent [registration token](../using-semaphore/self-hosted-install#register-agent).
-
## Optional parameters
Here's the converted markdown documentation based on the original table you provided:
@@ -98,7 +93,6 @@ Here's the converted markdown documentation based on the original table you prov
Path to a JSON file containing the parameters to use. This is an alternative to using environment variables for setting the stack's configuration parameters.
-
### Agent instance type {#agent-instance-type}
- **Parameter name**: `SEMAPHORE_AGENT_INSTANCE_TYPE`
@@ -106,7 +100,6 @@ Path to a JSON file containing the parameters to use. This is an alternative to
AWS instance type used for the agents. See the available instance type on [AWS docs](https://aws.amazon.com/ec2/instance-types/).
-
### Auto-scaling group minimum size {#asg-min-size}
- **Parameter name**: `SEMAPHORE_AGENT_ASG_MIN_SIZE`
@@ -114,7 +107,6 @@ AWS instance type used for the agents. See the available instance type on [AWS d
Minimum size for the auto-scaling group.
-
### Auto-scaling group maximum size {#asg-max-size}
- **Parameter name**: `SEMAPHORE_AGENT_ASG_MAX_SIZE`
@@ -122,7 +114,6 @@ Minimum size for the auto-scaling group.
Maximum size for the auto-scaling group.
-
### Auto-scaling group desired capacity {#asg-desired}
- **Parameter name**: `SEMAPHORE_AGENT_ASG_DESIRED`
@@ -130,7 +121,6 @@ Maximum size for the auto-scaling group.
Desired capacity for the auto-scaling group.
-
### Use dynamic scaling {#use-dynamic-scaling}
- **Parameter name**: `SEMAPHORE_AGENT_USE_DYNAMIC_SCALING`
@@ -147,14 +137,12 @@ Security Group ID to use for agent instances. If not specified, a security group
- an egress rule allowing all outbound traffic
- an ingress rule for SSH if [`SEMAPHORE_AGENT_KEY_NAME`](#key-name) is specified
-
### Key name {#key-name}
- **Parameter name**: `SEMAPHORE_AGENT_KEY_NAME`
Key name to access agents through SSH. If not specified, no SSH inbound access is allowed.
-
### Disconnect after job {#disconnect-after-job}
- **Parameter name**: `SEMAPHORE_AGENT_DISCONNECT_AFTER_JOB`
@@ -162,23 +150,20 @@ Key name to access agents through SSH. If not specified, no SSH inbound access i
If true, the agent disconnects after completing a job.
-
### Disconnect after idle timeout {#disconnect-after-idle-timeout}
- **Parameter name**: `SEMAPHORE_AGENT_DISCONNECT_AFTER_IDLE_TIMEOUT`
- **default value**: `300`
-Number of seconds of idleness after which the agent is shut down.
+Number of seconds of idleness after which the agent is shut down.
Setting this to 0 disables the scaling down behavior for the stack since the agents do not shutdown due to idleness.
-
### Cache bucket name {#cache-bucket-name}
- **Parameter name**: `SEMAPHORE_AGENT_CACHE_BUCKET_NAME`
-Existing S3 bucket name to use for caching. If this is not set, [caching](../using-semaphore/optimization/cache) does not work.
-
+Existing S3 bucket name to use for caching. If this is not set, [caching](../using-semaphore/cache) does not work.
### Token KMS key {#token-kms-key}
@@ -186,80 +171,70 @@ Existing S3 bucket name to use for caching. If this is not set, [caching](../usi
KMS key id used to encrypt and decrypt `SEMAPHORE_AGENT_TOKEN_PARAMETER_NAME`. If nothing is given, the default `alias/aws/ssm` key is assumed.
-
### VPC ID {#vpc-id}
- **Parameter name**: `SEMAPHORE_AGENT_VPC_ID`
The ID of an existing VPC to use when launching agent instances. By default, this is blank, and the default VPC on your AWS account is used.
-
### Subnets {#subnets}
- **Parameter name**: `SEMAPHORE_AGENT_SUBNETS`
-Comma-separated list of existing VPC subnet IDs where EC2 instances are to run. This is required when using [`SEMAPHORE_AGENT_VPC_ID`](#vpc-id).
+Comma-separated list of existing VPC subnet IDs where EC2 instances are to run. This is required when using [`SEMAPHORE_AGENT_VPC_ID`](#vpc-id).
If `SEMAPHORE_AGENT_SUBNETS` is set and [`SEMAPHORE_AGENT_VPC_ID`](#vpc-id) is blank, the subnets are ignored, and the default VPC is used. This means that private and public subnets are possible, but isolated subnets cannot be used.
-
### AMI {#ami}
- **Parameter name**: `SEMAPHORE_AGENT_AMI`
The AMI is used for all instances. If empty, the stack uses the default AMIs, looking them up by name. If the default AMI isn't sufficient, you can use your own AMIs, but they need to be based on the stack's default AMI.
-
### OS type {#os-type}
- **Parameter name**: `SEMAPHORE_AGENT_OS`
-The OS type for agents.
+The OS type for agents.
-Possible values:
+Possible values:
-- `ubuntu-focal`
+- `ubuntu-focal`
- `windows`
-
### Architecture type {#architecture-type}
- **Parameter name**: `SEMAPHORE_AGENT_ARCH`
The arch type for agents. Possible values:
-- `x86_64`
+- `x86_64`
- `arm64`
-
### Availability zones {#availability-zones}
- **Parameter name**: `SEMAPHORE_AGENT_AZS`
A comma-separated list of availability zones to use for the auto-scaling group.
-
### Managed policy names {#managed-policy-names}
- **Parameter name**: `SEMAPHORE_AGENT_MANAGED_POLICY_NAMES`
A comma-separated list of custom IAM policy names to attach to the instance profile role.
-
### ASG metrics {#asg-metrics}
- **Parameter name**: `SEMAPHORE_AGENT_ASG_METRICS`
A comma-separated list of ASG metrics to collect. Available metrics can be found on the [AWS CDK Documentation](https://docs.aws.amazon.com/cdk/api/v2/docs/aws-cdk-lib.aws_autoscaling.CfnAutoScalingGroup.MetricsCollectionProperty.html).
-
### Volume name {#volume-name}
- **Parameter name**: `SEMAPHORE_AGENT_VOLUME_NAME`
The EBS volume's device name to use for a custom volume. If this is not set, the EC2 instances are assigned the EBS volume based on the AMI.
-
### Volume type {#volume-type}
- **Parameter name**: `SEMAPHORE_AGENT_VOLUME_TYPE`
@@ -267,7 +242,6 @@ The EBS volume's device name to use for a custom volume. If this is not set, the
The EBS volume's type, when using [`SEMAPHORE_AGENT_VOLUME_NAME`](#volume-name).
-
### Volume size {#volume-size}
- **Parameter name**: `SEMAPHORE_AGENT_VOLUME_SIZE`
@@ -275,47 +249,41 @@ The EBS volume's type, when using [`SEMAPHORE_AGENT_VOLUME_NAME`](#volume-name).
The EBS volume's size, in GB, when using [`SEMAPHORE_AGENT_VOLUME_NAME`](#volume-name).
-
### License configuration ARN {#license-configuration-arn}
- **Parameter name**: `SEMAPHORE_AGENT_LICENSE_CONFIGURATION_ARN`
The license configuration ARN is associated with the AMI used by the stack.
-
### Mac family {#mac-family}
- **Parameter name**: `SEMAPHORE_AGENT_MAC_FAMILY`
The EC2 Mac instance family to use. Possible values: `mac1` and `mac2`.
-
### Mac dedicated hosts {#mac-dedicated-hosts}
- **Parameter name**: `SEMAPHORE_AGENT_MAC_DEDICATED_HOSTS`
A comma-separated list of dedicated host IDs to include in the host resource group.
-
### Tags {#tags}
- **Parameter name**: `SEMAPHORE_AGENT_TAGS`
-A comma-separated list of key-value pairs of tags to be added to all resources created for the stack.
+A comma-separated list of key-value pairs of tags to be added to all resources created for the stack.
For example: `Name:Something,Category:SomethingElse`.
-
### Use pre-signed URL {#use-pre-signed-url}
- **Parameter name**: `SEMAPHORE_AGENT_USE_PRE_SIGNED_URL`
- **default value**: `false`
-If true, use a pre-signed AWS STS GetCallerIdentity URL for agent registration.
+If true, use a pre-signed AWS STS GetCallerIdentity URL for agent registration.
See [agent type configuration](../using-semaphore/self-hosted-install#name-sts) to learn how to configure this security feature.
-
## See also
- [How to use self-hosted agents](../using-semaphore/self-hosted)
diff --git a/docs/docs/reference/env-vars.md b/docs/docs/reference/env-vars.md
index c6c555a24..ba55d6e2f 100644
--- a/docs/docs/reference/env-vars.md
+++ b/docs/docs/reference/env-vars.md
@@ -229,7 +229,6 @@ Holds `true` if the workflow was triggered using the [Semaphore API](../referenc
-
The variable is `false` if the workflow is triggered by a Git push, pull request, or via [Tasks](../using-semaphore/tasks).
### Workflow is triggered by hook {#workflow-triggered-by-hook}
@@ -324,7 +323,6 @@ Used only when running [`checkout --use-cache`](./toolbox#cache-full-clone). It
- **Environment variable**: `SEMAPHORE_GIT_CACHE_KEEP`
- **Example**: `1`
-
Used only when running [`checkout --use-cache`](./toolbox#cache-full-clone). It how many copies of the repository should be maintained in the Semaphore Git Cache. Older copies are automatically deleted.
The default value is 0, which means that Semaphore maintains only 1 copy of the repository. If you set it to 1, Semaphore will maintain 2 copies of the repository.
@@ -404,7 +402,6 @@ Present only for builds where `SEMAPHORE_GIT_REF_TYPE=pull-request`
The number of the Pull Request.
-
:::note
Present only for builds where `SEMAPHORE_GIT_REF_TYPE=pull-request`
@@ -446,7 +443,6 @@ Present only for builds where `SEMAPHORE_GIT_REF_TYPE=pull-request`
The name of the directory that contains the files of the repository linked to the current Semaphore project.
-
### Repository name {#git-repo-name}
- **Environment variable**: `SEMAPHORE_GIT_REPO_NAME`
@@ -543,7 +539,7 @@ The total duration of the pipeline including queuing time expressed in seconds.
## Cache variables {#cache-variables}
-These variables are used to access the [cache](../using-semaphore/optimization/cache).
+These variables are used to access the [cache](../using-semaphore/cache).
### Cache URL {#cache-url}
@@ -568,7 +564,7 @@ The path in the server to the SSH key file to access the cache server.
## Semaphore Docker registry variables {#registry-variables}
-These variables can be used to access the [Semaphore Docker registry](../using-semaphore/optimization/docker).
+These variables can be used to access the [Semaphore Docker registry](../using-semaphore/containers/docker).
### Username {#registry-username}
@@ -615,7 +611,7 @@ The path to the log file during the initialization job.
## See also
-- [Docker optimization](../using-semaphore/optimization/docker)
+- [Docker containers](../using-semaphore/containers/docker)
- [How to configure jobs](../using-semaphore/jobs)
- [Semaphore pipelines](../using-semaphore/pipelines)
- [Pipeline YAML reference](./pipeline-yaml)
diff --git a/docs/docs/reference/toolbox.md b/docs/docs/reference/toolbox.md
index 09adc0871..65d13f052 100644
--- a/docs/docs/reference/toolbox.md
+++ b/docs/docs/reference/toolbox.md
@@ -41,11 +41,11 @@ The available namespaces are:
See [artifact namespaces](../using-semaphore/artifacts#namespaces) for more details.
The optional flags are:
+
- `--force` or `-f`: overwrite file or directory if already exists
- `--destination` of `-d`: pull or yank the file into a different path
- `--verbose` or `-v`: verbose logging
-
### Examples
```shell title="Artifact usage examples"
@@ -83,7 +83,7 @@ The uploaded files must meet the following requirements:
- File names cannot contain non-URI-encodable characters like `{, }, |, \, ^, ~, [, ]`
- Files cannot be named `.` or `...`
-You can workaround these limitations by compressing the file with tar before pushing it to the artifact store. For example:
+You can workaround these limitations by compressing the file with tar before pushing it to the artifact store. For example:
```shell title="Creating a tarball before storing the artifact"
tar -czvf example.tar.gz ~/example
@@ -99,7 +99,7 @@ tar -xzf example.tar.gz
## cache {#cache}
-The cache tool lets you interact with your project's [Semaphore cache](../using-semaphore/optimization/cache).
+The cache tool lets you interact with your project's [Semaphore cache](../using-semaphore/cache).
The syntax is:
@@ -160,7 +160,6 @@ The supported options for `--cleanup-by` are:
- `STORE_TIME`: (default) delete oldest files first
- `ACCESS_TIME`: delete oldest accessed files first
-
### Environment variables {#cache-env-vars}
The cache tool depends on the following environment variables:
@@ -222,7 +221,7 @@ The checkout command uses the following environment variables.
## checksum {#checksum}
-This tool takes a single argument which is the file to checksum. It outputs the MD5 checksum of the file's contents. This tool is useful for tagging [artifacts](../using-semaphore/artifacts) or generating [cache keys](../using-semaphore/optimization/cache).
+This tool takes a single argument which is the file to checksum. It outputs the MD5 checksum of the file's contents. This tool is useful for tagging [artifacts](../using-semaphore/artifacts) or generating [cache keys](../using-semaphore/cache).
The syntax is:
@@ -241,7 +240,6 @@ $ checksum package-lock.json
The `install-package` tool is used to manage Ubuntu packages you may need for your jobs. It downloads and caches packages in a way that can be quickly reinstalled over and over again in different jobs. This is a convenient tool, you can still use `sudo` to install packages using the system's package manager.
-
The syntax is:
```shell title="install-package syntax"
@@ -254,8 +252,8 @@ Where command is one of the following:
- `update`: Retrieve new lists of packages
- `upgrade`: Perform an upgrade
-- `install`: Install new packages
-- `reinstall`: Reinstall packages
+- `install`: Install new packages
+- `reinstall`: Reinstall packages
- `remove`: Remove packages
- `purge`: Remove packages and config files
- `autoremove`: Remove automatically all unused packages
@@ -276,7 +274,7 @@ You can supply multiple packages with their versions in the same invocation:
install-package install mongodb-clients=3.6.8 mysql-client=8.0.36-0ubuntu0.20.04.1
```
-The tool integrates with the [Semaphore cache](../using-semaphore/optimization/cache) to save, retrieve, and update the Deb packages as needed.
+The tool integrates with the [Semaphore cache](../using-semaphore/cache) to save, retrieve, and update the Deb packages as needed.
You can reinstall the packages in a different job within the same project with:
@@ -363,6 +361,7 @@ $ sem-context get ReleaseVersion
```
Exit status codes:
+
- 0: key retrieved successfully
- 1: key not found
- 2: connection to the artifacts server failed
@@ -377,6 +376,7 @@ sem-context delete ReleaseVersion
```
Exit status codes:
+
- 0: key deleted successfully
- 1: key not found
- 2: connection to the artifacts server failed
@@ -384,7 +384,7 @@ Exit status codes:
## sem-service {#sem-service}
-The `sem-service` tool manages databases and other useful services in Ubuntu-based environments.
+The `sem-service` tool manages databases and other useful services in Ubuntu-based environments.
:::info
@@ -436,10 +436,9 @@ When starting `mysql` or `postgres` services you can provide the following optio
- On `postgres` defaults to a blank string
- `--db=` database name to create and default to
-
### Container registry images {#sem-service-container}
-The `sem-service` tool pulls images from the [Semaphore Container Registry](../using-semaphore/optimization/container-registry).
+The `sem-service` tool pulls images from the [Semaphore Container Registry](../using-semaphore/containers/container-registry).
## sem-version {#sem-version}
@@ -529,7 +528,7 @@ The test-results CLI is open-sourced and available on [semaphoreci/test-results]
### Merging test results {#test-result-merge}
-To use the test result feature you must add the following command at the end of every test job.
+To use the test result feature you must add the following command at the end of every test job.
The syntax is:
@@ -593,5 +592,5 @@ kubectl apply -f deployment.yml
## See also
- [Semaphore command line tool reference](./semaphore-cli)
-- [Working with Docker](../using-semaphore/optimization/docker)
+- [Working with Docker](../using-semaphore/containers/docker)
- [Environment variable reference](./env-vars)
diff --git a/docs/docs/using-semaphore/optimization/cache.md b/docs/docs/using-semaphore/cache.md
similarity index 82%
rename from docs/docs/using-semaphore/optimization/cache.md
rename to docs/docs/using-semaphore/cache.md
index 03c596e28..6e83a3ba7 100644
--- a/docs/docs/using-semaphore/optimization/cache.md
+++ b/docs/docs/using-semaphore/cache.md
@@ -17,18 +17,18 @@ The cache provides fast and convenient storage for your jobs. Use the cache to s
## Overview
-Semaphore provides a [cache tool](../../reference/toolbox#cache) in all jobs to reuse files your project depends on but are not part of the repository.
+Semaphore provides a [cache tool](../reference/toolbox#cache) in all jobs to reuse files your project depends on but are not part of the repository.
Typical uses of the cache are:
- to propagate a file from one block to the next
- to reuse dependencies that are normally downloaded from the internet, like NPM modules
-Semaphore creates a separate cache for every [project](../projects). For jobs running on Semaphore Cloud, the total cache size is 9.6GB. Older files are automatically deleted after 30 days or when the cache fills up.
+Semaphore creates a separate cache for every [project](./projects). For jobs running on Semaphore Cloud, the total cache size is 9.6GB. Older files are automatically deleted after 30 days or when the cache fills up.
:::note
-Users running Semaphore On-Premise or [self-hosted agents](../self-hosted) need to [configure a custom backend](#custom-backends) to use the cache.
+Users running Semaphore On-Premise or [self-hosted agents](./self-hosted) need to [configure a custom backend](#custom-backends) to use the cache.
:::
@@ -53,7 +53,6 @@ The cache tools recognize the following languages and dependency managers. See [
When using one of the supported dependency managers:
-
1. Run `cache restore` to restore the latest files from the cache
@@ -73,6 +72,7 @@ cache restore
npm install
cache store
```
+
@@ -92,12 +92,13 @@ cache restore
bundle install --path vendor/bundle
cache store
```
+
:::warning
-Avoid using `cache store` in the [prologue](../pipelines#prologue) as this can cause file corruption due to multiple jobs trying to write the same key simultaneously. Instead, use `cache store` in the individual job commands.
+Avoid using `cache store` in the [prologue](./pipelines#prologue) as this can cause file corruption due to multiple jobs trying to write the same key simultaneously. Instead, use `cache store` in the individual job commands.
:::
@@ -121,7 +122,7 @@ cache restore
Where keys are again a comma-separated lists of keys. Semaphore searches for the keys in the order provided and restores the first match to the working directory.
-### Using multiple keys
+### Using multiple keys
It's recommended to use multiple keys to increase the chances of matching a key. The following example uses two keys:
@@ -183,7 +184,7 @@ cache clear
## Custom backends {#custom-backends}
-The cache storage is available for all Semaphore Cloud users. If you're running a different version such as On-Premise or [self-hosted agents](../self-hosted), the cache might not be available.
+The cache storage is available for all Semaphore Cloud users. If you're running a different version such as On-Premise or [self-hosted agents](./self-hosted), the cache might not be available.
For these cases, you need to provide storage. This section explains how to configure custom storage in other platforms.
@@ -195,11 +196,11 @@ To provision the storage, follow these steps:
-1. Create and configure an S3 bucket as explained in [How to set up caching on self-hosted agents](../self-hosted-configure#aws-cache)
-2. Configure the following [environment variables](../jobs#environment-variables) in your job
+1. Create and configure an S3 bucket as explained in [How to set up caching on self-hosted agents](./self-hosted-configure#aws-cache)
+2. Configure the following [environment variables](./jobs#environment-variables) in your job
- `SEMAPHORE_CACHE_BACKEND` set its value to "s3"
- `SEMAPHORE_CACHE_S3_BUCKET` set its value to the S3 bucket name
-3. Create a [secret](../secrets) with the following credentials
+3. Create a [secret](./secrets) with the following credentials
- `AWS_ACCESS_KEY_ID`: the key for an IAM account with access to the bucket
- `AWS_SECRET_ACCESS_KEY`: the secret key for the account
- `AWS_DEFAULT_REGION`: the region where the bucket is located
@@ -215,7 +216,7 @@ To provision storage, follow these steps
1. [Create a Google Cloud Bucket](https://cloud.google.com/storage/docs/creating-buckets)
-2. Configure the following [environment variables](../jobs#environment-variables) in your job
+2. Configure the following [environment variables](./jobs#environment-variables) in your job
- `SEMAPHORE_CACHE_BACKEND` set it to "gcs"
- `SEMAPHORE_CACHE_GCS_BUCKET` set it to your Google Cloud bucket name
3. Provide the Google Cloud Application Default Credentials. See [How Application Default Credentials work](https://cloud.google.com/docs/authentication/application-default-credentials) to learn more
@@ -226,16 +227,16 @@ To provision storage, follow these steps
You can provide an SFTP server to provide custom storage for the cache.
-To use SFTP, define the following [environment variables](../jobs#environment-variables) in your job:
+To use SFTP, define the following [environment variables](./jobs#environment-variables) in your job:
- `SEMAPHORE_CACHE_BACKEND`: set its value to "sftp"
- `SEMAPHORE_CACHE_URL`: the IP address and port number of the SFTP server, e.g. "1.2.3.4:29920"
- `SEMAPHORE_CACHE_USERNAME`: the username used to connect to the server
- `SEMAPHORE_CACHE_PRIVATE_KEY_PATH`: the path of the private SSH key used to connect to the SFTP server
-In addition, you must create a [secret](../secrets) to store the private SSH key and expose it inside the job.
+In addition, you must create a [secret](./secrets) to store the private SSH key and expose it inside the job.
## See also
-- [Using cache in jobs](../jobs#cache)
-- [Cache toolbox reference](../../reference/toolbox#cache)
+- [Using cache in jobs](./jobs#cache)
+- [Cache toolbox reference](../reference/toolbox#cache)
diff --git a/docs/docs/using-semaphore/optimization/_category_.json b/docs/docs/using-semaphore/containers/_category_.json
similarity index 100%
rename from docs/docs/using-semaphore/optimization/_category_.json
rename to docs/docs/using-semaphore/containers/_category_.json
diff --git a/docs/docs/using-semaphore/optimization/container-registry.md b/docs/docs/using-semaphore/containers/container-registry.md
similarity index 100%
rename from docs/docs/using-semaphore/optimization/container-registry.md
rename to docs/docs/using-semaphore/containers/container-registry.md
diff --git a/docs/docs/using-semaphore/optimization/docker.md b/docs/docs/using-semaphore/containers/docker.md
similarity index 99%
rename from docs/docs/using-semaphore/optimization/docker.md
rename to docs/docs/using-semaphore/containers/docker.md
index 6d779a7d9..ef1ce055b 100644
--- a/docs/docs/using-semaphore/optimization/docker.md
+++ b/docs/docs/using-semaphore/containers/docker.md
@@ -3,7 +3,7 @@ description: Build and deploy Docker containers
sidebar_position: 5
---
-# Working with Docker
+# Building Docker Images
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
diff --git a/docs/docs/using-semaphore/optimization/img/add-metric.jpg b/docs/docs/using-semaphore/containers/img/add-metric.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/add-metric.jpg
rename to docs/docs/using-semaphore/containers/img/add-metric.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/change-skip-vs-run.jpg b/docs/docs/using-semaphore/containers/img/change-skip-vs-run.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/change-skip-vs-run.jpg
rename to docs/docs/using-semaphore/containers/img/change-skip-vs-run.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/custom-create.jpg b/docs/docs/using-semaphore/containers/img/custom-create.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/custom-create.jpg
rename to docs/docs/using-semaphore/containers/img/custom-create.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/dockerhub-secret.jpg b/docs/docs/using-semaphore/containers/img/dockerhub-secret.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/dockerhub-secret.jpg
rename to docs/docs/using-semaphore/containers/img/dockerhub-secret.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/freq-cd.jpg b/docs/docs/using-semaphore/containers/img/freq-cd.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/freq-cd.jpg
rename to docs/docs/using-semaphore/containers/img/freq-cd.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/freq-ci.jpg b/docs/docs/using-semaphore/containers/img/freq-ci.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/freq-ci.jpg
rename to docs/docs/using-semaphore/containers/img/freq-ci.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/insights-settings.jpg b/docs/docs/using-semaphore/containers/img/insights-settings.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/insights-settings.jpg
rename to docs/docs/using-semaphore/containers/img/insights-settings.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/insights-tab.jpg b/docs/docs/using-semaphore/containers/img/insights-tab.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/insights-tab.jpg
rename to docs/docs/using-semaphore/containers/img/insights-tab.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/new-metric.jpg b/docs/docs/using-semaphore/containers/img/new-metric.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/new-metric.jpg
rename to docs/docs/using-semaphore/containers/img/new-metric.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/org-health-location.jpg b/docs/docs/using-semaphore/containers/img/org-health-location.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/org-health-location.jpg
rename to docs/docs/using-semaphore/containers/img/org-health-location.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/org-health-overview.jpg b/docs/docs/using-semaphore/containers/img/org-health-overview.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/org-health-overview.jpg
rename to docs/docs/using-semaphore/containers/img/org-health-overview.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/perf-cd.jpg b/docs/docs/using-semaphore/containers/img/perf-cd.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/perf-cd.jpg
rename to docs/docs/using-semaphore/containers/img/perf-cd.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/perf-ci.jpg b/docs/docs/using-semaphore/containers/img/perf-ci.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/perf-ci.jpg
rename to docs/docs/using-semaphore/containers/img/perf-ci.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/rel-cd.jpg b/docs/docs/using-semaphore/containers/img/rel-cd.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/rel-cd.jpg
rename to docs/docs/using-semaphore/containers/img/rel-cd.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/rel-ci.jpg b/docs/docs/using-semaphore/containers/img/rel-ci.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/rel-ci.jpg
rename to docs/docs/using-semaphore/containers/img/rel-ci.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/select-date-range.jpg b/docs/docs/using-semaphore/containers/img/select-date-range.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/select-date-range.jpg
rename to docs/docs/using-semaphore/containers/img/select-date-range.jpg
diff --git a/docs/docs/using-semaphore/optimization/img/workflow-monorepo.jpg b/docs/docs/using-semaphore/containers/img/workflow-monorepo.jpg
similarity index 100%
rename from docs/docs/using-semaphore/optimization/img/workflow-monorepo.jpg
rename to docs/docs/using-semaphore/containers/img/workflow-monorepo.jpg
diff --git a/docs/docs/using-semaphore/languages/android.md b/docs/docs/using-semaphore/languages/android.md
index 7cea92bbe..cb772eff7 100644
--- a/docs/docs/using-semaphore/languages/android.md
+++ b/docs/docs/using-semaphore/languages/android.md
@@ -80,7 +80,6 @@ To configure your Android container using YAML, follow these steps:
-
@@ -184,7 +183,7 @@ blocks:
## Building custom images {#custom-images}
-You can find the pre-build Android images in the [Semaphore Container Registry](../optimization/container-registry).
+You can find the pre-build Android images in the [Semaphore Container Registry](../containers/container-registry).
The image definition and Dockerfiles for these images can be found in the repository [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images). Clone this repository to build your own image variants.
diff --git a/docs/docs/using-semaphore/languages/elixir-erlang.md b/docs/docs/using-semaphore/languages/elixir-erlang.md
index 7229fe25b..2b6313e06 100644
--- a/docs/docs/using-semaphore/languages/elixir-erlang.md
+++ b/docs/docs/using-semaphore/languages/elixir-erlang.md
@@ -29,7 +29,7 @@ sem-version elixir 1.16
### Using Docker containers {#containers}
-Semaphore distributes the pre-built `semaphoreci:elixir` image on the [Semaphore Container Registry](../../using-semaphore/optimization/container-registry#elixir). Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
+Semaphore distributes the pre-built `semaphoreci:elixir` image on the [Semaphore Container Registry](../../using-semaphore/containers/container-registry#elixir). Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
## How to cache Elixir dependencies {#cache}
@@ -55,14 +55,13 @@ checkout
cache restore
```
-
## How to set up test reports {#test-results1}
This section explains how to set up [test reports](../../using-semaphore/tests/test-reports) (and flaky tests) for Elixir and mix.
-1. Add junit-formatter to your `mix.exs` dependencies
+1. Add junit-formatter to your `mix.exs` dependencies
```elixir
defp deps do
@@ -129,7 +128,6 @@ This section explains how to set up [test reports](../../using-semaphore/tests/t
-
## How to change Erlang versions {#erlang-version}
Elixir is available on Linux [Ubuntu](../../reference/os-ubuntu) machines and [Docker Environments](../../using-semaphore/pipelines#docker-environments).
@@ -164,4 +162,3 @@ sem-version erlang 25
erlc hello.erl
erl -noshell -s hello helloWorld -s init stop
```
-
diff --git a/docs/docs/using-semaphore/languages/go.md b/docs/docs/using-semaphore/languages/go.md
index efa1b10b7..e0655e561 100644
--- a/docs/docs/using-semaphore/languages/go.md
+++ b/docs/docs/using-semaphore/languages/go.md
@@ -42,7 +42,7 @@ go version
The `sem-version` tool does not work on Docker containers. You must use a pre-built Docker image with the language versions you need and run the job using [Docker environments](../../using-semaphore/pipelines#docker-environments).
-You can use the pre-build [Go images](../../using-semaphore/optimization/container-registry#go) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
+You can use the pre-build [Go images](../../using-semaphore/containers/container-registry#go) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
## How to use GOPATH {#gopath}
diff --git a/docs/docs/using-semaphore/languages/javascript.md b/docs/docs/using-semaphore/languages/javascript.md
index 0d17ad0e4..41b64212c 100644
--- a/docs/docs/using-semaphore/languages/javascript.md
+++ b/docs/docs/using-semaphore/languages/javascript.md
@@ -19,7 +19,6 @@ Node.js is pre-installed in the Linux and macOS Semaphore environments. You can
You may also use Docker images.
-
## How to select Node versions {#switch}
Change the active Node.js versions on Linux and macOS with [sem-version](../../reference/toolbox#sem-version).
@@ -45,8 +44,7 @@ sem-version node --lts carbon
The `sem-version` tool does not work on Docker containers. You must use a pre-built Docker image with the language versions you need and run the job using [Docker environments](../../using-semaphore/pipelines#docker-environments).
-You can use the pre-built [Node images](../../using-semaphore/optimization/container-registry#node) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
-
+You can use the pre-built [Node images](../../using-semaphore/containers/container-registry#node) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
## How to cache Node dependencies {#cache}
@@ -84,7 +82,7 @@ To perform semantic releases, follow these steps
-1. Create a [Secret] with your GitHub Token.
+1. Create a [Secret] with your GitHub Token.
- The token should have write permissions on the repository
- The secret name should be `semantic-release-credentials`
@@ -153,6 +151,7 @@ This section explains how to set up [test reports](../../using-semaphore/tests/t
```shell
test-results publish junit.xml
```
+
diff --git a/docs/docs/using-semaphore/languages/php.md b/docs/docs/using-semaphore/languages/php.md
index b8999883b..56c92da95 100644
--- a/docs/docs/using-semaphore/languages/php.md
+++ b/docs/docs/using-semaphore/languages/php.md
@@ -35,7 +35,7 @@ phpbrew --no-progress install 8.2.20
The `sem-version` tool does not work on Docker containers. You must use a pre-built Docker image with the language versions you need and run the job using [Docker environments](../../using-semaphore/pipelines#docker-environments).
-You can use the pre-build [PHP images](../../using-semaphore/optimization/container-registry#php) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
+You can use the pre-build [PHP images](../../using-semaphore/containers/container-registry#php) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
## How to cache dependencies {#caching}
@@ -108,7 +108,3 @@ This section explains how to set up [test reports](../../using-semaphore/tests/t
-
-
-
-
diff --git a/docs/docs/using-semaphore/languages/python.md b/docs/docs/using-semaphore/languages/python.md
index 2edba5bd8..f7841e022 100644
--- a/docs/docs/using-semaphore/languages/python.md
+++ b/docs/docs/using-semaphore/languages/python.md
@@ -29,8 +29,7 @@ sem-version python 3.12
The `sem-version` tool does not work on Docker containers. You must use a pre-built Docker image with the language versions you need and run the job using [Docker environments](../../using-semaphore/pipelines#docker-environments).
-You can use the pre-build [Python images](../../using-semaphore/optimization/container-registry#python) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
-
+You can use the pre-build [Python images](../../using-semaphore/containers/container-registry#python) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
## How to cache packages {#caching}
@@ -103,4 +102,3 @@ This section explains how to set up [test reports](../../using-semaphore/tests/t
-
diff --git a/docs/docs/using-semaphore/languages/ruby.md b/docs/docs/using-semaphore/languages/ruby.md
index 7606548bd..6d2c2d6cf 100644
--- a/docs/docs/using-semaphore/languages/ruby.md
+++ b/docs/docs/using-semaphore/languages/ruby.md
@@ -35,7 +35,7 @@ rbenv install --list
The `sem-version` tool does not work on Docker containers. You must use a pre-built Docker image with the language versions you need and run the job using [Docker environments](../../using-semaphore/pipelines#docker-environments).
-You can use the pre-build [Ruby images](../../using-semaphore/optimization/container-registry#ruby) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
+You can use the pre-build [Ruby images](../../using-semaphore/containers/container-registry#ruby) or build your own. Find Dockerfiles to build your custom images in the [semaphoreci/docker-images](https://github.com/semaphoreci/docker-images) repository.
## How to cache Gems {#caching}
@@ -85,6 +85,7 @@ This section explains how to set up [test reports](../../using-semaphore/tests/t
--out junit.xml
--format documentation
```
+
- Or, changing the `rspec` invocation
```shell
@@ -134,7 +135,7 @@ This section explains how to set up [test reports](../../using-semaphore/tests/t
## How to parallelize tests
-You can run RSpec and Cucumber tests in parallel automatically using [job parallelism](../../using-semaphore/jobs#job-parallelism).
+You can run RSpec and Cucumber tests in parallel automatically using [job parallelism](../../using-semaphore/jobs#job-parallelism).
For RSpec, follow these steps:
@@ -163,5 +164,3 @@ On Cucumber, we use `cucumber_booster` instead:
```
-
-
diff --git a/docs/docs/using-semaphore/languages/rust.md b/docs/docs/using-semaphore/languages/rust.md
index 50e62dcdb..aa86b78cc 100644
--- a/docs/docs/using-semaphore/languages/rust.md
+++ b/docs/docs/using-semaphore/languages/rust.md
@@ -19,7 +19,7 @@ Rust is not preinstalled on Linux Semaphore environments. You must use [Docker E
## How to compile Rust code {#compile}
-You may use one of the pre-built [Rust images](../../using-semaphore/optimization/container-registry#rust) to run the jobs in a Docker-based environment.
+You may use one of the pre-built [Rust images](../../using-semaphore/containers/container-registry#rust) to run the jobs in a Docker-based environment.
@@ -27,7 +27,7 @@ You may use one of the pre-built [Rust images](../../using-semaphore/optimizatio
2. Select the pipeline
3. Select **Docker Containers** in the **Environment Type**
4. Select one of the available machines
-5. Type the name of a [Rust image](../../using-semaphore/optimization/container-registry#rust), e.g. `semaphoreci/rust:1.75`
+5. Type the name of a [Rust image](../../using-semaphore/containers/container-registry#rust), e.g. `semaphoreci/rust:1.75`

diff --git a/docs/docs/using-semaphore/monorepo.md b/docs/docs/using-semaphore/monorepo.md
index ce28bb6b0..e78331a9c 100644
--- a/docs/docs/using-semaphore/monorepo.md
+++ b/docs/docs/using-semaphore/monorepo.md
@@ -21,7 +21,7 @@ A [monorepo](https://semaphoreci.com/blog/what-is-monorepo) is a repository that
Semaphore can detect changes between commits, allowing you to set up fine-grained jobs that only run when the underlying code changes. Skipping jobs covering unchanged code can greatly speed testing and reduce costs on big codebases.
-:::note
+:::note
The `change_in` expressions are evaluated in the [pipeline initialization job](./pipelines#init-job).
@@ -129,7 +129,6 @@ To enable change detection, follow these steps.
-
1. Open the **Workflow Editor** for your Semaphore project
@@ -158,7 +157,7 @@ Conditions are ignored by default when you change the pipeline file. So, the ver
3. Add `run.when` under the block
4. Type the [change condition](#condition), e.g. `change_in('/frontend', {default_branch: 'main'})`
5. Repeat the process for the other blocks that need conditions
-6. Push the pipeline file to the remote repository
+6. Push the pipeline file to the remote repository
@@ -228,7 +227,7 @@ All paths are relative to the root of the repository.
You can use change detection in [promotions](./pipelines#connecting-pipelines). This is useful when you have continuous delivery or deployment pipelines that only need to run when certain folders or files in your project change.
-With change detection, you can set up smarter deployment pipelines. Imagine you have web and mobile apps in the same repository. The process for deploying each component is different: for a web app you might use a [Docker container](./optimization/docker), the Android app is deployed to the Google Store, while the iOS version goes to Apple.
+With change detection, you can set up smarter deployment pipelines. Imagine you have web and mobile apps in the same repository. The process for deploying each component is different: for a web app you might use a [Docker container](./containers/docker), the Android app is deployed to the Google Store, while the iOS version goes to Apple.
With change detection on promotions, you can activate the correct deployment pipeline based on what component has changed in the last push.
@@ -261,7 +260,7 @@ To use change detection, follow these steps:
3. Add `auto_promote.when` under the block
4. Type the [change condition](#condition), e.g. `change_in('/frontend', {default_branch: 'main'})`
5. Repeat the process for the other promotions that need conditions
-6. Push the pipeline file to the remote repository
+6. Push the pipeline file to the remote repository
@@ -329,7 +328,6 @@ Conditions are ignored by default when you change the pipeline file. So, the ver
:::
-
## Conditions options {#condition}
This section describes the available options for change detection. Note that the conditions are not limited to `change_in`. See the [conditions DSL reference](../reference/conditions-dsl) to view all available conditions.
@@ -431,6 +429,7 @@ The `options` is an optional hashmap to change the change detection behavior. Fo
```text title="Using main instead of master"
change_in('/backend/', {default_branch: 'main'})
```
+
The most common options are:
The supported options are:
@@ -481,7 +480,7 @@ branch =~ '^hotfix/' and change_in('/backend/', {default_branch: 'main'})
## Demo project {#demo}
-This section showcases how to use `change_in` in a working demo project.
+This section showcases how to use `change_in` in a working demo project.
The project is a microservice application consisting of three components. Each component is located in a separate folder:
@@ -594,10 +593,10 @@ blocks:
commands:
- go test ./...
```
+
-
## See also
- [How to create pipelines](./pipelines)
diff --git a/docs/docs/using-semaphore/pipelines.md b/docs/docs/using-semaphore/pipelines.md
index 43ab3d8d0..9265e0c22 100644
--- a/docs/docs/using-semaphore/pipelines.md
+++ b/docs/docs/using-semaphore/pipelines.md
@@ -4,7 +4,6 @@ description: Connect blocks to get things done
# Pipelines
-
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import Available from '@site/src/components/Available';
@@ -17,7 +16,7 @@ A pipeline is a group of connected blocks. This page explains what pipelines are
Pipelines are groups of blocks that can be connected via dependencies to define their execution order.
-Pipelines are also the *unit of configuration*. Each pipeline is encoded as separate a YAML file in the `.semaphore` folder.
+Pipelines are also the *unit of configuration*. Each pipeline is encoded as separate a YAML file in the `.semaphore` folder.
For reference, here is an example pipeline with its respective YAML.
@@ -163,7 +162,7 @@ For more information, see the [Promotions page](./promotions).
## Pipeline settings {#settings}
-Pipeline settings are applied to all its blocks. You can change pipeline settings with the editor or directly in the YAML.
+Pipeline settings are applied to all its blocks. You can change pipeline settings with the editor or directly in the YAML.
### Agents {#agents}
@@ -174,7 +173,7 @@ Semaphore Cloud provides the following agent types in x86 and ARM architectures:
- [Linux](../reference/machine-types#linux) Virtual Machines
- [Docker containers](#docker-environments) running on Linux
- [Apple macOS](../reference/machine-types#macos) Machines
-- [Windows](./self-hosted-install) Virtual Machines (only for self-hosted agents)
+- [Windows](./self-hosted-install) Virtual Machines (only for self-hosted agents)
You can add your own machines by [installing self-hosted agents](./self-hosted).
@@ -199,7 +198,6 @@ The available hardware changes depending on the type of environment you selected
-
1. Add the `agent` and `machine` keys
@@ -234,18 +232,17 @@ blocks:
-
### Docker containers {#docker-environments}
:::tip
-If you want to build and run Docker images in your jobs, check the [working with Docker page](./optimization/docker).
+If you want to build and run Docker images in your jobs, check the [working with Docker page](./containers/docker).
:::
Jobs can run inside Docker containers. This allows you to define a custom-build environment with pre-installed tools and dependencies needed for your project. You can enable this setting in the pipeline agent or in the [block agent override](./jobs#agent-override).
-You can run multiple containers at the same time. The job runs in the first container (called `main`) and attaches the other containers to the same network. This is similar to how containers inside a Kubernetes pod communicate.
+You can run multiple containers at the same time. The job runs in the first container (called `main`) and attaches the other containers to the same network. This is similar to how containers inside a Kubernetes pod communicate.
The network addresses of all containers are mapped to their names. Let's say you have two containers, "main" and "mysql", you can connect to the database from main with:
@@ -319,7 +316,7 @@ To use images in private repositories see [Private Docker Registries](#docker-pr
:::info
-Semaphore provides a [public Docker registry](./optimization/container-registry) for popular images.
+Semaphore provides a [public Docker registry](./containers/container-registry) for popular images.
:::
@@ -419,6 +416,7 @@ blocks:
commands:
- npm run build
```
+
@@ -462,6 +460,7 @@ blocks:
commands:
- npm run build
```
+
@@ -575,10 +574,10 @@ blocks:
commands:
- npm run build
```
+
-
### YAML file path {#yaml-path}
This option overrides the location of the pipeline file. This option is not available for the default pipeline (located at `.semaphore/semaphore.yml`).
@@ -678,7 +677,6 @@ after_pipeline:
-
## Private Docker Registries {#docker-private}
If the images you need for your [docker environment](#docker-environments) are not publicly available, you need to provide authentication credentials in your pipeline. This feature is only available by editing the pipeline YAML directly.
@@ -746,7 +744,6 @@ To pull images from a private AWS Elastic Container Registry (ECR), follow these
### Images in Google GCR {#docker-gcr}
-
To pull images from a private Google Container Registry (GCR), follow these steps:
@@ -841,14 +838,13 @@ To pull images from any arbitrary Docker registry, follow these steps:
-
## Pipeline queues {#pipeline-queues}
Queues allow you to control the order in which pipelines run. Semaphore pipelines can run sequentially or in parallel. For example, you can run CI pipelines in parallel on the main branch, while limiting deployment pipelines to run one at at time to prevent conflicts or race conditions.
### Default and named queues {#named-queues}
-Semaphore creates a queue for each Git push or pull requests. All workflows sharing the same commit SHA belong in the same queue and run sequentially.
+Semaphore creates a queue for each Git push or pull requests. All workflows sharing the same commit SHA belong in the same queue and run sequentially.
In other words, every time you re-run a workflow, create a pull request, push a tag, or start a [promotion](./pipelines#connecting-pipelines), the pipeline is added to the end of the same-commit queue.
@@ -964,7 +960,7 @@ blocks:
### Conditional queues {#conditional-queues}
-You can use conditional statements to assign pipelines based on parameters like branch name or tag name.
+You can use conditional statements to assign pipelines based on parameters like branch name or tag name.
The following example uses three rules:
@@ -1046,7 +1042,6 @@ To change the global time limit for all jobs in a pipeline, follow these steps:
-
1. Open the pipeline YAML
@@ -1090,13 +1085,13 @@ See [job time limit](./jobs#job-duration) to change the maximum duration for a s
You can workaround the queue limit by assigning pipelines to [named queues](#named-queues).
-If you have a use case in which this limit is too constraining, please contact us at support@semaphoreci.com and we will try to work out a solution.
+If you have a use case in which this limit is too constraining, please contact us at `support@semaphoreci.com` and we will try to work out a solution.
### Max blocks per pipeline {#max-blocks}
There is a hard limit of a 100 blocks per pipeline.
-This limit is not adjustable. If you have a use case in which this limit is too constraining, please contact us at support@semaphoreci.com and we will try to work out a solution.
+This limit is not adjustable. If you have a use case in which this limit is too constraining, please contact us at `support@semaphoreci.com` and we will try to work out a solution.
## See also
diff --git a/docs/docs/using-semaphore/recipes/img/infracost-key-secret.jpg b/docs/docs/using-semaphore/recipes/img/infracost-key-secret.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f825c51b395296487cb6dd5a96ab5e53e2962297
GIT binary patch
literal 115039
zcmeFZ2Ut_-x-h!vRumCXsskek2uSZR4$VRlLqM7ggc=}$1VV>#9Hj{iBAvhpV}L|p
zkP<>w>0JyVLMYOu_ul@6;>_Oro_)?f_uPB_^E_w1JXz~qZ~xw}iQG^FF|bf4rmzY;2n_L&K?bO
z2jOA>V623Bpu7Np*#pE2`FMF8!p}hXJPLFWgmVsIr!R2jA#C>re)pNjA~HH5tj0))QL!^5&agj30fHi`PvP5WWDyISyzY$Pv$L6!Pv@yFHF5t?Nhl
zPqru!`eJwA+ZB3<2lX?Yb9T6Sq@O|3-`T_r#Ge51+87s8Ll9;KVMDYlyD
zXRSm0XC9RI-NU?m3*u1@2)*w?7|a(#f+I@zFb;?Ocy}}ieU1aeYiF06hq7Qy7~Xq1
zo7@0lFjfp>Fx$`mgLz?CbU<7`iVs7A1Nz>fZ-+7Lv3Jnc0bwxjjK>@h(8K(Kc*YAJ
zUf&-S$f~kvXgo(oRk*U|lGyE?uWTtljofpNI7W
zeT{H!>e}uXEnW`3USJ&uw2&TtC>JMZuS?g#Ukry!HxV$&tCwW1$;bo1;qT8O4*;wf
zfBs#e<81pA_izXR)N2k74l4e{-KqhALPfAkApMEEq67eJCjp>|1oK8=zUX5-yfJ}2
z#~HBoT?F_6A>ay_9~nRaPzAmNbbwoc5nuwC1NXuD>I~L%6yO5{0uO-4KqT-B5D%mP
z89)}01LOg(03uKcya5`3R^S~#0cc?D83$&7MPMD+p`)W?p*u~-PIr+`fbKG#IGqfg
z5}gK}4xK*TT{?5RALyLu5Om&jfpib)BI%yerO-X6%cU!%BhkH~Yo_a>>!ll}o1$B$
z+oorrKSh6@o{wIbUW#6c{yP0_dJ}qUdMA2MdVl(#>7(hB=rieG(wEY|p>L(9(0`<#
zp!QjY%VhCo4U`SxdWXNYI2g{kv@R4DbVT+NOk)81p
zqXbxkZ!kg`VT_)P!HiEBQy6m?OBw4J$&AB{i;VkBrZZh3ta$@pfdd!r-
zgkvgUst4QBIMX^aGczZ%2(u#C7R{OA%mK_#n6b@;I=^nE!qW(nxiItNlPYRw?K52Z?
z<>ZeilTQAAvgzdT$*oi8PKlk;I%Rdr`&9I)>{G8#^`2TeefqTU>F-Wkoc1~$eLDAa
z?dieOo2=}tlB_pb?O8)uQ&~${-?Pr2IezBynd@inpYcDFaHi-?=b72F$Il9%)jDf?
zHu!AnS<+eR*)=wHHfc5^Hg~osYIcIYq7)F
zAF}_(Ue7*$p834U`J3nA=OfSOo$olm$Z?KCj>DA0mm`&JFPU%Yft=c4Py*o(xA1Kf<O=Xq3lU_22#uXy@+>3PL?@A3xlX7RT1uJQ5n>G65+CG*wt&GK{df5#8!
zf5u9HT@&4xdHyz*X2#N?o1%DDO
z5F8LXA*3YaDD+I|t|&U@w}3GrCXF)R7uoT6f62pj9yG$%vmf|tW%s`TtVDL94p>+mFcST
zRm9cIt5k_o64xa#5(J51Nlr<9$)6;NlJipEN?Az7N;OLDUz5M)cI~%oH0g8Fx1@iP
zE|*@G5tD(*q{?*5o|e^>4UsLAU6K=*bC64y`ykIQZy+Bk|5koaK~VvvkgqVM_^sj(
zifM{erSnR6l%6OxDKja5uNi9L3La8
z=jtCdE@}Lr@vFv1&2KbqH8V9wz7zV+{yW@vlh;MB!>_-*KL7o-?@`~Ee7~Wkq7|g|
zTAM*zPdh@pL+6|hR3}AeP*+gbNtd9zs3)%%p!fO)(~a9VVsB7y^4)~p%)PmIOYv6F
zt-9N%ZbNRT+#b;v(?{u-8_*lvGKe$iH@s|!FeDlQMmLS(j0WzA-0{3qVa#lN$2iq^
z;;!`Fz`KnQc8CoG4_P<)-Xz9^2E79HhQ2jDV`^oZYr0{kZT8gc<2|W+LHAnCxy@b7
z%Pd$dOf7I0YnD2e@s{IO@>Y+ndaOmPeXX1CbKiHnUuAQ~#@43j2c{p)e!%~*Yinei
zX}e~3(=N?!5vB!8gw5E0XaCH8%0b=Xsl%kBy5m#FPfqGiaZXdtn$8K%vo2aLDK1O!
z8}JPHhO3clw(EhLnOlMTG4~(bNr-a@S416>AL)zi@(}lU?D5f4#WUV>5p^4dL(`$H
z(Pdue!GV68_Z9C)-oqFTOe$v6$JD3Dm(3U9+vX?c7w$Lduj~IJfH43T@HX(9zz2cD
zLDz$x2h#=H1-}gu3i&x?{71bXa~_;{;QFBbr)xhw{b}`~*~9Xm`F{TK=g~)cj|h)h
zAEO>qLsdgF!WhF`!aBlb!;{1JBkUtuBCkazMD9L;J!y%Oj!KF;h<1$bh*5}1|AqM%
z#4kOunz1=g*`E489gQ=HD}8q9S;Vu|c$@g<1i6Ha#1o0$i64{nlgg66P5vc$H^n)H
zlKOpWK^kvbc-jWm9{WE1yY&1FzKqC>?O$Df{qS7xd1$_!t5`!J9Dk((2{AJng))e2M($1)K$uzXQK}|2|##
zLt)RW+ppdhDHY`xUoK87VK0d&r7QI-T_idaKbD!7b&+n7-j=JDmsChrWLJJunOenB
z6;pkp`seC{8vmNL*B-BD-#EP)eQWcUR(r3uyUw_-qyBb%Q-f|pZR7REnkMz8%4U^j
zQj1axu~nh9v`xOPq+Pzfq(h;jv{SLO?49zv@-DTm>i3%O-;lM*4c#}pTPcQ=t{!O5
z2dWkI;|Kc>pL*STm-;Y$JG3ABnffCJ&JH9D@(yMWi4GNfRQOmmtUcT|0vVx=!bWGt
z&||yfKTn*VNSG9u%>5+usd7qp>fN;E^!SX&%+BoNIkvg9`786q3*RlYE#6xkUqUS%
zEI(PfxRSjpxB7PN&f4I*`})pC#OB4#oGqoT#%;6h$sM0vmffU1(Y^Bh+xz_o?gs}R
zwkX@rV*v*6O7G|h0L$3`aM~OI*x!NU2g@(>!_OR_XUsy14-ce^9f
z-`&2)CCtRY6LI2*sBYg$s1fV@X&H_cDdn?a)9_7n!x;BVaoP`
zni?n6d{uniJ=`6l2%q$mbxY*B_ksNa!8>45MH*v
z5(u==p^7g$w85nVl#7R#3leeZP^YaO(%Va2P!ObFI#TZC;`pcbzl*^k<#Q1JmfgYE
z<)5%0QvPH=6jr%+*8z=m^FB1ME+{Q0Cnfz^)n}UdzeD?z1Vy-j{>pq7J2aqj$Hmvd
z&0O2X-2s9AqE=Ss+CPx*{S8@0UhZ$Cp=bw`7SbIQ2DL(9CM-b1M^Oih
zPwJ2I10VjV0S*YT*im5l9?-zq3^OAWGZP~-GZQll3-hrPXHT3se*DCF)-$Kha-F|$
zf$KacCpX_^0d5{4UQW(SS1$<(i-?MeUKEhHCUNE3qrVf!POzUi!G49C
zll#hlIvuIX=%oCkW89(J`>m9n=AQU~{Br07uzhTO!ylKv!Alj?o_n$x2OpGT1I(i01CN^f4bL{88k>-~z
zW1l{MHNq(*|2@n>R?!aS<^7`Q{RIUjGe`6r^60l(rVnc`>--#7{C0TpS-gl7uC5!j
z#C_<)VfwSN($O-jD3b8S;0h+)zdbuqoYvWtJEl0hsou^`dn)qJ;3GEgMecEeg+c%G0
z4IcaPzntS`d}dN|a?~#n)VlToFuPi3yf8Dr_23u5X%agh$yneru?iy8QJqhBDF$!v
zN2R=>$f(Use>`_=8>fd2vf}%T_*W4AstJEJhrjxS|J@zqV>buOIovo`uxPZ4=dDb=;JY^FXj5WW&sJq7Nj(oS
zYL)E;l60bCoy7SEb}f4r%t^e{l9qTeT+VV`qyOQKz`vZ1&ofIjr9Hu4Ma~H-Q+CsJ
zo@qmPwB64ZkSoxyHkNeQHeMQk_b+SvzsG1ht>D+Q?HZKmzFxNlYdqB$$0l98apT%7
zl}mzb;-imcI
zb9#7=;fb;h(X^sqhPWEUIu0?Eo902a7E9%w4ld}ntE6t83y$1lx
zMFoH4it^AB+A>s_a*q%<$dhKYsKMG87PD`yy0ElRJ)Jh*v>Ex6QbnBAJ*tvoSkc;5
zE5Gb)^0J`oq;XBv5In23SjU*B?RJSrdLi%iTL0i&$~+7j2N%n)Z%V=s1W_Qgv7|(i
z9;KoLJ`tm~&i3JS0pg=b1c54P(7v9TH0b19Q@JiH&wr=HDg(AQFP485Hr*=?jd0>m
z$;E}IuDD_iS7h1GuWxLGvxet-#+-KXD+ASCBVo0&5h}V&a?0f0&*kI9yIuV6g
z!l^xz{LWJ<_wdS<1H?(>Ku&Tn>nxEl#ks_t(~laHHwrg
zvUyAx7S`Px2uTfo>E_A>Cu9-zwV_Z;?Tt2yX4a?`3ibf`vtPgng6I7-NjDaKPRiI3
zJ}8RKqw?)ars^3qksF*1s4ElP`>xgAm>KXxD257oFaRBaQ*e*e8U4Qk!x
zeN}D8%blY`ecE**1WWYk*ox5%lgqy}?sUI%)LPS&Qrv&CWYl(e*kZCv8)8K^Ql;tA
z>Sa}ZC!Cs^>bkw!SWU{%CLj6OE#DU+2D1e2G&VJ+xKqGYKUwKH{Qk?%dUgYuyOvW{
zSf3m9ZKUvc{14`>L?kZ1y)8mCKSF(#EL#a#%IfkB)uUc%D$u1|Tghp}=709bPmxwp
z9-{U0(gthqNP#^?u*&G1R=?BGb!Dxy^~PYw))C+2aTv}cP+D)HTSP7x$dz({z%>uH~=_#<@ZCi
zJTAk2p}+F4cP`8MlLcb)%o@q_vqR4}ZZ%ut0xA?g1-*ZDxS{$#Kbg4jj}lk*W+zx<
z=T$>A2Rjb;djZCf1p6QXlHivgj9-qjL1$U#uOM>vlXz@vwp4JwlirW`Y}2trTwQ%xYy2;~w?z`Y^AK~-T#SSr7A;;oaS
zt$We_j=^!wlcp4pSHHhf8Sg8j`Y
z@JM-RSKn;J`b68hQB1vLH7DdJ$B&*Omv^&fR0d-*$7TZ6Us4s3mJK*~{s6>;Isj(3
z9gi=a?XwH4qN;G6^Bm${!es=o7_<#nb2TIp#PfQyGva8KWZrF`xm_FA0iKSXIo3K(
z$S;HMgP0(
zVvtLO>{O^KS)eCr(kC!KhZ8cnhYVLL2ruK23f=Vi5$)w_ZCZ|UB_Z%va1kzp1WfKk
z9R+HMwo}~+_U$CE8klC-?X`>L$*N!>nIt|#&*`ZuihQte|3dxT{JhWES1ARTeMuM8
zupavg?vZ6%3WZm2kNtX7kN%aL8i6#^Qt=?>Yilx{N}q4v7Vvr*
z``y1H{(qZ?gIlj`ZubBPP=5GZK=e@OMp68sOLRv=ub3}uC!f8f%U=@Vn({vlaTOdl
zdxaT#UPLnqtiEzHDca2`T}iJ(G{;RV*{mf`+N`*1!ezO`aA=>fvT3?Mv17IIMgZRz+R8?}Wf3;9;PiQ^_uraJQhgV!uq@X}!EKEe73v(=U5IAS
zR#wP|k5oxB%($5(1?nT7?hOUUV6E9Gs!2V!I?s-IDp^xewr70a-Mlh)QIoAZt)!~v
zEU@hHpO4C(CCQrbtiJ?mVwLIFP0^o|mTF?%c%3pf(|5XJ1E)gC{Z!ySnzT>srRuT$;?`5R`P7$vS@kx0hypV2TJG&ockYd2p3#r$
zH-li;yCv*bU;zm=oZ~esIq4s^0#Z*L4Nb4ToGuUA>4G<4Bw9-_I_K}>;2EhHXE18?
z+2Kl|w}$gnUe5Wbbnss_?1DDW;RByF20@mz5#xTw
zTWce5Djxum@B1SZw-kS|4|{pPBo>}pDrt-vu!*#0?+D)-{!fgi|MR8(Hxl$&M>X`C
z*Ow}b`t^|dZ%n{hZCh;)0CdPo%{Ibrs=wp_*ibwGR1W~$grv36bmC-C$j0(sO&y@t
z?;6bZ{!>p`%t`t+hz_AdpDI1nj!VYF))lKSjh7~d9RSq^~xbpYesX+#ey!;$ZJ=3xy$x=oMrP