Skip to content

Commit

Permalink
feat!: move BQ project/dataset/table to inputs
Browse files Browse the repository at this point in the history
feat!: move BQ project/dataset/table to inputs


BREAKING CHANGE: We now require the BQ project/dataset/table to be provided as inputs.
  • Loading branch information
mathieudi authored Mar 7, 2025
2 parents 9855470 + 4e02cb7 commit 80f40f7
Show file tree
Hide file tree
Showing 7 changed files with 59 additions and 9 deletions.
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,21 @@ At the end of a workflow call this action to send ci analytics to big query
```yaml
- uses: reside-eng/workflow-ci-analytics-action@v1
with:
# Google project ID where to send data
#
# Required: true
project_id: ''

# Bigquery dataset where to send data
#
# Required: true
dataset: ''

# Bigquery table where to send data
#
# Required: true
table: ''

# Timestamp when the job was created
#
# Required: true
Expand Down
9 changes: 9 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
name: 'Workflow CI Analytics'
description: 'Send Github Data Analytics to the Data Warehouse'
inputs:
project_id:
description: 'Google project ID where to send data'
required: true
dataset:
description: 'Bigquery dataset where to send data'
required: true
table:
description: 'Bigquery table where to send data'
required: true
created_at:
description: 'Timestamp when the job was created'
required: true
Expand Down
14 changes: 10 additions & 4 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -68014,6 +68014,9 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Inputs = void 0;
var Inputs;
(function (Inputs) {
Inputs["ProjectId"] = "project_id";
Inputs["Dataset"] = "dataset";
Inputs["Table"] = "table";
Inputs["CreatedAt"] = "created_at";
Inputs["StartedAt"] = "started_at";
Inputs["CompletedAt"] = "completed_at";
Expand Down Expand Up @@ -68077,7 +68080,7 @@ const core = __importStar(__nccwpck_require__(7484));
const github_1 = __nccwpck_require__(3228);
const bigquery_1 = __nccwpck_require__(676);
const inputs_1 = __nccwpck_require__(8422);
async function sendToBigQuery(analyticsObject) {
async function sendToBigQuery(analyticsObject, projectId, datasetName, tableName) {
const client = new bigquery_1.BigQuery();
const schema = 'Created_At:string, Started_At:string, Completed_At:string, MatrixName:string, MatrixValue:string, Result:string, Draft:boolean, JobLink:string';
const options = {
Expand All @@ -68090,13 +68093,16 @@ async function sendToBigQuery(analyticsObject) {
},
};
const table = await client
.dataset('github', { projectId: 'side-dw' })
.table('ci_analytics');
.dataset(datasetName, { projectId })
.table(tableName);
core.info(`Retrieved table ${table.id}`);
table.insert(analyticsObject);
}
async function pipeline() {
core.info('Successfully triggering CI Analytics action');
const projectId = core.getInput(inputs_1.Inputs.ProjectId, { required: true });
const dataset = core.getInput(inputs_1.Inputs.Dataset, { required: true });
const table = core.getInput(inputs_1.Inputs.Table, { required: true });
const createdAt = core.getInput(inputs_1.Inputs.CreatedAt, { required: true });
const startedAt = core.getInput(inputs_1.Inputs.StartedAt, { required: true });
const completedAt = core.getInput(inputs_1.Inputs.CompletedAt, { required: true });
Expand Down Expand Up @@ -68152,7 +68158,7 @@ async function pipeline() {
};
core.info('Analytics Object: ');
core.info(JSON.stringify(analyticsObject, null, 2));
await sendToBigQuery(analyticsObject);
await sendToBigQuery(analyticsObject, projectId, dataset, table);
core.info('Successfully Set CI Analytics in bigquery');
}
function handleError(err) {
Expand Down
3 changes: 3 additions & 0 deletions dist/inputs.d.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
export declare enum Inputs {
ProjectId = "project_id",
Dataset = "dataset",
Table = "table",
CreatedAt = "created_at",
StartedAt = "started_at",
CompletedAt = "completed_at",
Expand Down
3 changes: 3 additions & 0 deletions src/inputs.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
export enum Inputs {
ProjectId = 'project_id',
Dataset = 'dataset',
Table = 'table',
CreatedAt = 'created_at',
StartedAt = 'started_at',
CompletedAt = 'completed_at',
Expand Down
8 changes: 7 additions & 1 deletion src/main.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,12 @@ describe('GitHub Action - CI Analytics', () => {
.mockImplementation((name: string) => {
if (Object.values(Inputs).includes(name as Inputs)) {
switch (name) {
case Inputs.ProjectId:
return 'test';
case Inputs.Dataset:
return 'github';
case Inputs.Table:
return 'ci_analytics';
case Inputs.CreatedAt:
return mockExpectedAnalytics.created_at;
case Inputs.StartedAt:
Expand Down Expand Up @@ -145,7 +151,7 @@ describe('GitHub Action - CI Analytics', () => {

// Validate BigQuery interaction
expect(bigQueryMock.dataset).toHaveBeenCalledWith('github', {
projectId: 'side-dw',
projectId: 'test',
});
expect(datasetMock.table).toHaveBeenCalledWith('ci_analytics');
expect(insertMock).toHaveBeenCalledWith(mockExpectedAnalytics);
Expand Down
16 changes: 12 additions & 4 deletions src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,12 @@ export type AnalyticsObject = {
runner_type: string;
};

async function sendToBigQuery(analyticsObject: AnalyticsObject): Promise<void> {
async function sendToBigQuery(
analyticsObject: AnalyticsObject,
projectId: string,
datasetName: string,
tableName: string,
): Promise<void> {
const client = new BigQuery();

const schema =
Expand All @@ -48,8 +53,8 @@ async function sendToBigQuery(analyticsObject: AnalyticsObject): Promise<void> {

// Create a new table in the dataset
const table = await client
.dataset('github', { projectId: 'side-dw' })
.table('ci_analytics');
.dataset(datasetName, { projectId })
.table(tableName);

core.info(`Retrieved table ${table.id}`);

Expand All @@ -61,6 +66,9 @@ async function sendToBigQuery(analyticsObject: AnalyticsObject): Promise<void> {
*/
async function pipeline(): Promise<void> {
core.info('Successfully triggering CI Analytics action');
const projectId = core.getInput(Inputs.ProjectId, { required: true });
const dataset = core.getInput(Inputs.Dataset, { required: true });
const table = core.getInput(Inputs.Table, { required: true });
const createdAt = core.getInput(Inputs.CreatedAt, { required: true });
const startedAt = core.getInput(Inputs.StartedAt, { required: true });
const completedAt = core.getInput(Inputs.CompletedAt, { required: true });
Expand Down Expand Up @@ -139,7 +147,7 @@ async function pipeline(): Promise<void> {
core.info('Analytics Object: ');
core.info(JSON.stringify(analyticsObject, null, 2));

await sendToBigQuery(analyticsObject);
await sendToBigQuery(analyticsObject, projectId, dataset, table);
core.info('Successfully Set CI Analytics in bigquery');
}

Expand Down

0 comments on commit 80f40f7

Please sign in to comment.