Skip to content

Commit

Permalink
Add demo images upload support
Browse files Browse the repository at this point in the history
  • Loading branch information
HuakunShen committed May 28, 2024
1 parent 337100d commit 8c5b207
Show file tree
Hide file tree
Showing 14 changed files with 221 additions and 104 deletions.
6 changes: 6 additions & 0 deletions ci/build-upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {
buildWithDockerAndValidate,
checkPackagesValidity,
parsePackageJson,
uploadImage,
uploadTarballToS3,
uploadTarballToSupabaseStorage,
} from "./src/utils";
Expand Down Expand Up @@ -120,6 +121,10 @@ for (const buildResult of buildResults) {
// continue;
// }
const filesize = fs.statSync(buildResult.tarballPath).size;
const demoImgPaths = buildResult.pkg.jarvis.demoImages
.map((p) => join(buildResult.extPath, p))
.filter((p) => fs.existsSync);
const imgStoragePaths = await Promise.all(demoImgPaths.map((p) => uploadImage(p))); // file storage paths

const supabasePath = await uploadTarballToSupabaseStorage(
buildResult.tarballPath,
Expand All @@ -143,6 +148,7 @@ for (const buildResult of buildResults) {
packagejson: buildResult.pkg,
size: filesize,
tarball_path: supabasePath,
demo_images_paths: imgStoragePaths,
},
]);
if (error) {
Expand Down
1 change: 1 addition & 0 deletions ci/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"@aws-sdk/client-s3": "^3.583.0",
"@supabase/supabase-js": "^2.43.4",
"jarvis-api": "0.0.2-alpha.3",
"sharp": "^0.33.4",
"zod": "^3.23.8"
}
}
159 changes: 119 additions & 40 deletions ci/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,15 @@ import path, { join } from "path";
import { DOCKER_BUILD_ENTRYPOINT, REPO_ROOT } from "./constant";
import { spawn, exec } from "node:child_process";
import { supabase } from "./supabase";
import sharp from "sharp";

/**
* Package Name can be scoped or not
* Use regex to extract package name
* @param packageName
* @param version
*/
export function computeTarballName(
packageName: string,
version: string,
): string {
export function computeTarballName(packageName: string, version: string): string {
const scoped = packageName.startsWith("@");
if (scoped) {
const [scope, name] = packageName.split("/");
Expand All @@ -32,9 +31,7 @@ export function computeTarballName(
}

export function parsePackageJson(pkgJsonPath: string) {
const parse = ExtPackageJson.safeParse(
JSON.parse(fs.readFileSync(pkgJsonPath, "utf8")),
);
const parse = ExtPackageJson.safeParse(JSON.parse(fs.readFileSync(pkgJsonPath, "utf8")));
if (parse.error) {
console.error(`Error parsing ${pkgJsonPath}: ${parse.error}`);
process.exit(1);
Expand All @@ -45,30 +42,37 @@ export function parsePackageJson(pkgJsonPath: string) {

export function checkPackagesValidity(extPaths: string[]) {
/* ------------------- make sure package.json is parseable ------------------ */
const pkgs = extPaths.map((ext) =>
parsePackageJson(join(ext, "package.json")),
);
const pkgs = extPaths.map((ext) => parsePackageJson(join(ext, "package.json")));

/* --------------------- make sure identifier is unique --------------------- */
const identifiers = pkgs.map((pkg) => pkg.jarvis.identifier);
const uniqueIdentifiers = new Set(identifiers);
if (identifiers.length !== uniqueIdentifiers.size) {
console.error("Identifiers are not unique");
// find the duplicates
const duplicates = identifiers.filter(
(item, index) => identifiers.indexOf(item) !== index,
);
const duplicates = identifiers.filter((item, index) => identifiers.indexOf(item) !== index);
console.error("duplicates", duplicates);
process.exit(1);
}

/* ----------------------- Check Demo Images Existence ---------------------- */
for (const extPath of extPaths) {
const pkg = parsePackageJson(join(extPath, "package.json"));
for (const imgPath of pkg.jarvis.demoImages) {
const imgFullPath = join(extPath, imgPath);
if (!fs.existsSync(imgFullPath)) {
console.error(`Demo Image not found: ${imgFullPath} in ${extPath}`);
process.exit(1);
}
}
}

/* ------ make sure there is no tarball .tgz file in the each extension ----- */
for (const extPath of extPaths) {
const files = fs.readdirSync(extPath);
const tgzFiles = files.filter((file) => file.endsWith(".tgz"));
if (tgzFiles.length > 0) {
console.error(
`Extension ${extPath} contains tarball files: ${tgzFiles.join(", ")}`,
);
console.error(`Extension ${extPath} contains tarball files: ${tgzFiles.join(", ")}`);
console.error(
"If you are developing, run scripts/clean.sh to remove all .tgz file in the top level of each extension",
);
Expand All @@ -77,14 +81,9 @@ export function checkPackagesValidity(extPaths: string[]) {
}
}

/**
* Compute SHA-1 checksum of a file
* @param filePath
* @returns
*/
export function computeShasum1(filePath: string): Promise<string> {
export function computeFileHash(filePath: string, algorithm: string): Promise<string> {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha1");
const hash = crypto.createHash(algorithm);
const stream = fs.createReadStream(filePath);

stream.on("data", (data) => {
Expand All @@ -102,6 +101,14 @@ export function computeShasum1(filePath: string): Promise<string> {
});
}

export function computeFileSha1(filePath: string): Promise<string> {
return computeFileHash(filePath, "sha1");
}

export function computeFileSha512(filePath: string): Promise<string> {
return computeFileHash(filePath, "sha512");
}

/**
* Docker is used to build each individual extension for safety
* Packages could potentially modify other extensions if they share environment.
Expand Down Expand Up @@ -152,9 +159,7 @@ export function buildWithDocker(extPath: string): Promise<{
}

if (dataStr.includes("npm notice filename:")) {
const tarballFilename = dataStr.match(
/npm notice filename:\s+([^\s]+)/,
);
const tarballFilename = dataStr.match(/npm notice filename:\s+([^\s]+)/);
if (tarballFilename) {
stderrTarballFilename = tarballFilename[1];
console.log("Parsed tarball:", stderrTarballFilename);
Expand All @@ -172,10 +177,7 @@ export function buildWithDocker(extPath: string): Promise<{
});
subprocess.on("close", (code) => {
console.log(`child process exited with code ${code}`);
if (
stderrShasum.trim().length === 0 ||
stderrTarballFilename.trim().length === 0
) {
if (stderrShasum.trim().length === 0 || stderrTarballFilename.trim().length === 0) {
return reject("shasum or tarball filename not found");
}
if (code !== 0) {
Expand All @@ -201,17 +203,15 @@ export type BuildResult = {
* @param extPath Extension Path
* @returns
*/
export function buildWithDockerAndValidate(
extPath: string,
): Promise<BuildResult> {
export function buildWithDockerAndValidate(extPath: string): Promise<BuildResult> {
return buildWithDocker(extPath)
.then((res) => {
const parsedTarballPath = join(extPath, res.stderrTarballFilename);
if (!fs.existsSync(parsedTarballPath)) {
console.error(`Tarball not found: ${parsedTarballPath}`);
process.exit(1);
}
return computeShasum1(parsedTarballPath).then((computedShasum) => {
return computeFileSha1(parsedTarballPath).then((computedShasum) => {
if (computedShasum !== res.stderrShasum) {
console.error(
`Shasum mismatch: Computed(${computedShasum}) !== Output from docker(${res.stderrShasum})`,
Expand Down Expand Up @@ -296,12 +296,10 @@ export async function uploadTarballToSupabaseStorage(
const tarball = fs.readFileSync(tarballPath);
console.log("uploading to supabase storage");

const { data, error } = await supabase.storage
.from("extensions")
.upload(key, tarball, {
cacheControl: "3600",
upsert: true,
});
const { data, error } = await supabase.storage.from("extensions").upload(key, tarball, {
cacheControl: "3600",
upsert: true,
});
if (error) {
console.error("Failed to upload tarball to supabase storage");
console.error(error);
Expand All @@ -310,3 +308,84 @@ export async function uploadTarballToSupabaseStorage(
console.log("Tarball uploaded to supabase storage");
return data.path;
}

export function computeHash(buffer: Buffer, algorithm: "sha1" | "sha256" | "sha512") {
const hash = crypto.createHash(algorithm);
hash.update(buffer);
return hash.digest("hex");
}

export async function uploadImage(imagePath: string) {
// make sure imagePath exists and is a file
if (!fs.existsSync(imagePath)) {
console.error(`Image not found: ${imagePath}`);
process.exit(1);
}
const imageSize = fs.statSync(imagePath).size;
const img =
imageSize > 200 * 1024
? await sharp(imagePath)
.resize({
height: 720,
fit: sharp.fit.inside,
withoutEnlargement: true,
})
.jpeg()
.toBuffer()
: await sharp(imagePath).jpeg().toBuffer();
const imgSha512 = computeHash(img, "sha512");
/* ----------------------- Check if image exists in db ---------------------- */
const dbRes = await supabase.from("ext_demo_images").select("*").eq("sha512", imgSha512);
const exists = dbRes.data && dbRes.data.length > 0;
if (exists) {
return dbRes.data[0].image_path;
}

/* --------------------- Upload to supabase file storage -------------------- */
const key = `ext-images/${imgSha512}.jpeg`;

const { data, error } = await supabase.storage.from("extensions").upload(key, img, {
cacheControl: "3600",
upsert: true,
});
if (error) {
console.error(error);
throw new Error("Failed to upload image to supabase storage.");
}
/* ------------------------------ Upload to S3 ------------------------------ */
const s3Client = new S3Client({
endpoint: z.string().parse(process.env.S3_ENDPOINT),
region: "auto",
credentials: {
accessKeyId: z.string().parse(process.env.S3_ACCESS_KEY_ID),
secretAccessKey: z.string().parse(process.env.S3_SECRET_ACCESS_KEY),
},
});
await s3Client
.send(
new PutObjectCommand({
Bucket: "jarvis-extensions",
Key: key,
Body: img,
ContentType: "application/jpeg",
}),
)
.then((res) => {
return key;
})
.catch((err) => {
console.error("Failed to upload tarball");
console.error(err);
});

/* ------------------------- Insert into database -------------------------- */
const { data: insertData, error: insertError } = await supabase
.from("ext_demo_images")
.insert([{ sha512: imgSha512, image_path: data.path }]);
if (insertError) {
console.error(insertError);
throw new Error("Failed to insert image into database");
}

return data?.path;
}
Loading

0 comments on commit 8c5b207

Please sign in to comment.