diff --git a/CHANGELOG.md b/CHANGELOG.md index a8f54d105..fe6650a37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,6 +36,8 @@ Other improvements: that command, not root help. - a new `spago init --subpackage foo` option to initialize a sub-project in the current workspace. +- #1110: `spago publish` will now install packages returned by the registry solver + before trying to build with them. ## [0.21.0] - 2023-05-04 diff --git a/core/src/Prelude.purs b/core/src/Prelude.purs index 5047a48d2..e41556bdc 100644 --- a/core/src/Prelude.purs +++ b/core/src/Prelude.purs @@ -34,7 +34,7 @@ import Data.Show.Generic (genericShow) as Extra import Data.Traversable (for, traverse) as Extra import Data.TraversableWithIndex (forWithIndex) as Extra import Data.Tuple (Tuple(..), fst, snd) as Extra -import Data.Tuple.Nested ((/\)) as Extra +import Data.Tuple.Nested (type (/\), (/\)) as Extra import Effect (Effect) as Extra import Effect.Aff (Aff, Error) as Extra import Effect.Aff.Class (class MonadAff, liftAff) as Extra diff --git a/src/Spago/Command/Fetch.purs b/src/Spago/Command/Fetch.purs index 7e4afd43c..29c2f3bb0 100644 --- a/src/Spago/Command/Fetch.purs +++ b/src/Spago/Command/Fetch.purs @@ -7,6 +7,7 @@ module Spago.Command.Fetch , getTransitiveDeps , getTransitiveDepsFromRegistry , getWorkspacePackageDeps + , fetchPackagesToLocalCache , run , toAllDependencies , writeNewLockfile @@ -83,7 +84,7 @@ run :: forall a. FetchOpts -> Spago (FetchEnv a) PackageTransitiveDeps run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do logDebug $ "Requested to install these packages: " <> printJson (CJ.array PackageName.codec) packagesRequestedToInstall - { workspace: currentWorkspace, offline } <- ask + { workspace: currentWorkspace } <- ask let getPackageConfigPath errorMessageEnd = do @@ -192,88 +193,7 @@ run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do -- then for every package we have we try to download it, and copy it in the local cache logInfo "Downloading dependencies..." - - parallelise $ (flip map) (Map.toUnfoldable depsToFetch :: Array (Tuple PackageName Package)) \(Tuple name package) -> do - let localPackageLocation = Config.getPackageLocation name package - -- first of all, we check if we have the package in the local cache. If so, we don't even do the work - unlessM (FS.exists localPackageLocation) case package of - GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage - RegistryVersion v -> do - -- if the version comes from the registry then we have a longer list of things to do - let versionString = Registry.Version.print v - let packageVersion = PackageName.print name <> "@" <> versionString - -- get the metadata for the package, so we have access to the hash and other info - metadata <- Registry.getMetadata name - case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of - Left err -> die $ "Couldn't read metadata, reason:\n " <> err - Right versionMetadata -> do - logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata - -- then check if we have a tarball cached. If not, download it - let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ] - let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ] - FS.mkdirp globalCachePackagePath - -- We need to see if the tarball is there, and if we can decompress it. - -- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted. - -- By checking that it's broken we can try to redownload it here. - tarExists <- FS.exists archivePath - -- unpack the tars in a temp folder, then move to local cache - let tarInnerFolder = PackageName.print name <> "-" <> Version.print v - tempDir <- mkTemp - FS.mkdirp tempDir - tarIsGood <- - if tarExists then do - logDebug $ "Trying to unpack archive to temp folder: " <> tempDir - map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir } - else - pure false - case tarExists, tarIsGood, offline of - true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days! - _, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress." - _, _, Online -> do - let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz" - logInfo $ "Fetching package " <> packageVersion - response <- liftAff $ withBackoff' do - res <- Http.request - ( Http.defaultRequest - { method = Left Method.GET - , responseFormat = Response.arrayBuffer - , url = packageUrl - } - ) - -- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried - case res of - Right { status } | status == StatusCode 503 -> Aff.delay (Aff.Milliseconds 30_000.0) - _ -> pure unit - pure res - case response of - Nothing -> die $ "Couldn't reach the registry at " <> packageUrl - Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err - Just (Right { status, body }) | status /= StatusCode 200 -> do - (buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body - bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf - die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString - Just (Right r@{ body: archiveArrayBuffer }) -> do - logDebug $ "Got status: " <> show r.status - -- check the size and hash of the tar against the metadata - archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer - archiveSize <- liftEffect $ Buffer.size archiveBuffer - archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer - unless (Int.toNumber archiveSize == versionMetadata.bytes) do - die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")" - unless (archiveSha == versionMetadata.hash) do - die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")" - -- if everything's alright we stash the tar in the global cache - logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath - FS.writeFile archivePath archiveBuffer - logDebug $ "Unpacking archive to temp folder: " <> tempDir - (liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of - Right _ -> pure unit - Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ] - logDebug $ "Moving extracted file to local cache:" <> localPackageLocation - FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation } - -- Local package, no work to be done - LocalPackage _ -> pure unit - WorkspacePackage _ -> pure unit + fetchPackagesToLocalCache depsToFetch -- We return the dependencies, going through the lockfile write if we need to -- (we return them from inside there because we need to update the commit hashes) @@ -281,6 +201,91 @@ run { packages: packagesRequestedToInstall, ensureRanges, isTest, isRepl } = do Right _lockfile -> pure dependencies Left reason -> writeNewLockfile reason dependencies +fetchPackagesToLocalCache :: ∀ a. Map PackageName Package -> Spago (FetchEnv a) Unit +fetchPackagesToLocalCache packages = do + { offline } <- ask + parallelise $ packages # Map.toUnfoldable <#> \(Tuple name package) -> do + let localPackageLocation = Config.getPackageLocation name package + -- first of all, we check if we have the package in the local cache. If so, we don't even do the work + unlessM (FS.exists localPackageLocation) case package of + GitPackage gitPackage -> getGitPackageInLocalCache name gitPackage + RegistryVersion v -> do + -- if the version comes from the registry then we have a longer list of things to do + let versionString = Registry.Version.print v + let packageVersion = PackageName.print name <> "@" <> versionString + -- get the metadata for the package, so we have access to the hash and other info + metadata <- Registry.getMetadata name + case (metadata >>= (\(Metadata meta) -> Either.note "Didn't find version in the metadata file" $ Map.lookup v meta.published)) of + Left err -> die $ "Couldn't read metadata, reason:\n " <> err + Right versionMetadata -> do + logDebug $ "Metadata read: " <> printJson Metadata.publishedMetadataCodec versionMetadata + -- then check if we have a tarball cached. If not, download it + let globalCachePackagePath = Path.concat [ Paths.globalCachePath, "packages", PackageName.print name ] + let archivePath = Path.concat [ globalCachePackagePath, versionString <> ".tar.gz" ] + FS.mkdirp globalCachePackagePath + -- We need to see if the tarball is there, and if we can decompress it. + -- This is because if Spago is killed while it's writing the tar, then it might leave it corrupted. + -- By checking that it's broken we can try to redownload it here. + tarExists <- FS.exists archivePath + -- unpack the tars in a temp folder, then move to local cache + let tarInnerFolder = PackageName.print name <> "-" <> Version.print v + tempDir <- mkTemp + FS.mkdirp tempDir + tarIsGood <- + if tarExists then do + logDebug $ "Trying to unpack archive to temp folder: " <> tempDir + map (either (const false) (const true)) $ liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir } + else + pure false + case tarExists, tarIsGood, offline of + true, true, _ -> pure unit -- Tar exists and is good, and we already unpacked it. Happy days! + _, _, Offline -> die $ "Package " <> packageVersion <> " is not in the local cache, and Spago is running in offline mode - can't make progress." + _, _, Online -> do + let packageUrl = "https://packages.registry.purescript.org/" <> PackageName.print name <> "/" <> versionString <> ".tar.gz" + logInfo $ "Fetching package " <> packageVersion + response <- liftAff $ withBackoff' do + res <- Http.request + ( Http.defaultRequest + { method = Left Method.GET + , responseFormat = Response.arrayBuffer + , url = packageUrl + } + ) + -- If we get a 503, we want the backoff to kick in, so we wait here and we'll eventually be retried + case res of + Right { status } | status == StatusCode 503 -> Aff.delay (Aff.Milliseconds 30_000.0) + _ -> pure unit + pure res + case response of + Nothing -> die $ "Couldn't reach the registry at " <> packageUrl + Just (Left err) -> die $ "Couldn't fetch package " <> packageVersion <> ":\n " <> Http.printError err + Just (Right { status, body }) | status /= StatusCode 200 -> do + (buf :: Buffer) <- liftEffect $ Buffer.fromArrayBuffer body + bodyString <- liftEffect $ Buffer.toString Encoding.UTF8 buf + die $ "Couldn't fetch package " <> packageVersion <> ", status was not ok " <> show status <> ", got answer:\n " <> bodyString + Just (Right r@{ body: archiveArrayBuffer }) -> do + logDebug $ "Got status: " <> show r.status + -- check the size and hash of the tar against the metadata + archiveBuffer <- liftEffect $ Buffer.fromArrayBuffer archiveArrayBuffer + archiveSize <- liftEffect $ Buffer.size archiveBuffer + archiveSha <- liftEffect $ Sha256.hashBuffer archiveBuffer + unless (Int.toNumber archiveSize == versionMetadata.bytes) do + die $ "Archive fetched for " <> packageVersion <> " has a different size (" <> show archiveSize <> ") than expected (" <> show versionMetadata.bytes <> ")" + unless (archiveSha == versionMetadata.hash) do + die $ "Archive fetched for " <> packageVersion <> " has a different hash (" <> Sha256.print archiveSha <> ") than expected (" <> Sha256.print versionMetadata.hash <> ")" + -- if everything's alright we stash the tar in the global cache + logDebug $ "Fetched archive for " <> packageVersion <> ", saving it in the global cache: " <> archivePath + FS.writeFile archivePath archiveBuffer + logDebug $ "Unpacking archive to temp folder: " <> tempDir + (liftEffect $ Tar.extract { filename: archivePath, cwd: tempDir }) >>= case _ of + Right _ -> pure unit + Left err -> die [ "Failed to decode downloaded package " <> packageVersion <> ", error:", show err ] + logDebug $ "Moving extracted file to local cache:" <> localPackageLocation + FS.moveSync { src: (Path.concat [ tempDir, tarInnerFolder ]), dst: localPackageLocation } + -- Local package, no work to be done + LocalPackage _ -> pure unit + WorkspacePackage _ -> pure unit + lookupInCache :: ∀ a k v. Ord k => k -> Ref.Ref (Map k v) -> Spago a (Maybe v) lookupInCache key cacheRef = liftEffect $ Ref.read cacheRef >>= Map.lookup key >>> pure diff --git a/src/Spago/Command/Ls.purs b/src/Spago/Command/Ls.purs index f3a1bdda8..c4f645cef 100644 --- a/src/Spago/Command/Ls.purs +++ b/src/Spago/Command/Ls.purs @@ -18,7 +18,6 @@ import Data.Codec.JSON.Record as CJ.Record import Data.Foldable (elem) import Data.Map (filterKeys) import Data.Map as Map -import Data.Tuple.Nested (type (/\)) import Record as Record import Registry.Internal.Codec (packageMap) import Registry.PackageName as PackageName diff --git a/src/Spago/Command/Publish.purs b/src/Spago/Command/Publish.purs index 5d9d87f50..f8b88207b 100644 --- a/src/Spago/Command/Publish.purs +++ b/src/Spago/Command/Publish.purs @@ -354,6 +354,7 @@ publish _args = do -- from the solver (this is because the build might terminate the process, and we shall output the errors first) logInfo "Building again with the build plan from the solver..." let buildPlanDependencies = map Config.RegistryVersion resolutions + Fetch.fetchPackagesToLocalCache buildPlanDependencies builtAgain <- runBuild { selected, dependencies: Map.singleton selected.package.name { core: buildPlanDependencies, test: Map.empty } } ( Build.run { depsOnly: false diff --git a/test-fixtures/publish/1110-solver-different-version/.gitignore b/test-fixtures/publish/1110-solver-different-version/.gitignore new file mode 100644 index 000000000..a4314d5a6 --- /dev/null +++ b/test-fixtures/publish/1110-solver-different-version/.gitignore @@ -0,0 +1,2 @@ +.spago/ +output/ diff --git a/test-fixtures/publish/1110-solver-different-version/expected-stderr.txt b/test-fixtures/publish/1110-solver-different-version/expected-stderr.txt new file mode 100644 index 000000000..783486f27 --- /dev/null +++ b/test-fixtures/publish/1110-solver-different-version/expected-stderr.txt @@ -0,0 +1,30 @@ +Reading Spago workspace configuration... + +✓ Selecting package to build: aaa + +Downloading dependencies... +Building... + Src Lib All +Warnings 0 0 0 +Errors 0 0 0 + +✓ Build succeeded. + +Passed preliminary checks. +‼ Spago is in offline mode - not pushing the git tag v0.0.1 +Building again with the build plan from the solver... +Building... +[1 of 3] Compiling Effect.Console +[3 of 3] Compiling Lib +[2 of 3] Compiling Effect.Class.Console + Src Lib All +Warnings 0 0 0 +Errors 0 0 0 + +✓ Build succeeded. + + +✓ Ready for publishing. Calling the registry.. + + +✘ Spago is offline - not able to call the Registry. diff --git a/test-fixtures/publish/1110-solver-different-version/failure-stderr.txt b/test-fixtures/publish/1110-solver-different-version/failure-stderr.txt new file mode 100644 index 000000000..db7c26a40 --- /dev/null +++ b/test-fixtures/publish/1110-solver-different-version/failure-stderr.txt @@ -0,0 +1,39 @@ +Reading Spago workspace configuration... + +✓ Selecting package to build: aaa + +Downloading dependencies... +Building... +[1 of 3] Compiling Effect.Console +[3 of 3] Compiling Lib +[2 of 3] Compiling Effect.Class.Console + Src Lib All +Warnings 0 0 0 +Errors 0 0 0 + +✓ Build succeeded. + +Passed preliminary checks. +‼ Spago is in offline mode - not pushing the git tag v0.0.1 +Building again with the build plan from the solver... +Building... +[1 of 3] Compiling Effect.Console +[ERROR 1/1 MissingFFIModule] .spago/p/console-6.1.0/src/Effect/Console.purs:1:1 + + v + 1 module Effect.Console where + 2 + 3 import Control.Bind (discard, bind, pure) + ... + 86 result <- inner + 87 groupEnd + 88 pure result + ^ + + The foreign module implementation for module Effect.Console is missing. + + Src Lib All +Warnings 0 0 0 +Errors 0 1 1 + +✘ Failed to build. diff --git a/test-fixtures/publish/1110-solver-different-version/spago.yaml b/test-fixtures/publish/1110-solver-different-version/spago.yaml new file mode 100644 index 000000000..644cc11fe --- /dev/null +++ b/test-fixtures/publish/1110-solver-different-version/spago.yaml @@ -0,0 +1,18 @@ +package: + name: aaa + dependencies: + - console: ">=6.0.0 <7.0.0" + - effect: ">=4.0.0 <5.0.0" + - prelude: ">=6.0.1 <7.0.0" + - maybe: ">=6.0.0 <7.0.0" + publish: + version: 0.0.1 + license: MIT + location: + githubOwner: purescript + githubRepo: aaa +workspace: + packageSet: + registry: 58.0.0 + extraPackages: + console: "6.0.0" diff --git a/test-fixtures/publish/1110-solver-different-version/src/Main.purs b/test-fixtures/publish/1110-solver-different-version/src/Main.purs new file mode 100644 index 000000000..549e1681a --- /dev/null +++ b/test-fixtures/publish/1110-solver-different-version/src/Main.purs @@ -0,0 +1,10 @@ +module Lib where + +import Prelude + +import Data.Maybe (Maybe(..), isNothing) +import Effect (Effect) +import Effect.Console (logShow) + +printNothing :: Effect Unit +printNothing = logShow $ isNothing Nothing diff --git a/test/Prelude.purs b/test/Prelude.purs index 1ab419ae0..9477ac586 100644 --- a/test/Prelude.purs +++ b/test/Prelude.purs @@ -12,6 +12,7 @@ import Data.String as String import Effect.Aff as Aff import Effect.Class.Console (log) import Effect.Class.Console as Console +import Node.FS.Aff as FS.Aff import Node.Library.Execa (ExecaResult) import Node.Path (dirname) import Node.Path as Path @@ -75,6 +76,9 @@ withTempDir = Aff.bracket createTempDir cleanupTempDir cleanupTempDir { oldCwd } = do liftEffect $ Process.chdir oldCwd +rmRf :: ∀ m. MonadAff m => FilePath -> m Unit +rmRf dir = liftAff $ FS.Aff.rm' dir { force: true, recursive: true, maxRetries: 5, retryDelay: 1000 } + shouldEqual :: forall m t . MonadThrow Error m diff --git a/test/Spago/Build/Monorepo.purs b/test/Spago/Build/Monorepo.purs index e1f9c2fab..d7115c949 100644 --- a/test/Spago/Build/Monorepo.purs +++ b/test/Spago/Build/Monorepo.purs @@ -6,7 +6,6 @@ import Data.Array as Array import Data.String (Pattern(..)) import Data.String as String import Effect.Aff (bracket) -import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process import Spago.Cmd as Cmd @@ -357,5 +356,3 @@ spec = Spec.describe "monorepo" do res <- Cmd.exec "git" args opts res # shouldBeSuccess pure $ Cmd.getStdout res - - rmRf dir = liftAff $ FS.Aff.rm' dir { force: true, recursive: true, maxRetries: 5, retryDelay: 1000 } diff --git a/test/Spago/Publish.purs b/test/Spago/Publish.purs index 16c8626de..36bdd247c 100644 --- a/test/Spago/Publish.purs +++ b/test/Spago/Publish.purs @@ -2,6 +2,9 @@ module Test.Spago.Publish (spec) where import Test.Prelude +import Data.String as String +import Data.String.Regex as Regex +import Data.String.Regex.Flags as RF import Node.Platform as Platform import Node.Process as Process import Spago.Cmd as Cmd @@ -47,7 +50,7 @@ spec = Spec.around withTempDir do FS.copyFile { src: fixture "publish.purs", dst: "src/Main.purs" } spago [ "build" ] >>= shouldBeSuccess doTheGitThing - git [ "remote", "set-url", "origin", "git@github.com:purescript/bbb.git" ] >>= shouldBeSuccess + git [ "remote", "set-url", "origin", "git@github.com:purescript/bbb.git" ] spago [ "publish", "--offline" ] >>= shouldBeFailureErr (fixture "publish-invalid-location.txt") Spec.it "fails if a core dependency is not in the registry" \{ spago, fixture } -> do @@ -74,18 +77,68 @@ spec = Spec.around withTempDir do spago [ "fetch" ] >>= shouldBeSuccess spago [ "publish", "--offline" ] >>= shouldBeFailureErr (fixture "publish.txt") + Spec.it "#1110 installs versions of packages that are returned by the registry solver, but not present in cache" \{ spago, fixture } -> do + let + shouldBeFailureErr' file = checkOutputs' + { stdoutFile: Nothing + , stderrFile: Just file + , result: isLeft + , sanitize: + String.trim + >>> withForwardSlashes + >>> String.replaceAll (String.Pattern "\r\n") (String.Replacement "\n") + >>> Regex.replace buildOrderRegex "[x of 3] Compiling module-name" + } + + -- We have to ignore lines like "[1 of 3] Compiling Effect.Console" when + -- comparing output, because the compiler will always compile in + -- different order, depending on how the system resources happened to + -- align at the moment of the test run. + buildOrderRegex = unsafeFromRight $ Regex.regex + "\\[\\d of 3\\] Compiling (Effect\\.Console|Effect\\.Class\\.Console|Lib)" RF.global + + FS.copyTree { src: fixture "publish/1110-solver-different-version", dst: "." } + spago [ "build" ] >>= shouldBeSuccess + doTheGitThing + spago [ "fetch" ] >>= shouldBeSuccess + + -- The local `spago.yaml` specifies `console: 6.0.0` in `extraPackages`, + -- so that's what should be in local cache after running `fetch`. + -- Importantly, `console-6.1.0` should not be there yet. + FS.exists ".spago/p/console-6.0.0" >>= (_ `shouldEqual` true) + FS.exists ".spago/p/console-6.1.0" >>= (_ `shouldEqual` false) + + spago [ "publish", "--offline" ] >>= shouldBeFailureErr' (fixture "publish/1110-solver-different-version/expected-stderr.txt") + + -- When `publish` runs, it uses the registry solver, which returns + -- `console-6.1.0` version, so `publish` should fetch that into local + -- cache and build with it. + FS.exists ".spago/p/console-6.1.0" >>= (_ `shouldEqual` true) + + -- Now screw up the `console-6.1.0` package in the local cache, so that it + -- doesn't compile anymore, and check that the relevant compile error + -- happens on publish. + FS.unlink ".spago/p/console-6.1.0/src/Effect/Console.js" + rmRf ".spago/p/console-6.1.0/output" + spago [ "publish", "--offline" ] >>= shouldBeFailureErr' (fixture "publish/1110-solver-different-version/failure-stderr.txt") + doTheGitThing :: Aff Unit doTheGitThing = do - git [ "init" ] >>= shouldBeSuccess - git [ "config", "user.name", "test-user" ] >>= shouldBeSuccess - git [ "config", "user.email", "test-user@aol.com" ] >>= shouldBeSuccess - git [ "config", "commit.gpgSign", "false" ] >>= shouldBeSuccess - git [ "config", "tag.gpgSign", "false" ] >>= shouldBeSuccess - git [ "add", "." ] >>= shouldBeSuccess - git [ "commit", "-m", "first" ] >>= shouldBeSuccess - git [ "tag", "v0.0.1" ] >>= shouldBeSuccess - git [ "remote", "add", "origin", "git@github.com:purescript/aaa.git" ] >>= shouldBeSuccess - -git :: Array String -> Aff (Either ExecResult ExecResult) -git args = Cmd.exec "git" args - $ Cmd.defaultExecOptions { pipeStdout = false, pipeStderr = false, pipeStdin = StdinNewPipe } + git [ "init" ] + git [ "config", "user.name", "test-user" ] + git [ "config", "user.email", "test-user@aol.com" ] + git [ "config", "commit.gpgSign", "false" ] + git [ "config", "tag.gpgSign", "false" ] + git [ "add", "." ] + git [ "commit", "-m", "first" ] + git [ "tag", "v0.0.1" ] + git [ "remote", "add", "origin", "git@github.com:purescript/aaa.git" ] + +git :: Array String -> Aff Unit +git = git' Nothing + +git' :: Maybe FilePath -> Array String -> Aff Unit +git' cwd args = + Cmd.exec "git" args + (Cmd.defaultExecOptions { pipeStdout = false, pipeStderr = false, pipeStdin = StdinNewPipe, cwd = cwd }) + >>= shouldBeSuccess