From 1daafe68a47377c4619503be93a06c7dc05c51d0 Mon Sep 17 00:00:00 2001 From: Martin Schaef Date: Thu, 27 Jan 2022 12:58:52 -0500 Subject: [PATCH] Initial commit (#1) Initial commit of aws-codeguru-cli --- .github/workflows/guru-reviewer.yml | 52 ++++ .github/workflows/java-compatible.yml | 32 +++ .github/workflows/self-test-and-release.yml | 57 ++++ .gitignore | 8 + README.md | 106 +++++++- build.gradle | 94 +++++++ gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 59203 bytes gradle/wrapper/gradle-wrapper.properties | 5 + gradlew | 185 +++++++++++++ gradlew.bat | 89 ++++++ settings.gradle | 3 + .../com/amazonaws/gurureviewercli/Main.java | 256 ++++++++++++++++++ .../adapter/ArtifactAdapter.java | 133 +++++++++ .../adapter/AssociationAdapter.java | 166 ++++++++++++ .../gurureviewercli/adapter/GitAdapter.java | 135 +++++++++ .../adapter/ResultsAdapter.java | 135 +++++++++ .../gurureviewercli/adapter/ScanAdapter.java | 196 ++++++++++++++ .../exceptions/GuruCliException.java | 26 ++ .../gurureviewercli/model/Configuration.java | 45 +++ .../gurureviewercli/model/ErrorCodes.java | 31 +++ .../gurureviewercli/model/GitMetaData.java | 31 +++ .../gurureviewercli/model/Recommendation.java | 55 ++++ .../gurureviewercli/model/ScanMetaData.java | 35 +++ .../gurureviewercli/util/JsonUtil.java | 45 +++ .../amazonaws/gurureviewercli/util/Log.java | 58 ++++ .../gurureviewercli/util/ZipUtils.java | 102 +++++++ src/main/resources/log4j2.properties | 18 ++ .../adapter/ArtifactAdapterTest.java | 67 +++++ .../adapter/AssociationAdapterTest.java | 188 +++++++++++++ .../adapter/GitAdapterTest.java | 116 ++++++++ .../adapter/ResultsAdapterTest.java | 28 ++ .../adapter/ScanAdapterTest.java | 77 ++++++ test-data/fresh-repo-no-remote/git/HEAD | 1 + test-data/fresh-repo-no-remote/git/config | 7 + .../fresh-repo-no-remote/git/description | 1 + .../git/hooks/applypatch-msg.sample | 15 + .../git/hooks/commit-msg.sample | 24 ++ .../git/hooks/fsmonitor-watchman.sample | 114 ++++++++ .../git/hooks/post-update.sample | 8 + .../git/hooks/pre-applypatch.sample | 14 + .../git/hooks/pre-commit.sample | 49 ++++ .../git/hooks/pre-merge-commit.sample | 13 + .../git/hooks/pre-push.sample | 53 ++++ .../git/hooks/pre-rebase.sample | 169 ++++++++++++ .../git/hooks/pre-receive.sample | 24 ++ .../git/hooks/prepare-commit-msg.sample | 42 +++ .../git/hooks/update.sample | 128 +++++++++ .../fresh-repo-no-remote/git/info/exclude | 6 + test-data/fresh-repo-with-remote/git/HEAD | 1 + test-data/fresh-repo-with-remote/git/config | 10 + .../fresh-repo-with-remote/git/description | 1 + .../git/hooks/applypatch-msg.sample | 15 + .../git/hooks/commit-msg.sample | 24 ++ .../git/hooks/fsmonitor-watchman.sample | 114 ++++++++ .../git/hooks/post-update.sample | 8 + .../git/hooks/pre-applypatch.sample | 14 + .../git/hooks/pre-commit.sample | 49 ++++ .../git/hooks/pre-merge-commit.sample | 13 + .../git/hooks/pre-push.sample | 53 ++++ .../git/hooks/pre-rebase.sample | 169 ++++++++++++ .../git/hooks/pre-receive.sample | 24 ++ .../git/hooks/prepare-commit-msg.sample | 42 +++ .../git/hooks/update.sample | 128 +++++++++ .../fresh-repo-with-remote/git/info/exclude | 6 + test-data/one-commit/git/COMMIT_EDITMSG | 1 + test-data/one-commit/git/HEAD | 1 + test-data/one-commit/git/config | 10 + test-data/one-commit/git/description | 1 + .../git/hooks/applypatch-msg.sample | 15 + .../one-commit/git/hooks/commit-msg.sample | 24 ++ .../git/hooks/fsmonitor-watchman.sample | 114 ++++++++ .../one-commit/git/hooks/post-update.sample | 8 + .../git/hooks/pre-applypatch.sample | 14 + .../one-commit/git/hooks/pre-commit.sample | 49 ++++ .../git/hooks/pre-merge-commit.sample | 13 + .../one-commit/git/hooks/pre-push.sample | 53 ++++ .../one-commit/git/hooks/pre-rebase.sample | 169 ++++++++++++ .../one-commit/git/hooks/pre-receive.sample | 24 ++ .../git/hooks/prepare-commit-msg.sample | 42 +++ test-data/one-commit/git/hooks/update.sample | 128 +++++++++ test-data/one-commit/git/index | Bin 0 -> 137 bytes test-data/one-commit/git/info/exclude | 6 + test-data/one-commit/git/logs/HEAD | 1 + .../one-commit/git/logs/refs/heads/master | 1 + .../69/e978a2558e8b47b25058af8e10482831feded6 | Bin 0 -> 53 bytes .../9a/71f81a4b4754b686fd37cbb3c72d0250d344aa | Bin 0 -> 29 bytes .../cd/b0fcad7400610b1d1797a326a89414525160fe | 2 + test-data/one-commit/git/refs/heads/master | 1 + test-data/one-commit/test.txt | 2 + .../recommendations/recommendations.json | 62 +++++ test-data/two-commits/git/COMMIT_EDITMSG | 1 + test-data/two-commits/git/HEAD | 1 + test-data/two-commits/git/config | 10 + test-data/two-commits/git/description | 1 + .../git/hooks/applypatch-msg.sample | 15 + .../two-commits/git/hooks/commit-msg.sample | 24 ++ .../git/hooks/fsmonitor-watchman.sample | 114 ++++++++ .../two-commits/git/hooks/post-update.sample | 8 + .../git/hooks/pre-applypatch.sample | 14 + .../two-commits/git/hooks/pre-commit.sample | 49 ++++ .../git/hooks/pre-merge-commit.sample | 13 + .../two-commits/git/hooks/pre-push.sample | 53 ++++ .../two-commits/git/hooks/pre-rebase.sample | 169 ++++++++++++ .../two-commits/git/hooks/pre-receive.sample | 24 ++ .../git/hooks/prepare-commit-msg.sample | 42 +++ test-data/two-commits/git/hooks/update.sample | 128 +++++++++ test-data/two-commits/git/index | Bin 0 -> 137 bytes test-data/two-commits/git/info/exclude | 6 + test-data/two-commits/git/logs/HEAD | 2 + .../two-commits/git/logs/refs/heads/master | 2 + .../69/e978a2558e8b47b25058af8e10482831feded6 | Bin 0 -> 53 bytes .../7f/112b196b963ff72675febdbb97da5204f9497e | Bin 0 -> 27 bytes .../8e/ce465b7ecf8337bf767c9602d21bb92f2fad8a | 2 + .../96/f7fe170993c91f42a92e4ce8b1663d18ae7f54 | Bin 0 -> 53 bytes .../9a/71f81a4b4754b686fd37cbb3c72d0250d344aa | Bin 0 -> 29 bytes .../cd/b0fcad7400610b1d1797a326a89414525160fe | 2 + test-data/two-commits/git/refs/heads/master | 1 + test-data/two-commits/test.txt | 2 + 118 files changed, 5331 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/guru-reviewer.yml create mode 100644 .github/workflows/java-compatible.yml create mode 100644 .github/workflows/self-test-and-release.yml create mode 100644 .gitignore create mode 100644 build.gradle create mode 100644 gradle/wrapper/gradle-wrapper.jar create mode 100644 gradle/wrapper/gradle-wrapper.properties create mode 100755 gradlew create mode 100644 gradlew.bat create mode 100644 settings.gradle create mode 100644 src/main/java/com/amazonaws/gurureviewercli/Main.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapter.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapter.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/adapter/GitAdapter.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapter.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/adapter/ScanAdapter.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/exceptions/GuruCliException.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/model/Configuration.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/model/ErrorCodes.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/model/GitMetaData.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/model/Recommendation.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/model/ScanMetaData.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/util/JsonUtil.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/util/Log.java create mode 100644 src/main/java/com/amazonaws/gurureviewercli/util/ZipUtils.java create mode 100644 src/main/resources/log4j2.properties create mode 100644 src/test/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapterTest.java create mode 100644 src/test/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapterTest.java create mode 100644 src/test/java/com/amazonaws/gurureviewercli/adapter/GitAdapterTest.java create mode 100644 src/test/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapterTest.java create mode 100644 src/test/java/com/amazonaws/gurureviewercli/adapter/ScanAdapterTest.java create mode 100644 test-data/fresh-repo-no-remote/git/HEAD create mode 100644 test-data/fresh-repo-no-remote/git/config create mode 100644 test-data/fresh-repo-no-remote/git/description create mode 100755 test-data/fresh-repo-no-remote/git/hooks/applypatch-msg.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/commit-msg.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/fsmonitor-watchman.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/post-update.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/pre-applypatch.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/pre-commit.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/pre-merge-commit.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/pre-push.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/pre-rebase.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/pre-receive.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/prepare-commit-msg.sample create mode 100755 test-data/fresh-repo-no-remote/git/hooks/update.sample create mode 100644 test-data/fresh-repo-no-remote/git/info/exclude create mode 100644 test-data/fresh-repo-with-remote/git/HEAD create mode 100644 test-data/fresh-repo-with-remote/git/config create mode 100644 test-data/fresh-repo-with-remote/git/description create mode 100755 test-data/fresh-repo-with-remote/git/hooks/applypatch-msg.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/commit-msg.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/fsmonitor-watchman.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/post-update.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/pre-applypatch.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/pre-commit.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/pre-merge-commit.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/pre-push.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/pre-rebase.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/pre-receive.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/prepare-commit-msg.sample create mode 100755 test-data/fresh-repo-with-remote/git/hooks/update.sample create mode 100644 test-data/fresh-repo-with-remote/git/info/exclude create mode 100644 test-data/one-commit/git/COMMIT_EDITMSG create mode 100644 test-data/one-commit/git/HEAD create mode 100644 test-data/one-commit/git/config create mode 100644 test-data/one-commit/git/description create mode 100755 test-data/one-commit/git/hooks/applypatch-msg.sample create mode 100755 test-data/one-commit/git/hooks/commit-msg.sample create mode 100755 test-data/one-commit/git/hooks/fsmonitor-watchman.sample create mode 100755 test-data/one-commit/git/hooks/post-update.sample create mode 100755 test-data/one-commit/git/hooks/pre-applypatch.sample create mode 100755 test-data/one-commit/git/hooks/pre-commit.sample create mode 100755 test-data/one-commit/git/hooks/pre-merge-commit.sample create mode 100755 test-data/one-commit/git/hooks/pre-push.sample create mode 100755 test-data/one-commit/git/hooks/pre-rebase.sample create mode 100755 test-data/one-commit/git/hooks/pre-receive.sample create mode 100755 test-data/one-commit/git/hooks/prepare-commit-msg.sample create mode 100755 test-data/one-commit/git/hooks/update.sample create mode 100644 test-data/one-commit/git/index create mode 100644 test-data/one-commit/git/info/exclude create mode 100644 test-data/one-commit/git/logs/HEAD create mode 100644 test-data/one-commit/git/logs/refs/heads/master create mode 100644 test-data/one-commit/git/objects/69/e978a2558e8b47b25058af8e10482831feded6 create mode 100644 test-data/one-commit/git/objects/9a/71f81a4b4754b686fd37cbb3c72d0250d344aa create mode 100644 test-data/one-commit/git/objects/cd/b0fcad7400610b1d1797a326a89414525160fe create mode 100644 test-data/one-commit/git/refs/heads/master create mode 100644 test-data/one-commit/test.txt create mode 100644 test-data/recommendations/recommendations.json create mode 100644 test-data/two-commits/git/COMMIT_EDITMSG create mode 100644 test-data/two-commits/git/HEAD create mode 100644 test-data/two-commits/git/config create mode 100644 test-data/two-commits/git/description create mode 100755 test-data/two-commits/git/hooks/applypatch-msg.sample create mode 100755 test-data/two-commits/git/hooks/commit-msg.sample create mode 100755 test-data/two-commits/git/hooks/fsmonitor-watchman.sample create mode 100755 test-data/two-commits/git/hooks/post-update.sample create mode 100755 test-data/two-commits/git/hooks/pre-applypatch.sample create mode 100755 test-data/two-commits/git/hooks/pre-commit.sample create mode 100755 test-data/two-commits/git/hooks/pre-merge-commit.sample create mode 100755 test-data/two-commits/git/hooks/pre-push.sample create mode 100755 test-data/two-commits/git/hooks/pre-rebase.sample create mode 100755 test-data/two-commits/git/hooks/pre-receive.sample create mode 100755 test-data/two-commits/git/hooks/prepare-commit-msg.sample create mode 100755 test-data/two-commits/git/hooks/update.sample create mode 100644 test-data/two-commits/git/index create mode 100644 test-data/two-commits/git/info/exclude create mode 100644 test-data/two-commits/git/logs/HEAD create mode 100644 test-data/two-commits/git/logs/refs/heads/master create mode 100644 test-data/two-commits/git/objects/69/e978a2558e8b47b25058af8e10482831feded6 create mode 100644 test-data/two-commits/git/objects/7f/112b196b963ff72675febdbb97da5204f9497e create mode 100644 test-data/two-commits/git/objects/8e/ce465b7ecf8337bf767c9602d21bb92f2fad8a create mode 100644 test-data/two-commits/git/objects/96/f7fe170993c91f42a92e4ce8b1663d18ae7f54 create mode 100644 test-data/two-commits/git/objects/9a/71f81a4b4754b686fd37cbb3c72d0250d344aa create mode 100644 test-data/two-commits/git/objects/cd/b0fcad7400610b1d1797a326a89414525160fe create mode 100644 test-data/two-commits/git/refs/heads/master create mode 100644 test-data/two-commits/test.txt diff --git a/.github/workflows/guru-reviewer.yml b/.github/workflows/guru-reviewer.yml new file mode 100644 index 0000000..d3f1f20 --- /dev/null +++ b/.github/workflows/guru-reviewer.yml @@ -0,0 +1,52 @@ +# Created using https://github.com/aws-samples/aws-codeguru-reviewer-cicd-cdk-sample +name: Analyze with CodeGuru Reviewer + +on: [push] + +permissions: + id-token: write + contents: read + security-events: write + +jobs: + build: + name: Run CodeGuru Reviewer + runs-on: ubuntu-latest + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + continue-on-error: true + id: iam-role + with: + role-to-assume: arn:aws:iam::048169001733:role/GuruGitHubCICDRole + aws-region: us-west-2 + + - uses: actions/checkout@v2 + if: steps.iam-role.outcome == 'success' + with: + fetch-depth: 0 + + - name: Set up JDK 1.8 + if: steps.iam-role.outcome == 'success' + uses: actions/setup-java@v1 + with: + java-version: 1.8 + - name: Build project + if: steps.iam-role.outcome == 'success' + run: ./gradlew clean installDist + + - name: CodeGuru Reviewer + if: steps.iam-role.outcome == 'success' + id: codeguru + uses: aws-actions/codeguru-reviewer@v1.1 + continue-on-error: false + with: + s3_bucket: codeguru-reviewer-build-artifacts-048169001733-us-west-2 + build_path: ./build/libs + + - name: Upload review result + if: steps.iam-role.outcome == 'success' && steps.codeguru.outcome == 'success' + continue-on-error: true + uses: github/codeql-action/upload-sarif@v1 + with: + sarif_file: codeguru-results.sarif.json diff --git a/.github/workflows/java-compatible.yml b/.github/workflows/java-compatible.yml new file mode 100644 index 0000000..59fd9a5 --- /dev/null +++ b/.github/workflows/java-compatible.yml @@ -0,0 +1,32 @@ + +name: Build with different JDKs + +on: + push: + branches: + - main + +permissions: + id-token: write + contents: write + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + java: [ '8', '11', '16', '17' ] + name: Java ${{ matrix.Java }} build + + steps: + - uses: actions/checkout@v2 + - name: Setup java + uses: actions/setup-java@v2 + with: + distribution: 'temurin' + java-version: ${{ matrix.java }} + - name: Build project + run: ./gradlew clean installDist + - name: Run cli + run: ./build/install/aws-codeguru-cli/bin/aws-codeguru-cli + diff --git a/.github/workflows/self-test-and-release.yml b/.github/workflows/self-test-and-release.yml new file mode 100644 index 0000000..fa35adc --- /dev/null +++ b/.github/workflows/self-test-and-release.yml @@ -0,0 +1,57 @@ +# Created using https://github.com/aws-samples/aws-codeguru-reviewer-cicd-cdk-sample +name: Self-test and release + +on: + push: + branches: + - main + +permissions: + id-token: write + contents: write + +jobs: + build: + name: Build, self-test, release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Set up JDK 1.8 + uses: actions/setup-java@v1 + with: + java-version: 1.8 + - name: Build project + run: ./gradlew clean installDist distZip + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + continue-on-error: true + id: iam-role + with: + role-to-assume: arn:aws:iam::048169001733:role/GuruGitHubCICDRole + aws-region: us-west-2 + + - name: Self Test + if: steps.iam-role.outcome == 'success' + run: | + ./build/install/aws-codeguru-cli/bin/aws-codeguru-cli --region us-west-2 -r . -s src/main/java -b build/libs -c HEAD^:HEAD --no-prompt + + - name: Get Release Version + run: | + echo "::set-output name=TAG_NAME::$(./gradlew properties -q | grep "version:" | awk '{print $2}')" + id: version + + - name: Release + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ steps.version.outputs.TAG_NAME }} + prerelease: false + draft: false + body: > + Version ${{ steps.version.outputs.TAG_NAME }} of the AWS CodeGuru Reviewer CLI. + files: | + ./build/distributions/aws-codeguru-cli.zip + LICENSE \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a02e891 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +build +.gradle +.vscode +.guru +code-guru +.DS_Store +.idea +test-output* diff --git a/README.md b/README.md index 847260c..b93d186 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,106 @@ -## My Project +# CodeGuru Reviewer CLI Wrapper +Simple CLI wrapper for CodeGuru reviewer that provides a one-line command to scan a local clone of a repository and +receive results. This CLI wraps the [AWS CLI](https://aws.amazon.com/cli/) commands to communicated with +[AWS CodeGuru Reviewer](https://aws.amazon.com/codeguru/). Using CodeGuru Reviewer may generate metering fees +in your AWS account. See the [CodeGuru Reviewer pricing](https://aws.amazon.com/codeguru/pricing/) for details. -TODO: Fill this README out! +### Before you start -Be sure to: +Before we start, let's make sure that you can access an AWS account from your computer. +Follow the credential setup process for the [AWS CLI](https://github.com/aws/aws-cli#configuration). +The credentials must have at least the following permissions: -* Change the title in this README -* Edit your repository description on GitHub +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Action": [ + "codeguru-reviewer:ListRepositoryAssociations", + "codeguru-reviewer:AssociateRepository", + "codeguru-reviewer:DescribeRepositoryAssociation", + "codeguru-reviewer:CreateCodeReview", + "codeguru-reviewer:DescribeCodeReview", + "codeguru-reviewer:ListRecommendations" + ], + "Resource": "*", + "Effect": "Allow" + }, + { + "Action": [ + "s3:CreateBucket", + "s3:GetBucket*", + "s3:List*", + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject" + ], + "Resource": [ + "arn:aws:s3:::codeguru-reviewer-cli-*", + "arn:aws:s3:::codeguru-reviewer-cli-*/*" + ], + "Effect": "Allow" + } + ] +} +``` + + +### Download the CLI and scan an Example + +You can download the [aws-codeguru-cli](releases/download/latest/aws-codeguru-cli.zip) from the releases section. +Download the latest version and add it to your `PATH`: +``` +curl -OL https://github.com/martinschaef/aws-codeguru-cli/releases/download/latest/aws-codeguru-cli.zip +unzip aws-codeguru-cli.zip +export PATH=$PATH:./aws-codeguru-cli/bin +``` + +Now, lets download an example project (requires Maven): +``` +git clone https://github.com/aws-samples/amazon-codeguru-reviewer-sample-app +cd amazon-codeguru-reviewer-sample-app +mvn clean compile +``` +After compiling, we can run CodeGuru with: +``` +aws-codeguru-cli --root-dir ./ --build target/classes --src src --output ./output +open output/codeguru-report.html +``` +where `--root-dir .` specifies that the root of the project that we want to analyze. The option `--build target/classses` states that the build artifacts are located under `./target/classes` and `--src` says that we only want to analyze source files that are +located under `./src`. The option `--output ./output` specifies where CodeGuru should write its recommendations to. By default, +CodeGuru produces a Json and Html report. + +You can provide your own bucket name using the `--bucket-name` option. Note that, currently, CodeGuru Reviewer only +accepts bucket names that start with the prefix `codeguru-reviewer-`. + +### Running from CI/CD + +You can use this CLI to run CodeGuru from inside your CI/CD pipeline. See [this action](.github/workflows/self-test-and-release.yml#L30-L41) as an example. First, you need credentials for a role with the permissions mentioned above. If you already scanned +the repository once with the CLI, the S3 bucket has been created, and the you do not need the `s3:CreateBucket*` permission anymore. + +Then you can run the CLI in non-interactive mode using the `--no-prompt` option. Further, you can specify a region and +AWS profile using the `--region` and `--profile` options as needed: +``` +aws-codeguru-cli --region [BUCKET REGION] --no-prompt -r ./ ... +``` +obtain the commit range works differently for different CI/CD providers. For example, GitHub provides the relevant +commits via environment variables such as `${{ github.event.before }}` and `${{ github.event.after }}`. + +### Build from Source + +To build the project, you need Java 8 or later. Checkout this repository and run: +``` +./gradlew installDist +``` +and now run your local build with: +``` +./build/install/aws-codeguru-cli/bin/aws-codeguru-cli +``` +you can run a self-test with: +``` +./build/install/aws-codeguru-cli/bin/aws-codeguru-cli -r . -s src/main/java -b build/libs -c HEAD^:HEAD +``` ## Security @@ -14,4 +109,3 @@ See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more inform ## License This project is licensed under the Apache-2.0 License. - diff --git a/build.gradle b/build.gradle new file mode 100644 index 0000000..02b4726 --- /dev/null +++ b/build.gradle @@ -0,0 +1,94 @@ + +buildscript { + repositories { + maven { + url "https://plugins.gradle.org/m2/" + } + } + dependencies { + classpath "com.github.spotbugs.snom:spotbugs-gradle-plugin:5.0.3" + } +} + +/* + Applies core Gradle plugins, which are ones built into Gradle itself. +*/ +plugins { + // Java for compile and unit test of Java source files. Read more at: + // https://docs.gradle.org/current/userguide/java_plugin.html + id 'java' + + // Checkstyle for style checks and reports on Java source files. Read more at: + // https://docs.gradle.org/current/userguide/checkstyle_plugin.html + id 'checkstyle' + + id 'application' +} + +// SpotBugs for quality checks and reports of source files. Read more at: +// https://spotbugs.readthedocs.io/en/stable/gradle.html +apply plugin: 'com.github.spotbugs' + +checkstyle { + sourceSets = [sourceSets.main] + ignoreFailures = false +} + +spotbugs { + ignoreFailures.set(false) +} + +repositories { + mavenCentral() +} + +defaultTasks('installDist') + +version = '0.0.1' +jar.archiveName = "${jar.baseName}.${jar.extension}" +distZip.archiveName = "${jar.baseName}.zip" + +application { + mainClass = 'com.amazonaws.gurureviewercli.Main' +} + +dependencies { + implementation 'software.amazon.awssdk:s3:2.17.113' + implementation 'software.amazon.awssdk:sts:2.17.113' + implementation 'software.amazon.awssdk:codegurureviewer:2.17.113' + implementation 'software.amazon.awssdk:sdk-core:2.17.113' + + implementation 'com.fasterxml.jackson.core:jackson-databind:2.13.0' + implementation 'com.fasterxml.jackson.core:jackson-core:2.13.0' + + implementation 'com.beust:jcommander:1.81' + + implementation 'org.eclipse.jgit:org.eclipse.jgit:5.13.0.202109080827-r' + + implementation 'org.apache.logging.log4j:log4j-core:2.17.1' + implementation 'org.slf4j:slf4j-nop:2.0.0-alpha5' + + // For Java 9+ + implementation 'javax.xml.bind:jaxb-api:2.3.1' + + implementation 'org.commonmark:commonmark:0.18.1' + + implementation 'org.beryx:text-io:3.4.1' + + implementation 'com.google.code.findbugs:jsr305:3.0.2' + + compileOnly 'org.projectlombok:lombok:1.18.22' + annotationProcessor 'org.projectlombok:lombok:1.18.22' + + testCompileOnly 'org.projectlombok:lombok:1.18.22' + testAnnotationProcessor 'org.projectlombok:lombok:1.18.22' + + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.2' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.8.2' + testImplementation 'org.mockito:mockito-junit-jupiter:4.2.0' +} + +test { + useJUnitPlatform() +} + diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..e708b1c023ec8b20f512888fe07c5bd3ff77bb8f GIT binary patch literal 59203 zcma&O1CT9Y(k9%tZQHhO+qUh#ZQHhO+qmuS+qP|E@9xZO?0h@l{(r>DQ>P;GjjD{w zH}lENr;dU&FbEU?00aa80D$0M0RRB{U*7-#kbjS|qAG&4l5%47zyJ#WrfA#1$1Ctx zf&Z_d{GW=lf^w2#qRJ|CvSJUi(^E3iv~=^Z(zH}F)3Z%V3`@+rNB7gTVU{Bb~90p|f+0(v;nz01EG7yDMX9@S~__vVgv%rS$+?IH+oZ03D5zYrv|^ zC1J)SruYHmCki$jLBlTaE5&dFG9-kq3!^i>^UQL`%gn6)jz54$WDmeYdsBE9;PqZ_ zoGd=P4+|(-u4U1dbAVQrFWoNgNd;0nrghPFbQrJctO>nwDdI`Q^i0XJDUYm|T|RWc zZ3^Qgo_Qk$%Fvjj-G}1NB#ZJqIkh;kX%V{THPqOyiq)d)0+(r9o(qKlSp*hmK#iIY zA^)Vr$-Hz<#SF=0@tL@;dCQsm`V9s1vYNq}K1B)!XSK?=I1)tX+bUV52$YQu*0%fnWEukW>mxkz+%3-S!oguE8u#MGzST8_Dy^#U?fA@S#K$S@9msUiX!gd_ow>08w5)nX{-KxqMOo7d?k2&?Vf z&diGDtZr(0cwPe9z9FAUSD9KC)7(n^lMWuayCfxzy8EZsns%OEblHFSzP=cL6}?J| z0U$H!4S_TVjj<`6dy^2j`V`)mC;cB%* z8{>_%E1^FH!*{>4a7*C1v>~1*@TMcLK{7nEQ!_igZC}ikJ$*<$yHy>7)oy79A~#xE zWavoJOIOC$5b6*q*F_qN1>2#MY)AXVyr$6x4b=$x^*aqF*L?vmj>Mgv+|ITnw_BoW zO?jwHvNy^prH{9$rrik1#fhyU^MpFqF2fYEt(;4`Q&XWOGDH8k6M=%@fics4ajI;st# zCU^r1CK&|jzUhRMv;+W~6N;u<;#DI6cCw-otsc@IsN3MoSD^O`eNflIoR~l4*&-%RBYk@gb^|-JXs&~KuSEmMxB}xSb z@K76cXD=Y|=I&SNC2E+>Zg?R6E%DGCH5J1nU!A|@eX9oS(WPaMm==k2s_ueCqdZw| z&hqHp)47`c{BgwgvY2{xz%OIkY1xDwkw!<0veB#yF4ZKJyabhyyVS`gZepcFIk%e2 zTcrmt2@-8`7i-@5Nz>oQWFuMC_KlroCl(PLSodswHqJ3fn<;gxg9=}~3x_L3P`9Sn zChIf}8vCHvTriz~T2~FamRi?rh?>3bX1j}%bLH+uFX+p&+^aXbOK7clZxdU~6Uxgy z8R=obwO4dL%pmVo*Ktf=lH6hnlz_5k3cG;m8lgaPp~?eD!Yn2kf)tU6PF{kLyn|oI@eQ`F z3IF7~Blqg8-uwUuWZScRKn%c2_}dXB6Dx_&xR*n9M9LXasJhtZdr$vBY!rP{c@=)& z#!?L$2UrkvClwQO>U*fSMs67oSj2mxiJ$t;E|>q%Kh_GzzWWO&3;ufU%2z%ucBU8H z3WIwr$n)cfCXR&>tyB7BcSInK>=ByZA%;cVEJhcg<#6N{aZC4>K41XF>ZgjG`z_u& zGY?;Ad?-sgiOnI`oppF1o1Gurqbi*;#x2>+SSV6|1^G@ooVy@fg?wyf@0Y!UZ4!}nGuLeC^l)6pwkh|oRY`s1Pm$>zZ3u-83T|9 zGaKJIV3_x+u1>cRibsaJpJqhcm%?0-L;2 zitBrdRxNmb0OO2J%Y&Ym(6*`_P3&&5Bw157{o7LFguvxC$4&zTy#U=W*l&(Q2MNO} zfaUwYm{XtILD$3864IA_nn34oVa_g^FRuHL5wdUd)+W-p-iWCKe8m_cMHk+=? zeKX)M?Dt(|{r5t7IenkAXo%&EXIb-i^w+0CX0D=xApC=|Xy(`xy+QG^UyFe z+#J6h_&T5i#sV)hj3D4WN%z;2+jJcZxcI3*CHXGmOF3^)JD5j&wfX)e?-|V0GPuA+ zQFot%aEqGNJJHn$!_}#PaAvQ^{3-Ye7b}rWwrUmX53(|~i0v{}G_sI9uDch_brX&6 zWl5Ndj-AYg(W9CGfQf<6!YmY>Ey)+uYd_JNXH=>|`OH-CDCmcH(0%iD_aLlNHKH z7bcW-^5+QV$jK?R*)wZ>r9t}loM@XN&M-Pw=F#xn(;u3!(3SXXY^@=aoj70;_=QE9 zGghsG3ekq#N||u{4We_25U=y#T*S{4I{++Ku)> zQ!DZW;pVcn>b;&g2;YE#+V`v*Bl&Y-i@X6D*OpNA{G@JAXho&aOk(_j^weW{#3X5Y z%$q_wpb07EYPdmyH(1^09i$ca{O<}7) zRWncXdSPgBE%BM#by!E>tdnc$8RwUJg1*x($6$}ae$e9Knj8gvVZe#bLi!<+&BkFj zg@nOpDneyc+hU9P-;jmOSMN|*H#>^Ez#?;%C3hg_65leSUm;iz)UkW)jX#p)e&S&M z1|a?wDzV5NVnlhRBCd_;F87wp>6c<&nkgvC+!@KGiIqWY4l}=&1w7|r6{oBN8xyzh zG$b#2=RJp_iq6)#t5%yLkKx(0@D=C3w+oiXtSuaQ%I1WIb-eiE$d~!)b@|4XLy!CZ z9p=t=%3ad@Ep+<9003D2KZ5VyP~_n$=;~r&YUg5UZ0KVD&tR1DHy9x)qWtKJp#Kq# zP*8p#W(8JJ_*h_3W}FlvRam?<4Z+-H77^$Lvi+#vmhL9J zJ<1SV45xi;SrO2f=-OB(7#iNA5)x1uNC-yNxUw|!00vcW2PufRm>e~toH;M0Q85MQLWd?3O{i8H+5VkR@l9Dg-ma ze2fZ%>G(u5(k9EHj2L6!;(KZ8%8|*-1V|B#EagbF(rc+5iL_5;Eu)L4Z-V;0HfK4d z*{utLse_rvHZeQ>V5H=f78M3Ntg1BPxFCVD{HbNA6?9*^YIq;B-DJd{Ca2L#)qWP? zvX^NhFmX?CTWw&Ns}lgs;r3i+Bq@y}Ul+U%pzOS0Fcv9~aB(0!>GT0)NO?p=25LjN z2bh>6RhgqD7bQj#k-KOm@JLgMa6>%-ok1WpOe)FS^XOU{c?d5shG(lIn3GiVBxmg`u%-j=)^v&pX1JecJics3&jvPI)mDut52? z3jEA)DM%}BYbxxKrizVYwq?(P&19EXlwD9^-6J+4!}9{ywR9Gk42jjAURAF&EO|~N z)?s>$Da@ikI4|^z0e{r`J8zIs>SpM~Vn^{3fArRu;?+43>lD+^XtUcY1HidJwnR6+ z!;oG2=B6Z_=M%*{z-RaHc(n|1RTKQdNjjV!Pn9lFt^4w|AeN06*j}ZyhqZ^!-=cyGP_ShV1rGxkx8t zB;8`h!S{LD%ot``700d0@Grql(DTt4Awgmi+Yr0@#jbe=2#UkK%rv=OLqF)9D7D1j z!~McAwMYkeaL$~kI~90)5vBhBzWYc3Cj1WI0RS`z000R8-@ET0dA~*r(gSiCJmQMN&4%1D zyVNf0?}sBH8zNbBLn>~(W{d3%@kL_eQ6jEcR{l>C|JK z(R-fA!z|TTRG40|zv}7E@PqCAXP3n`;%|SCQ|ZS%ym$I{`}t3KPL&^l5`3>yah4*6 zifO#{VNz3)?ZL$be;NEaAk9b#{tV?V7 zP|wf5YA*1;s<)9A4~l3BHzG&HH`1xNr#%){4xZ!jq%o=7nN*wMuXlFV{HaiQLJ`5G zBhDi#D(m`Q1pLh@Tq+L;OwuC52RdW7b8}~60WCOK5iYMUad9}7aWBuILb({5=z~YF zt?*Jr5NG+WadM{mDL>GyiByCuR)hd zA=HM?J6l1Xv0Dl+LW@w$OTcEoOda^nFCw*Sy^I@$sSuneMl{4ys)|RY#9&NxW4S)9 zq|%83IpslTLoz~&vTo!Ga@?rj_kw{|k{nv+w&Ku?fyk4Ki4I?);M|5Axm)t+BaE)D zm(`AQ#k^DWrjbuXoJf2{Aj^KT zFb1zMSqxq|vceV+Mf-)$oPflsO$@*A0n0Z!R{&(xh8s}=;t(lIy zv$S8x>m;vQNHuRzoaOo?eiWFe{0;$s`Bc+Osz~}Van${u;g(su`3lJ^TEfo~nERfP z)?aFzpDgnLYiERsKPu|0tq4l2wT)Atr6Qb%m-AUn6HnCue*yWICp7TjW$@sO zm5rm4aTcPQ(rfi7a`xP7cKCFrJD}*&_~xgLyr^-bmsL}y;A5P|al8J3WUoBSjqu%v zxC;mK!g(7r6RRJ852Z~feoC&sD3(6}^5-uLK8o)9{8L_%%rItZK9C){UxB|;G>JbP zsRRtS4-3B*5c+K2kvmgZK8472%l>3cntWUOVHxB|{Ay~aOg5RN;{PJgeVD*H%ac+y!h#wi%o2bF2Ca8IyMyH{>4#{E_8u^@+l-+n=V}Sq?$O z{091@v%Bd*3pk0^2UtiF9Z+(a@wy6 zUdw8J*ze$K#=$48IBi1U%;hmhO>lu!uU;+RS}p&6@rQila7WftH->*A4=5W|Fmtze z)7E}jh@cbmr9iup^i%*(uF%LG&!+Fyl@LFA-}Ca#bxRfDJAiR2dt6644TaYw1Ma79 zt8&DYj31j^5WPNf5P&{)J?WlCe@<3u^78wnd(Ja4^a>{^Tw}W>|Cjt^If|7l^l)^Q zbz|7~CF(k_9~n|h;ysZ+jHzkXf(*O*@5m zLzUmbHp=x!Q|!9NVXyipZ3)^GuIG$k;D)EK!a5=8MFLI_lpf`HPKl=-Ww%z8H_0$j ztJ||IfFG1lE9nmQ0+jPQy zCBdKkjArH@K7jVcMNz);Q(Q^R{d5G?-kk;Uu_IXSyWB)~KGIizZL(^&qF;|1PI7!E zTP`%l)gpX|OFn&)M%txpQ2F!hdA~hX1Cm5)IrdljqzRg!f{mN%G~H1&oqe`5eJCIF zHdD7O;AX-{XEV(a`gBFJ9ews#CVS2y!&>Cm_dm3C8*n3MA*e67(WC?uP@8TXuMroq z{#w$%z@CBIkRM7?}Xib+>hRjy?%G!fiw8! z8(gB+8J~KOU}yO7UGm&1g_MDJ$IXS!`+*b*QW2x)9>K~Y*E&bYMnjl6h!{17_8d!%&9D`a7r&LKZjC<&XOvTRaKJ1 zUY@hl5^R&kZl3lU3njk`3dPzxj$2foOL26r(9zsVF3n_F#v)s5vv3@dgs|lP#eylq62{<-vczqP!RpVBTgI>@O6&sU>W|do17+#OzQ7o5A$ICH z?GqwqnK^n2%LR;$^oZM;)+>$X3s2n}2jZ7CdWIW0lnGK-b#EG01)P@aU`pg}th&J-TrU`tIpb5t((0eu|!u zQz+3ZiOQ^?RxxK4;zs=l8q!-n7X{@jSwK(iqNFiRColuEOg}!7cyZi`iBX4g1pNBj zAPzL?P^Ljhn;1$r8?bc=#n|Ed7wB&oHcw()&*k#SS#h}jO?ZB246EGItsz*;^&tzp zu^YJ0=lwsi`eP_pU8}6JA7MS;9pfD;DsSsLo~ogzMNP70@@;Fm8f0^;>$Z>~}GWRw!W5J3tNX*^2+1f3hz{~rIzJo z6W%J(H!g-eI_J1>0juX$X4Cl6i+3wbc~k146UIX&G22}WE>0ga#WLsn9tY(&29zBvH1$`iWtTe zG2jYl@P!P)eb<5DsR72BdI7-zP&cZNI{7q3e@?N8IKc4DE#UVr->|-ryuJXk^u^>4 z$3wE~=q390;XuOQP~TNoDR?#|NSPJ%sTMInA6*rJ%go|=YjGe!B>z6u$IhgQSwoV* zjy3F2#I>uK{42{&IqP59)Y(1*Z>>#W8rCf4_eVsH)`v!P#^;BgzKDR`ARGEZzkNX+ zJUQu=*-ol=Xqqt5=`=pA@BIn@6a9G8C{c&`i^(i+BxQO9?YZ3iu%$$da&Kb?2kCCo zo7t$UpSFWqmydXf@l3bVJ=%K?SSw)|?srhJ-1ZdFu*5QhL$~-IQS!K1s@XzAtv6*Y zl8@(5BlWYLt1yAWy?rMD&bwze8bC3-GfNH=p zynNFCdxyX?K&G(ZZ)afguQ2|r;XoV^=^(;Cku#qYn4Lus`UeKt6rAlFo_rU`|Rq z&G?~iWMBio<78of-2X(ZYHx~=U0Vz4btyXkctMKdc9UM!vYr~B-(>)(Hc|D zMzkN4!PBg%tZoh+=Gba!0++d193gbMk2&krfDgcbx0jI92cq?FFESVg0D$>F+bil} zY~$)|>1HZsX=5sAZ2WgPB5P=8X#TI+NQ(M~GqyVB53c6IdX=k>Wu@A0Svf5#?uHaF zsYn|koIi3$(%GZ2+G+7Fv^lHTb#5b8sAHSTnL^qWZLM<(1|9|QFw9pnRU{svj}_Al zL)b9>fN{QiA($8peNEJyy`(a{&uh-T4_kdZFIVsKKVM(?05}76EEz?#W za^fiZOAd14IJ4zLX-n7Lq0qlQ^lW8Cvz4UKkV9~P}>sq0?xD3vg+$4vLm~C(+ zM{-3Z#qnZ09bJ>}j?6ry^h+@PfaD7*jZxBEY4)UG&daWb??6)TP+|3#Z&?GL?1i+280CFsE|vIXQbm| zM}Pk!U`U5NsNbyKzkrul-DzwB{X?n3E6?TUHr{M&+R*2%yOiXdW-_2Yd6?38M9Vy^ z*lE%gA{wwoSR~vN0=no}tP2Ul5Gk5M(Xq`$nw#ndFk`tcpd5A=Idue`XZ!FS>Q zG^0w#>P4pPG+*NC9gLP4x2m=cKP}YuS!l^?sHSFftZy{4CoQrb_ z^20(NnG`wAhMI=eq)SsIE~&Gp9Ne0nD4%Xiu|0Fj1UFk?6avDqjdXz{O1nKao*46y zT8~iA%Exu=G#{x=KD;_C&M+Zx4+n`sHT>^>=-1YM;H<72k>$py1?F3#T1*ef9mLZw z5naLQr?n7K;2l+{_uIw*_1nsTn~I|kkCgrn;|G~##hM;9l7Jy$yJfmk+&}W@JeKcF zx@@Woiz8qdi|D%aH3XTx5*wDlbs?dC1_nrFpm^QbG@wM=i2?Zg;$VK!c^Dp8<}BTI zyRhAq@#%2pGV49*Y5_mV4+OICP|%I(dQ7x=6Ob}>EjnB_-_18*xrY?b%-yEDT(wrO z9RY2QT0`_OpGfMObKHV;QLVnrK%mc?$WAdIT`kJQT^n%GuzE7|9@k3ci5fYOh(287 zuIbg!GB3xLg$YN=n)^pHGB0jH+_iIiC=nUcD;G6LuJsjn2VI1cyZx=a?ShCsF==QK z;q~*m&}L<-cb+mDDXzvvrRsybcgQ;Vg21P(uLv5I+eGc7o7tc6`;OA9{soHFOz zT~2?>Ts}gprIX$wRBb4yE>ot<8+*Bv`qbSDv*VtRi|cyWS>)Fjs>fkNOH-+PX&4(~ z&)T8Zam2L6puQl?;5zg9h<}k4#|yH9czHw;1jw-pwBM*O2hUR6yvHATrI%^mvs9q_ z&ccT0>f#eDG<^WG^q@oVqlJrhxH)dcq2cty@l3~|5#UDdExyXUmLQ}f4#;6fI{f^t zDCsgIJ~0`af%YR%Ma5VQq-p21k`vaBu6WE?66+5=XUd%Ay%D$irN>5LhluRWt7 zov-=f>QbMk*G##&DTQyou$s7UqjjW@k6=!I@!k+S{pP8R(2=e@io;N8E`EOB;OGoI zw6Q+{X1_I{OO0HPpBz!X!@`5YQ2)t{+!?M_iH25X(d~-Zx~cXnS9z>u?+If|iNJbx zyFU2d1!ITX64D|lE0Z{dLRqL1Ajj=CCMfC4lD3&mYR_R_VZ>_7_~|<^o*%_&jevU+ zQ4|qzci=0}Jydw|LXLCrOl1_P6Xf@c0$ieK2^7@A9UbF{@V_0p%lqW|L?5k>bVM8|p5v&2g;~r>B8uo<4N+`B zH{J)h;SYiIVx@#jI&p-v3dwL5QNV1oxPr8J%ooezTnLW>i*3Isb49%5i!&ac_dEXv zvXmVUck^QHmyrF8>CGXijC_R-y(Qr{3Zt~EmW)-nC!tiH`wlw5D*W7Pip;T?&j%kX z6DkZX4&}iw>hE(boLyjOoupf6JpvBG8}jIh!!VhnD0>}KSMMo{1#uU6kiFcA04~|7 zVO8eI&x1`g4CZ<2cYUI(n#wz2MtVFHx47yE5eL~8bot~>EHbevSt}LLMQX?odD{Ux zJMnam{d)W4da{l7&y-JrgiU~qY3$~}_F#G7|MxT)e;G{U`In&?`j<5D->}cb{}{T(4DF0BOk-=1195KB-E*o@c?`>y#4=dMtYtSY=&L{!TAjFVcq0y@AH`vH! z$41+u!Ld&}F^COPgL(EE{0X7LY&%D7-(?!kjFF7=qw<;`V{nwWBq<)1QiGJgUc^Vz ztMUlq1bZqKn17|6x6iAHbWc~l1HcmAxr%$Puv!znW)!JiukwIrqQ00|H$Z)OmGG@= zv%A8*4cq}(?qn4rN6o`$Y))(MyXr8R<2S^J+v(wmFmtac!%VOfN?&(8Nr!T@kV`N; z*Q33V3t`^rN&aBiHet)18wy{*wi1=W!B%B-Q6}SCrUl$~Hl{@!95ydml@FK8P=u4s z4e*7gV2s=YxEvskw2Ju!2%{8h01rx-3`NCPc(O zH&J0VH5etNB2KY6k4R@2Wvl^Ck$MoR3=)|SEclT2ccJ!RI9Nuter7u9@;sWf-%um;GfI!=eEIQ2l2p_YWUd{|6EG ze{yO6;lMc>;2tPrsNdi@&1K6(1;|$xe8vLgiouj%QD%gYk`4p{Ktv9|j+!OF-P?@p z;}SV|oIK)iwlBs+`ROXkhd&NK zzo__r!B>tOXpBJMDcv!Mq54P+n4(@dijL^EpO1wdg~q+!DT3lB<>9AANSe!T1XgC=J^)IP0XEZ()_vpu!!3HQyJhwh?r`Ae%Yr~b% zO*NY9t9#qWa@GCPYOF9aron7thfWT`eujS4`t2uG6)~JRTI;f(ZuoRQwjZjp5Pg34 z)rp$)Kr?R+KdJ;IO;pM{$6|2y=k_siqvp%)2||cHTe|b5Ht8&A{wazGNca zX$Ol?H)E_R@SDi~4{d-|8nGFhZPW;Cts1;08TwUvLLv&_2$O6Vt=M)X;g%HUr$&06 zISZb(6)Q3%?;3r~*3~USIg=HcJhFtHhIV(siOwV&QkQe#J%H9&E21!C*d@ln3E@J* zVqRO^<)V^ky-R|%{(9`l-(JXq9J)1r$`uQ8a}$vr9E^nNiI*thK8=&UZ0dsFN_eSl z(q~lnD?EymWLsNa3|1{CRPW60>DSkY9YQ;$4o3W7Ms&@&lv9eH!tk~N&dhqX&>K@} zi1g~GqglxkZ5pEFkllJ)Ta1I^c&Bt6#r(QLQ02yHTaJB~- zCcE=5tmi`UA>@P=1LBfBiqk)HB4t8D?02;9eXj~kVPwv?m{5&!&TFYhu>3=_ zsGmYZ^mo*-j69-42y&Jj0cBLLEulNRZ9vXE)8~mt9C#;tZs;=#M=1*hebkS;7(aGf zcs7zH(I8Eui9UU4L--))yy`&d&$In&VA2?DAEss4LAPCLd>-$i?lpXvn!gu^JJ$(DoUlc6wE98VLZ*z`QGQov5l4Fm_h?V-;mHLYDVOwKz7>e4+%AzeO>P6v}ndPW| zM>m#6Tnp7K?0mbK=>gV}=@k*0Mr_PVAgGMu$j+pWxzq4MAa&jpCDU&-5eH27Iz>m^ zax1?*HhG%pJ((tkR(V(O(L%7v7L%!_X->IjS3H5kuXQT2!ow(;%FDE>16&3r){!ex zhf==oJ!}YU89C9@mfDq!P3S4yx$aGB?rbtVH?sHpg?J5C->!_FHM%Hl3#D4eplxzQ zRA+<@LD%LKSkTk2NyWCg7u=$%F#;SIL44~S_OGR}JqX}X+=bc@swpiClB`Zbz|f!4 z7Ysah7OkR8liXfI`}IIwtEoL}(URrGe;IM8%{>b1SsqXh)~w}P>yiFRaE>}rEnNkT z!HXZUtxUp1NmFm)Dm@-{FI^aRQqpSkz}ZSyKR%Y}YHNzBk)ZIp} zMtS=aMvkgWKm9&oTcU0?S|L~CDqA+sHpOxwnswF-fEG)cXCzUR?ps@tZa$=O)=L+5 zf%m58cq8g_o}3?Bhh+c!w4(7AjxwQ3>WnVi<{{38g7yFboo>q|+7qs<$8CPXUFAN< zG&}BHbbyQ5n|qqSr?U~GY{@GJ{(Jny{bMaOG{|IkUj7tj^9pa9|FB_<+KHLxSxR;@ zHpS$4V)PP+tx}22fWx(Ku9y+}Ap;VZqD0AZW4gCDTPCG=zgJmF{|x;(rvdM|2|9a}cex6xrMkERnkE;}jvU-kmzd%_J50$M`lIPCKf+^*zL=@LW`1SaEc%=m zQ+lT06Gw+wVwvQ9fZ~#qd430v2HndFsBa9WjD0P}K(rZYdAt^5WQIvb%D^Q|pkVE^ zte$&#~zmULFACGfS#g=2OLOnIf2Of-k!(BIHjs77nr!5Q1*I9 z1%?=~#Oss!rV~?-6Gm~BWJiA4mJ5TY&iPm_$)H1_rTltuU1F3I(qTQ^U$S>%$l z)Wx1}R?ij0idp@8w-p!Oz{&*W;v*IA;JFHA9%nUvVDy7Q8woheC#|8QuDZb-L_5@R zOqHwrh|mVL9b=+$nJxM`3eE{O$sCt$UK^2@L$R(r^-_+z?lOo+me-VW=Zw z-Bn>$4ovfWd%SPY`ab-u9{INc*k2h+yH%toDHIyqQ zO68=u`N}RIIs7lsn1D){)~%>ByF<>i@qFb<-axvu(Z+6t7v<^z&gm9McRB~BIaDn$ z#xSGT!rzgad8o>~kyj#h1?7g96tOcCJniQ+*#=b7wPio>|6a1Z?_(TS{)KrPe}(8j z!#&A=k(&Pj^F;r)CI=Z{LVu>uj!_W1q4b`N1}E(i%;BWjbEcnD=mv$FL$l?zS6bW!{$7j1GR5ocn94P2u{ z70tAAcpqtQo<@cXw~@i-@6B23;317|l~S>CB?hR5qJ%J3EFgyBdJd^fHZu7AzHF(BQ!tyAz^L0`X z23S4Fe{2X$W0$zu9gm%rg~A>ijaE#GlYlrF9$ds^QtaszE#4M(OLVP2O-;XdT(XIC zatwzF*)1c+t~c{L=fMG8Z=k5lv>U0;C{caN1NItnuSMp)6G3mbahu>E#sj&oy94KC zpH}8oEw{G@N3pvHhp{^-YaZeH;K+T_1AUv;IKD<=mv^&Ueegrb!yf`4VlRl$M?wsl zZyFol(2|_QM`e_2lYSABpKR{{NlxlDSYQNkS;J66aT#MSiTx~;tUmvs-b*CrR4w=f z8+0;*th6kfZ3|5!Icx3RV11sp=?`0Jy3Fs0N4GZQMN=8HmT6%x9@{Dza)k}UwL6JT zHRDh;%!XwXr6yuuy`4;Xsn0zlR$k%r%9abS1;_v?`HX_hI|+EibVnlyE@3aL5vhQq zlIG?tN^w@0(v9M*&L+{_+RQZw=o|&BRPGB>e5=ys7H`nc8nx)|-g;s7mRc7hg{GJC zAe^vCIJhajmm7C6g! zL&!WAQ~5d_5)00?w_*|*H>3$loHrvFbitw#WvLB!JASO?#5Ig5$Ys10n>e4|3d;tS zELJ0|R4n3Az(Fl3-r^QiV_C;)lQ1_CW{5bKS15U|E9?ZgLec@%kXr84>5jV2a5v=w z?pB1GPdxD$IQL4)G||B_lI+A=08MUFFR4MxfGOu07vfIm+j=z9tp~5i_6jb`tR>qV z$#`=BQ*jpCjm$F0+F)L%xRlnS%#&gro6PiRfu^l!EVan|r3y}AHJQOORGx4~ z&<)3=K-tx518DZyp%|!EqpU!+X3Et7n2AaC5(AtrkW>_57i}$eqs$rupubg0a1+WO zGHZKLN2L0D;ab%{_S1Plm|hx8R?O14*w*f&2&bB050n!R2by zw!@XOQx$SqZ5I<(Qu$V6g>o#A!JVwErWv#(Pjx=KeS0@hxr4?13zj#oWwPS(7Ro|v z>Mp@Kmxo79q|}!5qtX2-O@U&&@6s~!I&)1WQIl?lTnh6UdKT_1R640S4~f=_xoN3- zI+O)$R@RjV$F=>Ti7BlnG1-cFKCC(t|Qjm{SalS~V-tX#+2ekRhwmN zZr`8{QF6y~Z!D|{=1*2D-JUa<(1Z=;!Ei!KiRNH?o{p5o3crFF=_pX9O-YyJchr$~ zRC`+G+8kx~fD2k*ZIiiIGR<8r&M@3H?%JVOfE>)})7ScOd&?OjgAGT@WVNSCZ8N(p zuQG~76GE3%(%h1*vUXg$vH{ua0b`sQ4f0*y=u~lgyb^!#CcPJa2mkSEHGLsnO^kb$ zru5_l#nu=Y{rSMWiYx?nO{8I!gH+?wEj~UM?IrG}E|bRIBUM>UlY<`T1EHpRr36vv zBi&dG8oxS|J$!zoaq{+JpJy+O^W(nt*|#g32bd&K^w-t>!Vu9N!k9eA8r!Xc{utY> zg9aZ(D2E0gL#W0MdjwES-7~Wa8iubPrd?8-$C4BP?*wok&O8+ykOx{P=Izx+G~hM8 z*9?BYz!T8~dzcZr#ux8kS7u7r@A#DogBH8km8Ry4slyie^n|GrTbO|cLhpqgMdsjX zJ_LdmM#I&4LqqsOUIXK8gW;V0B(7^$y#h3h>J0k^WJfAMeYek%Y-Dcb_+0zPJez!GM zAmJ1u;*rK=FNM0Nf}Y!!P9c4)HIkMnq^b;JFd!S3?_Qi2G#LIQ)TF|iHl~WKK6JmK zbv7rPE6VkYr_%_BT}CK8h=?%pk@3cz(UrZ{@h40%XgThP*-Oeo`T0eq9 zA8BnWZKzCy5e&&_GEsU4*;_k}(8l_&al5K-V*BFM=O~;MgRkYsOs%9eOY6s6AtE*<7GQAR2ulC3RAJrG_P1iQK5Z~&B z&f8X<>yJV6)oDGIlS$Y*D^Rj(cszTy5c81a5IwBr`BtnC6_e`ArI8CaTX_%rx7;cn zR-0?J_LFg*?(#n~G8cXut(1nVF0Oka$A$1FGcERU<^ggx;p@CZc?3UB41RY+wLS`LWFNSs~YP zuw1@DNN3lTd|jDL7gjBsd9}wIw}4xT2+8dBQzI00m<@?c2L%>}QLfK5%r!a-iII`p zX@`VEUH)uj^$;7jVUYdADQ2k*!1O3WdfgF?OMtUXNpQ1}QINamBTKDuv19^{$`8A1 zeq%q*O0mi@(%sZU>Xdb0Ru96CFqk9-L3pzLVsMQ`Xpa~N6CR{9Rm2)A|CI21L(%GW zh&)Y$BNHa=FD+=mBw3{qTgw)j0b!Eahs!rZnpu)z!!E$*eXE~##yaXz`KE5(nQM`s zD!$vW9XH)iMxu9R>r$VlLk9oIR%HxpUiW=BK@4U)|1WNQ=mz9a z^!KkO=>GaJ!GBXm{KJj^;kh-MkUlEQ%lza`-G&}C5y1>La1sR6hT=d*NeCnuK%_LV zOXt$}iP6(YJKc9j-Fxq~*ItVUqljQ8?oaysB-EYtFQp9oxZ|5m0^Hq(qV!S+hq#g( z?|i*H2MIr^Kxgz+3vIljQ*Feejy6S4v~jKEPTF~Qhq!(ms5>NGtRgO5vfPPc4Z^AM zTj!`5xEreIN)vaNxa|q6qWdg>+T`Ol0Uz)ckXBXEGvPNEL3R8hB3=C5`@=SYgAju1 z!)UBr{2~=~xa{b8>x2@C7weRAEuatC)3pkRhT#pMPTpSbA|tan%U7NGMvzmF?c!V8 z=pEWxbdXbTAGtWTyI?Fml%lEr-^AE}w#l(<7OIw;ctw}imYax&vR4UYNJZK6P7ZOd zP87XfhnUHxCUHhM@b*NbTi#(-8|wcv%3BGNs#zRCVV(W?1Qj6^PPQa<{yaBwZ`+<`w|;rqUY_C z&AeyKwwf*q#OW-F()lir=T^<^wjK65Lif$puuU5+tk$;e_EJ;Lu+pH>=-8=PDhkBg z8cWt%@$Sc#C6F$Vd+0507;{OOyT7Hs%nKS88q-W!$f~9*WGBpHGgNp}=C*7!RiZ5s zn1L_DbKF@B8kwhDiLKRB@lsXVVLK|ph=w%_`#owlf@s@V(pa`GY$8h%;-#h@TsO|Y8V=n@*!Rog7<7Cid%apR|x zOjhHCyfbIt%+*PCveTEcuiDi%Wx;O;+K=W?OFUV%)%~6;gl?<0%)?snDDqIvkHF{ zyI02)+lI9ov42^hL>ZRrh*HhjF9B$A@=H94iaBESBF=eC_KT$8A@uB^6$~o?3Wm5t1OIaqF^~><2?4e3c&)@wKn9bD? zoeCs;H>b8DL^F&>Xw-xjZEUFFTv>JD^O#1E#)CMBaG4DX9bD(Wtc8Rzq}9soQ8`jf zeSnHOL}<+WVSKp4kkq&?SbETjq6yr@4%SAqOG=9E(3YeLG9dtV+8vmzq+6PFPk{L; z(&d++iu=^F%b+ea$i2UeTC{R*0Isk;vFK!no<;L+(`y`3&H-~VTdKROkdyowo1iqR zbVW(3`+(PQ2>TKY>N!jGmGo7oeoB8O|P_!Ic@ zZ^;3dnuXo;WJ?S+)%P>{Hcg!Jz#2SI(s&dY4QAy_vRlmOh)QHvs_7c&zkJCmJGVvV zX;Mtb>QE+xp`KyciG$Cn*0?AK%-a|=o!+7x&&yzHQOS>8=B*R=niSnta^Pxp1`=md z#;$pS$4WCT?mbiCYU?FcHGZ#)kHVJTTBt^%XE(Q};aaO=Zik0UgLcc0I(tUpt(>|& zcxB_|fxCF7>&~5eJ=Dpn&5Aj{A^cV^^}(7w#p;HG&Q)EaN~~EqrE1qKrMAc&WXIE;>@<&)5;gD2?={Xf@Mvn@OJKw=8Mgn z!JUFMwD+s==JpjhroT&d{$kQAy%+d`a*XxDEVxy3`NHzmITrE`o!;5ClXNPb4t*8P zzAivdr{j_v!=9!^?T3y?gzmqDWX6mkzhIzJ-3S{T5bcCFMr&RPDryMcdwbBuZbsgN zGrp@^i?rcfN7v0NKGzDPGE#4yszxu=I_`MI%Z|10nFjU-UjQXXA?k8Pk|OE<(?ae) zE%vG#eZAlj*E7_3dx#Zz4kMLj>H^;}33UAankJiDy5ZvEhrjr`!9eMD8COp}U*hP+ zF}KIYx@pkccIgyxFm#LNw~G&`;o&5)2`5aogs`1~7cMZQ7zj!%L4E`2yzlQN6REX20&O<9 zKV6fyr)TScJPPzNTC2gL+0x#=u>(({{D7j)c-%tvqls3#Y?Z1m zV5WUE)zdJ{$p>yX;^P!UcXP?UD~YM;IRa#Rs5~l+*$&nO(;Ers`G=0D!twR(0GF@c zHl9E5DQI}Oz74n zfKP>&$q0($T4y$6w(p=ERAFh+>n%iaeRA%!T%<^+pg?M)@ucY<&59$x9M#n+V&>}=nO9wCV{O~lg&v#+jcUj(tQ z`0u1YH)-`U$15a{pBkGyPL0THv1P|4e@pf@3IBZS4dVJPo#H>pWq%Lr0YS-SeWash z8R7=jb28KPMI|_lo#GEO|5B?N_e``H*23{~a!AmUJ+fb4HX-%QI@lSEUxKlGV7z7Q zSKw@-TR>@1RL%w{x}dW#k1NgW+q4yt2Xf1J62Bx*O^WG8OJ|FqI4&@d3_o8Id@*)4 zYrk=>@!wv~mh7YWv*bZhxqSmFh2Xq)o=m;%n$I?GSz49l1$xRpPu_^N(vZ>*>Z<04 z2+rP70oM=NDysd!@fQdM2OcyT?3T^Eb@lIC-UG=Bw{BjQ&P`KCv$AcJ;?`vdZ4){d z&gkoUK{$!$$K`3*O-jyM1~p-7T*qb)Ys>Myt^;#1&a%O@x8A+E>! zY8=eD`ZG)LVagDLBeHg>=atOG?Kr%h4B%E6m@J^C+U|y)XX@f z8oyJDW|9g=<#f<{JRr{y#~euMnv)`7j=%cHWLc}ngjq~7k**6%4u>Px&W%4D94(r* z+akunK}O0DC2A%Xo9jyF;DobX?!1I(7%}@7F>i%&nk*LMO)bMGg2N+1iqtg+r(70q zF5{Msgsm5GS7DT`kBsjMvOrkx&|EU!{{~gL4d2MWrAT=KBQ-^zQCUq{5PD1orxlIL zq;CvlWx#f1NWvh`hg011I%?T_s!e38l*lWVt|~z-PO4~~1g)SrJ|>*tXh=QfXT)%( z+ex+inPvD&O4Ur;JGz>$sUOnWdpSLcm1X%aQDw4{dB!cnj`^muI$CJ2%p&-kULVCE z>$eMR36kN$wCPR+OFDM3-U(VOrp9k3)lI&YVFqd;Kpz~K)@Fa&FRw}L(SoD z9B4a+hQzZT-BnVltst&=kq6Y(f^S4hIGNKYBgMxGJ^;2yrO}P3;r)(-I-CZ)26Y6? z&rzHI_1GCvGkgy-t1E;r^3Le30|%$ebDRu2+gdLG)r=A~Qz`}~&L@aGJ{}vVs_GE* zVUjFnzHiXfKQbpv&bR&}l2bzIjAooB)=-XNcYmrGmBh(&iu@o!^hn0^#}m2yZZUK8 zufVm7Gq0y`Mj;9b>`c?&PZkU0j4>IL=UL&-Lp3j&47B5pAW4JceG{!XCA)kT<%2nqCxj<)uy6XR_uws~>_MEKPOpAQ!H zkn>FKh)<9DwwS*|Y(q?$^N!6(51O0 z^JM~Ax{AI1Oj$fs-S5d4T7Z_i1?{%0SsIuQ&r8#(JA=2iLcTN+?>wOL532%&dMYkT z*T5xepC+V6zxhS@vNbMoi|i)=rpli@R9~P!39tWbSSb904ekv7D#quKbgFEMTb48P zuq(VJ+&L8aWU(_FCD$3^uD!YM%O^K(dvy~Wm2hUuh6bD|#(I39Xt>N1Y{ZqXL`Fg6 zKQ?T2htHN!(Bx;tV2bfTtIj7e)liN-29s1kew>v(D^@)#v;}C4-G=7x#;-dM4yRWm zyY`cS21ulzMK{PoaQ6xChEZ}o_#}X-o}<&0)$1#3we?+QeLt;aVCjeA)hn!}UaKt< zat1fHEx13y-rXNMvpUUmCVzocPmN~-Y4(YJvQ#db)4|%B!rBsgAe+*yor~}FrNH08 z3V!97S}D7d$zbSD{$z;@IYMxM6aHdypIuS*pr_U6;#Y!_?0i|&yU*@16l z*dcMqDQgfNBf}?quiu4e>H)yTVfsp#f+Du0@=Kc41QockXkCkvu>FBd6Q+@FL!(Yx z2`YuX#eMEiLEDhp+9uFqME_E^faV&~9qjBHJkIp~%$x^bN=N)K@kvSVEMdDuzA0sn z88CBG?`RX1@#hQNd`o^V{37)!w|nA)QfiYBE^m=yQKv-fQF+UCMcuEe1d4BH7$?>b zJl-r9@0^Ie=)guO1vOd=i$_4sz>y3x^R7n4ED!5oXL3@5**h(xr%Hv)_gILarO46q+MaDOF%ChaymKoI6JU5Pg;7#2n9-18|S1;AK+ zgsn6;k6-%!QD>D?cFy}8F;r@z8H9xN1jsOBw2vQONVqBVEbkiNUqgw~*!^##ht>w0 zUOykwH=$LwX2j&nLy=@{hr)2O&-wm-NyjW7n~Zs9UlH;P7iP3 zI}S(r0YFVYacnKH(+{*)Tbw)@;6>%=&Th=+Z6NHo_tR|JCI8TJiXv2N7ei7M^Q+RM z?9o`meH$5Yi;@9XaNR#jIK^&{N|DYNNbtdb)XW1Lv2k{E>;?F`#Pq|&_;gm~&~Zc9 zf+6ZE%{x4|{YdtE?a^gKyzr}dA>OxQv+pq|@IXL%WS0CiX!V zm$fCePA%lU{%pTKD7|5NJHeXg=I0jL@$tOF@K*MI$)f?om)D63K*M|r`gb9edD1~Y zc|w7N)Y%do7=0{RC|AziW7#am$)9jciRJ?IWl9PE{G3U+$%FcyKs_0Cgq`=K3@ttV z9g;M!3z~f_?P%y3-ph%vBMeS@p7P&Ea8M@97+%XEj*(1E6vHj==d zjsoviB>j^$_^OI_DEPvFkVo(BGRo%cJeD){6Uckei=~1}>sp299|IRjhXe)%?uP0I zF5+>?0#Ye}T^Y$u_rc4=lPcq4K^D(TZG-w30-YiEM=dcK+4#o*>lJ8&JLi+3UcpZk z!^?95S^C0ja^jwP`|{<+3cBVog$(mRdQmadS+Vh~z zS@|P}=|z3P6uS+&@QsMp0no9Od&27O&14zHXGAOEy zh~OKpymK5C%;LLb467@KgIiVwYbYd6wFxI{0-~MOGfTq$nBTB!{SrWmL9Hs}C&l&l#m?s*{tA?BHS4mVKHAVMqm63H<|c5n0~k)-kbg zXidai&9ZUy0~WFYYKT;oe~rytRk?)r8bptITsWj(@HLI;@=v5|XUnSls7$uaxFRL+ zRVMGuL3w}NbV1`^=Pw*0?>bm8+xfeY(1PikW*PB>>Tq(FR`91N0c2&>lL2sZo5=VD zQY{>7dh_TX98L2)n{2OV=T10~*YzX27i2Q7W86M4$?gZIXZaBq#sA*{PH8){|GUi;oM>e?ua7eF4WFuFYZSG| zze?srg|5Ti8Og{O zeFxuw9!U+zhyk?@w zjsA6(oKD=Ka;A>Ca)oPORxK+kxH#O@zhC!!XS4@=swnuMk>t+JmLmFiE^1aX3f<)D@`%K0FGK^gg1a1j>zi z2KhV>sjU7AX3F$SEqrXSC}fRx64GDoc%!u2Yag68Lw@w9v;xOONf@o)Lc|Uh3<21ctTYu-mFZuHk*+R{GjXHIGq3p)tFtQp%TYqD=j1&y)>@zxoxUJ!G@ zgI0XKmP6MNzw>nRxK$-Gbzs}dyfFzt>#5;f6oR27ql!%+{tr+(`(>%51|k`ML} zY4eE)Lxq|JMas(;JibNQds1bUB&r}ydMQXBY4x(^&fY_&LlQC)3hylc$~8&~|06-D z#T+%66rYbHX%^KuqJED_wuGB+=h`nWA!>1n0)3wZrBG3%`b^Ozv6__dNa@%V14|!D zQ?o$z5u0^8`giv%qE!BzZ!3j;BlDlJDk)h@9{nSQeEk!z9RGW) z${RSF3phEM*ce*>Xdp}585vj$|40=&S{S-GTiE?Op*vY&Lvr9}BO$XWy80IF+6@%n z5*2ueT_g@ofP#u5pxb7n*fv^Xtt7&?SRc{*2Ka-*!BuOpf}neHGCiHy$@Ka1^Dint z;DkmIL$-e)rj4o2WQV%Gy;Xg(_Bh#qeOsTM2f@KEe~4kJ8kNLQ+;(!j^bgJMcNhvklP5Z6I+9Fq@c&D~8Fb-4rmDT!MB5QC{Dsb;BharP*O;SF4& zc$wj-7Oep7#$WZN!1nznc@Vb<_Dn%ga-O#J(l=OGB`dy=Sy&$(5-n3zzu%d7E#^8`T@}V+5B;PP8J14#4cCPw-SQTdGa2gWL0*zKM z#DfSXs_iWOMt)0*+Y>Lkd=LlyoHjublNLefhKBv@JoC>P7N1_#> zv=mLWe96%EY;!ZGSQDbZWb#;tzqAGgx~uk+-$+2_8U`!ypbwXl z^2E-FkM1?lY@yt8=J3%QK+xaZ6ok=-y%=KXCD^0r!5vUneW>95PzCkOPO*t}p$;-> ze5j-BLT_;)cZQzR2CEsm@rU7GZfFtdp*a|g4wDr%8?2QkIGasRfDWT-Dvy*U{?IHT z*}wGnzdlSptl#ZF^sf)KT|BJs&kLG91^A6ls{CzFprZ6-Y!V0Xysh%9p%iMd7HLsS zN+^Un$tDV)T@i!v?3o0Fsx2qI(AX_$dDkBzQ@fRM%n zRXk6hb9Py#JXUs+7)w@eo;g%QQ95Yq!K_d=z{0dGS+pToEI6=Bo8+{k$7&Z zo4>PH(`ce8E-Ps&uv`NQ;U$%t;w~|@E3WVOCi~R4oj5wP?%<*1C%}Jq%a^q~T7u>K zML5AKfQDv6>PuT`{SrKHRAF+^&edg6+5R_#H?Lz3iGoWo#PCEd0DS;)2U({{X#zU^ zw_xv{4x7|t!S)>44J;KfA|DC?;uQ($l+5Vp7oeqf7{GBF9356nx|&B~gs+@N^gSdd zvb*>&W)|u#F{Z_b`f#GVtQ`pYv3#||N{xj1NgB<#=Odt6{eB%#9RLt5v zIi|0u70`#ai}9fJjKv7dE!9ZrOIX!3{$z_K5FBd-Kp-&e4(J$LD-)NMTp^_pB`RT; zftVVlK2g@+1Ahv2$D){@Y#cL#dUj9*&%#6 zd2m9{1NYp>)6=oAvqdCn5#cx{AJ%S8skUgMglu2*IAtd+z1>B&`MuEAS(D(<6X#Lj z?f4CFx$)M&$=7*>9v1ER4b6!SIz-m0e{o0BfkySREchp?WdVPpQCh!q$t>?rL!&Jg zd#heM;&~A}VEm8Dvy&P|J*eAV&w!&Nx6HFV&B8jJFVTmgLaswn!cx$&%JbTsloz!3 zMEz1d`k==`Ueub_JAy_&`!ogbwx27^ZXgFNAbx=g_I~5nO^r)}&myw~+yY*cJl4$I znNJ32M&K=0(2Dj_>@39`3=FX!v3nZHno_@q^!y}%(yw0PqOo=);6Y@&ylVe>nMOZ~ zd>j#QQSBn3oaWd;qy$&5(5H$Ayi)0haAYO6TH>FR?rhqHmNOO+(})NB zLI@B@v0)eq!ug`>G<@htRlp3n!EpU|n+G+AvXFrWSUsLMBfL*ZB`CRsIVHNTR&b?K zxBgsN0BjfB>UVcJ|x%=-zb%OV7lmZc& zxiupadZVF7)6QuhoY;;FK2b*qL0J-Rn-8!X4ZY$-ZSUXV5DFd7`T41c(#lAeLMoeT z4%g655v@7AqT!i@)Edt5JMbN(=Q-6{=L4iG8RA%}w;&pKmtWvI4?G9pVRp|RTw`g0 zD5c12B&A2&P6Ng~8WM2eIW=wxd?r7A*N+&!Be7PX3s|7~z=APxm=A?5 zt>xB4WG|*Td@VX{Rs)PV0|yK`oI3^xn(4c_j&vgxk_Y3o(-`_5o`V zRTghg6%l@(qodXN;dB#+OKJEEvhfcnc#BeO2|E(5df-!fKDZ!%9!^BJ_4)9P+9Dq5 zK1=(v?KmIp34r?z{NEWnLB3Px{XYwy-akun4F7xTRr2^zeYW{gcK9)>aJDdU5;w5@ zak=<+-PLH-|04pelTb%ULpuuuJC7DgyT@D|p{!V!0v3KpDnRjANN12q6SUR3mb9<- z>2r~IApQGhstZ!3*?5V z8#)hJ0TdZg0M-BK#nGFP>$i=qk82DO z7h;Ft!D5E15OgW)&%lej*?^1~2=*Z5$2VX>V{x8SC+{i10BbtUk9@I#Vi&hX)q
Q!LwySI{Bnv%Sm)yh{^sSVJ8&h_D-BJ_YZe5eCaAWU9b$O2c z$T|{vWVRtOL!xC0DTc(Qbe`ItNtt5hr<)VijD0{U;T#bUEp381_y`%ZIav?kuYG{iyYdEBPW=*xNSc;Rlt6~F4M`5G+VtOjc z*0qGzCb@gME5udTjJA-9O<&TWd~}ysBd(eVT1-H82-doyH9RST)|+Pb{o*;$j9Tjs zhU!IlsPsj8=(x3bAKJTopW3^6AKROHR^7wZ185wJGVhA~hEc|LP;k7NEz-@4p5o}F z`AD6naG3(n=NF9HTH81=F+Q|JOz$7wm9I<+#BSmB@o_cLt2GkW9|?7mM;r!JZp89l zbo!Hp8=n!XH1{GwaDU+k)pGp`C|cXkCU5%vcH)+v@0eK>%7gWxmuMu9YLlChA|_D@ zi#5zovN_!a-0?~pUV-Rj*1P)KwdU-LguR>YM&*Nen+ln8Q$?WFCJg%DY%K}2!!1FE zDv-A%Cbwo^p(lzac&_TZ-l#9kq`mhLcY3h9ZTUVCM(Ad&=EriQY5{jJv<5K&g|*Lk zgV%ILnf1%8V2B0E&;Sp4sYbYOvvMebLwYwzkRQ#F8GpTQq#uv=J`uaSJ34OWITeSGo6+-8Xw znCk*n{kdDEi)Hi&u^)~cs@iyCkFWB2SWZU|Uc%^43ZIZQ-vWNExCCtDWjqHs;;tWf$v{}0{p0Rvxkq``)*>+Akq%|Na zA`@~-Vfe|+(AIlqru+7Ceh4nsVmO9p9jc8}HX^W&ViBDXT+uXbT#R#idPn&L>+#b6 zflC-4C5-X;kUnR~L>PSLh*gvL68}RBsu#2l`s_9KjUWRhiqF`j)`y`2`YU(>3bdBj z?>iyjEhe-~$^I5!nn%B6Wh+I`FvLNvauve~eX<+Ipl&04 zT}};W&1a3%W?dJ2=N#0t?e+aK+%t}5q%jSLvp3jZ%?&F}nOOWr>+{GFIa%wO_2`et z=JzoRR~}iKuuR+azPI8;Gf9)z3kyA4EIOSl!sRR$DlW}0>&?GbgPojmjmnln;cTqCt=ADbE zZ8GAnoM+S1(5$i8^O4t`ue;vO4i}z0wz-QEIVe5_u03;}-!G1NyY8;h^}y;tzY}i5 zqQr#Ur3Fy8sSa$Q0ys+f`!`+>9WbvU_I`Sj;$4{S>O3?#inLHCrtLy~!s#WXV=oVP zeE93*Nc`PBi4q@%Ao$x4lw9vLHM!6mn3-b_cebF|n-2vt-zYVF_&sDE--J-P;2WHo z+@n2areE0o$LjvjlV2X7ZU@j+`{*8zq`JR3gKF#EW|#+{nMyo-a>nFFTg&vhyT=b} zDa8+v0(Dgx0yRL@ZXOYIlVSZ0|MFizy0VPW8;AfA5|pe!#j zX}Py^8fl5SyS4g1WSKKtnyP+_PoOwMMwu`(i@Z)diJp~U54*-miOchy7Z35eL>^M z4p<-aIxH4VUZgS783@H%M7P9hX>t{|RU7$n4T(brCG#h9e9p! z+o`i;EGGq3&pF;~5V~eBD}lC)>if$w%Vf}AFxGqO88|ApfHf&Bvu+xdG)@vuF}Yvk z)o;~k-%+0K0g+L`Wala!$=ZV|z$e%>f0%XoLib%)!R^RoS+{!#X?h-6uu zF&&KxORdZU&EwQFITIRLo(7TA3W}y6X{?Y%y2j0It!ekU#<)$qghZtpcS>L3uh`Uj z7GY;6f$9qKynP#oS3$$a{p^{D+0oJQ71`1?OAn_m8)UGZmj3l*ZI)`V-a>MKGGFG< z&^jg#Ok%(hhm>hSrZ5;Qga4u(?^i>GiW_j9%_7M>j(^|Om$#{k+^*ULnEgzW_1gCICtAD^WpC`A z{9&DXkG#01Xo)U$OC(L5Y$DQ|Q4C6CjUKk1UkPj$nXH##J{c8e#K|&{mA*;b$r0E4 zUNo0jthwA(c&N1l=PEe8Rw_8cEl|-eya9z&H3#n`B$t#+aJ03RFMzrV@gowbe8v(c zIFM60^0&lCFO10NU4w@|61xiZ4CVXeaKjd;d?sv52XM*lS8XiVjgWpRB;&U_C0g+`6B5V&w|O6B*_q zsATxL!M}+$He)1eOWECce#eS@2n^xhlB4<_Nn?yCVEQWDs(r`|@2GqLe<#(|&P0U? z$7V5IgpWf09uIf_RazRwC?qEqRaHyL?iiS05UiGesJy%^>-C{{ypTBI&B0-iUYhk> zIk<5xpsuV@g|z(AZD+C-;A!fTG=df1=<%nxy(a(IS+U{ME4ZbDEBtcD_3V=icT6*_ z)>|J?>&6%nvHhZERBtjK+s4xnut*@>GAmA5m*OTp$!^CHTr}vM4n(X1Q*;{e-Rd2BCF-u@1ZGm z!S8hJ6L=Gl4T_SDa7Xx|-{4mxveJg=ctf`BJ*fy!yF6Dz&?w(Q_6B}WQVtNI!BVBC zKfX<>7vd6C96}XAQmF-Jd?1Q4eTfRB3q7hCh0f!(JkdWT5<{iAE#dKy*Jxq&3a1@~ z8C||Dn2mFNyrUV|<-)C^_y7@8c2Fz+2jrae9deBDu;U}tJ{^xAdxCD248(k;dCJ%o z`y3sADe>U%suxwwv~8A1+R$VB=Q?%U?4joI$um;aH+eCrBqpn- z%79D_7rb;R-;-9RTrwi9dPlg8&@tfWhhZ(Vx&1PQ+6(huX`;M9x~LrW~~#3{j0Bh2kDU$}@!fFQej4VGkJv?M4rU^x!RU zEwhu$!CA_iDjFjrJa`aocySDX16?~;+wgav;}Zut6Mg%C4>}8FL?8)Kgwc(Qlj{@#2Pt0?G`$h7P#M+qoXtlV@d}%c&OzO+QYKK`kyXaK{U(O^2DyIXCZlNQjt0^8~8JzNGrIxhj}}M z&~QZlbx%t;MJ(Vux;2tgNKGlAqphLq%pd}JG9uoVHUo?|hN{pLQ6Em%r*+7t^<);X zm~6=qChlNAVXNN*Sow->*4;}T;l;D1I-5T{Bif@4_}=>l`tK;qqDdt5zvisCKhMAH z#r}`)7VW?LZqfdmXQ%zo5bJ00{Xb9^YKrk0Nf|oIW*K@(=`o2Vndz}ZDyk{!u}PVx zzd--+_WC*U{~DH3{?GI64IB+@On&@9X>EUAo&L+G{L^dozaI4C3G#2wr~hseW@K&g zKWs{uHu-9Je!3;4pE>eBltKUXb^*hG8I&413)$J&{D4N%7PcloU6bn%jPxJyQL?g* z9g+YFFEDiE`8rW^laCNzQmi7CTnPfwyg3VDHRAl>h=In6jeaVOP@!-CP60j3+#vpL zEYmh_oP0{-gTe7Or`L6x)6w?77QVi~jD8lWN@3RHcm80iV%M1A!+Y6iHM)05iC64tb$X2lV_%Txk@0l^hZqi^%Z?#- zE;LE0uFx)R08_S-#(wC=dS&}vj6P4>5ZWjhthP=*Hht&TdLtKDR;rXEX4*z0h74FA zMCINqrh3Vq;s%3MC1YL`{WjIAPkVL#3rj^9Pj9Ss7>7duy!9H0vYF%>1jh)EPqvlr6h%R%CxDsk| z!BACz7E%j?bm=pH6Eaw{+suniuY7C9Ut~1cWfOX9KW9=H><&kQlinPV3h9R>3nJvK z4L9(DRM=x;R&d#a@oFY7mB|m8h4692U5eYfcw|QKwqRsshN(q^v$4$)HgPpAJDJ`I zkqjq(8Cd!K!+wCd=d@w%~e$=gdUgD&wj$LQ1r>-E=O@c ze+Z$x{>6(JA-fNVr)X;*)40Eym1TtUZI1Pwwx1hUi+G1Jlk~vCYeXMNYtr)1?qwyg zsX_e*$h?380O00ou?0R@7-Fc59o$UvyVs4cUbujHUA>sH!}L54>`e` zHUx#Q+Hn&Og#YVOuo*niy*GU3rH;%f``nk#NN5-xrZ34NeH$l`4@t);4(+0|Z#I>Y z)~Kzs#exIAaf--65L0UHT_SvV8O2WYeD>Mq^Y6L!Xu8%vnpofG@w!}R7M28?i1*T&zp3X4^OMCY6(Dg<-! zXmcGQrRgHXGYre7GfTJ)rhl|rs%abKT_Nt24_Q``XH{88NVPW+`x4ZdrMuO0iZ0g` z%p}y};~T5gbb9SeL8BSc`SO#ixC$@QhXxZ=B}L`tP}&k?1oSPS=4%{UOHe0<_XWln zwbl5cn(j-qK`)vGHY5B5C|QZd5)W7c@{bNVXqJ!!n$^ufc?N9C-BF2QK1(kv++h!>$QbAjq)_b$$PcJdV+F7hz0Hu@ zqj+}m0qn{t^tD3DfBb~0B36|Q`bs*xs|$i^G4uNUEBl4g;op-;Wl~iThgga?+dL7s zUP(8lMO?g{GcYpDS{NM!UA8Hco?#}eNEioRBHy4`mq!Pd-9@-97|k$hpEX>xoX+dY zDr$wfm^P&}Wu{!%?)U_(%Mn79$(ywvu*kJ9r4u|MyYLI_67U7%6Gd_vb##Nerf@>& z8W11z$$~xEZt$dPG}+*IZky+os5Ju2eRi;1=rUEeIn>t-AzC_IGM-IXWK3^6QNU+2pe=MBn4I*R@A%-iLDCOHTE-O^wo$sL_h{dcPl=^muAQb`_BRm};=cy{qSkui;`WSsj9%c^+bIDQ z0`_?KX0<-=o!t{u(Ln)v>%VGL z0pC=GB7*AQ?N7N{ut*a%MH-tdtNmNC+Yf$|KS)BW(gQJ*z$d{+{j?(e&hgTy^2|AR9vx1Xre2fagGv0YXWqtNkg*v%40v?BJBt|f9wX5 z{QTlCM}b-0{mV?IG>TW_BdviUKhtosrBqdfq&Frdz>cF~yK{P@(w{Vr7z2qKFwLhc zQuogKO@~YwyS9%+d-zD7mJG~@?EFJLSn!a&mhE5$_4xBl&6QHMzL?CdzEnC~C3$X@ zvY!{_GR06ep5;<#cKCSJ%srxX=+pn?ywDwtJ2{TV;0DKBO2t++B(tIO4)Wh`rD13P z4fE$#%zkd=UzOB74gi=-*CuID&Z3zI^-`4U^S?dHxK8fP*;fE|a(KYMgMUo`THIS1f!*6dOI2 zFjC3O=-AL`6=9pp;`CYPTdVX z8(*?V&%QoipuH0>WKlL8A*zTKckD!paN@~hh zmXzm~qZhMGVdQGd=AG8&20HW0RGV8X{$9LldFZYm zE?}`Q3i?xJRz43S?VFMmqRyvWaS#(~Lempg9nTM$EFDP(Gzx#$r)W&lpFKqcAoJh-AxEw$-bjW>`_+gEi z2w`99#UbFZGiQjS8kj~@PGqpsPX`T{YOj`CaEqTFag;$jY z8_{Wzz>HXx&G*Dx<5skhpETxIdhKH?DtY@b9l8$l?UkM#J-Snmts7bd7xayKTFJ(u zyAT&@6cAYcs{PBfpqZa%sxhJ5nSZBPji?Zlf&}#L?t)vC4X5VLp%~fz2Sx<*oN<7` z?ge=k<=X7r<~F7Tvp9#HB{!mA!QWBOf%EiSJ6KIF8QZNjg&x~-%e*tflL(ji_S^sO ztmib1rp09uon}RcsFi#k)oLs@$?vs(i>5k3YN%$T(5Or(TZ5JW9mA6mIMD08=749$ z!d+l*iu{Il7^Yu}H;lgw=En1sJpCKPSqTCHy4(f&NPelr31^*l%KHq^QE>z>Ks_bH zjbD?({~8Din7IvZeJ>8Ey=e;I?thpzD=zE5UHeO|neioJwG;IyLk?xOz(yO&0DTU~ z^#)xcs|s>Flgmp;SmYJ4g(|HMu3v7#;c*Aa8iF#UZo7CvDq4>8#qLJ|YdZ!AsH%^_7N1IQjCro

K7UpUK$>l@ zw`1S}(D?mUXu_C{wupRS-jiX~w=Uqqhf|Vb3Cm9L=T+w91Cu^ z*&Ty%sN?x*h~mJc4g~k{xD4ZmF%FXZNC;oVDwLZ_WvrnzY|{v8hc1nmx4^}Z;yriXsAf+Lp+OFLbR!&Ox?xABwl zu8w&|5pCxmu#$?Cv2_-Vghl2LZ6m7}VLEfR5o2Ou$x02uA-%QB2$c(c1rH3R9hesc zfpn#oqpbKuVsdfV#cv@5pV4^f_!WS+F>SV6N0JQ9E!T90EX((_{bSSFv9ld%I0&}9 zH&Jd4MEX1e0iqDtq~h?DBrxQX1iI0lIs<|kB$Yrh&cpeK0-^K%=FBsCBT46@h#yi!AyDq1V(#V}^;{{V*@T4WJ&U-NTq43w=|K>z8%pr_nC>%C(Wa_l78Ufib$r8Od)IIN=u>417 z`Hl{9A$mI5A(;+-Q&$F&h-@;NR>Z<2U;Y21>>Z;s@0V@SbkMQQj%_;~+qTuQ?c|AV zcWm3XZQHhP&R%QWarS%mJ!9R^&!_)*s(v+VR@I#QrAT}`17Y+l<`b-nvmDNW`De%y zrwTZ9EJrj1AFA>B`1jYDow}~*dfPs}IZMO3=a{Fy#IOILc8F0;JS4x(k-NSpbN@qM z`@aE_e}5{!$v3+qVs7u?sOV(y@1Os*Fgu`fCW9=G@F_#VQ%xf$hj0~wnnP0$hFI+@ zkQj~v#V>xn)u??YutKsX>pxKCl^p!C-o?+9;!Nug^ z{rP!|+KsP5%uF;ZCa5F;O^9TGac=M|=V z_H(PfkV1rz4jl?gJ(ArXMyWT4y(86d3`$iI4^l9`vLdZkzpznSd5Ikfrs8qcSy&>z zTIZgWZGXw0n9ibQxYWE@gI0(3#KA-dAdPcsL_|hg2@~C!VZDM}5;v_Nykfq!*@*Zf zE_wVgx82GMDryKO{U{D>vSzSc%B~|cjDQrt5BN=Ugpsf8H8f1lR4SGo#hCuXPL;QQ z#~b?C4MoepT3X`qdW2dNn& zo8)K}%Lpu>0tQei+{>*VGErz|qjbK#9 zvtd8rcHplw%YyQCKR{kyo6fgg!)6tHUYT(L>B7er5)41iG`j$qe*kSh$fY!PehLcD zWeKZHn<492B34*JUQh=CY1R~jT9Jt=k=jCU2=SL&&y5QI2uAG2?L8qd2U(^AW#{(x zThSy=C#>k+QMo^7caQcpU?Qn}j-`s?1vXuzG#j8(A+RUAY})F@=r&F(8nI&HspAy4 z4>(M>hI9c7?DCW8rw6|23?qQMSq?*Vx?v30U%luBo)B-k2mkL)Ljk5xUha3pK>EEj z@(;tH|M@xkuN?gsz;*bygizwYR!6=(Xgcg^>WlGtRYCozY<rFX2E>kaZo)O<^J7a`MX8Pf`gBd4vrtD|qKn&B)C&wp0O-x*@-|m*0egT=-t@%dD zgP2D+#WPptnc;_ugD6%zN}Z+X4=c61XNLb7L1gWd8;NHrBXwJ7s0ce#lWnnFUMTR& z1_R9Fin4!d17d4jpKcfh?MKRxxQk$@)*hradH2$3)nyXep5Z;B z?yX+-Bd=TqO2!11?MDtG0n(*T^!CIiF@ZQymqq1wPM_X$Iu9-P=^}v7npvvPBu!d$ z7K?@CsA8H38+zjA@{;{kG)#AHME>Ix<711_iQ@WWMObXyVO)a&^qE1GqpP47Q|_AG zP`(AD&r!V^MXQ^e+*n5~Lp9!B+#y3#f8J^5!iC@3Y@P`;FoUH{G*pj*q7MVV)29+j z>BC`a|1@U_v%%o9VH_HsSnM`jZ-&CDvbiqDg)tQEnV>b%Ptm)T|1?TrpIl)Y$LnG_ zzKi5j2Fx^K^PG1=*?GhK;$(UCF-tM~^=Z*+Wp{FSuy7iHt9#4n(sUuHK??@v+6*|10Csdnyg9hAsC5_OrSL;jVkLlf zHXIPukLqbhs~-*oa^gqgvtpgTk_7GypwH><53riYYL*M=Q@F-yEPLqQ&1Sc zZB%w}T~RO|#jFjMWcKMZccxm-SL)s_ig?OC?y_~gLFj{n8D$J_Kw%{r0oB8?@dWzn zB528d-wUBQzrrSSLq?fR!K%59Zv9J4yCQhhDGwhptpA5O5U?Hjqt>8nOD zi{)0CI|&Gu%zunGI*XFZh(ix)q${jT8wnnzbBMPYVJc4HX*9d^mz|21$=R$J$(y7V zo0dxdbX3N#=F$zjstTf*t8vL)2*{XH!+<2IJ1VVFa67|{?LP&P41h$2i2;?N~RA30LV`BsUcj zfO9#Pg1$t}7zpv#&)8`mis3~o+P(DxOMgz-V*(?wWaxi?R=NhtW}<#^Z?(BhSwyar zG|A#Q7wh4OfK<|DAcl9THc-W4*>J4nTevsD%dkj`U~wSUCh15?_N@uMdF^Kw+{agk zJ`im^wDqj`Ev)W3k3stasP`88-M0ZBs7;B6{-tSm3>I@_e-QfT?7|n0D~0RRqDb^G zyHb=is;IwuQ&ITzL4KsP@Z`b$d%B0Wuhioo1CWttW8yhsER1ZUZzA{F*K=wmi-sb#Ju+j z-l@In^IKnb{bQG}Ps>+Vu_W#grNKNGto+yjA)?>0?~X`4I3T@5G1)RqGUZuP^NJCq&^HykuYtMDD8qq+l8RcZNJsvN(10{ zQ1$XcGt}QH-U^WU!-wRR1d--{B$%vY{JLWIV%P4-KQuxxDeJaF#{eu&&r!3Qu{w}0f--8^H|KwE>)ORrcR+2Qf zb})DRcH>k0zWK8@{RX}NYvTF;E~phK{+F;MkIP$)T$93Ba2R2TvKc>`D??#mv9wg$ zd~|-`Qx5LwwsZ2hb*Rt4S9dsF%Cny5<1fscy~)d;0m2r$f=83<->c~!GNyb!U)PA; zq^!`@@)UaG)Ew(9V?5ZBq#c%dCWZrplmuM`o~TyHjAIMh0*#1{B>K4po-dx$Tk-Cq z=WZDkP5x2W&Os`N8KiYHRH#UY*n|nvd(U>yO=MFI-2BEp?x@=N<~CbLJBf6P)}vLS?xJXYJ2^<3KJUdrwKnJnTp{ zjIi|R=L7rn9b*D#Xxr4*R<3T5AuOS+#U8hNlfo&^9JO{VbH!v9^JbK=TCGR-5EWR@ zN8T-_I|&@A}(hKeL4_*eb!1G8p~&_Im8|wc>Cdir+gg90n1dw?QaXcx6Op_W1r=axRw>4;rM*UOpT#Eb9xU1IiWo@h?|5uP zka>-XW0Ikp@dIe;MN8B01a7+5V@h3WN{J=HJ*pe0uwQ3S&MyWFni47X32Q7SyCTNQ z+sR!_9IZa5!>f&V$`q!%H8ci!a|RMx5}5MA_kr+bhtQy{-^)(hCVa@I!^TV4RBi zAFa!Nsi3y37I5EK;0cqu|9MRj<^r&h1lF}u0KpKQD^5Y+LvFEwM zLU@@v4_Na#Axy6tn3P%sD^5P#<7F;sd$f4a7LBMk zGU^RZHBcxSA%kCx*eH&wgA?Qwazm8>9SCSz_!;MqY-QX<1@p$*T8lc?@`ikEqJ>#w zcG``^CoFMAhdEXT9qt47g0IZkaU)4R7wkGs^Ax}usqJ5HfDYAV$!=6?>J6+Ha1I<5 z|6=9soU4>E))tW$<#>F ziZ$6>KJf0bPfbx_)7-}tMINlc=}|H+$uX)mhC6-Hz+XZxsKd^b?RFB6et}O#+>Wmw9Ec9) z{q}XFWp{3@qmyK*Jvzpyqv57LIR;hPXKsrh{G?&dRjF%Zt5&m20Ll?OyfUYC3WRn{cgQ?^V~UAv+5 z&_m#&nIwffgX1*Z2#5^Kl4DbE#NrD&Hi4|7SPqZ}(>_+JMz=s|k77aEL}<=0Zfb)a z%F(*L3zCA<=xO)2U3B|pcTqDbBoFp>QyAEU(jMu8(jLA61-H!ucI804+B!$E^cQQa z)_ERrW3g!B9iLb3nn3dlkvD7KsY?sRvls3QC0qPi>o<)GHx%4Xb$5a3GBTJ(k@`e@ z$RUa^%S15^1oLEmA=sayrP5;9qtf!Z1*?e$ORVPsXpL{jL<6E)0sj&swP3}NPmR%FM?O>SQgN5XfHE< zo(4#Cv11(%Nnw_{_Ro}r6=gKd{k?NebJ~<~Kv0r(r0qe4n3LFx$5%x(BKvrz$m?LG zjLIc;hbj0FMdb9aH9Lpsof#yG$(0sG2%RL;d(n>;#jb!R_+dad+K;Ccw!|RY?uS(a zj~?=&M!4C(5LnlH6k%aYvz@7?xRa^2gml%vn&eKl$R_lJ+e|xsNfXzr#xuh(>`}9g zLHSyiFwK^-p!;p$yt7$F|3*IfO3Mlu9e>Dpx8O`37?fA`cj`C0B-m9uRhJjs^mRp# zWB;Aj6|G^1V6`jg7#7V9UFvnB4((nIwG?k%c7h`?0tS8J3Bn0t#pb#SA}N-|45$-j z$R>%7cc2ebAClXc(&0UtHX<>pd)akR3Kx_cK+n<}FhzmTx!8e9^u2e4%x{>T6pQ`6 zO182bh$-W5A3^wos0SV_TgPmF4WUP-+D25KjbC{y_6W_9I2_vNKwU(^qSdn&>^=*t z&uvp*@c8#2*paD!ZMCi3;K{Na;I4Q35zw$YrW5U@Kk~)&rw;G?d7Q&c9|x<Hg|CNMsxovmfth*|E*GHezPTWa^Hd^F4!B3sF;)? z(NaPyAhocu1jUe(!5Cy|dh|W2=!@fNmuNOzxi^tE_jAtzNJ0JR-avc_H|ve#KO}#S z#a(8secu|^Tx553d4r@3#6^MHbH)vmiBpn0X^29xEv!Vuh1n(Sr5I0V&`jA2;WS|Y zbf0e}X|)wA-Pf5gBZ>r4YX3Mav1kKY(ulAJ0Q*jB)YhviHK)w!TJsi3^dMa$L@^{` z_De`fF4;M87vM3Ph9SzCoCi$#Fsd38u!^0#*sPful^p5oI(xGU?yeYjn;Hq1!wzFk zG&2w}W3`AX4bxoVm03y>ts{KaDf!}b&7$(P4KAMP=vK5?1In^-YYNtx1f#}+2QK@h zeSeAI@E6Z8a?)>sZ`fbq9_snl6LCu6g>o)rO;ijp3|$vig+4t} zylEo7$SEW<_U+qgVcaVhk+4k+C9THI5V10qV*dOV6pPtAI$)QN{!JRBKh-D zk2^{j@bZ}yqW?<#VVuI_27*cI-V~sJiqQv&m07+10XF+#ZnIJdr8t`9s_EE;T2V;B z4UnQUH9EdX%zwh-5&wflY#ve!IWt0UE-My3?L#^Bh%kcgP1q{&26eXLn zTkjJ*w+(|_>Pq0v8{%nX$QZbf)tbJaLY$03;MO=Ic-uqYUmUCuXD>J>o6BCRF=xa% z3R4SK9#t1!K4I_d>tZgE>&+kZ?Q}1qo4&h%U$GfY058s%*=!kac{0Z+4Hwm!)pFLR zJ+5*OpgWUrm0FPI2ib4NPJ+Sk07j(`diti^i#kh&f}i>P4~|d?RFb#!JN)~D@)beox}bw?4VCf^y*`2{4`-@%SFTry2h z>9VBc9#JxEs1+0i2^LR@B1J`B9Ac=#FW=(?2;5;#U$0E0UNag_!jY$&2diQk_n)bT zl5Me_SUvqUjwCqmVcyb`igygB_4YUB*m$h5oeKv3uIF0sk}~es!{D>4r%PC*F~FN3owq5e0|YeUTSG#Vq%&Gk7uwW z0lDo#_wvflqHeRm*}l?}o;EILszBt|EW*zNPmq#?4A+&i0xx^?9obLyY4xx=Y9&^G;xYXYPxG)DOpPg!i_Ccl#3L}6xAAZzNhPK1XaC_~ z!A|mlo?Be*8Nn=a+FhgpOj@G7yYs(Qk(8&|h@_>w8Y^r&5nCqe0V60rRz?b5%J;GYeBqSAjo|K692GxD4` zRZyM2FdI+-jK2}WAZTZ()w_)V{n5tEb@>+JYluDozCb$fA4H)$bzg(Ux{*hXurjO^ zwAxc+UXu=&JV*E59}h3kzQPG4M)X8E*}#_&}w*KEgtX)cU{vm9b$atHa;s>| z+L6&cn8xUL*OSjx4YGjf6{Eq+Q3{!ZyhrL&^6Vz@jGbI%cAM9GkmFlamTbcQGvOlL zmJ?(FI)c86=JEs|*;?h~o)88>12nXlpMR4@yh%qdwFNpct;vMlc=;{FSo*apJ;p}! zAX~t;3tb~VuP|ZW;z$=IHf->F@Ml)&-&Bnb{iQyE#;GZ@C$PzEf6~q}4D>9jic@mTO5x76ulDz@+XAcm35!VSu zT*Gs>;f0b2TNpjU_BjHZ&S6Sqk6V1370+!eppV2H+FY!q*n=GHQ!9Rn6MjY!Jc77A zG7Y!lFp8?TIHN!LXO?gCnsYM-gQxsm=Ek**VmZu7vnuufD7K~GIxfxbsQ@qv2T zPa`tvHB$fFCyZl>3oYg?_wW)C>^_iDOc^B7klnTOoytQH18WkOk)L2BSD0r%xgRSW zQS9elF^?O=_@|58zKLK;(f77l-Zzu}4{fXed2saq!5k#UZAoDBqYQS{sn@j@Vtp|$ zG%gnZ$U|9@u#w1@11Sjl8ze^Co=)7yS(}=;68a3~g;NDe_X^}yJj;~s8xq9ahQ5_r zxAlTMnep*)w1e(TG%tWsjo3RR;yVGPEO4V{Zp?=a_0R#=V^ioQu4YL=BO4r0$$XTX zZfnw#_$V}sDAIDrezGQ+h?q24St0QNug_?{s-pI(^jg`#JRxM1YBV;a@@JQvH8*>> zIJvku74E0NlXkYe_624>znU0J@L<-c=G#F3k4A_)*;ky!C(^uZfj%WB3-*{*B$?9+ zDm$WFp=0(xnt6`vDQV3Jl5f&R(Mp};;q8d3I%Kn>Kx=^;uSVCw0L=gw53%Bp==8Sw zxtx=cs!^-_+i{2OK`Q;913+AXc_&Z5$@z3<)So0CU3;JAv=H?@Zpi~riQ{z-zLtVL z!oF<}@IgJp)Iyz1zVJ42!SPHSkjYNS4%ulVVIXdRuiZ@5Mx8LJS}J#qD^Zi_xQ@>DKDr-_e#>5h3dtje*NcwH_h;i{Sx7}dkdpuW z(yUCjckQsagv*QGMSi9u1`Z|V^}Wjf7B@q%j2DQXyd0nOyqg%m{CK_lAoKlJ7#8M} z%IvR?Vh$6aDWK2W!=i?*<77q&B8O&3?zP(Cs@kapc)&p7En?J;t-TX9abGT#H?TW? ztO5(lPKRuC7fs}zwcUKbRh=7E8wzTsa#Z{a`WR}?UZ%!HohN}d&xJ=JQhpO1PI#>X zHkb>pW04pU%Bj_mf~U}1F1=wxdBZu1790>3Dm44bQ#F=T4V3&HlOLsGH)+AK$cHk6 zia$=$kog?)07HCL*PI6}DRhpM^*%I*kHM<#1Se+AQ!!xyhcy6j7`iDX7Z-2i73_n# zas*?7LkxS-XSqv;YBa zW_n*32D(HTYQ0$feV_Fru1ZxW0g&iwqixPX3=9t4o)o|kOo79V$?$uh?#8Q8e>4e)V6;_(x&ViUVxma+i25qea;d-oK7ouuDsB^ab{ zu1qjQ%`n56VtxBE#0qAzb7lph`Eb-}TYpXB!H-}3Ykqyp`otprp7{VEuW*^IR2n$Fb99*nAtqT&oOFIf z@w*6>YvOGw@Ja?Pp1=whZqydzx@9X4n^2!n83C5{C?G@|E?&$?p*g68)kNvUTJ)I6 z1Q|(#UuP6pj78GUxq11m-GSszc+)X{C2eo-?8ud9sB=3(D47v?`JAa{V(IF zPZQ_0AY*9M97>Jf<o%#O_%Wq}8>YM=q0|tGY+hlXcpE=Z4Od z`NT7Hu2hnvRoqOw@g1f=bv`+nba{GwA$Ak0INlqI1k<9!x_!sL()h?hEWoWrdU3w` zZ%%)VR+Bc@_v!C#koM1p-3v_^L6)_Ktj4HE>aUh%2XZE@JFMOn)J~c`_7VWNb9c-N z2b|SZMR4Z@E7j&q&9(6H3yjEu6HV7{2!1t0lgizD;mZ9$r(r7W5G$ky@w(T_dFnOD z*p#+z$@pKE+>o@%eT(2-p_C}wbQ5s(%Sn_{$HDN@MB+Ev?t@3dPy`%TZ!z}AThZSu zN<1i$siJhXFdjV zP*y|V<`V8t=h#XTRUR~5`c`Z9^-`*BZf?WAehGdg)E2Je)hqFa!k{V(u+(hTf^Yq& zoruUh2(^3pe)2{bvt4&4Y9CY3js)PUHtd4rVG57}uFJL)D(JfSIo^{P=7liFXG zq5yqgof0V8paQcP!gy+;^pp-DA5pj=gbMN0eW=-eY+N8~y+G>t+x}oa!5r>tW$xhI zPQSv=pi;~653Gvf6~*JcQ%t1xOrH2l3Zy@8AoJ+wz@daW@m7?%LXkr!bw9GY@ns3e zSfuWF_gkWnesv?s3I`@}NgE2xwgs&rj?kH-FEy82=O8`+szN ziHch`vvS`zNfap14!&#i9H@wF7}yIPm=UB%(o(}F{wsZ(wA0nJ2aD^@B41>>o-_U6 zUqD~vdo48S8~FTb^+%#zcbQiiYoDKYcj&$#^;Smmb+Ljp(L=1Kt_J!;0s%1|JK}Wi z;={~oL!foo5n8=}rs6MmUW~R&;SIJO3TL4Ky?kh+b2rT9B1Jl4>#Uh-Bec z`Hsp<==#UEW6pGPhNk8H!!DUQR~#F9jEMI6T*OWfN^Ze&X(4nV$wa8QUJ>oTkruH# zm~O<`J7Wxseo@FqaZMl#Y(mrFW9AHM9Kb|XBMqaZ2a)DvJgYipkDD_VUF_PKd~dT7 z#02}bBfPn9a!X!O#83=lbJSK#E}K&yx-HI#T6ua)6o0{|={*HFusCkHzs|Fn&|C3H zBck1cmfcWVUN&i>X$YU^Sn6k2H;r3zuXbJFz)r5~3$d$tUj(l1?o={MM){kjgqXRO zc5R*#{;V7AQh|G|)jLM@wGAK&rm2~@{Pewv#06pHbKn#wL0P6F1!^qw9g&cW3Z=9} zj)POhOlwsh@eF=>z?#sIs*C-Nl(yU!#DaiaxhEs#iJqQ8w%(?+6lU02MYSeDkr!B- zPjMv+on6OLXgGnAtl(ao>|X2Y8*Hb}GRW5}-IzXnoo-d0!m4Vy$GS!XOLy>3_+UGs z2D|YcQx@M#M|}TDOetGi{9lGo9m-=0-^+nKE^*?$^uHkxZh}I{#UTQd;X!L+W@jm( zDg@N4+lUqI92o_rNk{3P>1gxAL=&O;x)ZT=q1mk0kLlE$WeWuY_$0`0jY-Kkt zP*|m3AF}Ubd=`<>(Xg0har*_@x2YH}bn0Wk*OZz3*e5;Zc;2uBdnl8?&XjupbkOeNZsNh6pvsq_ydmJI+*z**{I{0K)-;p1~k8cpJXL$^t!-`E}=*4G^-E8>H!LjTPxSx zcF+cS`ommfKMhNSbas^@YbTpH1*RFrBuATUR zt{oFWSk^$xU&kbFQ;MCX22RAN5F6eq9UfR$ut`Jw--p2YX)A*J69m^!oYfj2y7NYcH6&r+0~_sH^c^nzeN1AU4Ga7=FlR{S|Mm~MpzY0$Z+p2W(a={b-pR9EO1Rs zB%KY|@wLcAA@)KXi!d2_BxrkhDn`DT1=Dec}V!okd{$+wK z4E{n8R*xKyci1(CnNdhf$Dp2(Jpof0-0%-38X=Dd9PQgT+w%Lshx9+loPS~MOm%ZT zt%2B2iL_KU_ita%N>xjB!#71_3=3c}o zgeW~^U_ZTJQ2!PqXulQd=3b=XOQhwATK$y(9$#1jOQ4}4?~l#&nek)H(04f(Sr=s| zWv7Lu1=%WGk4FSw^;;!8&YPM)pQDCY9DhU`hMty1@sq1=Tj7bFsOOBZOFlpR`W>-J$-(kezWJj;`?x-v>ev{*8V z8p|KXJPV$HyQr1A(9LVrM47u-XpcrIyO`yWvx1pVYc&?154aneRpLqgx)EMvRaa#|9?Wwqs2+W8n5~79G z(}iCiLk;?enn}ew`HzhG+tu+Ru@T+K5juvZN)wY;x6HjvqD!&!)$$;1VAh~7fg0K| zEha#aN=Yv|3^~YFH}cc38ovVb%L|g@9W6fo(JtT6$fa?zf@Ct88e}m?i)b*Jgc{fl zExfdvw-BYDmH6>(4QMt#p0;FUIQqkhD}aH?a7)_%JtA~soqj{ppP_82yi9kaxuK>~ ze_)Zt>1?q=ZH*kF{1iq9sr*tVuy=u>Zev}!gEZx@O6-fjyu9X00gpIl-fS_pzjpqJ z1yqBmf9NF!jaF<+YxgH6oXBdK)sH(>VZ)1siyA$P<#KDt;8NT*l_0{xit~5j1P)FN zI8hhYKhQ)i z37^aP13B~u65?sg+_@2Kr^iWHN=U;EDSZ@2W2!5ALhGNWXnFBY%7W?1 z=HI9JzQ-pLKZDYTv<0-lt|6c-RwhxZ)mU2Os{bsX_i^@*fKUj8*aDO5pks=qn3Dv6 zwggpKLuyRCTVPwmw1r}B#AS}?X7b837UlXwp~E2|PJw2SGVueL7){Y&z!jL!XN=0i zU^Eig`S2`{+gU$68aRdWx?BZ{sU_f=8sn~>s~M?GU~`fH5kCc; z8ICp+INM3(3{#k32RZdv6b9MQYdZXNuk7ed8;G?S2nT+NZBG=Tar^KFl2SvhW$bGW#kdWL-I)s_IqVnCDDM9fm8g;P;8 z7t4yZn3^*NQfx7SwmkzP$=fwdC}bafQSEF@pd&P8@H#`swGy_rz;Z?Ty5mkS%>m#% zp_!m9e<()sfKiY(nF<1zBz&&`ZlJf6QLvLhl`_``%RW&{+O>Xhp;lwSsyRqGf=RWd zpftiR`={2(siiPAS|p}@q=NhVc0ELprt%=fMXO3B)4ryC2LT(o=sLM7hJC!}T1@)E zA3^J$3&1*M6Xq>03FX`R&w*NkrZE?FwU+Muut;>qNhj@bX17ZJxnOlPSZ=Zeiz~T_ zOu#yc3t6ONHB;?|r4w+pI)~KGN;HOGC)txxiUN8#mexj+W(cz%9a4sx|IRG=}ia zuEBuba3AHsV2feqw-3MvuL`I+2|`Ud4~7ZkN=JZ;L20|Oxna5vx1qbIh#k2O4$RQF zo`tL()zxaqibg^GbB+BS5#U{@K;WWQj~GcB1zb}zJkPwH|5hZ9iH2308!>_;%msji zJHSL~s)YHBR=Koa1mLEOHos*`gp=s8KA-C zu0aE+W!#iJ*0xqKm3A`fUGy#O+X+5W36myS>Uh2!R*s$aCU^`K&KKLCCDkejX2p=5 z%o7-fl03x`gaSNyr?3_JLv?2RLS3F*8ub>Jd@^Cc17)v8vYEK4aqo?OS@W9mt%ITJ z9=S2%R8M){CugT@k~~0x`}Vl!svYqX=E)c_oU6o}#Hb^%G1l3BudxA{F*tbjG;W_>=xV73pKY53v%>I)@D36I_@&p$h|Aw zonQS`07z_F#@T-%@-Tb|)7;;anoD_WH>9ewFy(ZcEOM$#Y)8>qi7rCnsH9GO-_7zF zu*C87{Df1P4TEOsnzZ@H%&lvV(3V@;Q!%+OYRp`g05PjY^gL$^$-t0Y>H*CDDs?FZly*oZ&dxvsxaUWF!{em4{A>n@vpXg$dwvt@_rgmHF z-MER`ABa8R-t_H*kv>}CzOpz;!>p^^9ztHMsHL|SRnS<-y5Z*r(_}c4=fXF`l^-i}>e7v!qs_jv zqvWhX^F=2sDNWA9c@P0?lUlr6ecrTKM%pNQ^?*Lq?p-0~?_j50xV%^(+H>sMul#Tw zeciF*1=?a7cI(}352%>LO96pD+?9!fNyl^9v3^v&Y4L)mNGK0FN43&Xf8jUlxW1Bw zyiu2;qW-aGNhs=zbuoxnxiwZ3{PFZM#Kw)9H@(hgX23h(`Wm~m4&TvoZoYp{plb^> z_#?vXcxd>r7K+1HKJvhed>gtK`TAbJUazUWQY6T~t2af%#<+Veyr%7-#*A#@&*;@g58{i|E%6yC_InGXCOd{L0;$)z#?n7M`re zh!kO{6=>7I?*}czyF7_frt#)s1CFJ_XE&VrDA?Dp3XbvF{qsEJgb&OLSNz_5g?HpK z9)8rsr4JN!Af3G9!#Qn(6zaUDqLN(g2g8*M)Djap?WMK9NKlkC)E2|-g|#-rp%!Gz zAHd%`iq|81efi93m3yTBw3g0j#;Yb2X{mhRAI?&KDmbGqou(2xiRNb^sV}%%Wu0?< z?($L>(#BO*)^)rSgyNRni$i`R4v;GhlCZ8$@e^ROX(p=2_v6Y!%^As zu022)fHdv_-~Yu_H6WVPLpHQx!W%^6j)cBhS`O3QBW#x(eX54d&I22op(N59b*&$v zFiSRY6rOc^(dgSV1>a7-5C;(5S5MvKcM2Jm-LD9TGqDpP097%52V+0>Xqq!! zq4e3vj53SE6i8J`XcQB|MZPP8j;PAOnpGnllH6#Ku~vS42xP*Nz@~y%db7Xi8s09P z1)e%8ys6&M8D=Dt6&t`iKG_4X=!kgRQoh%Z`dc&mlOUqXk-k`jKv9@(a^2-Upw>?< zt5*^DV~6Zedbec4NVl($2T{&b)zA@b#dUyd>`2JC0=xa_fIm8{5um zr-!ApXZhC8@=vC2WyxO|!@0Km)h8ep*`^he92$@YwP>VcdoS5OC^s38e#7RPsg4j+ zbVGG}WRSET&ZfrcR(x~k8n1rTP%CnfUNKUonD$P?FtNFF#cn!wEIab-;jU=B1dHK@ z(;(yAQJ`O$sMn>h;pf^8{JISW%d+@v6@CnXh9n5TXGC}?FI9i-D0OMaIg&mAg=0Kn zNJ7oz5*ReJukD55fUsMuaP+H4tDN&V9zfqF@ zr=#ecUk9wu{0;!+gl;3Bw=Vn^)z$ahVhhw)io!na&9}LmWurLb0zubxK=UEnU*{5P z+SP}&*(iBKSO4{alBHaY^)5Q=mZ+2OwIooJ7*Q5XJ+2|q`9#f?6myq!&oz?klihLq z4C)$XP!BNS0G_Z1&TM>?Jk{S~{F3n83ioli=IO6f%wkvCl(RFFw~j0tb{GvXTx>*sB0McY0s&SNvj4+^h`9nJ_wM>F!Uc>X}9PifQekn0sKI2SAJP!a4h z5cyGTuCj3ZBM^&{dRelIlT^9zcfaAuL5Y~bl!ppSf`wZbK$z#6U~rdclk``e+!qhe z6Qspo*%<)eu6?C;Bp<^VuW6JI|Ncvyn+LlSl;Mp22Bl7ARQ0Xc24%29(ZrdsIPw&-=yHQ7_Vle|5h>AST0 zUGX2Zk34vp?U~IHT|;$U86T+UUHl_NE4m|}>E~6q``7hccCaT^#y+?wD##Q%HwPd8 zV3x4L4|qqu`B$4(LXqDJngNy-{&@aFBvVsywt@X^}iH7P%>bR?ciC$I^U-4Foa`YKI^qDyGK7k%E%c_P=yzAi`YnxGA%DeNd++j3*h^ z=rn>oBd0|~lZ<6YvmkKY*ZJlJ;Im0tqgWu&E92eqt;+NYdxx`eS(4Hw_Jb5|yVvBg z*tbdY^!AN;luEyN4VRhS@-_DC{({ziH{&Z}iGElSV~qvT>L-8G%+yEL zX#MFOhj{InyKG=mvW-<1B@c-}x$vA(nU?>S>0*eN#!SLzQ)Ex7fvQ)S4D<8|I#N$3 zT5Ei`Z?cxBODHX8(Xp73v`IsAYC@9b;t}z0wxVuQSY1J^GRwDPN@qbM-ZF48T$GZ< z8WU+;Pqo?{ghI-KZ-i*ydXu`Ep0Xw^McH_KE9J0S7G;x8Fe`DVG?j3Pv=0YzJ}yZR z%2=oqHiUjvuk0~Ca>Kol4CFi0_xQT~;_F?=u+!kIDl-9g`#ZNZ9HCy17Ga1v^Jv9# z{T4Kb1-AzUxq*MutfOWWZgD*HnFfyYg0&e9f(5tZ>krPF6{VikNeHoc{linPPt#Si z&*g>(c54V8rT_AX!J&bNm-!umPvOR}vDai#`CX___J#=zeB*{4<&2WpaDncZsOkp* zsg<%@@rbrMkR_ux9?LsQxzoBa1s%$BBn6vk#{&&zUwcfzeCBJUwFYSF$08qDsB;gWQN*g!p8pxjofWbqNSZOEKOaTx@+* zwdt5*Q47@EOZ~EZL9s?1o?A%9TJT=Ob_13yyugvPg*e&ZU(r6^k4=2+D-@n=Hv5vu zSXG|hM(>h9^zn=eQ=$6`JO&70&2|%V5Lsx>)(%#;pcOfu>*nk_3HB_BNaH$`jM<^S zcSftDU1?nL;jy)+sfonQN}(}gUW?d_ikr*3=^{G)=tjBtEPe>TO|0ddVB zTklrSHiW+!#26frPXQQ(YN8DG$PZo?(po(QUCCf_OJC`pw*uey00%gmH!`WJkrKXj2!#6?`T25mTu9OJp2L8z3! z=arrL$ZqxuE{%yV)14Kd>k}j7pxZ6#$Dz8$@WV5p8kTqN<-7W)Q7Gt2{KoOPK_tZ| zf2WG~O5@{qPI+W<4f_;reuFVdO^5`ADC1!JQE|N`s3cq@(0WB!n0uh@*c{=LAd;~} zyGK@hbF-Oo+!nN)@i*O(`@FA#u?o=~e{`4O#5}z&=UkU*50fOrzi11D^&FOqe>wii z?*k+2|EcUs;Gx{!@KBT~>PAwLrIDT7Th=Utu?~?np@t^gFs?zgX=D${RwOY^WGh-+ z+#4$066ISh8eYW#FXWp~S`<*%O^ZuItL1Tyqt8#tZ zY120E;^VG`!lZn&3sPd$RkdHpU#|w+bYV)pJC|SH9g%|5IkxVTQcBA4CL0}$&}ef@ zW^Vtj%M;;_1xxP9x#ex17&4N*{ksO*_4O}xYu(p*JkL#yr}@7b)t5X?%CY<+s5_MJ zuiqt+N_;A(_)%lumoyRFixWa-M7qK_9s6<1X?JDa9fP!+_6u~~M$5L=ipB=7(j#f< zZ34J%=bs549%~_mA(|={uZNs_0?o7;-LBP(ZRnkd{-^|2|=4vUTmtByHL8 zEph`(LSEzQj68a+`d$V<45J7cyv^#|^|%fD#si1Nx!4NW*`l*{->HEWNh6-|g>-=r zXmQ|-i}Ku$ndUeHQ^&ieT!Lf}vf6GaqW9$DJ2NWrqwPY%%4nip$@vK$nRp*_C-v<| zuKz~ZyN&<%!NS26&x?jhy+@awJipMQ-8(X4#Ae5??U<1QMt1l9R=w9fAnEF}NYu$2 z>6}Vkc zIb*A?G*z8^IvibmBKn_u^5&T_1oey0gZS2~obf(#xk=erZGTEdQnt3DMGM+0oPwss zj5zXD;(oWhB_T@~Ig#9@v)AKtXu3>Inmgf@A|-lD-1U>cNyl3h?ADD9)GG4}zUGPk zZzaXe!~Kf?<~@$G?Uql3t8jy9{2!doq4=J}j9ktTxss{p6!9UdjyDERlA*xZ!=Q)KDs5O)phz>Vq3BNGoM(H|=1*Q4$^2fTZw z(%nq1P|5Rt81}SYJpEEzMPl5VJsV5&4e)ZWKDyoZ>1EwpkHx-AQVQc8%JMz;{H~p{=FXV>jIxvm4X*qv52e?Y-f%DJ zxEA165GikEASQ^fH6K#d!Tpu2HP{sFs%E=e$gYd$aj$+xue6N+Wc(rAz~wUsk2`(b z8Kvmyz%bKQxpP}~baG-rwYcYCvkHOi zlkR<=>ZBTU*8RF_d#Bl@zZsRIhx<%~Z@Z=ik z>adw3!DK(8R|q$vy{FTxw%#xliD~6qXmY^7_9kthVPTF~Xy1CfBqbU~?1QmxmU=+k z(ggxvEuA;0e&+ci-zQR{-f7aO{O(Pz_OsEjLh_K>MbvoZ4nxtk5u{g@nPv)cgW_R} z9}EA4K4@z0?7ue}Z(o~R(X&FjejUI2g~08PH1E4w>9o{)S(?1>Z0XMvTb|;&EuyOE zGvWNpYX)Nv<8|a^;1>bh#&znEcl-r!T#pn= z4$?Yudha6F%4b>*8@=BdtXXY4N+`U4Dmx$}>HeVJk-QdTG@t!tVT#0(LeV0gvqyyw z2sEp^9eY0N`u10Tm4n8No&A=)IeEC|gnmEXoNSzu!1<4R<%-9kY_8~5Ej?zRegMn78wuMs#;i&eUA0Zk_RXQ3b&TT} z;SCI=7-FUB@*&;8|n>(_g^HGf3@QODE3LpmX~ELnymQm{Sx9xrKS zK29p~?v@R$0=v6Dr5aW>-!{+h@?Q58|Kz8{{W`%J+lDAdb&M5VHrX_mDY;1-JLnf)ezmPau$)1;=`-FU=-r-83tX=C`S#}GZufju zQ>sXNT0Ny=k@nc%cFnvA_i4SC)?_ORXHq8B4D%el1uPX`c~uG#S1M7C+*MMqLw78E zhY2dI8@+N^qrMI1+;TUda(vGqGSRyU{Fnm`aqrr7bz42c5xsOO-~oZpkzorD1g}Y<6rk&3>PsSGy}W?MtqFky@A(X# zIuNZK0cK?^=;PUAu>j0#HtjbHCV*6?jzA&OoE$*Jlga*}LF`SF?WLhv1O|zqC<>*> zYB;#lsYKx0&kH@BFpW8n*yDcc6?;_zaJs<-jPSkCsSX-!aV=P5kUgF@Nu<{a%#K*F z134Q{9|YX7X(v$62_cY3^G%t~rD>Q0z@)1|zs)vjJ6Jq9;7#Ki`w+eS**En?7;n&7 zu==V3T&eFboN3ZiMx3D8qYc;VjFUk_H-WWCau(VFXSQf~viH0L$gwD$UfFHqNcgN`x}M+YQ6RnN<+@t>JUp#)9YOkqst-Ga?{FsDpEeX0(5v{0J~SEbWiL zXC2}M4?UH@u&|;%0y`eb33ldo4~z-x8zY!oVmV=c+f$m?RfDC35mdQ2E>Pze7KWP- z>!Bh<&57I+O_^s}9Tg^k)h7{xx@0a0IA~GAOt2yy!X%Q$1rt~LbTB6@Du!_0%HV>N zlf)QI1&gvERKwso23mJ!Ou6ZS#zCS5W`gxE5T>C#E|{i<1D35C222I33?Njaz`On7 zi<+VWFP6D{e-{yiN#M|Jgk<44u1TiMI78S5W`Sdb5f+{zu34s{CfWN7a3Cf^@L%!& zN$?|!!9j2c)j$~+R6n#891w-z8(!oBpL2K=+%a$r2|~8-(vQj5_XT`<0Ksf;oP+tz z9CObS!0m)Tgg`K#xBM8B(|Z)Wb&DYL{WTYv`;A=q6~Nnx2+!lTIXtj8J7dZE!P_{z z#f8w6F}^!?^KE#+ZDv+xd5O&3EmomZzsv?>E-~ygGum45fk!SBN&|eo1rKw^?aZJ4 E2O(~oYXATM literal 0 HcmV?d00001 diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..d2880ba --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.2-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100755 index 0000000..4f906e0 --- /dev/null +++ b/gradlew @@ -0,0 +1,185 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=`expr $i + 1` + done + case $i in + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=`save "$@"` + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..ac1b06f --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 0000000..93602bf --- /dev/null +++ b/settings.gradle @@ -0,0 +1,3 @@ +// Always define a settings file: +// https://docs.gradle.org/current/userguide/organizing_gradle_projects.html#always_define_a_settings_file +rootProject.name = 'aws-codeguru-cli' diff --git a/src/main/java/com/amazonaws/gurureviewercli/Main.java b/src/main/java/com/amazonaws/gurureviewercli/Main.java new file mode 100644 index 0000000..fa3ed79 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/Main.java @@ -0,0 +1,256 @@ +package com.amazonaws.gurureviewercli; + + +import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.ParameterException; +import lombok.val; +import org.beryx.textio.TextIO; +import org.beryx.textio.system.SystemTextTerminal; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; +import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider; +import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.codegurureviewer.CodeGuruReviewerClient; +import software.amazon.awssdk.services.codegurureviewer.model.RecommendationSummary; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; +import software.amazon.awssdk.services.sts.StsClient; + +import com.amazonaws.gurureviewercli.adapter.GitAdapter; +import com.amazonaws.gurureviewercli.adapter.ResultsAdapter; +import com.amazonaws.gurureviewercli.adapter.ScanAdapter; +import com.amazonaws.gurureviewercli.exceptions.GuruCliException; +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.ErrorCodes; +import com.amazonaws.gurureviewercli.model.GitMetaData; +import com.amazonaws.gurureviewercli.model.ScanMetaData; +import com.amazonaws.gurureviewercli.util.Log; + +public class Main { + private static final String REVIEWER_ENDPOINT_PATTERN = "https://codeguru-reviewer.%s.amazonaws.com"; + + @Parameter(names = {"--region"}, + description = "Region where CodeGuru Reviewer will run.", + required = false) + private String regionName = "us-east-1"; + + @Parameter(names = {"--profile"}, + description = "Use a named profile to get AWS Credentials", + required = false) + private String profileName; + + @Parameter(names = {"--commit-range", "-c"}, + description = "Range of commits to analyze separated by ':'. For example HEAD^:HEAD ", + required = false) + private String commitRange; + + @Parameter(names = {"--no-prompt"}, + description = "Run in non-interactive mode.", + required = false) + private boolean noPrompt; + + @Parameter(names = {"--root-dir", "-r"}, + description = "The root directory of the project that should be analyzed.", + required = true) + private String repoDir; + + @Parameter(names = {"--src", "-s"}, + description = "Source directories to be analyzed. Can be used multiple times.") + private List sourceDirs; + + @Parameter(names = {"--build", "-b"}, + description = "Directory of all build artifacts. Can be used multiple times.") + private List buildDirs; + + @Parameter(names = {"--output", "-o"}, + description = "Output directory.") + private String outputDir = "./code-guru"; + + @Parameter(names = {"--bucket-name"}, + description = "Name of S3 bucket that source and build artifacts will be uploaded to for analysis." + + " The bucket name has to be prefixed with 'codeguru-reviewer-'. If no bucket name" + + " is provided, the CLI will create a bucket automatically.") + private String bucketName; + + @Parameter(names = {"--kms-key-id", "-kms"}, + description = "KMS Key ID to encrypt source and build artifacts in S3") + private String kmsKeyId; + + public static void main(String[] argv) { + val textIO = new TextIO(new SystemTextTerminal()); + + val main = new Main(); + val jCommander = JCommander.newBuilder() + .addObject(main) + .build(); + if (argv.length == 0) { + jCommander.usage(); + return; + } + try { + jCommander.parse(argv); + val config = Configuration.builder() + .textIO(textIO) + .interactiveMode(!main.noPrompt) + .bucketName(main.bucketName) + .build(); + main.validateInitialConfig(config); + // try to build the AWS client objects first. + main.createAWSClients(config); + + String repoName = config.getRootDir().toFile().getName(); + config.setRepoName(repoName); + + // check if repo is valid git. + val gitMetaData = main.readGitMetaData(config, Paths.get(main.repoDir).normalize()); + + ScanMetaData scanMetaData = null; + List results = new ArrayList<>(); + try { + scanMetaData = ScanAdapter.startScan(config, gitMetaData, main.sourceDirs, main.buildDirs); + results.addAll(ScanAdapter.fetchResults(config, scanMetaData)); + } finally { + if (scanMetaData != null) { + // try to clean up objects from S3. + main.tryDeleteS3Object(config.getS3Client(), + scanMetaData.getBucketName(), + scanMetaData.getSourceKey()); + main.tryDeleteS3Object(config.getS3Client(), + scanMetaData.getBucketName(), + scanMetaData.getBuildKey()); + } + } + + val outputPath = Paths.get(main.outputDir); + if (!outputPath.toFile().exists()) { + if (!outputPath.toFile().mkdirs()) { + Log.error("Failed to create output directory %s.", outputPath); + } + } + ResultsAdapter.saveResults(outputPath, results, scanMetaData); + Log.info("Analysis finished."); + } catch (GuruCliException e) { + Log.error("%s: %s", e.getErrorCode(), e.getMessage()); + e.printStackTrace(); + } catch (ParameterException e) { + Log.error(e); + jCommander.usage(); + System.exit(1); + } catch (Exception e) { + e.printStackTrace(); + Log.error(e); + System.exit(2); + } + System.exit(0); + } + + protected GitMetaData readGitMetaData(final Configuration config, final Path repoRoot) { + if (commitRange != null) { + val commits = commitRange.split(":"); + if (commits.length != 2) { + throw new GuruCliException(ErrorCodes.GIT_INVALID_COMMITS, + "Invalid value for --commit-range. Use '[before commit]:[after commit]'."); + } + config.setBeforeCommit(commits[0]); + config.setAfterCommit(commits[1]); + } + + return GitAdapter.getGitMetaData(config, repoRoot); + } + + private void validateInitialConfig(final Configuration config) { + if (config.getBucketName() != null && !config.getBucketName().startsWith("codeguru-reviewer-")) { + throw new GuruCliException(ErrorCodes.BAD_BUCKET_NAME, + config.getBucketName() + " is not a valid bucket name for CodeGuru."); + } + if (!Paths.get(repoDir).toFile().isDirectory()) { + throw new GuruCliException(ErrorCodes.DIR_NOT_FOUND, + repoDir + " is not a valid directory."); + } + config.setRootDir(Paths.get(repoDir).toAbsolutePath().normalize()); + if (this.sourceDirs == null || this.sourceDirs.isEmpty()) { + this.sourceDirs = Arrays.asList(config.getRootDir().toString()); + } + sourceDirs.forEach(sourceDir -> { + if (!Paths.get(sourceDir).toFile().isDirectory()) { + throw new GuruCliException(ErrorCodes.DIR_NOT_FOUND, + sourceDir + " is not a valid directory."); + } + }); + if (this.buildDirs != null) { + buildDirs.forEach(buildDir -> { + if (!Paths.get(buildDir).toFile().isDirectory()) { + throw new GuruCliException(ErrorCodes.DIR_NOT_FOUND, + buildDir + " is not a valid directory."); + } + }); + } + config.setKeyId(this.kmsKeyId); + } + + private void tryDeleteS3Object(final S3Client s3Client, final String s3Bucket, final String s3Key) { + try { + if (s3Key != null) { + s3Client.deleteObject(DeleteObjectRequest.builder().bucket(s3Bucket).key(s3Key).build()); + } + } catch (Exception e) { + Log.warn("Failed to delete %s from %s. Please delete the object by hand.", s3Key, s3Bucket); + } + } + + protected void createAWSClients(final Configuration config) { + val credentials = getCredentials(); + try { + config.setRegion(regionName); + val callerIdentity = + StsClient.builder() + .credentialsProvider(credentials) + .region(Region.of(regionName)) + .build().getCallerIdentity(); + config.setAccountId(callerIdentity.account()); + config.setGuruFrontendService(getNewGuruClient(credentials)); + config.setS3Client(getS3Client(credentials)); + } catch (IllegalArgumentException e) { + // profile could not be found + throw new GuruCliException(ErrorCodes.AWS_INIT_ERROR, + "Error accessing the provided profile. " + this.profileName + + "Ensure that the spelling is correct and" + + " that the role has access to CodeGuru and S3."); + } catch (SdkClientException e) { + throw new GuruCliException(ErrorCodes.AWS_INIT_ERROR, + "No AWS credentials found. Use 'aws configure' to set them up."); + } + } + + private AwsCredentialsProvider getCredentials() { + if (profileName == null || profileName.replaceAll("\\s+", "").length() == 0) { + return DefaultCredentialsProvider.create(); + } + return ProfileCredentialsProvider.create(profileName); + } + + private CodeGuruReviewerClient getNewGuruClient(AwsCredentialsProvider credentialsProvider) { + final String endpoint = String.format(REVIEWER_ENDPOINT_PATTERN, regionName); + return CodeGuruReviewerClient.builder() + .credentialsProvider(credentialsProvider) + .endpointOverride(URI.create(endpoint)) + .region(Region.of(regionName)) + .build(); + } + + private S3Client getS3Client(AwsCredentialsProvider credentialsProvider) { + return S3Client.builder() + .credentialsProvider(credentialsProvider) + .region(Region.of(regionName)) + .build(); + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapter.java b/src/main/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapter.java new file mode 100644 index 0000000..e48c4d7 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapter.java @@ -0,0 +1,133 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import lombok.val; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; + +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.ScanMetaData; +import com.amazonaws.gurureviewercli.util.ZipUtils; + +/** + * Utility class class to Zip and upload source and build artifacts to S3. + */ +public final class ArtifactAdapter { + + /** + * Zip and upload source and build artifacts to S3. + * + * @param config The current {@link Configuration} + * @param tempDir A temp directory where files can be copied to and zipped. Will be deleted after completion. + * @param repositoryDir The root directory of the repo to analyze + * @param sourceDirs The list of source directories under repositoryDir. + * @param buildDirs The list of build directories (can be empty). + * @param bucketName The name of the S3 bucket that should be used for the upload. + * @return Metadata about what was zipped and uploaded. + * @throws IOException If writing to tempDir fails. + */ + public static ScanMetaData zipAndUpload(final Configuration config, + final Path tempDir, + final Path repositoryDir, + final List sourceDirs, + final List buildDirs, + final String bucketName) throws IOException { + try { + val sourceDirsAndGit = new ArrayList(sourceDirs); + if (config.getBeforeCommit() != null && config.getAfterCommit() != null) { + // only add the git folder if a commit range is provided. + sourceDirsAndGit.add(repositoryDir.resolve(".git").toAbsolutePath().toString()); + } + final String sourceKey = + zipAndUploadDir("analysis-src-" + UUID.randomUUID(), sourceDirsAndGit, repositoryDir, + bucketName, tempDir, config.getAccountId(), config.getS3Client()); + final String buildKey; + if (buildDirs != null && !buildDirs.isEmpty()) { + for (val buildDir : buildDirs) { + if (!Paths.get(buildDir).toFile().isDirectory()) { + throw new FileNotFoundException("Provided build directory not found " + buildDir); + } + } + buildKey = + zipAndUploadDir("analysis-bin-" + UUID.randomUUID(), buildDirs, + bucketName, tempDir, config.getAccountId(), config.getS3Client()); + } else { + buildKey = null; + } + return ScanMetaData.builder() + .bucketName(bucketName) + .repositoryRoot(repositoryDir) + .sourceDirectories(sourceDirs.stream().map(Paths::get).collect(Collectors.toList())) + .sourceKey(sourceKey) + .buildKey(buildKey) + .build(); + } finally { + // Delete the temp dir. + try (val walker = Files.walk(tempDir)) { + walker.sorted(Comparator.reverseOrder()) + .map(Path::toFile) + .forEach(File::delete); + } + } + } + + + private static String zipAndUploadDir(final String artifactName, + final List dirNames, + final String bucketName, + final Path tempDir, + final String accountId, + final S3Client s3Client) throws IOException { + return zipAndUploadDir(artifactName, dirNames, null, bucketName, tempDir, accountId, s3Client); + } + + private static String zipAndUploadDir(final String artifactName, + final List dirNames, + final Path rootDir, + final String bucketName, + final Path tempDir, + final String accountId, + final S3Client s3Client) throws IOException { + if (dirNames != null) { + for (val dirName : dirNames) { + if (!Paths.get(dirName).toAbsolutePath().toFile().isDirectory()) { + throw new IOException("Not a valid directory: " + dirName); + } + } + val zipFileName = artifactName + ".zip"; + val zipFile = tempDir.resolve(zipFileName).toAbsolutePath(); + val s3Key = zipFileName; + if (!zipFile.toFile().isFile()) { + if (rootDir != null) { + ZipUtils.pack(dirNames, rootDir, zipFile.toString()); + } else { + ZipUtils.pack(dirNames, zipFile.toString()); + } + } + val putObjectRequest = PutObjectRequest.builder() + .bucket(bucketName) + .key(s3Key) + .expectedBucketOwner(accountId) + .build(); + s3Client.putObject(putObjectRequest, zipFile); + return s3Key; + } + return null; + } + + + private ArtifactAdapter() { + // do not instantiate + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapter.java b/src/main/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapter.java new file mode 100644 index 0000000..6f5d6d7 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapter.java @@ -0,0 +1,166 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.util.concurrent.TimeUnit; + +import lombok.val; +import software.amazon.awssdk.services.codegurureviewer.model.AssociateRepositoryRequest; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeRepositoryAssociationRequest; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeRepositoryAssociationResponse; +import software.amazon.awssdk.services.codegurureviewer.model.EncryptionOption; +import software.amazon.awssdk.services.codegurureviewer.model.KMSKeyDetails; +import software.amazon.awssdk.services.codegurureviewer.model.ListRepositoryAssociationsRequest; +import software.amazon.awssdk.services.codegurureviewer.model.ProviderType; +import software.amazon.awssdk.services.codegurureviewer.model.Repository; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociation; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociationState; +import software.amazon.awssdk.services.codegurureviewer.model.S3Repository; +import software.amazon.awssdk.services.s3.model.CreateBucketRequest; +import software.amazon.awssdk.services.s3.model.HeadBucketRequest; +import software.amazon.awssdk.services.s3.model.NoSuchBucketException; + +import com.amazonaws.gurureviewercli.exceptions.GuruCliException; +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.ErrorCodes; +import com.amazonaws.gurureviewercli.util.Log; + +/** + * Utility class to get or create a CodeGuru Reviewer Repository association. + */ +public final class AssociationAdapter { + + private static final String BUCKET_NAME_PATTERN = "codeguru-reviewer-cli-%s-%s"; + private static final long WAIT_TIME_IN_SECONDS = 1L; + + /** + * Get or create a CodeGuru Repository Association (and, if necessary an S3 bucket). + * + * @param config The {@link Configuration} with name of repo, account, and region. + * @return A CodeGuru Repository association. + */ + public static RepositoryAssociation getAssociatedGuruRepo(final Configuration config) { + val guruFrontendService = config.getGuruFrontendService(); + val repositoryAssociationsRequest = + ListRepositoryAssociationsRequest.builder() + .providerTypes(ProviderType.S3_BUCKET) + .names(config.getRepoName()) + .build(); + val associationResults = guruFrontendService.listRepositoryAssociations(repositoryAssociationsRequest); + if (associationResults.repositoryAssociationSummaries().size() == 1) { + val summary = associationResults.repositoryAssociationSummaries().get(0); + val describeAssociationRequest = + DescribeRepositoryAssociationRequest.builder().associationArn(summary.associationArn()).build(); + val association = + guruFrontendService.describeRepositoryAssociation(describeAssociationRequest) + .repositoryAssociation(); + if (!RepositoryAssociationState.ASSOCIATED.equals(association.state())) { + val msg = String.format("Repository association in unexpected state %s: %s", + association.state(), + association.stateReason()); + throw new GuruCliException(ErrorCodes.ASSOCIATION_FAILED, msg); + } + if (config.getKeyId() != null && + !config.getKeyId().equals(association.kmsKeyDetails().kmsKeyId())) { + val msg = String.format("Provided KMS Key alias %s for repository %s does " + + "not match existing key: %s", + config.getKeyId(), + association.name(), + association.kmsKeyDetails().kmsKeyId()); + throw new GuruCliException(ErrorCodes.ASSOCIATION_FAILED, msg); + } + return association; + } else if (associationResults.repositoryAssociationSummaries().isEmpty()) { + return createBucketAndAssociation(config); + } else { + throw new RuntimeException("Found more than one matching association: " + associationResults); + } + } + + private static RepositoryAssociation createBucketAndAssociation(final Configuration config) { + final String bucketName; + if (config.getBucketName() != null) { + if (!config.getBucketName().startsWith("codeguru-reviewer-")) { + throw new GuruCliException(ErrorCodes.BAD_BUCKET_NAME, + config.getBucketName() + " is not a valid bucket name for CodeGuru."); + } + bucketName = config.getBucketName(); + } else { + bucketName = String.format(BUCKET_NAME_PATTERN, config.getAccountId(), config.getRegion()); + } + try { + config.getS3Client().headBucket(HeadBucketRequest.builder().bucket(bucketName).build()); + } catch (NoSuchBucketException e) { + Log.info("CodeGuru Reviewer requires an S3 bucket to upload the analysis artifacts to."); + val createBucket = + !config.isInteractiveMode() || + config.getTextIO() + .newBooleanInputReader() + .withTrueInput("y") + .withFalseInput("n") + .read("Do you want to create a new S3 bucket: " + bucketName, bucketName); + if (createBucket) { + Log.info("Creating new bucket: %s", bucketName); + config.getS3Client().createBucket(CreateBucketRequest.builder().bucket(bucketName).build()); + } else { + throw new GuruCliException(ErrorCodes.USER_ABORT, "CodeGuru needs an S3 bucket to continue."); + } + + } + + val repository = Repository.builder() + .s3Bucket(S3Repository.builder() + .bucketName(bucketName) + .name(config.getRepoName()) + .build()) + .build(); + + AssociateRepositoryRequest associateRequest; + if (config.getKeyId() != null) { + val keyDetails = KMSKeyDetails.builder() + .encryptionOption(EncryptionOption.CUSTOMER_MANAGED_CMK) + .kmsKeyId(config.getKeyId()) + .build(); + associateRequest = AssociateRepositoryRequest.builder() + .repository(repository) + .kmsKeyDetails(keyDetails) + .build(); + } else { + associateRequest = AssociateRepositoryRequest.builder().repository(repository).build(); + } + val associateResponse = config.getGuruFrontendService().associateRepository(associateRequest); + val associationArn = associateResponse.repositoryAssociation().associationArn(); + Log.print("Creating association "); + DescribeRepositoryAssociationRequest associationRequest = + DescribeRepositoryAssociationRequest.builder().associationArn(associationArn).build(); + + DescribeRepositoryAssociationResponse associationResponse = + config.getGuruFrontendService().describeRepositoryAssociation(associationRequest); + while (associationResponse != null) { + val association = associationResponse.repositoryAssociation(); + if (RepositoryAssociationState.ASSOCIATED.equals(association.state())) { + Log.println(" done"); + Log.print("Created new repository association: "); + Log.awsUrl("?region=%s#/ciworkflows/associationdetails/%s", config.getRegion(), + association.associationArn()); + return association; + } else if (RepositoryAssociationState.ASSOCIATING.equals(association.state())) { + Log.print("."); + try { + Thread.sleep(TimeUnit.SECONDS.toMillis(WAIT_TIME_IN_SECONDS)); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } else { + val msg = String.format("Repository association in unexpected state %s: %s", + association.state(), + association.stateReason()); + throw new GuruCliException(ErrorCodes.ASSOCIATION_FAILED, msg); + } + associationResponse = config.getGuruFrontendService().describeRepositoryAssociation(associationRequest); + } + throw new GuruCliException(ErrorCodes.ASSOCIATION_FAILED, "Unexpected error during association"); + } + + private AssociationAdapter() { + // do not instantiate + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/adapter/GitAdapter.java b/src/main/java/com/amazonaws/gurureviewercli/adapter/GitAdapter.java new file mode 100644 index 0000000..94819e8 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/adapter/GitAdapter.java @@ -0,0 +1,135 @@ +package com.amazonaws.gurureviewercli.adapter; + +import javax.annotation.Nonnull; +import java.io.IOException; +import java.nio.file.Path; + +import lombok.val; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.lib.ObjectReader; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.revwalk.RevWalk; +import org.eclipse.jgit.storage.file.FileRepositoryBuilder; +import org.eclipse.jgit.treewalk.CanonicalTreeParser; + +import com.amazonaws.gurureviewercli.exceptions.GuruCliException; +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.ErrorCodes; +import com.amazonaws.gurureviewercli.model.GitMetaData; +import com.amazonaws.gurureviewercli.util.Log; + +/** + * Util to sanity-check if a repo is a valid git repository that can be analyzed by CodeGuru. + */ +public final class GitAdapter { + + @Nonnull + public static GitMetaData getGitMetaData(final Configuration config, final Path pathToRepo) { + val gitDir = pathToRepo.toAbsolutePath().normalize().resolve(".git"); + if (!gitDir.toFile().isDirectory()) { + // if the directory is not under version control, return a dummy object. + return GitMetaData.builder() + .repoRoot(pathToRepo) + .userName("nobody") + .currentBranch("unknown") + .build(); + } + return tryGetMetaData(config, pathToRepo.toAbsolutePath().normalize().resolve(".git")); + } + + @Nonnull + protected static GitMetaData tryGetMetaData(final Configuration config, final Path gitDir) { + if (!gitDir.toFile().isDirectory()) { + throw new GuruCliException(ErrorCodes.GIT_INVALID_DIR); + } + val builder = new FileRepositoryBuilder(); + try (val repository = builder.setGitDir(gitDir.toFile()).findGitDir().build()) { + val userName = repository.getConfig().getString("user", null, "email"); + val urlString = repository.getConfig().getString("remote", "origin", "url"); + val branchName = repository.getBranch(); + if (branchName == null) { + throw new GuruCliException(ErrorCodes.GIT_BRANCH_MISSING); + } + + val metadata = GitMetaData.builder() + .currentBranch(branchName) + .userName(userName) + .repoRoot(gitDir.getParent()) + .remoteUrl(urlString) + .build(); + + if (config.getBeforeCommit() == null || config.getAfterCommit() == null) { + // ask if commits should be inferred or if the entire repo should be scanned. + Log.warn("CodeGuru will perform a full repository analysis if you do not provide a commit range."); + Log.warn("For pricing details see: https://aws.amazon.com/codeguru/pricing/"); + val doPackageScan = + !config.isInteractiveMode() || + config.getTextIO() + .newBooleanInputReader() + .withTrueInput("y") + .withFalseInput("n") + .read("Do you want to perform a full repository analysis?"); + if (doPackageScan) { + return metadata; + } else { + throw new GuruCliException(ErrorCodes.USER_ABORT, "Use --commit-range to set a commit range"); + } + } + + validateCommits(config, repository); + metadata.setBeforeCommit(config.getBeforeCommit()); + metadata.setAfterCommit(config.getAfterCommit()); + + return metadata; + + } catch (IOException | GitAPIException e) { + throw new GuruCliException(ErrorCodes.GIT_INVALID_DIR, "Cannot read " + gitDir, e); + } + } + + private static boolean validateCommits(final Configuration config, final Repository repo) + throws GitAPIException { + + val beforeTreeIter = treeForCommitId(repo, config.getBeforeCommit()); + val afterTreeIter = treeForCommitId(repo, config.getAfterCommit()); + + // Resolve git constants, such as HEAD^^ to the actual commit hash + config.setBeforeCommit(resolveSha(repo, config.getBeforeCommit())); + config.setAfterCommit(resolveSha(repo, config.getAfterCommit())); + + val diffEntries = new Git(repo).diff().setOldTree(beforeTreeIter).setNewTree(afterTreeIter).call(); + if (diffEntries.isEmpty()) { + throw new GuruCliException(ErrorCodes.GIT_EMPTY_DIFF); + } + + return true; + } + + private static String resolveSha(final Repository repo, final String commitName) { + try { + return repo.resolve(commitName).getName(); + } catch (Throwable e) { + throw new GuruCliException(ErrorCodes.GIT_INVALID_COMMITS, "Invalid commit " + commitName); + } + } + + private static CanonicalTreeParser treeForCommitId(final Repository repo, final String commitId) { + try (RevWalk walk = new RevWalk(repo)) { + val commit = walk.parseCommit(repo.resolve(commitId)); + val treeId = commit.getTree().getId(); + try (ObjectReader reader = repo.newObjectReader()) { + return new CanonicalTreeParser(null, reader, treeId); + } + } catch (NullPointerException e) { + throw new GuruCliException(ErrorCodes.GIT_INVALID_COMMITS, "Not a valid commit id " + commitId, e); + } catch (IOException e) { + throw new GuruCliException(ErrorCodes.GIT_INVALID_COMMITS, "Cannot parse commit id " + commitId, e); + } + } + + private GitAdapter() { + // do not instantiate + } +} + diff --git a/src/main/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapter.java b/src/main/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapter.java new file mode 100644 index 0000000..e7c687a --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapter.java @@ -0,0 +1,135 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import lombok.val; +import org.commonmark.node.Node; +import org.commonmark.parser.Parser; +import org.commonmark.renderer.html.HtmlRenderer; +import software.amazon.awssdk.services.codegurureviewer.model.RecommendationSummary; + +import com.amazonaws.gurureviewercli.model.ScanMetaData; +import com.amazonaws.gurureviewercli.util.JsonUtil; +import com.amazonaws.gurureviewercli.util.Log; + +/** + * Util to save Guru recommendations to disk and convert them to HTML. + */ +public final class ResultsAdapter { + + public static void saveResults(final Path outputDir, + final List results, + final ScanMetaData scanMetaData) throws IOException { + val jsonFile = outputDir.resolve("recommendations.json"); + JsonUtil.storeRecommendations(results, jsonFile); + Log.info("Recommendations in Json format written to to:%n%s", jsonFile.normalize().toUri()); + createHtmlReport(outputDir, scanMetaData, results); + } + + private static void createHtmlReport(final Path outputDir, + final ScanMetaData scanMetaData, + final List recommendations) throws IOException { + + int validFindings = 0; + // sort by file name and line number + sortByFileName(recommendations); + + Parser parser = Parser.builder().build(); + HtmlRenderer renderer = HtmlRenderer.builder().build(); + + val htmlFile = outputDir.resolve("codeguru-report.html"); + try (OutputStreamWriter writer = + new OutputStreamWriter(new FileOutputStream(htmlFile.toFile()), StandardCharsets.UTF_8)) { + + writer.write("\n\n"); + writer.write("\n"); + writer.write("

CodeGuru Reviewer Recommendations

\n"); + val awsUrlPrfix = "https://console.aws.amazon.com/codeguru/reviewer"; + val associationUrl = String.format("%s?region=%s#/ciworkflows/associationdetails/%s", + awsUrlPrfix, scanMetaData.getRegion(), scanMetaData.getAssociationArn()); + val scanUrl = String.format("%s?region=%s#/codereviews/details/%s", + awsUrlPrfix, scanMetaData.getRegion(), scanMetaData.getCodeReviewArn()); + + writer.write(renderer.render(parser.parse(String.format("**CodeGuru Repository ARN**: [%s](%s)%n", + scanMetaData.getAssociationArn(), + associationUrl)))); + writer.write(renderer.render(parser.parse(String.format("**CodeGuru Scan ARN**: [%s](%s)%n", + scanMetaData.getCodeReviewArn(), + scanUrl)))); + writer.write("\n


\n"); + + for (val recommendation : recommendations) { + val filePath = scanMetaData.getRepositoryRoot().resolve(recommendation.filePath()).toAbsolutePath(); + if (filePath == null || !filePath.toFile().isFile()) { + if (filePath != null && !(filePath.endsWith(".") || filePath.endsWith("/"))) { + Log.warn("Dropping finding because file not found on disk: %s", filePath); + } + continue; + } + validFindings++; + String lineMsg; + if (!recommendation.startLine().equals(recommendation.endLine()) + && recommendation.endLine() != null) { + lineMsg = String.format("### In: [%s](%s) L%d %n", + filePath, filePath.toUri(), + recommendation.startLine()); + } else { + lineMsg = String.format("### In: [%s](%s) L%d - L%d %n", + filePath, filePath.toUri(), + recommendation.startLine(), + recommendation.endLine()); + } + + Node document = parser.parse(String.format("### In: [%s](%s) L%d %n", + filePath, filePath.toUri(), + recommendation.startLine())); + writer.write(renderer.render(document)); + + document = parser.parse("**Issue:** " + recommendation.description()); + writer.write(renderer.render(document)); + + writer.write(String.format("

Severity: %s

", recommendation.severity())); + + if (recommendation.ruleMetadata() != null && recommendation.ruleMetadata().ruleId() != null) { + val manifest = recommendation.ruleMetadata(); + writer.write(String.format("

Rule ID: %s

", manifest.ruleId())); + writer.write(String.format("

Rule Name: %s

", manifest.ruleName())); + document = parser.parse("**Description:** " + manifest.longDescription()); + writer.write(renderer.render(document)); + if (manifest.ruleTags() != null && !manifest.ruleTags().isEmpty()) { + val mdList = manifest.ruleTags().stream() + .map(s -> String.format("- %s%n", s)) + .collect(Collectors.joining()); + document = parser.parse("**Tags:**\n" + mdList); + writer.write(renderer.render(document)); + } + } + writer.write("\n


\n"); + } + writer.write("\n"); + writer.write("\n"); + } + Log.info("Report with %d recommendations written to:%n%s", validFindings, htmlFile.normalize().toUri()); + } + + private static void sortByFileName(final List recommendations) { + Collections.sort(recommendations, (o1, o2) -> { + int pathComp = o1.filePath().compareTo(o2.filePath()); + if (pathComp == 0) { + return o1.startLine().compareTo(o2.startLine()); + } + return pathComp; + }); + } + + private ResultsAdapter() { + // do not instantiate + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/adapter/ScanAdapter.java b/src/main/java/com/amazonaws/gurureviewercli/adapter/ScanAdapter.java new file mode 100644 index 0000000..d3088af --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/adapter/ScanAdapter.java @@ -0,0 +1,196 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.io.IOException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import com.beust.jcommander.internal.Nullable; +import lombok.val; +import software.amazon.awssdk.services.codegurureviewer.CodeGuruReviewerClient; +import software.amazon.awssdk.services.codegurureviewer.model.AnalysisType; +import software.amazon.awssdk.services.codegurureviewer.model.CodeArtifacts; +import software.amazon.awssdk.services.codegurureviewer.model.CodeReviewType; +import software.amazon.awssdk.services.codegurureviewer.model.CommitDiffSourceCodeType; +import software.amazon.awssdk.services.codegurureviewer.model.CreateCodeReviewRequest; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeCodeReviewRequest; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeCodeReviewResponse; +import software.amazon.awssdk.services.codegurureviewer.model.EventInfo; +import software.amazon.awssdk.services.codegurureviewer.model.JobState; +import software.amazon.awssdk.services.codegurureviewer.model.ListRecommendationsRequest; +import software.amazon.awssdk.services.codegurureviewer.model.RecommendationSummary; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAnalysis; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociation; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryHeadSourceCodeType; +import software.amazon.awssdk.services.codegurureviewer.model.RequestMetadata; +import software.amazon.awssdk.services.codegurureviewer.model.S3BucketRepository; +import software.amazon.awssdk.services.codegurureviewer.model.S3RepositoryDetails; +import software.amazon.awssdk.services.codegurureviewer.model.SourceCodeType; +import software.amazon.awssdk.services.codegurureviewer.model.ValidationException; +import software.amazon.awssdk.services.codegurureviewer.model.VendorName; + +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.GitMetaData; +import com.amazonaws.gurureviewercli.model.ScanMetaData; +import com.amazonaws.gurureviewercli.util.Log; + + +/** + * Wraps the commands to start a code-review and to poll and download the results. + */ +public final class ScanAdapter { + + private static final String SCAN_PREFIX_NAME = "codeguru-reviewer-cli-"; + + private static final long WAIT_TIME_IN_SECONDS = 2L; + + public static ScanMetaData startScan(final Configuration config, + final GitMetaData gitMetaData, + final List sourceDirs, + final List buildDirs) throws IOException { + val association = AssociationAdapter.getAssociatedGuruRepo(config); + val bucketName = association.s3RepositoryDetails().bucketName(); + Log.info("Starting analysis of %s with association %s and S3 bucket %s", + config.getRootDir(), association.associationArn(), bucketName); + + try { + val tempDir = Files.createTempDirectory("artifact-packing-dir"); + val metadata = ArtifactAdapter.zipAndUpload(config, tempDir, config.getRootDir(), + sourceDirs, buildDirs, bucketName); + + val request = createRepoAnalysisRequest(gitMetaData, metadata.getSourceKey(), + metadata.getBuildKey(), association); + + val response = config.getGuruFrontendService().createCodeReview(request); + if (response == null) { + throw new RuntimeException("Failed to start scan: " + request); + } + + Log.print("Started new CodeGuru Reviewer scan: "); + Log.awsUrl("?region=%s#/codereviews/details/%s", config.getRegion(), + response.codeReview().codeReviewArn()); + + metadata.setCodeReviewArn(response.codeReview().codeReviewArn()); + metadata.setAssociationArn(association.associationArn()); + metadata.setRegion(config.getRegion()); + return metadata; + } catch (ValidationException e) { + throw new RuntimeException(e); + } + } + + public static List fetchResults(final Configuration config, + final ScanMetaData scanMetaData) { + val reviewARN = scanMetaData.getCodeReviewArn(); + val describeReviewRequest = DescribeCodeReviewRequest.builder().codeReviewArn(reviewARN).build(); + DescribeCodeReviewResponse response = config.getGuruFrontendService().describeCodeReview(describeReviewRequest); + while (response != null) { + val state = response.codeReview().state(); + if (JobState.COMPLETED.equals(state)) { + Log.println(":)"); + return downloadResults(config.getGuruFrontendService(), reviewARN); + } else if (JobState.PENDING.equals(state)) { + Log.print("."); + try { + Thread.sleep(TimeUnit.SECONDS.toMillis(WAIT_TIME_IN_SECONDS)); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } else if (JobState.FAILED.equals(state)) { + val msg = String.format("CodeGuru scan failed for ARN %s: %s%nCheck the AWS Console for more detail", + reviewARN, response.codeReview().stateReason()); + throw new RuntimeException(msg); + } else { + val msg = String.format("CodeGuru scan is in an unexpected state %s: %s%n" + + "Check the AWS Console for more detail", + state, response.codeReview().stateReason()); + throw new RuntimeException(msg); + } + response = config.getGuruFrontendService().describeCodeReview(describeReviewRequest); + } + throw new RuntimeException("Unable to find information for scan " + reviewARN); + } + + private static List downloadResults(final CodeGuruReviewerClient guruFrontendService, + final String reviewARN) { + val recommendations = new ArrayList(); + val listRequest = ListRecommendationsRequest.builder().codeReviewArn(reviewARN).build(); + guruFrontendService.listRecommendationsPaginator(listRequest) + .forEach(resp -> recommendations.addAll(resp.recommendationSummaries())); + return recommendations; + } + + private static CreateCodeReviewRequest createRepoAnalysisRequest(final GitMetaData gitMetaData, + final String sourceKey, + final @Nullable String buildArtifactKey, + final RepositoryAssociation association) { + final CodeArtifacts codeArtifacts; + final AnalysisType[] analysisTypes; + if (buildArtifactKey == null) { + codeArtifacts = CodeArtifacts.builder().sourceCodeArtifactsObjectKey(sourceKey).build(); + analysisTypes = new AnalysisType[]{AnalysisType.CODE_QUALITY}; + } else { + codeArtifacts = CodeArtifacts.builder().sourceCodeArtifactsObjectKey(sourceKey) + .buildArtifactsObjectKey(buildArtifactKey) + .build(); + analysisTypes = new AnalysisType[]{AnalysisType.SECURITY, AnalysisType.CODE_QUALITY}; + } + + val s3repoDetails = S3RepositoryDetails.builder().bucketName(association.s3RepositoryDetails() + .bucketName()) + .codeArtifacts(codeArtifacts).build(); + val s3repo = S3BucketRepository.builder().name(association.name()) + .details(s3repoDetails).build(); + + val sourceCodeType = getSourceCodeType(s3repo, gitMetaData); + + val repoAnalysis = RepositoryAnalysis.builder().sourceCodeType(sourceCodeType).build(); + + val reviewType = CodeReviewType.builder().repositoryAnalysis(repoAnalysis) + .analysisTypes(analysisTypes) + .build(); + + return CreateCodeReviewRequest.builder().type(reviewType) + .name(SCAN_PREFIX_NAME + UUID.randomUUID().toString()) + .repositoryAssociationArn(association.associationArn()) + .build(); + } + + private static SourceCodeType getSourceCodeType(final S3BucketRepository s3BucketRepository, + final GitMetaData gitMetaData) { + + val hasDiff = gitMetaData.getBeforeCommit() != null && gitMetaData.getAfterCommit() != null; + val eventInfo = hasDiff ? EventInfo.builder().name("push").build() : + EventInfo.builder().name("schedule").build(); + val requestMetaData = RequestMetadata.builder().requestId(gitMetaData.getPullRequestId()) + .eventInfo(eventInfo) + .requester(gitMetaData.getUserName()) + .vendorName(VendorName.GIT_HUB) + .build(); + if (hasDiff) { + val commitDiff = CommitDiffSourceCodeType.builder().sourceCommit(gitMetaData.getAfterCommit()) + .destinationCommit(gitMetaData.getBeforeCommit()) + .build(); + val repoHead = + RepositoryHeadSourceCodeType.builder().branchName(gitMetaData.getCurrentBranch()).build(); + return SourceCodeType.builder().s3BucketRepository(s3BucketRepository) + .commitDiff(commitDiff) + .repositoryHead(repoHead) + .requestMetadata(requestMetaData) + .build(); + } else { + val repoHead = + RepositoryHeadSourceCodeType.builder().branchName(gitMetaData.getCurrentBranch()).build(); + return SourceCodeType.builder().s3BucketRepository(s3BucketRepository) + .repositoryHead(repoHead) + .requestMetadata(requestMetaData) + .build(); + } + } + + private ScanAdapter() { + // do not instantiate + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/exceptions/GuruCliException.java b/src/main/java/com/amazonaws/gurureviewercli/exceptions/GuruCliException.java new file mode 100644 index 0000000..a15d4cd --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/exceptions/GuruCliException.java @@ -0,0 +1,26 @@ +package com.amazonaws.gurureviewercli.exceptions; + +import lombok.Getter; + +import com.amazonaws.gurureviewercli.model.ErrorCodes; + +public class GuruCliException extends RuntimeException { + + public GuruCliException(final ErrorCodes errorCode) { + this.errorCode = errorCode; + } + + public GuruCliException(final ErrorCodes errorCode, final String msg) { + super(msg); + this.errorCode = errorCode; + } + + public GuruCliException(final ErrorCodes errorCode, final String msg, final Throwable cause) { + super(msg, cause); + this.errorCode = errorCode; + } + + @Getter + private ErrorCodes errorCode; + +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/model/Configuration.java b/src/main/java/com/amazonaws/gurureviewercli/model/Configuration.java new file mode 100644 index 0000000..1d74227 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/model/Configuration.java @@ -0,0 +1,45 @@ +package com.amazonaws.gurureviewercli.model; + +import javax.annotation.Nullable; +import java.nio.file.Path; + +import lombok.Builder; +import lombok.Data; +import org.beryx.textio.TextIO; +import software.amazon.awssdk.services.codegurureviewer.CodeGuruReviewerClient; +import software.amazon.awssdk.services.s3.S3Client; + +/** + * Class to hold all shared configuration data. This object is mutable and information is added as it becomes + * available. + */ +@Data +@Builder +public class Configuration { + + private boolean interactiveMode; + + private CodeGuruReviewerClient guruFrontendService; + + private S3Client s3Client; + + private String accountId; + + private String region; + + private String repoName; + + private String keyId; + + private Path rootDir; + + private TextIO textIO; + + private String bucketName; + + private @Nullable + String beforeCommit; + + private @Nullable + String afterCommit; +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/model/ErrorCodes.java b/src/main/java/com/amazonaws/gurureviewercli/model/ErrorCodes.java new file mode 100644 index 0000000..75f9311 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/model/ErrorCodes.java @@ -0,0 +1,31 @@ +package com.amazonaws.gurureviewercli.model; + +import lombok.Getter; + +/** + * Error Codes for the CLI. + */ +public enum ErrorCodes { + + ASSOCIATION_FAILED("Failed to associate with CodeGuru"), + GIT_INVALID_DIR("Invalid Git Directory"), + GIT_BRANCH_MISSING("Cannot determine Git branch"), + DIR_NOT_FOUND("Provided path is not a valid directory"), + GIT_INVALID_COMMITS("Not a valid commit"), + GIT_EMPTY_DIFF("Git Diff is empty"), + AWS_INIT_ERROR("Failed to initialize AWS API"), + BAD_BUCKET_NAME("CodeGuru Reviewer expects bucket names to start with codeguru-reviewer-"), + USER_ABORT("Abort"); + + @Getter + final String errorMessage; + + ErrorCodes(String msg) { + this.errorMessage = msg; + } + + @Override + public String toString() { + return errorMessage; + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/model/GitMetaData.java b/src/main/java/com/amazonaws/gurureviewercli/model/GitMetaData.java new file mode 100644 index 0000000..e06fd77 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/model/GitMetaData.java @@ -0,0 +1,31 @@ +package com.amazonaws.gurureviewercli.model; + +import javax.annotation.Nullable; +import java.nio.file.Path; + +import lombok.Builder; +import lombok.Data; + +/** + * Metadata collected about the analyzed git repo. + */ +@Builder +@Data +public class GitMetaData { + + private Path repoRoot; + + private String userName; + + private String currentBranch; + + @Builder.Default + private String pullRequestId = "0"; + + private @Nullable String remoteUrl; + + private @Nullable String beforeCommit; + + private @Nullable String afterCommit; + +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/model/Recommendation.java b/src/main/java/com/amazonaws/gurureviewercli/model/Recommendation.java new file mode 100644 index 0000000..0bd0c23 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/model/Recommendation.java @@ -0,0 +1,55 @@ +package com.amazonaws.gurureviewercli.model; + +import java.util.List; + +import lombok.Data; +import lombok.NoArgsConstructor; +import lombok.val; +import software.amazon.awssdk.services.codegurureviewer.model.RecommendationSummary; + +/** + * Serializable recommendation class + */ +@Data +@NoArgsConstructor +public class Recommendation { + + private String filePath; + private String recommendationId; + private Integer startLine; + private Integer endLine; + private String description; + private String recommendationCategory; + private RuleMetadata ruleMetadata; + private String severity; + + @Data + static final class RuleMetadata { + private String ruleId; + private String ruleName; + private String shortDescription; + private String longDescription; + private List ruleTags; + } + + public RecommendationSummary toRecommendationSummary() { + val rm = software.amazon.awssdk.services.codegurureviewer.model. + RuleMetadata.builder() + .ruleId(ruleMetadata.ruleId) + .longDescription(ruleMetadata.longDescription) + .shortDescription(ruleMetadata.shortDescription) + .ruleName(ruleMetadata.ruleName) + .ruleTags(ruleMetadata.ruleTags) + .build(); + return RecommendationSummary.builder() + .description(description) + .recommendationId(recommendationId) + .recommendationCategory(recommendationCategory) + .filePath(filePath) + .startLine(startLine) + .endLine(endLine) + .severity(severity) + .ruleMetadata(rm) + .build(); + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/model/ScanMetaData.java b/src/main/java/com/amazonaws/gurureviewercli/model/ScanMetaData.java new file mode 100644 index 0000000..0af13b2 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/model/ScanMetaData.java @@ -0,0 +1,35 @@ +package com.amazonaws.gurureviewercli.model; + +import java.nio.file.Path; +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + * Data class to store information about a started CodeGuru Review. + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class ScanMetaData { + + private String associationArn; + + private String codeReviewArn; + + private String region; + + private Path repositoryRoot; + + private List sourceDirectories; + + private String bucketName; + + private String sourceKey; + + private String buildKey; +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/util/JsonUtil.java b/src/main/java/com/amazonaws/gurureviewercli/util/JsonUtil.java new file mode 100644 index 0000000..23327bb --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/util/JsonUtil.java @@ -0,0 +1,45 @@ +package com.amazonaws.gurureviewercli.util; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import lombok.NonNull; +import software.amazon.awssdk.services.codegurureviewer.model.RecommendationSummary; + +import com.amazonaws.gurureviewercli.model.Recommendation; + +/** + * Util class to load scan metadata + */ +public final class JsonUtil { + + private static final ObjectMapper OBJECT_MAPPER = + new ObjectMapper().configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true) + .enable(SerializationFeature.INDENT_OUTPUT) + .setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY) + .setSerializationInclusion(JsonInclude.Include.NON_EMPTY); + + public static List loadRecommendations(@NonNull final Path jsonFile) throws IOException { + return OBJECT_MAPPER.readValue(jsonFile.toFile(), new TypeReference>() { + }) + .stream().map(Recommendation::toRecommendationSummary).collect(Collectors.toList()); + } + + public static void storeRecommendations(@NonNull final List recommendations, + @NonNull final Path targetFile) throws IOException { + OBJECT_MAPPER.writeValue(targetFile.toFile(), recommendations); + } + + private JsonUtil() { + // do not initialize utility + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/util/Log.java b/src/main/java/com/amazonaws/gurureviewercli/util/Log.java new file mode 100644 index 0000000..52552c3 --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/util/Log.java @@ -0,0 +1,58 @@ +package com.amazonaws.gurureviewercli.util; + +import org.beryx.textio.TextTerminal; +import org.beryx.textio.system.SystemTextTerminal; + +public final class Log { + + private static final String TEXT_RESET = "\u001B[0m"; + private static final String TEXT_BLACK = "\u001B[30m"; + private static final String TEXT_RED = "\u001B[31m"; + private static final String TEXT_GREEN = "\u001B[32m"; + private static final String TEXT_YELLOW = "\u001B[33m"; + private static final String TEXT_BLUE = "\u001B[34m"; + private static final String TEXT_PURPLE = "\u001B[35m"; + private static final String TEXT_CYAN = "\u001B[36m"; + private static final String TEXT_WHITE = "\u001B[37m"; + + private static final String AWS_URL_PREFIX = "https://console.aws.amazon.com/codeguru/reviewer"; + + // can be overriden + private static TextTerminal terminal = new SystemTextTerminal(); + + public static void setTerminal(final TextTerminal t) { + terminal = t; + } + + public static void print(final String format, final Object... args) { + terminal.printf(format, args); + } + + public static void println(final String format, final Object... args) { + terminal.printf(format + "%n", args); + } + + public static void info(final String format, final Object... args) { + terminal.printf(TEXT_GREEN + format + TEXT_RESET + "%n", args); + } + + public static void warn(final String format, final Object... args) { + terminal.printf(TEXT_YELLOW + format + TEXT_RESET + "%n", args); + } + + public static void error(final String format, final Object... args) { + terminal.printf(TEXT_RED + format + TEXT_RESET + "%n", args); + } + + public static void awsUrl(final String format, final Object... args) { + terminal.printf(TEXT_CYAN + AWS_URL_PREFIX + format + TEXT_RESET + "%n", args); + } + + public static void error(final Throwable t) { + terminal.println(TEXT_RED + t.getMessage() + TEXT_RESET); + } + + private Log() { + // do not initialize + } +} diff --git a/src/main/java/com/amazonaws/gurureviewercli/util/ZipUtils.java b/src/main/java/com/amazonaws/gurureviewercli/util/ZipUtils.java new file mode 100644 index 0000000..c06d22b --- /dev/null +++ b/src/main/java/com/amazonaws/gurureviewercli/util/ZipUtils.java @@ -0,0 +1,102 @@ +package com.amazonaws.gurureviewercli.util; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import lombok.extern.log4j.Log4j2; +import lombok.val; + +/** + * Util class for ZipFile. + */ +@Log4j2 +public final class ZipUtils { + + /** + * Zip source directory to destination path. + * + * @param sourceDirPaths source dir paths + * @param zipFilePath destination zip file + * @throws IOException io exception + */ + public static void pack(final List sourceDirPaths, final String zipFilePath) throws IOException { + Path p = Files.createFile(Paths.get(zipFilePath).normalize().toAbsolutePath()); + try (ZipOutputStream zs = new ZipOutputStream(Files.newOutputStream(p))) { + for (val sourceDirPath : sourceDirPaths) { + Path pp = Paths.get(sourceDirPath).normalize().toAbsolutePath(); + try (val walk = Files.walk(pp)) { + walk.filter(path -> !Files.isDirectory(path)) + .forEach(path -> { + val normalizedPath = path.normalize().toAbsolutePath(); + val relpath = pp.relativize(normalizedPath).toString(); + ZipEntry zipEntry = new ZipEntry(relpath); + try { + zs.putNextEntry(zipEntry); + zs.write(Files.readAllBytes(path)); + zs.closeEntry(); + } catch (Exception e) { + log.error("Skipping file {} because of error: {}", path, e.getMessage()); + } + }); + } + } + } + } + + /** + * Zip source directory to destination path. + * + * @param sourceDirPaths source dir paths + * @param relativeRoot The a shared parent of the sourceDirPaths that should be used for all entries. + * @param zipFilePath destination zip file + * @throws IOException io exception + */ + public static void pack(final List sourceDirPaths, + final Path relativeRoot, + final String zipFilePath) throws IOException { + sourceDirPaths.stream().forEach(p -> { + val child = Paths.get(p).toAbsolutePath().normalize(); + val parent = relativeRoot.toAbsolutePath().normalize(); + if (!child.startsWith(parent)) { + val msg = String.format("Folder %s is not a subfolder of %s", child, parent); + throw new RuntimeException(msg); + } + }); + Path p = Files.createFile(Paths.get(zipFilePath)); + try (ZipOutputStream zs = new ZipOutputStream(Files.newOutputStream(p))) { + for (val sourceDirPath : sourceDirPaths) { + Path pp = Paths.get(sourceDirPath).normalize().toAbsolutePath(); + try (val walk = Files.walk(pp)) { + walk.filter(path -> !Files.isDirectory(path)) + .forEach(path -> { + val normalizedPath = path.normalize().toAbsolutePath(); + val relPath = relativeRoot.toAbsolutePath() + .normalize() + .relativize(normalizedPath) + .normalize().toString(); + ZipEntry zipEntry = new ZipEntry(relPath); + try { + zs.putNextEntry(zipEntry); + zs.write(Files.readAllBytes(path)); + zs.closeEntry(); + } catch (Exception e) { + log.error("Skipping file {} because of error: {}", path, e.getMessage()); + } + }); + } + } + } + } + + /** + * private construct. + */ + private ZipUtils() { + } + +} diff --git a/src/main/resources/log4j2.properties b/src/main/resources/log4j2.properties new file mode 100644 index 0000000..c2ca2e0 --- /dev/null +++ b/src/main/resources/log4j2.properties @@ -0,0 +1,18 @@ +# Log4j 2 properties file, this will be automatically loaded by Log4j when the application starts +# Read more: https://logging.apache.org/log4j/2.x/manual/configuration.html#Configuration_with_Properties +dest = err +status = error +name = PropertiesConfig + +appenders = console + +# Configuration for printing to console +appender.console.type = Console +appender.console.name = STDOUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d %p - %m%n + +# Root logger +rootLogger.level = info +rootLogger.appenderRefs = stdout +rootLogger.appenderRef.stdout.ref = STDOUT diff --git a/src/test/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapterTest.java b/src/test/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapterTest.java new file mode 100644 index 0000000..2d179d7 --- /dev/null +++ b/src/test/java/com/amazonaws/gurureviewercli/adapter/ArtifactAdapterTest.java @@ -0,0 +1,67 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.zip.ZipFile; + +import lombok.val; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.stubbing.Answer; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; + +import com.amazonaws.gurureviewercli.model.Configuration; + +@ExtendWith(MockitoExtension.class) +class ArtifactAdapterTest { + + @Mock + private S3Client s3client; + + @Test + public void test_zipAndUpload_happyCaseSourceOnly() throws Exception{ + val repoDir = Paths.get("./"); + // skip the test if the test container stripped to the top level .git folder + Assumptions.assumeTrue(repoDir.resolve(".git").toFile().isDirectory()); + val tempDir = Files.createTempDirectory("test_zipAndUpload_happyCase"); + val bucketName = "some-bucket"; + + final List sourceDirs = Arrays.asList("src"); + final List buildDirs = Collections.emptyList(); + val config = Configuration.builder() + .s3Client(s3client) + .build(); + Answer answer = invocationOnMock -> { + System.err.println(invocationOnMock); + Path filePath = invocationOnMock.getArgument(1); + Assertions.assertTrue(filePath.toFile().isFile()); + try (val zipFile = new ZipFile(filePath.toFile())) { + val entries = zipFile.entries(); + while (entries.hasMoreElements()) { + val s = entries.nextElement().getName(); + val original = repoDir.resolve(s).toFile(); + Assertions.assertTrue(original.isFile(), "Not a valid file: " + original); + Assertions.assertFalse(s.startsWith("..")); + } + } + return null; + }; + doAnswer(answer).when(s3client).putObject(any(PutObjectRequest.class), any(Path.class)); + + val metaData = ArtifactAdapter.zipAndUpload(config, tempDir, repoDir, sourceDirs, buildDirs, bucketName); + Assertions.assertNull(metaData.getBuildKey()); + Assertions.assertNotNull(metaData.getSourceKey()); + } + +} \ No newline at end of file diff --git a/src/test/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapterTest.java b/src/test/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapterTest.java new file mode 100644 index 0000000..ed0b096 --- /dev/null +++ b/src/test/java/com/amazonaws/gurureviewercli/adapter/AssociationAdapterTest.java @@ -0,0 +1,188 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.util.Collections; + +import lombok.val; +import org.beryx.textio.TextIO; +import org.beryx.textio.mock.MockTextTerminal; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import software.amazon.awssdk.services.codegurureviewer.CodeGuruReviewerClient; +import software.amazon.awssdk.services.codegurureviewer.model.AssociateRepositoryRequest; +import software.amazon.awssdk.services.codegurureviewer.model.AssociateRepositoryResponse; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeRepositoryAssociationRequest; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeRepositoryAssociationResponse; +import software.amazon.awssdk.services.codegurureviewer.model.ListRepositoryAssociationsRequest; +import software.amazon.awssdk.services.codegurureviewer.model.ListRepositoryAssociationsResponse; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociation; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociationState; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociationSummary; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.HeadBucketRequest; +import software.amazon.awssdk.services.s3.model.HeadBucketResponse; +import software.amazon.awssdk.services.s3.model.NoSuchBucketException; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +import com.amazonaws.gurureviewercli.exceptions.GuruCliException; +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.ErrorCodes; + +@ExtendWith(MockitoExtension.class) +class AssociationAdapterTest { + + @Mock + private CodeGuruReviewerClient guruFrontendService; + + @Mock + private S3Client s3client; + + @Test + public void test_getAssociatedGuruRepo_associationExists() { + val fakeArn = "123"; + val expected = RepositoryAssociation.builder() + .associationArn(fakeArn) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + val summary = RepositoryAssociationSummary.builder() + .associationArn(fakeArn) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + val response = ListRepositoryAssociationsResponse.builder().repositoryAssociationSummaries(summary).build(); + when(guruFrontendService.listRepositoryAssociations(any(ListRepositoryAssociationsRequest.class))) + .thenReturn(response); + val describeResponse = DescribeRepositoryAssociationResponse.builder().repositoryAssociation(expected).build(); + when(guruFrontendService.describeRepositoryAssociation(any(DescribeRepositoryAssociationRequest.class))) + .thenReturn(describeResponse); + val config = Configuration.builder() + .guruFrontendService(guruFrontendService) + .repoName("some-repo-name") + .build(); + val association = AssociationAdapter.getAssociatedGuruRepo(config); + Assertions.assertEquals(expected.associationArn(), association.associationArn()); + } + + @Test + public void test_getAssociatedGuruRepo_createNewWithExistingBucket() { + val bucketName = "some-bucket"; + val fakeArn = "123"; + val expected = RepositoryAssociation.builder() + .associationArn(fakeArn) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + val emptyListResponse = + ListRepositoryAssociationsResponse.builder() + .repositoryAssociationSummaries(Collections.emptyList()) + .build(); + when(guruFrontendService.listRepositoryAssociations(any(ListRepositoryAssociationsRequest.class))) + .thenReturn(emptyListResponse); + when(s3client.headBucket(any(HeadBucketRequest.class))).thenReturn(HeadBucketResponse.builder().build()); + when(guruFrontendService.associateRepository(any(AssociateRepositoryRequest.class))) + .thenReturn(AssociateRepositoryResponse.builder().repositoryAssociation(expected).build()); + when(guruFrontendService.describeRepositoryAssociation(any(DescribeRepositoryAssociationRequest.class))) + .thenReturn(DescribeRepositoryAssociationResponse.builder().repositoryAssociation(expected).build()); + val config = Configuration.builder() + .guruFrontendService(guruFrontendService) + .interactiveMode(false) + .s3Client(s3client) + .repoName("some-repo-name") + .build(); + val association = AssociationAdapter.getAssociatedGuruRepo(config); + Assertions.assertEquals(expected.associationArn(), association.associationArn()); + } + + @Test + public void test_getAssociatedGuruRepo_createNewWithCreateBucket() { + // Same test as test_getAssociatedGuruRepo_createNewWithExistingBucket since creating the bucket does not + // return anything + val bucketName = "some-bucket"; + val fakeArn = "123"; + val expected = RepositoryAssociation.builder() + .associationArn(fakeArn) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + val emptyListResponse = + ListRepositoryAssociationsResponse.builder() + .repositoryAssociationSummaries(Collections.emptyList()) + .build(); + when(guruFrontendService.listRepositoryAssociations(any(ListRepositoryAssociationsRequest.class))) + .thenReturn(emptyListResponse); + when(s3client.headBucket(any(HeadBucketRequest.class))).thenThrow(NoSuchBucketException.class); + when(guruFrontendService.associateRepository(any(AssociateRepositoryRequest.class))) + .thenReturn(AssociateRepositoryResponse.builder().repositoryAssociation(expected).build()); + when(guruFrontendService.describeRepositoryAssociation(any(DescribeRepositoryAssociationRequest.class))) + .thenReturn(DescribeRepositoryAssociationResponse.builder().repositoryAssociation(expected).build()); + val config = Configuration.builder() + .guruFrontendService(guruFrontendService) + .interactiveMode(false) + .s3Client(s3client) + .repoName("some-repo-name") + .build(); + val association = AssociationAdapter.getAssociatedGuruRepo(config); + Assertions.assertEquals(expected.associationArn(), association.associationArn()); + } + + @Test + public void test_getAssociatedGuruRepo_createNewWithCreateBucketInteractive() { + val bucketName = "some-bucket"; + val fakeArn = "123"; + val expected = RepositoryAssociation.builder() + .associationArn(fakeArn) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + + val emptyListResponse = + ListRepositoryAssociationsResponse.builder() + .repositoryAssociationSummaries(Collections.emptyList()) + .build(); + when(guruFrontendService.listRepositoryAssociations(any(ListRepositoryAssociationsRequest.class))) + .thenReturn(emptyListResponse); + when(s3client.headBucket(any(HeadBucketRequest.class))).thenThrow(NoSuchBucketException.class); + when(guruFrontendService.associateRepository(any(AssociateRepositoryRequest.class))) + .thenReturn(AssociateRepositoryResponse.builder().repositoryAssociation(expected).build()); + when(guruFrontendService.describeRepositoryAssociation(any(DescribeRepositoryAssociationRequest.class))) + .thenReturn(DescribeRepositoryAssociationResponse.builder().repositoryAssociation(expected).build()); + + val mockTerminal = new MockTextTerminal(); + mockTerminal.getInputs().add("y"); + + val config = Configuration.builder() + .guruFrontendService(guruFrontendService) + .interactiveMode(true) + .s3Client(s3client) + .repoName("some-repo-name") + .textIO(new TextIO(mockTerminal)) + .build(); + val association = AssociationAdapter.getAssociatedGuruRepo(config); + Assertions.assertEquals(expected.associationArn(), association.associationArn()); + } + + @Test + public void test_getAssociatedGuruRepo_createNewWithCreateBucketInteractiveAbort() { + val bucketName = "some-bucket"; + val emptyListResponse = + ListRepositoryAssociationsResponse.builder() + .repositoryAssociationSummaries(Collections.emptyList()) + .build(); + when(guruFrontendService.listRepositoryAssociations(any(ListRepositoryAssociationsRequest.class))) + .thenReturn(emptyListResponse); + when(s3client.headBucket(any(HeadBucketRequest.class))).thenThrow(NoSuchBucketException.class); + + val mockTerminal = new MockTextTerminal(); + mockTerminal.getInputs().add("n"); + + val config = Configuration.builder() + .guruFrontendService(guruFrontendService) + .interactiveMode(true) + .s3Client(s3client) + .repoName("some-repo-name") + .textIO(new TextIO(mockTerminal)) + .build(); + GuruCliException ret = Assertions.assertThrows(GuruCliException.class, () -> + AssociationAdapter.getAssociatedGuruRepo(config)); + Assertions.assertEquals(ErrorCodes.USER_ABORT, ret.getErrorCode()); + } +} \ No newline at end of file diff --git a/src/test/java/com/amazonaws/gurureviewercli/adapter/GitAdapterTest.java b/src/test/java/com/amazonaws/gurureviewercli/adapter/GitAdapterTest.java new file mode 100644 index 0000000..d6328bc --- /dev/null +++ b/src/test/java/com/amazonaws/gurureviewercli/adapter/GitAdapterTest.java @@ -0,0 +1,116 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.nio.file.Path; +import java.nio.file.Paths; + +import lombok.val; +import org.beryx.textio.TextIO; +import org.beryx.textio.mock.MockTextTerminal; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import com.amazonaws.gurureviewercli.exceptions.GuruCliException; +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.ErrorCodes; + +class GitAdapterTest { + + private static final Path RESOURCE_ROOT = Paths.get("test-data"); + + @Test + public void test_getGitMetaData_notARepo() { + val repo = RESOURCE_ROOT.resolve("fresh-repo-without-remote"); + GuruCliException ret = Assertions.assertThrows(GuruCliException.class, () -> + GitAdapter.tryGetMetaData(configWithoutCommits(repo), repo.resolve("notgit"))); + Assertions.assertEquals(ErrorCodes.GIT_INVALID_DIR, ret.getErrorCode()); + } + + @Test + public void test_getGitMetaData_noRemote() throws Exception { + val repo = RESOURCE_ROOT.resolve("fresh-repo-no-remote"); + val metadata = GitAdapter.tryGetMetaData(configWithoutCommits(repo), repo.resolve("git")); + Assertions.assertNull(metadata.getRemoteUrl()); + Assertions.assertNotNull(metadata.getCurrentBranch()); + Assertions.assertEquals(repo, metadata.getRepoRoot()); + } + + + @Test + public void test_getGitMetaData_oneCommit_packageScan() { + val repo = RESOURCE_ROOT.resolve("one-commit"); + val mockTerminal = new MockTextTerminal(); + mockTerminal.getInputs().add("y"); + val config = Configuration.builder() + .textIO(new TextIO(mockTerminal)) + .interactiveMode(true) + .build(); + val gitMetaData = GitAdapter.tryGetMetaData(config, repo.resolve("git")); + Assertions.assertNotNull(gitMetaData); + Assertions.assertNull(gitMetaData.getBeforeCommit()); + Assertions.assertNull(gitMetaData.getAfterCommit()); + Assertions.assertEquals("master", gitMetaData.getCurrentBranch()); + Assertions.assertEquals("git@amazon.com:username/new_repo", gitMetaData.getRemoteUrl()); + } + + @Test + public void test_getGitMetaData_oneCommit_packageScanAbort() { + val repo = RESOURCE_ROOT.resolve("one-commit"); + val mockTerminal = new MockTextTerminal(); + mockTerminal.getInputs().add("n"); + val config = Configuration.builder() + .textIO(new TextIO(mockTerminal)) + .interactiveMode(true) + .build(); + GuruCliException ret = Assertions.assertThrows(GuruCliException.class, () -> + GitAdapter.tryGetMetaData(config, repo.resolve("git"))); + Assertions.assertEquals(ErrorCodes.USER_ABORT, ret.getErrorCode()); + + } + + @Test + public void test_getGitMetaData_twoCommits_validCommits() { + val repo = RESOURCE_ROOT.resolve("two-commits"); + val config = configWithoutCommits(repo); + config.setBeforeCommit("cdb0fcad7400610b1d1797a326a89414525160fe"); + config.setAfterCommit("8ece465b7ecf8337bf767c9602d21bb92f2fad8a"); + val gitMetaData = GitAdapter.tryGetMetaData(config, repo.resolve("git")); + Assertions.assertNotNull(gitMetaData); + Assertions.assertNotNull(gitMetaData.getBeforeCommit()); + Assertions.assertNotNull(gitMetaData.getAfterCommit()); + Assertions.assertEquals("master", gitMetaData.getCurrentBranch()); + Assertions.assertEquals("git@amazon.com:username/new_repo", gitMetaData.getRemoteUrl()); + } + + @Test + public void test_getGitMetaData_twoCommits_commitShortHand() { + val repo = RESOURCE_ROOT.resolve("two-commits"); + val config = configWithoutCommits(repo); + config.setBeforeCommit("HEAD^"); + config.setAfterCommit("HEAD"); + val gitMetaData = GitAdapter.tryGetMetaData(config, repo.resolve("git")); + Assertions.assertNotNull(gitMetaData); + Assertions.assertNotNull(gitMetaData.getBeforeCommit()); + Assertions.assertNotNull(gitMetaData.getAfterCommit()); + Assertions.assertEquals("master", gitMetaData.getCurrentBranch()); + Assertions.assertEquals("git@amazon.com:username/new_repo", gitMetaData.getRemoteUrl()); + } + + @Test + public void test_getGitMetaData_twoCommits_invalidCommits() { + val repo = RESOURCE_ROOT.resolve("two-commits"); + val config = configWithoutCommits(repo); + config.setBeforeCommit("thisIsNotACommitHash"); + config.setAfterCommit("8ece465b7ecf8337bf767c9602d21bb92f2fad8a"); + + Exception ret = Assertions.assertThrows(Exception.class, () -> + GitAdapter.tryGetMetaData(config, repo.resolve("git"))); + Assertions.assertTrue(ret.getMessage().contains("Not a valid commit id ")); + } + + private Configuration configWithoutCommits(final Path workingDir) { + return Configuration.builder() + .textIO(new TextIO(new MockTextTerminal())) + .interactiveMode(false) + .build(); + } +} \ No newline at end of file diff --git a/src/test/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapterTest.java b/src/test/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapterTest.java new file mode 100644 index 0000000..3293955 --- /dev/null +++ b/src/test/java/com/amazonaws/gurureviewercli/adapter/ResultsAdapterTest.java @@ -0,0 +1,28 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Collections; + +import lombok.val; +import org.junit.jupiter.api.Test; + +import com.amazonaws.gurureviewercli.model.ScanMetaData; +import com.amazonaws.gurureviewercli.util.JsonUtil; + +class ResultsAdapterTest { + + @Test + void saveResults() throws Exception { + val recommendations = + JsonUtil.loadRecommendations(Paths.get("test-data/recommendations/recommendations.json")); + val scanMetaData = ScanMetaData.builder() + .repositoryRoot(Paths.get("./").toAbsolutePath().normalize()) + .associationArn("123") + .codeReviewArn("456") + .sourceDirectories(Collections.emptyList()) + .build(); + val outDir = Files.createTempDirectory(Paths.get("./"), "test-output"); + ResultsAdapter.saveResults(outDir, recommendations, scanMetaData); + } +} \ No newline at end of file diff --git a/src/test/java/com/amazonaws/gurureviewercli/adapter/ScanAdapterTest.java b/src/test/java/com/amazonaws/gurureviewercli/adapter/ScanAdapterTest.java new file mode 100644 index 0000000..0e45a59 --- /dev/null +++ b/src/test/java/com/amazonaws/gurureviewercli/adapter/ScanAdapterTest.java @@ -0,0 +1,77 @@ +package com.amazonaws.gurureviewercli.adapter; + +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; + +import lombok.val; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import software.amazon.awssdk.services.codegurureviewer.CodeGuruReviewerClient; +import software.amazon.awssdk.services.codegurureviewer.model.CodeReview; +import software.amazon.awssdk.services.codegurureviewer.model.CreateCodeReviewRequest; +import software.amazon.awssdk.services.codegurureviewer.model.CreateCodeReviewResponse; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeRepositoryAssociationRequest; +import software.amazon.awssdk.services.codegurureviewer.model.DescribeRepositoryAssociationResponse; +import software.amazon.awssdk.services.codegurureviewer.model.ListRepositoryAssociationsRequest; +import software.amazon.awssdk.services.codegurureviewer.model.ListRepositoryAssociationsResponse; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociation; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociationState; +import software.amazon.awssdk.services.codegurureviewer.model.RepositoryAssociationSummary; +import software.amazon.awssdk.services.codegurureviewer.model.S3RepositoryDetails; +import software.amazon.awssdk.services.s3.S3Client; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; + +import com.amazonaws.gurureviewercli.model.Configuration; +import com.amazonaws.gurureviewercli.model.GitMetaData; + +@ExtendWith(MockitoExtension.class) +class ScanAdapterTest { + + @Mock + private CodeGuruReviewerClient guruFrontendService; + + @Mock + private S3Client s3client; + + @Test + public void test_startScan_HappyCase() throws Exception { + // skip the test if the test container stripped to the top level .git folder + Assumptions.assumeTrue(Paths.get("./.git").toFile().isDirectory()); + val fakeArn = "123"; + val bucketName = "some-bucket"; + val repoDetails = S3RepositoryDetails.builder().bucketName(bucketName).build(); + val expected = RepositoryAssociation.builder().associationArn(fakeArn) + .s3RepositoryDetails(repoDetails) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + val summary = RepositoryAssociationSummary.builder() + .associationArn(fakeArn) + .state(RepositoryAssociationState.ASSOCIATED) + .build(); + val response = ListRepositoryAssociationsResponse.builder().repositoryAssociationSummaries(summary).build(); + when(guruFrontendService.listRepositoryAssociations(any(ListRepositoryAssociationsRequest.class))) + .thenReturn(response); + val describeResponse = DescribeRepositoryAssociationResponse.builder().repositoryAssociation(expected).build(); + when(guruFrontendService.describeRepositoryAssociation(any(DescribeRepositoryAssociationRequest.class))) + .thenReturn(describeResponse); + val review = CodeReview.builder().codeReviewArn(fakeArn).build(); + val crResponse = CreateCodeReviewResponse.builder().codeReview(review).build(); + when(guruFrontendService.createCodeReview(any(CreateCodeReviewRequest.class))).thenReturn(crResponse); + + val config = Configuration.builder() + .guruFrontendService(guruFrontendService) + .s3Client(s3client) + .build(); + val gitMetaData = GitMetaData.builder() + .repoRoot(Paths.get("./")) + .build(); + List sourceDirs = Arrays.asList("src"); + List buildDirs = Arrays.asList(); + ScanAdapter.startScan(config, gitMetaData, sourceDirs, buildDirs); + } +} \ No newline at end of file diff --git a/test-data/fresh-repo-no-remote/git/HEAD b/test-data/fresh-repo-no-remote/git/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test-data/fresh-repo-no-remote/git/config b/test-data/fresh-repo-no-remote/git/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/test-data/fresh-repo-no-remote/git/description b/test-data/fresh-repo-no-remote/git/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test-data/fresh-repo-no-remote/git/hooks/applypatch-msg.sample b/test-data/fresh-repo-no-remote/git/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/test-data/fresh-repo-no-remote/git/hooks/commit-msg.sample b/test-data/fresh-repo-no-remote/git/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/test-data/fresh-repo-no-remote/git/hooks/fsmonitor-watchman.sample b/test-data/fresh-repo-no-remote/git/hooks/fsmonitor-watchman.sample new file mode 100755 index 0000000..e673bb3 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/fsmonitor-watchman.sample @@ -0,0 +1,114 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use IPC::Open2; + +# An example hook script to integrate Watchman +# (https://facebook.github.io/watchman/) with git to speed up detecting +# new and modified files. +# +# The hook is passed a version (currently 1) and a time in nanoseconds +# formatted as a string and outputs to stdout all files that have been +# modified since the given time. Paths must be relative to the root of +# the working tree and separated by a single NUL. +# +# To enable this hook, rename this file to "query-watchman" and set +# 'git config core.fsmonitor .git/hooks/query-watchman' +# +my ($version, $time) = @ARGV; + +# Check the hook interface version + +if ($version == 1) { + # convert nanoseconds to seconds + $time = int $time / 1000000000; +} else { + die "Unsupported query-fsmonitor hook version '$version'.\n" . + "Falling back to scanning...\n"; +} + +my $git_work_tree; +if ($^O =~ 'msys' || $^O =~ 'cygwin') { + $git_work_tree = Win32::GetCwd(); + $git_work_tree =~ tr/\\/\//; +} else { + require Cwd; + $git_work_tree = Cwd::cwd(); +} + +my $retry = 1; + +launch_watchman(); + +sub launch_watchman { + + my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') + or die "open2() failed: $!\n" . + "Falling back to scanning...\n"; + + # In the query expression below we're asking for names of files that + # changed since $time but were not transient (ie created after + # $time but no longer exist). + # + # To accomplish this, we're using the "since" generator to use the + # recency index to select candidate nodes and "fields" to limit the + # output to file names only. Then we're using the "expression" term to + # further constrain the results. + # + # The category of transient files that we want to ignore will have a + # creation clock (cclock) newer than $time_t value and will also not + # currently exist. + + my $query = <<" END"; + ["query", "$git_work_tree", { + "since": $time, + "fields": ["name"], + "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] + }] + END + + print CHLD_IN $query; + close CHLD_IN; + my $response = do {local $/; }; + + die "Watchman: command returned no output.\n" . + "Falling back to scanning...\n" if $response eq ""; + die "Watchman: command returned invalid output: $response\n" . + "Falling back to scanning...\n" unless $response =~ /^\{/; + + my $json_pkg; + eval { + require JSON::XS; + $json_pkg = "JSON::XS"; + 1; + } or do { + require JSON::PP; + $json_pkg = "JSON::PP"; + }; + + my $o = $json_pkg->new->utf8->decode($response); + + if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { + print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; + $retry--; + qx/watchman watch "$git_work_tree"/; + die "Failed to make watchman watch '$git_work_tree'.\n" . + "Falling back to scanning...\n" if $? != 0; + + # Watchman will always return all files on the first query so + # return the fast "everything is dirty" flag to git and do the + # Watchman query just to get it over with now so we won't pay + # the cost in git to look up each individual file. + print "/\0"; + eval { launch_watchman() }; + exit 0; + } + + die "Watchman: $o->{error}.\n" . + "Falling back to scanning...\n" if $o->{error}; + + binmode STDOUT, ":utf8"; + local $, = "\0"; + print @{$o->{files}}; +} diff --git a/test-data/fresh-repo-no-remote/git/hooks/post-update.sample b/test-data/fresh-repo-no-remote/git/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/test-data/fresh-repo-no-remote/git/hooks/pre-applypatch.sample b/test-data/fresh-repo-no-remote/git/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/test-data/fresh-repo-no-remote/git/hooks/pre-commit.sample b/test-data/fresh-repo-no-remote/git/hooks/pre-commit.sample new file mode 100755 index 0000000..6a75641 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=$(git hash-object -t tree /dev/null) +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/test-data/fresh-repo-no-remote/git/hooks/pre-merge-commit.sample b/test-data/fresh-repo-no-remote/git/hooks/pre-merge-commit.sample new file mode 100755 index 0000000..399eab1 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/pre-merge-commit.sample @@ -0,0 +1,13 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git merge" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message to +# stderr if it wants to stop the merge commit. +# +# To enable this hook, rename this file to "pre-merge-commit". + +. git-sh-setup +test -x "$GIT_DIR/hooks/pre-commit" && + exec "$GIT_DIR/hooks/pre-commit" +: diff --git a/test-data/fresh-repo-no-remote/git/hooks/pre-push.sample b/test-data/fresh-repo-no-remote/git/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/test-data/fresh-repo-no-remote/git/hooks/pre-rebase.sample b/test-data/fresh-repo-no-remote/git/hooks/pre-rebase.sample new file mode 100755 index 0000000..6cbef5c --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up to date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +<<\DOC_END + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". + +DOC_END diff --git a/test-data/fresh-repo-no-remote/git/hooks/pre-receive.sample b/test-data/fresh-repo-no-remote/git/hooks/pre-receive.sample new file mode 100755 index 0000000..a1fd29e --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/pre-receive.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to make use of push options. +# The example simply echoes all push options that start with 'echoback=' +# and rejects all pushes when the "reject" push option is used. +# +# To enable this hook, rename this file to "pre-receive". + +if test -n "$GIT_PUSH_OPTION_COUNT" +then + i=0 + while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" + do + eval "value=\$GIT_PUSH_OPTION_$i" + case "$value" in + echoback=*) + echo "echo from the pre-receive-hook: ${value#*=}" >&2 + ;; + reject) + exit 1 + esac + i=$((i + 1)) + done +fi diff --git a/test-data/fresh-repo-no-remote/git/hooks/prepare-commit-msg.sample b/test-data/fresh-repo-no-remote/git/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..10fa14c --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/prepare-commit-msg.sample @@ -0,0 +1,42 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first one removes the +# "# Please enter the commit message..." help message. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +COMMIT_MSG_FILE=$1 +COMMIT_SOURCE=$2 +SHA1=$3 + +/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" + +# case "$COMMIT_SOURCE,$SHA1" in +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; +# *) ;; +# esac + +# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" +# if test -z "$COMMIT_SOURCE" +# then +# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" +# fi diff --git a/test-data/fresh-repo-no-remote/git/hooks/update.sample b/test-data/fresh-repo-no-remote/git/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 )" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 " >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/test-data/fresh-repo-no-remote/git/info/exclude b/test-data/fresh-repo-no-remote/git/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/test-data/fresh-repo-no-remote/git/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/test-data/fresh-repo-with-remote/git/HEAD b/test-data/fresh-repo-with-remote/git/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test-data/fresh-repo-with-remote/git/config b/test-data/fresh-repo-with-remote/git/config new file mode 100644 index 0000000..0344a2e --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/config @@ -0,0 +1,10 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true +[remote "origin"] + url = git@amazon.com:username/new_repo + fetch = +refs/heads/*:refs/remotes/origin/* diff --git a/test-data/fresh-repo-with-remote/git/description b/test-data/fresh-repo-with-remote/git/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test-data/fresh-repo-with-remote/git/hooks/applypatch-msg.sample b/test-data/fresh-repo-with-remote/git/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/test-data/fresh-repo-with-remote/git/hooks/commit-msg.sample b/test-data/fresh-repo-with-remote/git/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/test-data/fresh-repo-with-remote/git/hooks/fsmonitor-watchman.sample b/test-data/fresh-repo-with-remote/git/hooks/fsmonitor-watchman.sample new file mode 100755 index 0000000..e673bb3 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/fsmonitor-watchman.sample @@ -0,0 +1,114 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use IPC::Open2; + +# An example hook script to integrate Watchman +# (https://facebook.github.io/watchman/) with git to speed up detecting +# new and modified files. +# +# The hook is passed a version (currently 1) and a time in nanoseconds +# formatted as a string and outputs to stdout all files that have been +# modified since the given time. Paths must be relative to the root of +# the working tree and separated by a single NUL. +# +# To enable this hook, rename this file to "query-watchman" and set +# 'git config core.fsmonitor .git/hooks/query-watchman' +# +my ($version, $time) = @ARGV; + +# Check the hook interface version + +if ($version == 1) { + # convert nanoseconds to seconds + $time = int $time / 1000000000; +} else { + die "Unsupported query-fsmonitor hook version '$version'.\n" . + "Falling back to scanning...\n"; +} + +my $git_work_tree; +if ($^O =~ 'msys' || $^O =~ 'cygwin') { + $git_work_tree = Win32::GetCwd(); + $git_work_tree =~ tr/\\/\//; +} else { + require Cwd; + $git_work_tree = Cwd::cwd(); +} + +my $retry = 1; + +launch_watchman(); + +sub launch_watchman { + + my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') + or die "open2() failed: $!\n" . + "Falling back to scanning...\n"; + + # In the query expression below we're asking for names of files that + # changed since $time but were not transient (ie created after + # $time but no longer exist). + # + # To accomplish this, we're using the "since" generator to use the + # recency index to select candidate nodes and "fields" to limit the + # output to file names only. Then we're using the "expression" term to + # further constrain the results. + # + # The category of transient files that we want to ignore will have a + # creation clock (cclock) newer than $time_t value and will also not + # currently exist. + + my $query = <<" END"; + ["query", "$git_work_tree", { + "since": $time, + "fields": ["name"], + "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] + }] + END + + print CHLD_IN $query; + close CHLD_IN; + my $response = do {local $/; }; + + die "Watchman: command returned no output.\n" . + "Falling back to scanning...\n" if $response eq ""; + die "Watchman: command returned invalid output: $response\n" . + "Falling back to scanning...\n" unless $response =~ /^\{/; + + my $json_pkg; + eval { + require JSON::XS; + $json_pkg = "JSON::XS"; + 1; + } or do { + require JSON::PP; + $json_pkg = "JSON::PP"; + }; + + my $o = $json_pkg->new->utf8->decode($response); + + if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { + print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; + $retry--; + qx/watchman watch "$git_work_tree"/; + die "Failed to make watchman watch '$git_work_tree'.\n" . + "Falling back to scanning...\n" if $? != 0; + + # Watchman will always return all files on the first query so + # return the fast "everything is dirty" flag to git and do the + # Watchman query just to get it over with now so we won't pay + # the cost in git to look up each individual file. + print "/\0"; + eval { launch_watchman() }; + exit 0; + } + + die "Watchman: $o->{error}.\n" . + "Falling back to scanning...\n" if $o->{error}; + + binmode STDOUT, ":utf8"; + local $, = "\0"; + print @{$o->{files}}; +} diff --git a/test-data/fresh-repo-with-remote/git/hooks/post-update.sample b/test-data/fresh-repo-with-remote/git/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/test-data/fresh-repo-with-remote/git/hooks/pre-applypatch.sample b/test-data/fresh-repo-with-remote/git/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/test-data/fresh-repo-with-remote/git/hooks/pre-commit.sample b/test-data/fresh-repo-with-remote/git/hooks/pre-commit.sample new file mode 100755 index 0000000..6a75641 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=$(git hash-object -t tree /dev/null) +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/test-data/fresh-repo-with-remote/git/hooks/pre-merge-commit.sample b/test-data/fresh-repo-with-remote/git/hooks/pre-merge-commit.sample new file mode 100755 index 0000000..399eab1 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/pre-merge-commit.sample @@ -0,0 +1,13 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git merge" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message to +# stderr if it wants to stop the merge commit. +# +# To enable this hook, rename this file to "pre-merge-commit". + +. git-sh-setup +test -x "$GIT_DIR/hooks/pre-commit" && + exec "$GIT_DIR/hooks/pre-commit" +: diff --git a/test-data/fresh-repo-with-remote/git/hooks/pre-push.sample b/test-data/fresh-repo-with-remote/git/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/test-data/fresh-repo-with-remote/git/hooks/pre-rebase.sample b/test-data/fresh-repo-with-remote/git/hooks/pre-rebase.sample new file mode 100755 index 0000000..6cbef5c --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up to date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +<<\DOC_END + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". + +DOC_END diff --git a/test-data/fresh-repo-with-remote/git/hooks/pre-receive.sample b/test-data/fresh-repo-with-remote/git/hooks/pre-receive.sample new file mode 100755 index 0000000..a1fd29e --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/pre-receive.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to make use of push options. +# The example simply echoes all push options that start with 'echoback=' +# and rejects all pushes when the "reject" push option is used. +# +# To enable this hook, rename this file to "pre-receive". + +if test -n "$GIT_PUSH_OPTION_COUNT" +then + i=0 + while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" + do + eval "value=\$GIT_PUSH_OPTION_$i" + case "$value" in + echoback=*) + echo "echo from the pre-receive-hook: ${value#*=}" >&2 + ;; + reject) + exit 1 + esac + i=$((i + 1)) + done +fi diff --git a/test-data/fresh-repo-with-remote/git/hooks/prepare-commit-msg.sample b/test-data/fresh-repo-with-remote/git/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..10fa14c --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/prepare-commit-msg.sample @@ -0,0 +1,42 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first one removes the +# "# Please enter the commit message..." help message. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +COMMIT_MSG_FILE=$1 +COMMIT_SOURCE=$2 +SHA1=$3 + +/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" + +# case "$COMMIT_SOURCE,$SHA1" in +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; +# *) ;; +# esac + +# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" +# if test -z "$COMMIT_SOURCE" +# then +# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" +# fi diff --git a/test-data/fresh-repo-with-remote/git/hooks/update.sample b/test-data/fresh-repo-with-remote/git/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 )" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 " >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/test-data/fresh-repo-with-remote/git/info/exclude b/test-data/fresh-repo-with-remote/git/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/test-data/fresh-repo-with-remote/git/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/test-data/one-commit/git/COMMIT_EDITMSG b/test-data/one-commit/git/COMMIT_EDITMSG new file mode 100644 index 0000000..dddc242 --- /dev/null +++ b/test-data/one-commit/git/COMMIT_EDITMSG @@ -0,0 +1 @@ +hello hello diff --git a/test-data/one-commit/git/HEAD b/test-data/one-commit/git/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/test-data/one-commit/git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test-data/one-commit/git/config b/test-data/one-commit/git/config new file mode 100644 index 0000000..0344a2e --- /dev/null +++ b/test-data/one-commit/git/config @@ -0,0 +1,10 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true +[remote "origin"] + url = git@amazon.com:username/new_repo + fetch = +refs/heads/*:refs/remotes/origin/* diff --git a/test-data/one-commit/git/description b/test-data/one-commit/git/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/test-data/one-commit/git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test-data/one-commit/git/hooks/applypatch-msg.sample b/test-data/one-commit/git/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/test-data/one-commit/git/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/test-data/one-commit/git/hooks/commit-msg.sample b/test-data/one-commit/git/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/test-data/one-commit/git/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/test-data/one-commit/git/hooks/fsmonitor-watchman.sample b/test-data/one-commit/git/hooks/fsmonitor-watchman.sample new file mode 100755 index 0000000..e673bb3 --- /dev/null +++ b/test-data/one-commit/git/hooks/fsmonitor-watchman.sample @@ -0,0 +1,114 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use IPC::Open2; + +# An example hook script to integrate Watchman +# (https://facebook.github.io/watchman/) with git to speed up detecting +# new and modified files. +# +# The hook is passed a version (currently 1) and a time in nanoseconds +# formatted as a string and outputs to stdout all files that have been +# modified since the given time. Paths must be relative to the root of +# the working tree and separated by a single NUL. +# +# To enable this hook, rename this file to "query-watchman" and set +# 'git config core.fsmonitor .git/hooks/query-watchman' +# +my ($version, $time) = @ARGV; + +# Check the hook interface version + +if ($version == 1) { + # convert nanoseconds to seconds + $time = int $time / 1000000000; +} else { + die "Unsupported query-fsmonitor hook version '$version'.\n" . + "Falling back to scanning...\n"; +} + +my $git_work_tree; +if ($^O =~ 'msys' || $^O =~ 'cygwin') { + $git_work_tree = Win32::GetCwd(); + $git_work_tree =~ tr/\\/\//; +} else { + require Cwd; + $git_work_tree = Cwd::cwd(); +} + +my $retry = 1; + +launch_watchman(); + +sub launch_watchman { + + my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') + or die "open2() failed: $!\n" . + "Falling back to scanning...\n"; + + # In the query expression below we're asking for names of files that + # changed since $time but were not transient (ie created after + # $time but no longer exist). + # + # To accomplish this, we're using the "since" generator to use the + # recency index to select candidate nodes and "fields" to limit the + # output to file names only. Then we're using the "expression" term to + # further constrain the results. + # + # The category of transient files that we want to ignore will have a + # creation clock (cclock) newer than $time_t value and will also not + # currently exist. + + my $query = <<" END"; + ["query", "$git_work_tree", { + "since": $time, + "fields": ["name"], + "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] + }] + END + + print CHLD_IN $query; + close CHLD_IN; + my $response = do {local $/; }; + + die "Watchman: command returned no output.\n" . + "Falling back to scanning...\n" if $response eq ""; + die "Watchman: command returned invalid output: $response\n" . + "Falling back to scanning...\n" unless $response =~ /^\{/; + + my $json_pkg; + eval { + require JSON::XS; + $json_pkg = "JSON::XS"; + 1; + } or do { + require JSON::PP; + $json_pkg = "JSON::PP"; + }; + + my $o = $json_pkg->new->utf8->decode($response); + + if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { + print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; + $retry--; + qx/watchman watch "$git_work_tree"/; + die "Failed to make watchman watch '$git_work_tree'.\n" . + "Falling back to scanning...\n" if $? != 0; + + # Watchman will always return all files on the first query so + # return the fast "everything is dirty" flag to git and do the + # Watchman query just to get it over with now so we won't pay + # the cost in git to look up each individual file. + print "/\0"; + eval { launch_watchman() }; + exit 0; + } + + die "Watchman: $o->{error}.\n" . + "Falling back to scanning...\n" if $o->{error}; + + binmode STDOUT, ":utf8"; + local $, = "\0"; + print @{$o->{files}}; +} diff --git a/test-data/one-commit/git/hooks/post-update.sample b/test-data/one-commit/git/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/test-data/one-commit/git/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/test-data/one-commit/git/hooks/pre-applypatch.sample b/test-data/one-commit/git/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/test-data/one-commit/git/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/test-data/one-commit/git/hooks/pre-commit.sample b/test-data/one-commit/git/hooks/pre-commit.sample new file mode 100755 index 0000000..6a75641 --- /dev/null +++ b/test-data/one-commit/git/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=$(git hash-object -t tree /dev/null) +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/test-data/one-commit/git/hooks/pre-merge-commit.sample b/test-data/one-commit/git/hooks/pre-merge-commit.sample new file mode 100755 index 0000000..399eab1 --- /dev/null +++ b/test-data/one-commit/git/hooks/pre-merge-commit.sample @@ -0,0 +1,13 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git merge" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message to +# stderr if it wants to stop the merge commit. +# +# To enable this hook, rename this file to "pre-merge-commit". + +. git-sh-setup +test -x "$GIT_DIR/hooks/pre-commit" && + exec "$GIT_DIR/hooks/pre-commit" +: diff --git a/test-data/one-commit/git/hooks/pre-push.sample b/test-data/one-commit/git/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/test-data/one-commit/git/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/test-data/one-commit/git/hooks/pre-rebase.sample b/test-data/one-commit/git/hooks/pre-rebase.sample new file mode 100755 index 0000000..6cbef5c --- /dev/null +++ b/test-data/one-commit/git/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up to date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +<<\DOC_END + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". + +DOC_END diff --git a/test-data/one-commit/git/hooks/pre-receive.sample b/test-data/one-commit/git/hooks/pre-receive.sample new file mode 100755 index 0000000..a1fd29e --- /dev/null +++ b/test-data/one-commit/git/hooks/pre-receive.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to make use of push options. +# The example simply echoes all push options that start with 'echoback=' +# and rejects all pushes when the "reject" push option is used. +# +# To enable this hook, rename this file to "pre-receive". + +if test -n "$GIT_PUSH_OPTION_COUNT" +then + i=0 + while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" + do + eval "value=\$GIT_PUSH_OPTION_$i" + case "$value" in + echoback=*) + echo "echo from the pre-receive-hook: ${value#*=}" >&2 + ;; + reject) + exit 1 + esac + i=$((i + 1)) + done +fi diff --git a/test-data/one-commit/git/hooks/prepare-commit-msg.sample b/test-data/one-commit/git/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..10fa14c --- /dev/null +++ b/test-data/one-commit/git/hooks/prepare-commit-msg.sample @@ -0,0 +1,42 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first one removes the +# "# Please enter the commit message..." help message. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +COMMIT_MSG_FILE=$1 +COMMIT_SOURCE=$2 +SHA1=$3 + +/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" + +# case "$COMMIT_SOURCE,$SHA1" in +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; +# *) ;; +# esac + +# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" +# if test -z "$COMMIT_SOURCE" +# then +# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" +# fi diff --git a/test-data/one-commit/git/hooks/update.sample b/test-data/one-commit/git/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/test-data/one-commit/git/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 )" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 " >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/test-data/one-commit/git/index b/test-data/one-commit/git/index new file mode 100644 index 0000000000000000000000000000000000000000..35feea2033672bac7685baaa288393e82482cea4 GIT binary patch literal 137 zcmZ?q402{*U|<4b#>AbB-)ckh}ftH%Yag9Sb%4AvJbu6V344tc*50HH-Ks{jB1 literal 0 HcmV?d00001 diff --git a/test-data/one-commit/git/info/exclude b/test-data/one-commit/git/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/test-data/one-commit/git/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/test-data/one-commit/git/logs/HEAD b/test-data/one-commit/git/logs/HEAD new file mode 100644 index 0000000..b14cfbc --- /dev/null +++ b/test-data/one-commit/git/logs/HEAD @@ -0,0 +1 @@ +0000000000000000000000000000000000000000 cdb0fcad7400610b1d1797a326a89414525160fe Martin Schaef 1639514623 -0500 commit (initial): hello hello diff --git a/test-data/one-commit/git/logs/refs/heads/master b/test-data/one-commit/git/logs/refs/heads/master new file mode 100644 index 0000000..b14cfbc --- /dev/null +++ b/test-data/one-commit/git/logs/refs/heads/master @@ -0,0 +1 @@ +0000000000000000000000000000000000000000 cdb0fcad7400610b1d1797a326a89414525160fe Martin Schaef 1639514623 -0500 commit (initial): hello hello diff --git a/test-data/one-commit/git/objects/69/e978a2558e8b47b25058af8e10482831feded6 b/test-data/one-commit/git/objects/69/e978a2558e8b47b25058af8e10482831feded6 new file mode 100644 index 0000000000000000000000000000000000000000..47ebcf66c4a82656b828ee5d65dc7438d73ce217 GIT binary patch literal 53 zcmbvpu6NUf) literal 0 HcmV?d00001 diff --git a/test-data/one-commit/git/objects/9a/71f81a4b4754b686fd37cbb3c72d0250d344aa b/test-data/one-commit/git/objects/9a/71f81a4b4754b686fd37cbb3c72d0250d344aa new file mode 100644 index 0000000000000000000000000000000000000000..3f54fe4d7688da045ffe5c32eee685d09d6e9c01 GIT binary patch literal 29 lcmbù J߶ßR«wPÞD§¬×®ÒI){\þ+…·­Á¹âUF9u \ No newline at end of file diff --git a/test-data/one-commit/git/refs/heads/master b/test-data/one-commit/git/refs/heads/master new file mode 100644 index 0000000..4a5dc69 --- /dev/null +++ b/test-data/one-commit/git/refs/heads/master @@ -0,0 +1 @@ +cdb0fcad7400610b1d1797a326a89414525160fe diff --git a/test-data/one-commit/test.txt b/test-data/one-commit/test.txt new file mode 100644 index 0000000..9a71f81 --- /dev/null +++ b/test-data/one-commit/test.txt @@ -0,0 +1,2 @@ +hello world + diff --git a/test-data/recommendations/recommendations.json b/test-data/recommendations/recommendations.json new file mode 100644 index 0000000..7a6bfdb --- /dev/null +++ b/test-data/recommendations/recommendations.json @@ -0,0 +1,62 @@ +[ { + "FilePath" : "src/main/java/com/amazonaws/gurureviewercli/Main.java", + "RecommendationId" : "1509b96226f77f1d3bad3f53c42f56c09ce16ac1f3713d7007c3a6871ea71d5d", + "StartLine" : 161, + "EndLine" : 161, + "Description" : "**Problem**: While wrapping the caught exception into a custom one, information about the caught exception is being lost, including information about the stack trace of the exception.\n\n**Fix**: If the caught exception object does not contain sensitive information, consider passing it as the \"rootCause\" or inner exception parameter to the constructor of the new exception before throwing the new exception. (Note that not all exception constructors support inner exceptions. Use a wrapper exception that supports inner exceptions.) \n[Learn more](https://www.ibm.com/support/pages/best-practice-catching-and-re-throwing-java-exceptions)\nSimilar issue at line numbers 166.", + "RecommendationCategory" : "JavaBestPractices", + "RuleMetadata" : { + "RuleId" : "java/throw-exception-with-trace@v1.0", + "RuleName" : "Stack trace not included in re-thrown exception", + "ShortDescription" : "When re-throwing an exception, make sure to include the stack trace.", + "LongDescription" : "When re-throwing an exception, make sure to include the stack trace. Otherwise pertinent debug information is lost.", + "RuleTags" : [ "cwe-755", "java" ] + }, + "Severity" : "Medium" +}, { + "FilePath" : "src/main/java/com/amazonaws/gurureviewercli/Main.java", + "RecommendationId" : "4d2c43618a2dac129818bef77093730e84a4e139eef3f0166334657503ecd88d", + "StartLine" : 154, + "EndLine" : 154, + "Description" : "AWS Region is set using a `String`. To explicitly set a publicly available region, we recommend that you use the [Regions](https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/index.html?com/amazonaws/regions/Regions.html) enum.\n\n[Learn more](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-region-selection.html#region-selection-choose-region)", + "RecommendationCategory" : "AWSBestPractices", + "RuleMetadata" : { + "RuleId" : "java/aws-region-enumeration@v1.0", + "RuleName" : "Use an enum to specify an AWS Region", + "ShortDescription" : "To minimize the risk of error, use an enum instead of a string to specify an AWS Region.", + "LongDescription" : "Use a `Regions` enum instead of a string to specify an AWS Region. This can minimize the risk of error.", + "RuleTags" : [ "aws", "java" ] + }, + "Severity" : "Medium" +}, { + "FilePath" : "src/main/java/com/amazonaws/gurureviewercli/Main.java", + "RecommendationId" : "5ea0fef84e3623ae3c98ee10e25ab39899f1bfbe1a99e7ec374c31ae58d21cfe", + "StartLine" : 194, + "EndLine" : 194, + "Description" : "AWS Region is set using a `String`. To explicitly set a publicly available region, we recommend that you use the [Regions](https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/index.html?com/amazonaws/regions/Regions.html) enum.\n\n[Learn more](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-region-selection.html#region-selection-choose-region)", + "RecommendationCategory" : "AWSBestPractices", + "RuleMetadata" : { + "RuleId" : "java/aws-region-enumeration@v1.0", + "RuleName" : "Use an enum to specify an AWS Region", + "ShortDescription" : "To minimize the risk of error, use an enum instead of a string to specify an AWS Region.", + "LongDescription" : "Use a `Regions` enum instead of a string to specify an AWS Region. This can minimize the risk of error.", + "RuleTags" : [ "aws", "java" ] + }, + "Severity" : "Medium" +}, { + "FilePath" : "src/main/java/com/amazonaws/gurureviewercli/adapter/ScanAdapter.java", + "RecommendationId" : "ce6d28872c691f82a95099dd2e04eff0da58f23d13162c8b0421a0c46e03d163", + "StartLine" : 86, + "EndLine" : 86, + "Description" : "**Problem**: InterruptedException is ignored. This can delay thread shutdown and clear the thread’s interrupt status. Only code that implements a thread’s interruption policy can swallow an interruption request.\n\n**Fix**: Rethrow the InterruptedException or reinterrupt the current thread using *Thread.currentThread().interrupt()* so that higher-level interrupt handlers can function correctly.\n If you are wrapping the InterruptedException inside a RuntimeException, call *Thread.currentThread().interrupt()* before throwing the RuntimeException.\n\nLearn more about [interrupts](https://docs.oracle.com/javase/tutorial/essential/concurrency/interrupt.html) and [dealing with InterruptedException](https://www.ibm.com/developerworks/java/library/j-jtp05236/index.html?ca=drs-#N10187)\n", + "RecommendationCategory" : "JavaBestPractices", + "RuleMetadata" : { } +}, { + "FilePath" : ".", + "RecommendationId" : "security-1f1a4dd101b245a24f50b51d8f45862b8db66e1fee0aef4a2d0be46", + "StartLine" : 1, + "EndLine" : 1, + "Description" : "New security detectors are periodically added, so consider regular security-analysis to keep your code secure.\nThe build artifacts for some of your source code are missing. Only source code that was uploaded with its accompanying build artifact files was analyzed for security recommendations. All of your source code was analyzed for source code quality recommendations because code quality analysis doesn’t require build artifacts.", + "RecommendationCategory" : "CodeMaintenanceIssues", + "RuleMetadata" : { } +} ] \ No newline at end of file diff --git a/test-data/two-commits/git/COMMIT_EDITMSG b/test-data/two-commits/git/COMMIT_EDITMSG new file mode 100644 index 0000000..0c1f383 --- /dev/null +++ b/test-data/two-commits/git/COMMIT_EDITMSG @@ -0,0 +1 @@ +one more commit diff --git a/test-data/two-commits/git/HEAD b/test-data/two-commits/git/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/test-data/two-commits/git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/test-data/two-commits/git/config b/test-data/two-commits/git/config new file mode 100644 index 0000000..0344a2e --- /dev/null +++ b/test-data/two-commits/git/config @@ -0,0 +1,10 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true +[remote "origin"] + url = git@amazon.com:username/new_repo + fetch = +refs/heads/*:refs/remotes/origin/* diff --git a/test-data/two-commits/git/description b/test-data/two-commits/git/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/test-data/two-commits/git/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/test-data/two-commits/git/hooks/applypatch-msg.sample b/test-data/two-commits/git/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/test-data/two-commits/git/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/test-data/two-commits/git/hooks/commit-msg.sample b/test-data/two-commits/git/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/test-data/two-commits/git/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/test-data/two-commits/git/hooks/fsmonitor-watchman.sample b/test-data/two-commits/git/hooks/fsmonitor-watchman.sample new file mode 100755 index 0000000..e673bb3 --- /dev/null +++ b/test-data/two-commits/git/hooks/fsmonitor-watchman.sample @@ -0,0 +1,114 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use IPC::Open2; + +# An example hook script to integrate Watchman +# (https://facebook.github.io/watchman/) with git to speed up detecting +# new and modified files. +# +# The hook is passed a version (currently 1) and a time in nanoseconds +# formatted as a string and outputs to stdout all files that have been +# modified since the given time. Paths must be relative to the root of +# the working tree and separated by a single NUL. +# +# To enable this hook, rename this file to "query-watchman" and set +# 'git config core.fsmonitor .git/hooks/query-watchman' +# +my ($version, $time) = @ARGV; + +# Check the hook interface version + +if ($version == 1) { + # convert nanoseconds to seconds + $time = int $time / 1000000000; +} else { + die "Unsupported query-fsmonitor hook version '$version'.\n" . + "Falling back to scanning...\n"; +} + +my $git_work_tree; +if ($^O =~ 'msys' || $^O =~ 'cygwin') { + $git_work_tree = Win32::GetCwd(); + $git_work_tree =~ tr/\\/\//; +} else { + require Cwd; + $git_work_tree = Cwd::cwd(); +} + +my $retry = 1; + +launch_watchman(); + +sub launch_watchman { + + my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') + or die "open2() failed: $!\n" . + "Falling back to scanning...\n"; + + # In the query expression below we're asking for names of files that + # changed since $time but were not transient (ie created after + # $time but no longer exist). + # + # To accomplish this, we're using the "since" generator to use the + # recency index to select candidate nodes and "fields" to limit the + # output to file names only. Then we're using the "expression" term to + # further constrain the results. + # + # The category of transient files that we want to ignore will have a + # creation clock (cclock) newer than $time_t value and will also not + # currently exist. + + my $query = <<" END"; + ["query", "$git_work_tree", { + "since": $time, + "fields": ["name"], + "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] + }] + END + + print CHLD_IN $query; + close CHLD_IN; + my $response = do {local $/; }; + + die "Watchman: command returned no output.\n" . + "Falling back to scanning...\n" if $response eq ""; + die "Watchman: command returned invalid output: $response\n" . + "Falling back to scanning...\n" unless $response =~ /^\{/; + + my $json_pkg; + eval { + require JSON::XS; + $json_pkg = "JSON::XS"; + 1; + } or do { + require JSON::PP; + $json_pkg = "JSON::PP"; + }; + + my $o = $json_pkg->new->utf8->decode($response); + + if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { + print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; + $retry--; + qx/watchman watch "$git_work_tree"/; + die "Failed to make watchman watch '$git_work_tree'.\n" . + "Falling back to scanning...\n" if $? != 0; + + # Watchman will always return all files on the first query so + # return the fast "everything is dirty" flag to git and do the + # Watchman query just to get it over with now so we won't pay + # the cost in git to look up each individual file. + print "/\0"; + eval { launch_watchman() }; + exit 0; + } + + die "Watchman: $o->{error}.\n" . + "Falling back to scanning...\n" if $o->{error}; + + binmode STDOUT, ":utf8"; + local $, = "\0"; + print @{$o->{files}}; +} diff --git a/test-data/two-commits/git/hooks/post-update.sample b/test-data/two-commits/git/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/test-data/two-commits/git/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/test-data/two-commits/git/hooks/pre-applypatch.sample b/test-data/two-commits/git/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/test-data/two-commits/git/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/test-data/two-commits/git/hooks/pre-commit.sample b/test-data/two-commits/git/hooks/pre-commit.sample new file mode 100755 index 0000000..6a75641 --- /dev/null +++ b/test-data/two-commits/git/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=$(git hash-object -t tree /dev/null) +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/test-data/two-commits/git/hooks/pre-merge-commit.sample b/test-data/two-commits/git/hooks/pre-merge-commit.sample new file mode 100755 index 0000000..399eab1 --- /dev/null +++ b/test-data/two-commits/git/hooks/pre-merge-commit.sample @@ -0,0 +1,13 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git merge" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message to +# stderr if it wants to stop the merge commit. +# +# To enable this hook, rename this file to "pre-merge-commit". + +. git-sh-setup +test -x "$GIT_DIR/hooks/pre-commit" && + exec "$GIT_DIR/hooks/pre-commit" +: diff --git a/test-data/two-commits/git/hooks/pre-push.sample b/test-data/two-commits/git/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/test-data/two-commits/git/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/test-data/two-commits/git/hooks/pre-rebase.sample b/test-data/two-commits/git/hooks/pre-rebase.sample new file mode 100755 index 0000000..6cbef5c --- /dev/null +++ b/test-data/two-commits/git/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up to date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +<<\DOC_END + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". + +DOC_END diff --git a/test-data/two-commits/git/hooks/pre-receive.sample b/test-data/two-commits/git/hooks/pre-receive.sample new file mode 100755 index 0000000..a1fd29e --- /dev/null +++ b/test-data/two-commits/git/hooks/pre-receive.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to make use of push options. +# The example simply echoes all push options that start with 'echoback=' +# and rejects all pushes when the "reject" push option is used. +# +# To enable this hook, rename this file to "pre-receive". + +if test -n "$GIT_PUSH_OPTION_COUNT" +then + i=0 + while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" + do + eval "value=\$GIT_PUSH_OPTION_$i" + case "$value" in + echoback=*) + echo "echo from the pre-receive-hook: ${value#*=}" >&2 + ;; + reject) + exit 1 + esac + i=$((i + 1)) + done +fi diff --git a/test-data/two-commits/git/hooks/prepare-commit-msg.sample b/test-data/two-commits/git/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..10fa14c --- /dev/null +++ b/test-data/two-commits/git/hooks/prepare-commit-msg.sample @@ -0,0 +1,42 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first one removes the +# "# Please enter the commit message..." help message. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +COMMIT_MSG_FILE=$1 +COMMIT_SOURCE=$2 +SHA1=$3 + +/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" + +# case "$COMMIT_SOURCE,$SHA1" in +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; +# *) ;; +# esac + +# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" +# if test -z "$COMMIT_SOURCE" +# then +# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" +# fi diff --git a/test-data/two-commits/git/hooks/update.sample b/test-data/two-commits/git/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/test-data/two-commits/git/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 )" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 " >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/test-data/two-commits/git/index b/test-data/two-commits/git/index new file mode 100644 index 0000000000000000000000000000000000000000..f3e11cf3fc1e0750a1fad5253d8add10695b8514 GIT binary patch literal 137 zcmZ?q402{*U|<4b#>AaWawe4uCNP?jfq{ipWaT;rhQ=j=EjAN?YD9pTyIxRRGJBf+ zceT=gdv{O26~yw>vyOqIB(=CiucV@cfgvQw)fK2tlEF~HfNR?Kf8v~zPs%&3)bn|< gG0j$DU46*Cn^)Hs*>~m!#0$Th*eWf1XW6e-0LMlw!T 1639514623 -0500 commit (initial): hello hello +cdb0fcad7400610b1d1797a326a89414525160fe 8ece465b7ecf8337bf767c9602d21bb92f2fad8a Martin Schaef 1639514670 -0500 commit: one more commit diff --git a/test-data/two-commits/git/logs/refs/heads/master b/test-data/two-commits/git/logs/refs/heads/master new file mode 100644 index 0000000..2d5af8d --- /dev/null +++ b/test-data/two-commits/git/logs/refs/heads/master @@ -0,0 +1,2 @@ +0000000000000000000000000000000000000000 cdb0fcad7400610b1d1797a326a89414525160fe Martin Schaef 1639514623 -0500 commit (initial): hello hello +cdb0fcad7400610b1d1797a326a89414525160fe 8ece465b7ecf8337bf767c9602d21bb92f2fad8a Martin Schaef 1639514670 -0500 commit: one more commit diff --git a/test-data/two-commits/git/objects/69/e978a2558e8b47b25058af8e10482831feded6 b/test-data/two-commits/git/objects/69/e978a2558e8b47b25058af8e10482831feded6 new file mode 100644 index 0000000000000000000000000000000000000000..47ebcf66c4a82656b828ee5d65dc7438d73ce217 GIT binary patch literal 53 zcmbvpu6NUf) literal 0 HcmV?d00001 diff --git a/test-data/two-commits/git/objects/7f/112b196b963ff72675febdbb97da5204f9497e b/test-data/two-commits/git/objects/7f/112b196b963ff72675febdbb97da5204f9497e new file mode 100644 index 0000000000000000000000000000000000000000..f1f2ae16d7874a6011ba237a5bc40960ae8380d7 GIT binary patch literal 27 jcmb9V=y!@Ff%bxC`m0Y(JQGaVW=0>mdu`J|6Q&0-`?HRZw0aZ L^sEB_H`Ne&{~#8# literal 0 HcmV?d00001 diff --git a/test-data/two-commits/git/objects/9a/71f81a4b4754b686fd37cbb3c72d0250d344aa b/test-data/two-commits/git/objects/9a/71f81a4b4754b686fd37cbb3c72d0250d344aa new file mode 100644 index 0000000000000000000000000000000000000000..3f54fe4d7688da045ffe5c32eee685d09d6e9c01 GIT binary patch literal 29 lcmbù J߶ßR«wPÞD§¬×®ÒI){\þ+…·­Á¹âUF9u \ No newline at end of file diff --git a/test-data/two-commits/git/refs/heads/master b/test-data/two-commits/git/refs/heads/master new file mode 100644 index 0000000..7fdf54b --- /dev/null +++ b/test-data/two-commits/git/refs/heads/master @@ -0,0 +1 @@ +8ece465b7ecf8337bf767c9602d21bb92f2fad8a diff --git a/test-data/two-commits/test.txt b/test-data/two-commits/test.txt new file mode 100644 index 0000000..7f112b1 --- /dev/null +++ b/test-data/two-commits/test.txt @@ -0,0 +1,2 @@ +hello git +