Skip to content

Commit

Permalink
Merge branch 'main' into a2-7-3-undocumented-function-scope
Browse files Browse the repository at this point in the history
  • Loading branch information
lcartey authored Feb 14, 2024
2 parents 2b9036b + e204fc4 commit 4cffce3
Show file tree
Hide file tree
Showing 155 changed files with 1,632 additions and 861 deletions.
58 changes: 26 additions & 32 deletions .github/workflows/update-release-status.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,5 @@
name: "Update Release Status"
on:
check_run:
types:
- completed
- rerequested
branches:
- "rc/**"

workflow_dispatch:
inputs:
head-sha:
Expand All @@ -20,40 +13,36 @@ permissions:
checks: write
contents: write

env:
HEAD_SHA: ${{ inputs.head-sha }}

jobs:
validate-check-runs:
runs-on: ubuntu-22.04
outputs:
status: ${{ steps.set-output.outputs.status }}
check-run-head-sha: ${{ steps.set-output.outputs.check-run-head-sha }}
conclusion: ${{ steps.set-output.outputs.conclusion }}
steps:
- name: Determine check run head SHA
env:
HEAD_SHA_FROM_EVENT: ${{ github.event.check_run.head_sha }}
HEAD_SHA_FROM_INPUTS: ${{ inputs.head-sha }}
run: |
if [[ $GITHUB_EVENT_NAME == "workflow_dispatch" ]]; then
echo "CHECK_RUN_HEAD_SHA=$HEAD_SHA_FROM_INPUTS" >> "$GITHUB_ENV"
else
echo "CHECK_RUN_HEAD_SHA=$HEAD_SHA_FROM_EVENT" >> "$GITHUB_ENV"
fi
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ env.CHECK_RUN_HEAD_SHA }}
ref: ${{ inputs.head-sha }}

- name: Get release status check run
id: get-check-run
if: (github.event_name == 'check_run' && github.event.check_run.conclusion == 'success' && github.event.check_run.name != github.workflow) || github.event_name == 'workflow_dispatch'
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
check_run_info=$(gh api \
--header "Accept: application/vnd.github+json" \
--header "X-GitHub-Api-Version: 2022-11-28" \
--jq '.check_runs[] | select(.name == "release-status") | {id: .id, status: .status, conclusion: .conclusion}' \
/repos/$GITHUB_REPOSITORY/commits/$CHECK_RUN_HEAD_SHA/check-runs)
/repos/$GITHUB_REPOSITORY/commits/$HEAD_SHA/check-runs)
if [[ -z "$check_run_info" ]]; then
echo "No release status check run found"
exit 1
fi
check_run_id=$(echo "$check_run_info" | jq -r '.id')
check_run_status=$(echo "$check_run_info" | jq -r '.status')
Expand All @@ -64,19 +53,22 @@ jobs:
echo "CHECK_RUN_CONCLUSION=$check_run_conclusion" >> "$GITHUB_ENV"
- name: Reset release status
if: env.CHECK_RUN_STATUS == 'completed' && ((github.event_name == 'check_run' && github.event.action == 'rerequested') || github.event_name == 'workflow_dispatch')
if: env.CHECK_RUN_STATUS == 'completed'
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
CHECK_RUN_ID=$(gh api \
check_run_id=$(gh api \
--header "Accept: application/vnd.github+json" \
--header "X-GitHub-Api-Version: 2022-11-28" \
--field name="release-status" \
--field head_sha="$CHECK_RUN_HEAD_SHA" \
--field head_sha="$HEAD_SHA" \
--jq ".id" \
/repos/$GITHUB_REPOSITORY/check-runs)
echo "Created release status check run with id $CHECK_RUN_ID"
echo "Created release status check run with id $check_run_id"
# Reset the status to in progress.
echo "CHECK_RUN_STATUS=in_progress" >> "$GITHUB_ENV"
echo "CHECK_RUN_ID=$check_run_id" >> "$GITHUB_ENV"
- name: Check all runs completed
if: env.CHECK_RUN_STATUS != 'completed'
Expand All @@ -87,10 +79,12 @@ jobs:
--header "Accept: application/vnd.github+json" \
--header "X-GitHub-Api-Version: 2022-11-28" \
--jq '.check_runs | map(select(.name != "release-status"))' \
/repos/$GITHUB_REPOSITORY/commits/$CHECK_RUN_HEAD_SHA/check-runs)
/repos/$GITHUB_REPOSITORY/commits/$HEAD_SHA/check-runs)
status_stats=$(echo "$check_runs" | jq -r '. | {failed: (map(select(.conclusion == "failure")) | length), pending: (map(select(.status != "completed")) | length) }')
echo "status_stats=$status_stats"
failed=$(echo "$status_stats" | jq -r '.failed')
pending=$(echo "$status_stats" | jq -r '.pending')
Expand All @@ -101,7 +95,6 @@ jobs:
if: env.CHECK_RUNS_PENDING == '0' && env.CHECK_RUN_STATUS != 'completed'
env:
GITHUB_TOKEN: ${{ github.token }}
CHECK_RUNS_FAILED: ${{ env.check-runs-failed }}
run: |
if [[ "$CHECK_RUNS_FAILED" == "0" ]]; then
echo "All check runs succeeded"
Expand All @@ -123,22 +116,23 @@ jobs:
--input - \
/repos/$GITHUB_REPOSITORY/check-runs/$CHECK_RUN_ID
echo "RELEASE_STATUS_CONCLUSION=$conclusion" >> "$GITHUB_ENV"
- name: Set output
id: set-output
run: |
echo "conclusion=$RELEASE_STATUS_CONCLUSION" >> "$GITHUB_OUTPUT"
if [[ "$CHECK_RUNS_PENDING" == "0" ]]; then
echo "status=completed" >> "$GITHUB_OUTPUT"
else
echo "status=in_progress" >> "$GITHUB_OUTPUT"
fi
echo "check-run-head-sha=$CHECK_RUN_HEAD_SHA" >> "$GITHUB_OUTPUT"
update-release:
needs: validate-check-runs
if: needs.validate-check-runs.outputs.status == 'completed'
if: needs.validate-check-runs.outputs.status == 'completed' && needs.validate-check-runs.outputs.conclusion == 'success'
uses: ./.github/workflows/update-release.yml
with:
head-sha: ${{ needs.validate-check-runs.outputs.check-run-head-sha }}
head-sha: ${{ inputs.head-sha }}
secrets:
AUTOMATION_PRIVATE_KEY: ${{ secrets.AUTOMATION_PRIVATE_KEY }}
2 changes: 1 addition & 1 deletion .github/workflows/update-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ inputs.head-sha }}
fetch-depth: 0 # We need the full history to compute the changelog

- name: Install Python
uses: actions/setup-python@v4
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,14 @@
import cpp
import codingstandards.c.cert
import codingstandards.cpp.dataflow.DataFlow
import DataFlow::PathGraph
import NonArrayPointerToArrayIndexingExprFlow::PathGraph

/**
* A data-flow configuration that tracks flow from an `AddressOfExpr` of a variable
* of `PointerType` that is not also an `ArrayType` to a `PointerArithmeticOrArrayExpr`
*/
class NonArrayPointerToArrayIndexingExprConfig extends DataFlow::Configuration {
NonArrayPointerToArrayIndexingExprConfig() { this = "ArrayToArrayIndexConfig" }

override predicate isSource(DataFlow::Node source) {
module NonArrayPointerToArrayIndexingExprConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
exists(AddressOfExpr ao, Type t |
source.asExpr() = ao and
not ao.getOperand() instanceof ArrayExpr and
Expand All @@ -35,15 +33,15 @@ class NonArrayPointerToArrayIndexingExprConfig extends DataFlow::Configuration {
)
}

override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(PointerArithmeticOrArrayExpr ae |
sink.asExpr() = ae.getPointerOperand() and
not sink.asExpr() instanceof Literal and
not ae.isNonPointerOperandZero()
)
}

override predicate isBarrierOut(DataFlow::Node node) {
predicate isBarrierOut(DataFlow::Node node) {
// the default interprocedural data-flow model flows through any field or array assignment
// expressions to the qualifier (array base, pointer dereferenced, or qualifier) instead of the
// individual element or field that the assignment modifies. this default behaviour causes
Expand All @@ -63,6 +61,9 @@ class NonArrayPointerToArrayIndexingExprConfig extends DataFlow::Configuration {
}
}

module NonArrayPointerToArrayIndexingExprFlow =
DataFlow::Global<NonArrayPointerToArrayIndexingExprConfig>;

class PointerArithmeticOrArrayExpr extends Expr {
Expr operand;

Expand Down Expand Up @@ -101,9 +102,11 @@ class PointerArithmeticOrArrayExpr extends Expr {
predicate isNonPointerOperandZero() { operand.(Literal).getValue().toInt() = 0 }
}

from DataFlow::PathNode source, DataFlow::PathNode sink
from
NonArrayPointerToArrayIndexingExprFlow::PathNode source,
NonArrayPointerToArrayIndexingExprFlow::PathNode sink
where
not isExcluded(sink.getNode().asExpr(),
InvalidMemory2Package::doNotUsePointerArithmeticOnNonArrayObjectPointersQuery()) and
any(NonArrayPointerToArrayIndexingExprConfig cfg).hasFlowPath(source, sink)
NonArrayPointerToArrayIndexingExprFlow::flowPath(source, sink)
select sink, source, sink, "Pointer arithmetic on non-array object pointer."
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import cpp
import codingstandards.c.cert
import codingstandards.c.Pointers
import codingstandards.cpp.dataflow.TaintTracking
import DataFlow::PathGraph
import ScaledIntegerPointerArithmeticFlow::PathGraph

/**
* An expression which invokes the `offsetof` macro or `__builtin_offsetof` operation.
Expand Down Expand Up @@ -69,12 +69,10 @@ class ScaledIntegerExpr extends Expr {
* A data-flow configuration modeling data-flow from a `ScaledIntegerExpr` to a
* `PointerArithmeticExpr` where the pointer does not point to a 1-byte type.
*/
class ScaledIntegerPointerArithmeticConfig extends DataFlow::Configuration {
ScaledIntegerPointerArithmeticConfig() { this = "ScaledIntegerPointerArithmeticConfig" }
module ScaledIntegerPointerArithmeticConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node src) { src.asExpr() instanceof ScaledIntegerExpr }

override predicate isSource(DataFlow::Node src) { src.asExpr() instanceof ScaledIntegerExpr }

override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(PointerArithmeticExpr pa |
// exclude pointers to 1-byte types as they do not scale
pa.getPointer().getFullyConverted().getType().(DerivedType).getBaseType().getSize() != 1 and
Expand All @@ -83,9 +81,13 @@ class ScaledIntegerPointerArithmeticConfig extends DataFlow::Configuration {
}
}

from ScaledIntegerPointerArithmeticConfig config, DataFlow::PathNode src, DataFlow::PathNode sink
module ScaledIntegerPointerArithmeticFlow = DataFlow::Global<ScaledIntegerPointerArithmeticConfig>;

from
ScaledIntegerPointerArithmeticFlow::PathNode src,
ScaledIntegerPointerArithmeticFlow::PathNode sink
where
not isExcluded(sink.getNode().asExpr(),
Pointers2Package::doNotAddOrSubtractAScaledIntegerToAPointerQuery()) and
config.hasFlowPath(src, sink)
ScaledIntegerPointerArithmeticFlow::flowPath(src, sink)
select sink, src, sink, "Scaled integer used in pointer arithmetic."
16 changes: 8 additions & 8 deletions c/cert/src/rules/CON30-C/CleanUpThreadSpecificStorage.ql
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@ import codingstandards.cpp.Concurrency
import codingstandards.cpp.dataflow.TaintTracking
import codingstandards.cpp.dataflow.DataFlow

class TssCreateToTssDeleteDataFlowConfiguration extends DataFlow::Configuration {
TssCreateToTssDeleteDataFlowConfiguration() { this = "TssCreateToTssDeleteDataFlowConfiguration" }

override predicate isSource(DataFlow::Node node) {
module TssCreateToTssDeleteConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) {
exists(TSSCreateFunctionCall tsc, Expr e |
// the only requirement of the source is that at some point
// it refers to the key of a create statement
Expand All @@ -30,7 +28,7 @@ class TssCreateToTssDeleteDataFlowConfiguration extends DataFlow::Configuration
)
}

override predicate isSink(DataFlow::Node node) {
predicate isSink(DataFlow::Node node) {
exists(TSSDeleteFunctionCall tsd, Expr e |
// the only requirement of a sink is that at some point
// it references the key of a delete call.
Expand All @@ -40,15 +38,17 @@ class TssCreateToTssDeleteDataFlowConfiguration extends DataFlow::Configuration
}
}

module TssCreateToTssDeleteFlow = DataFlow::Global<TssCreateToTssDeleteConfig>;

from TSSCreateFunctionCall tcfc
where
not isExcluded(tcfc, Concurrency4Package::cleanUpThreadSpecificStorageQuery()) and
// all calls to `tss_create` must be bookended by calls to tss_delete
// even if a thread is not created.
not exists(TssCreateToTssDeleteDataFlowConfiguration config |
config.hasFlow(DataFlow::definitionByReferenceNodeFromArgument(tcfc.getKey()), _)
not (
TssCreateToTssDeleteFlow::flow(DataFlow::definitionByReferenceNodeFromArgument(tcfc.getKey()), _)
or
config.hasFlow(DataFlow::exprNode(tcfc.getKey()), _)
TssCreateToTssDeleteFlow::flow(DataFlow::exprNode(tcfc.getKey()), _)
)
or
// if a thread is created, we must check additional items
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@ import cpp
import codingstandards.c.cert
import codingstandards.cpp.Alignment
import codingstandards.cpp.dataflow.DataFlow
import codingstandards.cpp.dataflow.DataFlow2
import semmle.code.cpp.rangeanalysis.SimpleRangeAnalysis
import DataFlow::PathGraph
import ExprWithAlignmentToCStyleCastFlow::PathGraph

/**
* An expression with a type that has defined alignment requirements
Expand Down Expand Up @@ -96,8 +95,7 @@ class UnconvertedCastFromNonVoidPointerExpr extends Expr {
*/
class DefaultAlignedPointerExpr extends UnconvertedCastFromNonVoidPointerExpr, ExprWithAlignment {
DefaultAlignedPointerExpr() {
not any(AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprConfig config)
.hasFlowTo(DataFlow::exprNode(this))
not AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprFlow::flowTo(DataFlow::exprNode(this))
}

override int getAlignment() { result = this.getType().(PointerType).getBaseType().getAlignment() }
Expand All @@ -118,43 +116,37 @@ class DefaultAlignedPointerExpr extends UnconvertedCastFromNonVoidPointerExpr, E
* to exclude an `DefaultAlignedPointerAccessExpr` as a source if a preceding source
* defined by this configuration provides more accurate alignment information.
*/
class AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprConfig extends DataFlow2::Configuration
module AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprConfig implements
DataFlow::ConfigSig
{
AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprConfig() {
this = "AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprConfig"
}

override predicate isSource(DataFlow::Node source) {
predicate isSource(DataFlow::Node source) {
source.asExpr() instanceof AddressOfAlignedVariableExpr or
source.asExpr() instanceof DefinedAlignmentAllocationExpr
}

override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
sink.asExpr() instanceof UnconvertedCastFromNonVoidPointerExpr
}
}

module AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprFlow =
DataFlow::Global<AllocationOrAddressOfExprToUnconvertedCastFromNonVoidPointerExprConfig>;

/**
* A data-flow configuration for analysing the flow of `ExprWithAlignment` pointer expressions
* to casts which perform pointer type conversions and potentially create pointer alignment issues.
*/
class ExprWithAlignmentToCStyleCastConfiguration extends DataFlow::Configuration {
ExprWithAlignmentToCStyleCastConfiguration() {
this = "ExprWithAlignmentToCStyleCastConfiguration"
}
module ExprWithAlignmentToCStyleCastConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source.asExpr() instanceof ExprWithAlignment }

override predicate isSource(DataFlow::Node source) {
source.asExpr() instanceof ExprWithAlignment
}

override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(CStyleCast cast |
cast.getUnderlyingType() instanceof PointerType and
cast.getUnconverted() = sink.asExpr()
)
}

override predicate isBarrierOut(DataFlow::Node node) {
predicate isBarrierOut(DataFlow::Node node) {
// the default interprocedural data-flow model flows through any array assignment expressions
// to the qualifier (array base or pointer dereferenced) instead of the individual element
// that the assignment modifies. this default behaviour causes false positives for any future
Expand All @@ -169,12 +161,15 @@ class ExprWithAlignmentToCStyleCastConfiguration extends DataFlow::Configuration
}
}

module ExprWithAlignmentToCStyleCastFlow = DataFlow::Global<ExprWithAlignmentToCStyleCastConfig>;

from
DataFlow::PathNode source, DataFlow::PathNode sink, ExprWithAlignment expr, CStyleCast cast,
ExprWithAlignmentToCStyleCastFlow::PathNode source,
ExprWithAlignmentToCStyleCastFlow::PathNode sink, ExprWithAlignment expr, CStyleCast cast,
Type toBaseType, int alignmentFrom, int alignmentTo
where
not isExcluded(cast, Pointers3Package::doNotCastPointerToMoreStrictlyAlignedPointerTypeQuery()) and
any(ExprWithAlignmentToCStyleCastConfiguration config).hasFlowPath(source, sink) and
ExprWithAlignmentToCStyleCastFlow::flowPath(source, sink) and
source.getNode().asExpr() = expr and
sink.getNode().asExpr() = cast.getUnconverted() and
toBaseType = cast.getActualType().(PointerType).getBaseType() and
Expand Down
Loading

0 comments on commit 4cffce3

Please sign in to comment.