Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions internal/server/spanner/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ type SpannerClient interface {
SearchNodes(ctx context.Context, query string, types []string) ([]*SearchNode, error)
ResolveByID(ctx context.Context, nodes []string, in, out string) (map[string][]string, error)
Sparql(ctx context.Context, nodes []types.Node, queries []*types.Query, opts *types.QueryOptions) ([][]string, error)
GetVariableMetadata(ctx context.Context, ids []string) (map[string][]*VariableMetadata, error)
Id() string
Start()
Close()
Expand Down
87 changes: 87 additions & 0 deletions internal/server/spanner/golden/query/get_variable_metadata.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
{
"Count_Household_FamilyHousehold": [
{
"VariableMeasured": "Count_Household_FamilyHousehold",
"ImportName": "CensusACS5YearSurvey",
"FacetId": "1457828370",
"ObservationPeriod": "",
"MeasurementMethod": "CensusACS5yrSurvey",
"Unit": "",
"ScalingFactor": "",
"IsDcAggregate": false,
"TotalObservations": 2547106,
"ObservedPlaces": 220438,
"MinDate": "2011",
"MaxDate": "2023",
"PlaceTypes": [
"AdministrativeArea1",
"AdministrativeArea2",
"AdministrativeArea4",
"AdministrativeArea5",
"Borough",
"CensusCoreBasedStatisticalArea",
"CensusCountyDivision",
"CensusDivision",
"CensusTract",
"CensusZipCodeTabulationArea",
"City",
"CongressionalDistrict",
"Country",
"County",
"ElementarySchoolDistrict",
"HighSchoolDistrict",
"Neighborhood",
"Place",
"ProvisionalNode",
"SchoolDistrict",
"State",
"StateComponent",
"Town",
"Village"
]
}
],
"Count_Household_HasComputer": [
{
"VariableMeasured": "Count_Household_HasComputer",
"ImportName": "CensusACS5YearSurvey",
"FacetId": "1457828370",
"ObservationPeriod": "",
"MeasurementMethod": "CensusACS5yrSurvey",
"Unit": "",
"ScalingFactor": "",
"IsDcAggregate": false,
"TotalObservations": 2805865,
"ObservedPlaces": 511517,
"MinDate": "2017",
"MaxDate": "2023",
"PlaceTypes": [
"AdministrativeArea1",
"AdministrativeArea2",
"AdministrativeArea4",
"AdministrativeArea5",
"Borough",
"CensusBlockGroup",
"CensusCoreBasedStatisticalArea",
"CensusCountyDivision",
"CensusDivision",
"CensusTract",
"CensusZipCodeTabulationArea",
"City",
"CongressionalDistrict",
"Country",
"County",
"ElementarySchoolDistrict",
"HighSchoolDistrict",
"Neighborhood",
"Place",
"ProvisionalNode",
"SchoolDistrict",
"State",
"StateComponent",
"Town",
"Village"
]
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
SELECT
variable_measured,
import_name,
facet_id,
observation_period,
measurement_method,
unit,
scaling_factor,
is_dc_aggregate,
total_observations,
observed_places,
min_date,
max_date,
place_types
FROM
VariableMetadata
WHERE
variable_measured IN ('Count_Household_FamilyHousehold','Count_Household_HasComputer')
12 changes: 12 additions & 0 deletions internal/server/spanner/golden/query_builder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,18 @@ func TestSparqlQuery(t *testing.T) {
}
}

func TestGetVariableMetadataQuery(t *testing.T) {
t.Parallel()

for _, c := range variableMetadataTestCases {
goldenFile := c.golden + ".sql"

runQueryBuilderGoldenTest(t, goldenFile, func(ctx context.Context) (interface{}, error) {
return spanner.GetVariableMetadataQuery(c.ids), nil
})
}
}

// runQueryBuilderGoldenTest is a helper function that performs the golden file validation.
func runQueryBuilderGoldenTest(t *testing.T, goldenFile string, fn goldenTestFunc) {
t.Helper()
Expand Down
10 changes: 10 additions & 0 deletions internal/server/spanner/golden/query_cases_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -410,3 +410,13 @@ var sparqlTestCases = []struct {
golden: "sparql_dcid_california",
},
}

var variableMetadataTestCases = []struct {
ids []string
golden string
}{
{
ids: []string{"Count_Household_FamilyHousehold", "Count_Household_HasComputer"},
golden: "get_variable_metadata",
},
}
17 changes: 17 additions & 0 deletions internal/server/spanner/golden/query_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,23 @@ func TestSparql(t *testing.T) {
}
}

func TestGetVariableMetadata(t *testing.T) {
client := test.NewSpannerClient()
if client == nil {
return
}

t.Parallel()

for _, c := range variableMetadataTestCases {
goldenFile := c.golden + ".json"

runQueryGoldenTest(t, goldenFile, func(ctx context.Context) (interface{}, error) {
return client.GetVariableMetadata(ctx, c.ids)
})
}
}

// runQueryGoldenTest is a helper function that performs the golden file validation.
func runQueryGoldenTest(t *testing.T, goldenFile string, fn goldenTestFunc) {
t.Helper()
Expand Down
17 changes: 17 additions & 0 deletions internal/server/spanner/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,23 @@ type ResolutionCandidate struct {
Candidate string `spanner:"candidate"`
}

// VariableMetadata struct represents a single row in the VariableMetadata table.
type VariableMetadata struct {
VariableMeasured string `spanner:"variable_measured"`
ImportName string `spanner:"import_name"`
FacetId string `spanner:"facet_id"`
ObservationPeriod string `spanner:"observation_period"`
MeasurementMethod string `spanner:"measurement_method"`
Unit string `spanner:"unit"`
ScalingFactor string `spanner:"scaling_factor"`
IsDcAggregate bool `spanner:"is_dc_aggregate"`
TotalObservations int64 `spanner:"total_observations"`
ObservedPlaces int64 `spanner:"observed_places"`
MinDate string `spanner:"min_date"`
MaxDate string `spanner:"max_date"`
PlaceTypes []string `spanner:"place_types"`
}

// SpannerConfig struct to hold the YAML configuration to a spanner database.
type SpannerConfig struct {
Project string `yaml:"project"`
Expand Down
25 changes: 25 additions & 0 deletions internal/server/spanner/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,31 @@ func (sc *spannerDatabaseClient) Sparql(ctx context.Context, nodes []types.Node,
return sc.queryDynamic(ctx, *query)
}

func (sc *spannerDatabaseClient) GetVariableMetadata(ctx context.Context, ids []string) (map[string][]*VariableMetadata, error) {
var idToMetadata = make(map[string][]*VariableMetadata)
if len(ids) == 0 {
return idToMetadata, nil
}

err := sc.queryStructs(
ctx,
*GetVariableMetadataQuery(ids),
func() interface{} {
return &VariableMetadata{}
},
func(rowStruct interface{}) {
varMeta := rowStruct.(*VariableMetadata)
id := varMeta.VariableMeasured
idToMetadata[id] = append(idToMetadata[id], varMeta)
},
)
if err != nil {
return nil, err
}

return idToMetadata, nil
}

// fetchAndUpdateTimestamp queries Spanner and updates the timestamp.
func (sc *spannerDatabaseClient) fetchAndUpdateTimestamp(ctx context.Context) error {
iter := sc.client.Single().Query(ctx, *GetCompletionTimestampQuery())
Expand Down
9 changes: 9 additions & 0 deletions internal/server/spanner/query_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,15 @@ func SparqlQuery(nodes []types.Node, queries []*types.Query, opts *types.QueryOp
}, nil
}

func GetVariableMetadataQuery(ids []string) *spanner.Statement {
getIds, params := getIdStatement(ids)

return &spanner.Statement{
SQL: fmt.Sprintf(statements.getVariableMetadata, getIds),
Params: params,
}
}

// generateSafeAliasMap generates a map of safe aliases for SPARQL queries.
func generateSafeAliasMap(queries []*types.Query) map[string]string {
safeAliasMap := make(map[string]string)
Expand Down
20 changes: 20 additions & 0 deletions internal/server/spanner/statements.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ var statements = struct {
nodeFilter string
// Generic triple pattern.
triple string
// Get variable metadata.
getVariableMetadata string
}{
getCompletionTimestamp: ` SELECT
CompletionTimestamp
Expand Down Expand Up @@ -292,4 +294,22 @@ var statements = struct {
WHERE
%[1]s.subject_id IN UNNEST(@%[1]s)`,
triple: `(%[1]s:Node%[2]s)-[:Edge {predicate: @predicate%[3]d}]->(%[4]s:Node%[5]s)`,
getVariableMetadata: ` SELECT
variable_measured,
import_name,
facet_id,
observation_period,
measurement_method,
unit,
scaling_factor,
is_dc_aggregate,
total_observations,
observed_places,
min_date,
max_date,
place_types
FROM
VariableMetadata
WHERE
variable_measured %s`,
}
Loading