diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 014c2e1..b8ca628 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,3 +5,7 @@ updates: directory: / schedule: interval: daily + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml new file mode 100644 index 0000000..11cd4b7 --- /dev/null +++ b/.github/workflows/golangci-lint.yml @@ -0,0 +1,25 @@ +name: golangci-lint +on: + push: + branches: + - master + - main + pull_request: + +permissions: + contents: read + +jobs: + golangci: + name: lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + cache: false + - name: golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + version: v1.55.1 diff --git a/.golangci.yml b/.golangci.yml new file mode 100755 index 0000000..69ce224 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,61 @@ +service: + golangci-lint-version: v1.55.1 + +run: + tests: false + skip-dirs: + - allure + - mocks + skip-files: + - ".*easyjson\\.go$" +output: + print-issued-lines: false + +linters-settings: + lll: + line-length: 140 + gocritic: + disabled-checks: + - hugeParam + enabled-tags: + - performance + - style + - experimental + gosec: + excludes: + - G204 + - G306 + stylecheck: + checks: ["all", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022"] + revive: + rules: + # Should be enabled after fixing underscore package names. + - name: var-naming + disabled: true + +linters: + enable: + - bodyclose + - goconst + - gocritic + - gocyclo + - goimports + - revive + - goprintffuncname + - gosec + - gosimple + - govet + - ineffassign + - lll + - misspell + - nakedret + - nlreturn + - nolintlint + - rowserrcheck + - staticcheck + - stylecheck + - typecheck + - unconvert + - unparam + - unused + - gas diff --git a/Makefile b/Makefile index d4529dd..c280621 100644 --- a/Makefile +++ b/Makefile @@ -24,3 +24,6 @@ build: @build test: go test ./... + +lint: + docker run --rm -v $(PWD):/app -w /app golangci/golangci-lint:v1.55.1 golangci-lint run -v \ No newline at end of file diff --git a/README-ru.md b/README-ru.md index d54baa0..8794419 100644 --- a/README-ru.md +++ b/README-ru.md @@ -10,7 +10,7 @@ Gonkey протестирует ваши сервисы, используя их - моки для имитации внешних сервисов - можно подключить к проекту как библиотеку и запускать вместе с юнит-тестами - запись результата тестов в виде отчета [Allure](http://allure.qatools.ru/) -- имеется [JSON-schema](#json-schema) для автодополнения и валидации YAML-файлов Gonkey +- имеется [JSON-schema](#json-schema) для автодополнения и валидации YAML-файлов Gonkey ## Содержание @@ -26,6 +26,7 @@ Gonkey протестирует ваши сервисы, используя их - [Из результатов предыдущего запроса](#из-результатов-предыдущего-запроса) - [Из результата текущего запроса](#из-результата-текущего-запроса) - [В переменных окружения или в env-файле](#в-переменных-окружения-или-в-env-файле) + - [В cases](#в-cases) - [Загрузка файлов](#загрузка-файлов) - [Фикстуры](#фикстуры) - [Удаление данных из таблиц](#удаление-данных-из-таблиц) @@ -50,6 +51,9 @@ Gonkey протестирует ваши сервисы, используя их - [Описание ответа на запрос в Базу данных](#описание-ответа-на-запрос-в-базу-данных) - [Параметризация при запросах в Базу данных](#параметризация-при-запросах-в-базу-данных) - [Игнорирование порядка записей в ответе на запрос в базу данных](#игнорирование-порядка-записей-в-ответе-на-запрос-в-базу-данных) +- [JSON-schema](#json-schema) + - [Настройка на IDE Jetbrains](#настройка-на-ide-jetbrains) + - [Настройка на IDE VSCode](#настройка-на-ide-vscode) ## Использование консольной утилиты @@ -691,6 +695,7 @@ tables: Для хранилища Aerospike также поддерживается заливка тестовых данных. Для этого важно не забыть при запуске gonkey как CLI-приложение использовать флаг `-db-type aerospike`, а при использовании в качестве библиотеки в конфигурации раннера: `DbType: fixtures.Aerospike`. Формат файлов с фикстурами для Aerospike отличается, но смысл остаётся прежним: + ```yaml sets: set1: @@ -714,6 +719,7 @@ sets: ``` Также поддерживаются шаблоны: + ```yaml templates: base_tmpl: @@ -1620,6 +1626,7 @@ Example: ### Формат описания запросов Для описания запросов к БД в тесте, можно использовать legacy-формат: + ```yaml - name: my test ... @@ -1631,6 +1638,7 @@ Example: ``` Но, более предпочтительным будет следующий формат: + ```yaml - name: my test ... @@ -1759,25 +1767,25 @@ Example: ``` ## JSON-schema + Для упрощения написания тестов на Gonkey, используйте [файл со схемой](https://raw.githubusercontent.com/lamoda/gonkey/master/gonkey.json) Он добавляет in-line документацию и авто-дополнение в IDE которые это поддерживают. - Пример работы в IDE Jetbrains: ![Example Jetbrains](https://i.imgur.com/oYuPuR3.gif) Пример работы в IDE VSCode: ![Example Jetbrains](https://i.imgur.com/hBIGjP9.gif) - ### Настройка на IDE Jetbrains -Скачайте [файл со схемой](https://raw.githubusercontent.com/lamoda/gonkey/master/gonkey.json). + +Скачайте [файл со схемой](https://raw.githubusercontent.com/lamoda/gonkey/master/gonkey.json). В настройках Languages & Frameworks > Schemas and DTDs > JSON Schema Mappings ![Jetbrains IDE Settings](https://i.imgur.com/xkO22by.png) -Добавьте новую схему +Добавьте новую схему ![Add schema](https://i.imgur.com/XHw14GJ.png) @@ -1789,7 +1797,7 @@ Example: ![Mapping](https://i.imgur.com/iFjm0Ld.png) -Выберите то что удобно для вас. +Выберите то что удобно для вас. ![Mapping pattern](https://i.imgur.com/WIK6sZW.png) @@ -1803,7 +1811,7 @@ Example: ### Настройка на IDE VSCode -Для начала вам нужно установить плагин для работы с YAML +Для начала вам нужно установить плагин для работы с YAML Откройте меню Code(File)->Preferences->Extensions ![VSCode Preferences](https://i.imgur.com/X7bk5Kh.png) @@ -1813,15 +1821,16 @@ Example: ![Yaml Extension](https://i.imgur.com/57onioF.png) Откройте меню Code(File)->Preferences->Settings -Наберите YAML:Schemas и нажмите на ссылку _Edit in settings.json_ +Наберите YAML:Schemas и нажмите на ссылку *Edit in settings.json* ![Yaml link](https://i.imgur.com/IEwxWyG.png) -Добавьте маппинг файла и путь к схеме +Добавьте маппинг файла и путь к схеме + ``` "yaml.schemas": { "C:\\Users\\Leo\\gonkey.json": ["*.gonkey.yaml"] } ``` -В примере выше, схема из файла C:\Users\Leo\gonkey.json будет применяться ко всем файлам +В примере выше, схема из файла C:\Users\Leo\gonkey.json будет применяться ко всем файлам с расширением .gonkey.yaml diff --git a/README.md b/README.md index 32333d8..34ae68a 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,7 @@ Capabilities: - [From the response of the previous test](#from-the-response-of-the-previous-test) - [From the response of currently running test](#from-the-response-of-currently-running-test) - [From environment variables or from env-file](#from-environment-variables-or-from-env-file) + - [From cases](#from-cases) - [Files uploading](#files-uploading) - [Fixtures](#fixtures) - [Deleting data from tables](#deleting-data-from-tables) @@ -52,6 +53,9 @@ Capabilities: - [Definition of DB request response](#definition-of-db-request-response) - [DB request parameterization](#db-request-parameterization) - [Ignoring ordering in DB response](#ignoring-ordering-in-db-response) +- [JSON-schema](#json-schema) + - [Setup in Jetbrains IDE](#setup-in-jetbrains-ide) + - [Setup is VSCode IDE](#setup-is-vscode-ide) ## Using the CLI @@ -133,7 +137,7 @@ func TestFuncCases(t *testing.T) { } ``` -Starts from version 1.18.3, externally written fixture loader may be used for loading test data, if gonkey used as a library. +Starts from version 1.18.3, externally written fixture loader may be used for loading test data, if gonkey used as a library. To start using the custom loader, you need to import the custom module, that contains implementation of fixtures.Loader interface. Example with a redis fixtures loader: @@ -746,11 +750,11 @@ While using gonkey as a CLI application do not forget the flag `-db-type redis`. List of supported data structures: - - Plain key/value - - Set - - Hash - - List - - ZSet (sorted set) +- Plain key/value +- Set +- Hash +- List +- ZSet (sorted set) Fixture file example: @@ -1622,6 +1626,7 @@ The response can contain several records. Those records are compared to the expe ### Test Format You can use legacy style for run sql queries, like this: + ```yaml - name: my test ... @@ -1633,6 +1638,7 @@ You can use legacy style for run sql queries, like this: ``` But, for now, already acceptable style is: + ```yaml - name: my test ... @@ -1760,22 +1766,21 @@ Example: ``` ## JSON-schema + Use [file with schema](https://raw.githubusercontent.com/lamoda/gonkey/master/gonkey.json) to add syntax highlight to your favourite IDE and write Gonkey tests more easily. It adds in-line documentation and auto-completion to any IDE that supports it. - - Example in Jetbrains IDE: ![Example Jetbrains](https://i.imgur.com/oYuPuR3.gif) Example in VSCode IDE: ![Example Jetbrains](https://i.imgur.com/hBIGjP9.gif) - ### Setup in Jetbrains IDE + Download [file with schema](https://raw.githubusercontent.com/lamoda/gonkey/master/gonkey.json). -Open preferences File->Preferences +Open preferences File->Preferences In Languages & Frameworks > Schemas and DTDs > JSON Schema Mappings ![Jetbrains IDE Settings](https://i.imgur.com/xkO22by.png) @@ -1817,14 +1822,15 @@ Look for YAML and install YAML Language Support by Red Hat Open Settings by going to Code(File)->Preferences->Settings -Open Schema Settings by typing YAML:Schemas and click on _Edit in settings.json_ +Open Schema Settings by typing YAML:Schemas and click on *Edit in settings.json* ![Yaml link](https://i.imgur.com/IEwxWyG.png) Add file match to apply the JSON on YAML files. + ``` "yaml.schemas": { "C:\\Users\\Leo\\gonkey.json": ["*.gonkey.yaml"] } ``` -In the example above the JSON schema stored in C:\Users\Leo\gonkey.json will be applied on all the files that ends with .gonkey.yaml \ No newline at end of file +In the example above the JSON schema stored in C:\Users\Leo\gonkey.json will be applied on all the files that ends with .gonkey.yaml diff --git a/checker/response_body/response_body.go b/checker/response_body/response_body.go index f347a24..8a38fce 100644 --- a/checker/response_body/response_body.go +++ b/checker/response_body/response_body.go @@ -32,13 +32,14 @@ func (c *ResponseBodyChecker) Check(t models.TestInterface, result *models.Resul errs = append(errs, checkErrs...) } else { // compare bodies as leaf nodes - errs = append(errs, compare.Compare(expectedBody, result.ResponseBody, compare.CompareParams{})...) + errs = append(errs, compare.Compare(expectedBody, result.ResponseBody, compare.Params{})...) } } if !foundResponse { err := fmt.Errorf("server responded with status %d", result.ResponseStatusCode) errs = append(errs, err) } + return errs, nil } @@ -60,7 +61,7 @@ func compareJsonBody(t models.TestInterface, expectedBody string, result *models return []error{errors.New("could not parse response")}, nil } - params := compare.CompareParams{ + params := compare.Params{ IgnoreValues: !t.NeedsCheckingValues(), IgnoreArraysOrdering: t.IgnoreArraysOrdering(), DisallowExtraFields: t.DisallowExtraFields(), diff --git a/checker/response_db/response_db.go b/checker/response_db/response_db.go index 30ba3ea..d7108cb 100644 --- a/checker/response_db/response_db.go +++ b/checker/response_db/response_db.go @@ -80,19 +80,20 @@ func (c *ResponseDbChecker) check( // compare responses length if err := compareDbResponseLength(t.DbResponseJson(), actualDbResponse, t.DbQueryString()); err != nil { errors = append(errors, err) + return errors, nil } // compare responses as json lists - expectedItems, err := toJsonArray(t.DbResponseJson(), "expected", testName) + expectedItems, err := toJSONArray(t.DbResponseJson(), "expected", testName) if err != nil { return nil, err } - actualItems, err := toJsonArray(actualDbResponse, "actual", testName) + actualItems, err := toJSONArray(actualDbResponse, "actual", testName) if err != nil { return nil, err } - errs := compare.Compare(expectedItems, actualItems, compare.CompareParams{ + errs := compare.Compare(expectedItems, actualItems, compare.Params{ IgnoreArraysOrdering: ignoreOrdering, }) @@ -101,11 +102,11 @@ func (c *ResponseDbChecker) check( return errors, nil } -func toJsonArray(items []string, qual, testName string) ([]interface{}, error) { - var itemJSONs []interface{} +func toJSONArray(items []string, qual, testName string) ([]interface{}, error) { + itemJSONs := make([]interface{}, 0, len(items)) for i, row := range items { - var itemJson interface{} - if err := json.Unmarshal([]byte(row), &itemJson); err != nil { + var itemJSON interface{} + if err := json.Unmarshal([]byte(row), &itemJSON); err != nil { return nil, fmt.Errorf( "invalid JSON in the %s DB response for test %s:\n row #%d:\n %s\n error:\n%s", qual, @@ -115,8 +116,9 @@ func toJsonArray(items []string, qual, testName string) ([]interface{}, error) { err.Error(), ) } - itemJSONs = append(itemJSONs, itemJson) + itemJSONs = append(itemJSONs, itemJSON) } + return itemJSONs, nil } @@ -132,11 +134,11 @@ func compareDbResponseLength(expected, actual []string, query interface{}) error color.CyanString("%v", pretty.Compare(expected, actual)), ) } + return err } func newQuery(dbQuery string, db *sql.DB) ([]string, error) { - var dbResponse []string var jsonString string diff --git a/checker/response_header/response_header.go b/checker/response_header/response_header.go index 316d525..925ef06 100644 --- a/checker/response_header/response_header.go +++ b/checker/response_header/response_header.go @@ -28,11 +28,12 @@ func (c *ResponseHeaderChecker) Check(t models.TestInterface, result *models.Res actualValues, ok := result.ResponseHeaders[k] if !ok { errs = append(errs, fmt.Errorf("response does not include expected header %s", k)) + continue } found := false for _, actualValue := range actualValues { - e := compare.Compare(v, actualValue, compare.CompareParams{}) + e := compare.Compare(v, actualValue, compare.Params{}) if len(e) == 0 { found = true } diff --git a/cmd_runner/cmd_runner.go b/cmd_runner/cmd_runner.go index c5fce78..806eaf4 100644 --- a/cmd_runner/cmd_runner.go +++ b/cmd_runner/cmd_runner.go @@ -1,3 +1,4 @@ +//go:build !windows // +build !windows package cmd_runner @@ -14,7 +15,7 @@ import ( ) func CmdRun(scriptPath string, timeout int) error { - //by default timeout should be 3s + // by default timeout should be 3s if timeout <= 0 { timeout = 3 } diff --git a/compare/compare.go b/compare/compare.go index 613a019..4283e33 100644 --- a/compare/compare.go +++ b/compare/compare.go @@ -8,7 +8,7 @@ import ( "github.com/fatih/color" ) -type CompareParams struct { +type Params struct { IgnoreValues bool `json:"ignoreValues" yaml:"ignoreValues"` IgnoreArraysOrdering bool `json:"ignoreArraysOrdering" yaml:"ignoreArraysOrdering"` DisallowExtraFields bool `json:"disallowExtraFields" yaml:"disallowExtraFields"` @@ -23,18 +23,23 @@ const ( regex ) +const ( + arrayType = "array" + mapType = "map" +) + var regexExprRx = regexp.MustCompile(`^\$matchRegexp\((.+)\)$`) // Compare compares values as plain text // It can be compared several ways: -// - Pure values: should be equal -// - Regex: try to compile 'expected' as regex and match 'actual' with it +// - Pure values: should be equal +// - Regex: try to compile 'expected' as regex and match 'actual' with it // It activates on following syntax: $matchRegexp(%EXPECTED_VALUE%) -func Compare(expected, actual interface{}, params CompareParams) []error { +func Compare(expected, actual interface{}, params Params) []error { return compareBranch("$", expected, actual, ¶ms) } -func compareBranch(path string, expected, actual interface{}, params *CompareParams) []error { +func compareBranch(path string, expected, actual interface{}, params *Params) []error { expectedType := getType(expected) actualType := getType(actual) var errors []error @@ -42,6 +47,7 @@ func compareBranch(path string, expected, actual interface{}, params *ComparePar // compare types if leafMatchType(expected) != regex && expectedType != actualType { errors = append(errors, makeError(path, "types do not match", expectedType, actualType)) + return errors } @@ -51,12 +57,13 @@ func compareBranch(path string, expected, actual interface{}, params *ComparePar } // compare arrays - if actualType == "array" { + if actualType == arrayType { expectedArray := convertToArray(expected) actualArray := convertToArray(actual) if len(expectedArray) != len(actualArray) { errors = append(errors, makeError(path, "array lengths do not match", len(expectedArray), len(actualArray))) + return errors } @@ -76,12 +83,13 @@ func compareBranch(path string, expected, actual interface{}, params *ComparePar } // compare maps - if actualType == "map" { + if actualType == mapType { expectedRef := reflect.ValueOf(expected) actualRef := reflect.ValueOf(actual) if params.DisallowExtraFields && expectedRef.Len() != actualRef.Len() { errors = append(errors, makeError(path, "map lengths do not match", expectedRef.Len(), actualRef.Len())) + return errors } @@ -92,6 +100,7 @@ func compareBranch(path string, expected, actual interface{}, params *ComparePar if params.failFast { return errors } + continue } @@ -117,12 +126,14 @@ func getType(value interface{}) string { if value == nil { return "nil" } + rt := reflect.TypeOf(value) - if rt.Kind() == reflect.Slice || rt.Kind() == reflect.Array { + switch { + case rt.Kind() == reflect.Slice || rt.Kind() == reflect.Array: return "array" - } else if rt.Kind() == reflect.Map { + case rt.Kind() == reflect.Map: return "map" - } else { + default: return rt.String() } } @@ -132,7 +143,6 @@ func isScalarType(t string) bool { } func compareLeafs(path string, expected, actual interface{}) []error { - var errors []error switch leafMatchType(expected) { @@ -150,7 +160,6 @@ func compareLeafs(path string, expected, actual interface{}) []error { } func comparePure(path string, expected, actual interface{}) (errors []error) { - if expected != actual { errors = append(errors, makeError(path, "values do not match", expected, actual)) } @@ -159,10 +168,10 @@ func comparePure(path string, expected, actual interface{}) (errors []error) { } func compareRegex(path string, expected, actual interface{}) (errors []error) { - regexExpr, ok := expected.(string) if !ok { errors = append(errors, makeError(path, "type mismatch", "string", reflect.TypeOf(expected))) + return errors } @@ -171,11 +180,13 @@ func compareRegex(path string, expected, actual interface{}) (errors []error) { rx, err := regexp.Compile(retrieveRegexStr(regexExpr)) if err != nil { errors = append(errors, makeError(path, "can not compile regex", nil, "error")) + return errors } if !rx.MatchString(value) { errors = append(errors, makeError(path, "value does not match regex", expected, actual)) + return errors } @@ -183,7 +194,6 @@ func compareRegex(path string, expected, actual interface{}) (errors []error) { } func retrieveRegexStr(expr string) string { - if matches := regexExprRx.FindStringSubmatch(expr); matches != nil { return matches[1] } @@ -221,11 +231,12 @@ func convertToArray(array interface{}) []interface{} { for i := 0; i < ref.Len(); i++ { interfaceSlice = append(interfaceSlice, ref.Index(i).Interface()) } + return interfaceSlice } // For every elem in "expected" try to find elem in "actual". Returns arrays without matching. -func getUnmatchedArrays(expected, actual []interface{}, params *CompareParams) ([]interface{}, []interface{}) { +func getUnmatchedArrays(expected, actual []interface{}, params *Params) (expectedUnmatched, actualUnmatched []interface{}) { expectedError := make([]interface{}, 0) failfastParams := *params @@ -242,6 +253,7 @@ func getUnmatchedArrays(expected, actual []interface{}, params *CompareParams) ( actual[i] = actual[len(actual)-1] } actual = actual[:len(actual)-1] + break } } diff --git a/compare/compare_query.go b/compare/compare_query.go index 1422535..00794cc 100644 --- a/compare/compare_query.go +++ b/compare/compare_query.go @@ -5,19 +5,20 @@ import ( "regexp" ) -func CompareQuery(expected, actual []string) (bool, error) { +func Query(expected, actual []string) (bool, error) { if len(expected) != len(actual) { return false, fmt.Errorf("expected and actual query params have different lengths") } remove := func(array []string, i int) []string { array[i] = array[len(array)-1] + return array[:len(array)-1] } - var expectedCopy = make([]string, len(expected)) + expectedCopy := make([]string, len(expected)) copy(expectedCopy, expected) - var actualCopy = make([]string, len(actual)) + actualCopy := make([]string, len(actual)) copy(actualCopy, actual) for len(expectedCopy) != 0 { @@ -39,6 +40,7 @@ func CompareQuery(expected, actual []string) (bool, error) { if found { expectedCopy = remove(expectedCopy, i) actualCopy = remove(actualCopy, j) + break } } diff --git a/compare/compare_query_test.go b/compare/compare_query_test.go index b7e8b17..9e2d818 100644 --- a/compare/compare_query_test.go +++ b/compare/compare_query_test.go @@ -37,7 +37,7 @@ func TestCompareQuery(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - ok, err := CompareQuery(tt.expectedQuery, tt.actualQuery) + ok, err := Query(tt.expectedQuery, tt.actualQuery) if err != nil { t.Error(err) } diff --git a/compare/compare_test.go b/compare/compare_test.go index f41042f..1ccb24a 100644 --- a/compare/compare_test.go +++ b/compare/compare_test.go @@ -20,7 +20,7 @@ func makeErrorString(path, msg string, expected, actual interface{}) string { } func TestCompareNils(t *testing.T) { - errors := Compare(nil, nil, CompareParams{}) + errors := Compare(nil, nil, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -31,7 +31,7 @@ func TestCompareNils(t *testing.T) { } func TestCompareNilWithNonNil(t *testing.T) { - errors := Compare("", nil, CompareParams{}) + errors := Compare("", nil, Params{}) if errors[0].Error() != makeErrorString("$", "types do not match", "string", "nil") { t.Error( "must return one error", @@ -42,7 +42,7 @@ func TestCompareNilWithNonNil(t *testing.T) { } func TestCompareEqualStrings(t *testing.T) { - errors := Compare("1", "1", CompareParams{}) + errors := Compare("1", "1", Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -53,7 +53,7 @@ func TestCompareEqualStrings(t *testing.T) { } func TestCompareDifferStrings(t *testing.T) { - errors := Compare("1", "2", CompareParams{}) + errors := Compare("1", "2", Params{}) if errors[0].Error() != makeErrorString("$", "values do not match", 1, 2) { t.Error( "must return one error", @@ -64,7 +64,7 @@ func TestCompareDifferStrings(t *testing.T) { } func TestCompareEqualIntegers(t *testing.T) { - errors := Compare(1, 1, CompareParams{}) + errors := Compare(1, 1, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -75,7 +75,7 @@ func TestCompareEqualIntegers(t *testing.T) { } func TestCompareDifferIntegers(t *testing.T) { - errors := Compare(1, 2, CompareParams{}) + errors := Compare(1, 2, Params{}) if errors[0].Error() != makeErrorString("$", "values do not match", 1, 2) { t.Error( "must return one error", @@ -86,7 +86,7 @@ func TestCompareDifferIntegers(t *testing.T) { } func TestCheckRegexMach(t *testing.T) { - errors := Compare("$matchRegexp(x.+z)", "xyyyz", CompareParams{}) + errors := Compare("$matchRegexp(x.+z)", "xyyyz", Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -97,7 +97,7 @@ func TestCheckRegexMach(t *testing.T) { } func TestCheckRegexNotMach(t *testing.T) { - errors := Compare("$matchRegexp(x.+z)", "ayyyb", CompareParams{}) + errors := Compare("$matchRegexp(x.+z)", "ayyyb", Params{}) if errors[0].Error() != makeErrorString("$", "value does not match regex", "$matchRegexp(x.+z)", "ayyyb") { t.Error( @@ -109,7 +109,7 @@ func TestCheckRegexNotMach(t *testing.T) { } func TestCheckRegexCantCompile(t *testing.T) { - errors := Compare("$matchRegexp((?x))", "2", CompareParams{}) + errors := Compare("$matchRegexp((?x))", "2", Params{}) if errors[0].Error() != makeErrorString("$", "can not compile regex", nil, "error") { t.Error( "must return one error", @@ -122,7 +122,7 @@ func TestCheckRegexCantCompile(t *testing.T) { func TestCompareEqualArrays(t *testing.T) { array1 := []string{"1", "2"} array2 := []string{"1", "2"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -135,7 +135,7 @@ func TestCompareEqualArrays(t *testing.T) { func TestCompareEqualArraysWithDifferentElementsOrder(t *testing.T) { array1 := []string{"1", "2"} array2 := []string{"2", "1"} - errors := Compare(array1, array2, CompareParams{IgnoreArraysOrdering: true}) + errors := Compare(array1, array2, Params{IgnoreArraysOrdering: true}) if len(errors) != 0 { t.Error( "must return no errors", @@ -148,7 +148,7 @@ func TestCompareEqualArraysWithDifferentElementsOrder(t *testing.T) { func TestCompareArraysDifferLengths(t *testing.T) { array1 := []string{"1", "2", "3"} array2 := []string{"1", "2"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if errors[0].Error() != makeErrorString("$", "array lengths do not match", 3, 2) { t.Error( "must return one error", @@ -161,7 +161,7 @@ func TestCompareArraysDifferLengths(t *testing.T) { func TestCompareDifferArrays(t *testing.T) { array1 := []string{"1", "2"} array2 := []string{"1", "3"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if errors[0].Error() != makeErrorString("$[1]", "values do not match", 2, 3) { t.Error( "must return one error", @@ -174,14 +174,14 @@ func TestCompareDifferArrays(t *testing.T) { func TestCompareArraysFewErrors(t *testing.T) { array1 := []string{"1", "2", "3"} array2 := []string{"1", "3", "4"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) assert.Len(t, errors, 2) } func TestCompareNestedEqualArrays(t *testing.T) { array1 := [][]string{{"1", "2"}, {"3", "4"}} array2 := [][]string{{"1", "2"}, {"3", "4"}} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -194,7 +194,7 @@ func TestCompareNestedEqualArrays(t *testing.T) { func TestCompareNestedDifferArrays(t *testing.T) { array1 := [][]string{{"1", "2"}, {"3", "4"}} array2 := [][]string{{"1", "2"}, {"3", "5"}} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if errors[0].Error() != makeErrorString("$[1][1]", "values do not match", 4, 5) { t.Error( "must return one error", @@ -205,11 +205,10 @@ func TestCompareNestedDifferArrays(t *testing.T) { } func TestCompareArraysWithRegex(t *testing.T) { - arrayExpected := []string{"2", "$matchRegexp(x.+z)"} arrayActual := []string{"2", "xyyyz"} - errors := Compare(arrayExpected, arrayActual, CompareParams{}) + errors := Compare(arrayExpected, arrayActual, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -220,11 +219,10 @@ func TestCompareArraysWithRegex(t *testing.T) { } func TestCompareArraysWithRegexMixedTypes(t *testing.T) { - arrayExpected := []string{"2", "$matchRegexp([0-9]+)"} arrayActual := []interface{}{"2", 123} - errors := Compare(arrayExpected, arrayActual, CompareParams{}) + errors := Compare(arrayExpected, arrayActual, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -235,11 +233,10 @@ func TestCompareArraysWithRegexMixedTypes(t *testing.T) { } func TestCompareArraysWithRegexNotMatch(t *testing.T) { - arrayExpected := []string{"2", "$matchRegexp(x.+z)"} arrayActual := []string{"2", "ayyyb"} - errors := Compare(arrayExpected, arrayActual, CompareParams{}) + errors := Compare(arrayExpected, arrayActual, Params{}) expectedErrors := makeErrorString("$[1]", "value does not match regex", "$matchRegexp(x.+z)", "ayyyb") if errors[0].Error() != expectedErrors { @@ -254,7 +251,7 @@ func TestCompareArraysWithRegexNotMatch(t *testing.T) { func TestCompareEqualMaps(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2"} array2 := map[string]string{"a": "1", "b": "2"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -263,11 +260,12 @@ func TestCompareEqualMaps(t *testing.T) { t.Fail() } } + func TestCompareMapsWithRegex(t *testing.T) { mapExpected := map[string]string{"a": "1", "b": "$matchRegexp(x.+z)"} mapActual := map[string]string{"a": "1", "b": "xyyyz"} - errors := Compare(mapExpected, mapActual, CompareParams{}) + errors := Compare(mapExpected, mapActual, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -281,7 +279,7 @@ func TestCompareMapsWithRegexNotMatch(t *testing.T) { mapExpected := map[string]string{"a": "1", "b": "$matchRegexp(x.+z)"} mapActual := map[string]string{"a": "1", "b": "ayyyb"} - errors := Compare(mapExpected, mapActual, CompareParams{}) + errors := Compare(mapExpected, mapActual, Params{}) expectedErrors := makeErrorString("$.b", "value does not match regex", "$matchRegexp(x.+z)", "ayyyb") if errors[0].Error() != expectedErrors { @@ -296,7 +294,7 @@ func TestCompareMapsWithRegexNotMatch(t *testing.T) { func TestCompareEqualMapsWithExtraFields(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2"} array2 := map[string]string{"a": "1", "b": "2", "c": "3"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -309,7 +307,7 @@ func TestCompareEqualMapsWithExtraFields(t *testing.T) { func TestCompareEqualMapsWithExtraFieldsCheckingEnabled(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2"} array2 := map[string]string{"a": "1", "b": "2", "c": "3"} - errors := Compare(array1, array2, CompareParams{DisallowExtraFields: true}) + errors := Compare(array1, array2, Params{DisallowExtraFields: true}) if errors[0].Error() != makeErrorString("$", "map lengths do not match", 2, 3) { t.Error( "must return one error", @@ -322,7 +320,7 @@ func TestCompareEqualMapsWithExtraFieldsCheckingEnabled(t *testing.T) { func TestCompareEqualMapsWithDifferentKeysOrder(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2"} array2 := map[string]string{"b": "2", "a": "1"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -335,7 +333,7 @@ func TestCompareEqualMapsWithDifferentKeysOrder(t *testing.T) { func TestCompareMapsWithDifferentKeys(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2"} array2 := map[string]string{"a": "1", "c": "2"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) expectedErr := makeErrorString("$", "key is missing", "b", "") if errors[0].Error() != expectedErr { t.Error( @@ -349,7 +347,7 @@ func TestCompareMapsWithDifferentKeys(t *testing.T) { func TestCompareMapsWithDifferentValues(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2"} array2 := map[string]string{"a": "1", "b": "3"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if errors[0].Error() != makeErrorString("$.b", "values do not match", 2, 3) { t.Error( "must return one error", @@ -362,14 +360,14 @@ func TestCompareMapsWithDifferentValues(t *testing.T) { func TestCompareMapsWithFewErrors(t *testing.T) { array1 := map[string]string{"a": "1", "b": "2", "c": "5"} array2 := map[string]string{"a": "1", "b": "3", "d": "4"} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) assert.Len(t, errors, 2) } func TestCompareEqualNestedMaps(t *testing.T) { array1 := map[string]map[string]string{"a": {"i": "3", "j": "4"}, "b": {"k": "5", "l": "6"}} array2 := map[string]map[string]string{"a": {"i": "3", "j": "4"}, "b": {"k": "5", "l": "6"}} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -382,7 +380,7 @@ func TestCompareEqualNestedMaps(t *testing.T) { func TestCompareNestedMapsWithDifferentKeys(t *testing.T) { array1 := map[string]map[string]string{"a": {"i": "3", "j": "4"}, "b": {"k": "5", "l": "6"}} array2 := map[string]map[string]string{"a": {"i": "3", "j": "4"}, "b": {"l": "6"}} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) expectedErr := makeErrorString("$.b", "key is missing", "k", "") if errors[0].Error() != expectedErr { t.Error( @@ -396,7 +394,7 @@ func TestCompareNestedMapsWithDifferentKeys(t *testing.T) { func TestCompareNestedMapsWithDifferentValues(t *testing.T) { array1 := map[string]map[string]string{"a": {"i": "3", "j": "4"}, "b": {"k": "5", "l": "6"}} array2 := map[string]map[string]string{"a": {"i": "3", "j": "4"}, "b": {"k": "5", "l": "7"}} - errors := Compare(array1, array2, CompareParams{}) + errors := Compare(array1, array2, Params{}) if errors[0].Error() != makeErrorString("$.b.l", "values do not match", 6, 7) { t.Error( "must return one error", @@ -410,7 +408,7 @@ func TestCompareEqualJsonScalars(t *testing.T) { var json1, json2 interface{} json.Unmarshal([]byte("1"), &json1) json.Unmarshal([]byte("1"), &json2) - errors := Compare(json1, json2, CompareParams{}) + errors := Compare(json1, json2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -424,7 +422,7 @@ func TestCompareDifferJsonScalars(t *testing.T) { var json1, json2 interface{} json.Unmarshal([]byte("1"), &json1) json.Unmarshal([]byte("2"), &json2) - errors := Compare(json1, json2, CompareParams{}) + errors := Compare(json1, json2, Params{}) if errors[0].Error() != makeErrorString("$", "values do not match", 1, 2) { t.Error( "must return one error", @@ -458,7 +456,7 @@ func TestCompareEqualArraysWithIgnoreArraysOrdering(t *testing.T) { var json1, json2 interface{} json.Unmarshal([]byte(expectedArrayJson), &json1) json.Unmarshal([]byte(actualArrayJson), &json2) - errors := Compare(json1, json2, CompareParams{ + errors := Compare(json1, json2, Params{ IgnoreArraysOrdering: true, }) if len(errors) != 0 { @@ -474,7 +472,7 @@ func TestCompareEqualComplexJson(t *testing.T) { var json1, json2 interface{} json.Unmarshal([]byte(complexJson1), &json1) json.Unmarshal([]byte(complexJson1), &json2) // compare json with same json - errors := Compare(json1, json2, CompareParams{}) + errors := Compare(json1, json2, Params{}) if len(errors) != 0 { t.Error( "must return no errors", @@ -488,7 +486,7 @@ func TestCompareDifferComplexJson(t *testing.T) { var json1, json2 interface{} json.Unmarshal([]byte(complexJson1), &json1) json.Unmarshal([]byte(complexJson2), &json2) - errors := Compare(json1, json2, CompareParams{}) + errors := Compare(json1, json2, Params{}) expectedErr := makeErrorString( "$.paths./api/get-delivery-info.get.parameters[2].$ref", "values do not match", @@ -5416,6 +5414,7 @@ var complexJson1 = ` ] } ` + var complexJson2 = ` { "swagger": "2.0", diff --git a/fixtures/aerospike/aerospike.go b/fixtures/aerospike/aerospike.go index ed34eb2..4f994eb 100644 --- a/fixtures/aerospike/aerospike.go +++ b/fixtures/aerospike/aerospike.go @@ -21,8 +21,10 @@ type LoaderAerospike struct { debug bool } -type binMap map[string]interface{} -type set map[string]binMap +type ( + binMap map[string]interface{} + set map[string]binMap +) type fixture struct { Inherits []string @@ -60,6 +62,7 @@ func (l *LoaderAerospike) Load(names []string) error { return fmt.Errorf("unable to load fixture %s: %s", name, err.Error()) } } + return l.loadSets(&ctx) } @@ -74,6 +77,7 @@ func (l *LoaderAerospike) loadFile(name string, ctx *loadContext) error { for _, candidate := range candidates { if _, err = os.Stat(candidate); err == nil { file = candidate + break } } @@ -92,6 +96,7 @@ func (l *LoaderAerospike) loadFile(name string, ctx *loadContext) error { return err } ctx.files = append(ctx.files, file) + return l.loadYml(data, ctx) } @@ -163,6 +168,7 @@ func (l *LoaderAerospike) loadYml(data []byte, ctx *loadContext) error { } ctx.sets = append(ctx.sets, lt) } + return nil } @@ -234,17 +240,18 @@ func (l *LoaderAerospike) truncateSet(name string) error { func (l *LoaderAerospike) loadSet(ctx *loadContext, set loadedSet) error { // $extend keyword allows, to import values from a named row for key, binMap := range set.data { - if base, ok := binMap["$extend"]; ok { - baseName := base.(string) - baseBinMap, err := l.resolveReference(ctx.refsDefinition, baseName) - if err != nil { - return err - } - for k, v := range binMap { - baseBinMap[k] = v - } - set.data[key] = baseBinMap + if _, ok := binMap["$extend"]; !ok { + continue + } + baseName := binMap["$extend"].(string) + baseBinMap, err := l.resolveReference(ctx.refsDefinition, baseName) + if err != nil { + return err + } + for k, v := range binMap { + baseBinMap[k] = v } + set.data[key] = baseBinMap } for key, binmap := range set.data { @@ -267,10 +274,11 @@ func (l *LoaderAerospike) resolveReference(refs set, refName string) (binMap, er // by the way removing $-records from base row targetCopy := make(binMap, len(target)) for k, v := range target { - if len(k) == 0 || k[0] != '$' { + if k == "" || k[0] != '$' { targetCopy[k] = v } } + return targetCopy, nil } @@ -281,5 +289,6 @@ func inArray(needle string, haystack []string) bool { return true } } + return false } diff --git a/fixtures/loader.go b/fixtures/loader.go index 24b56ba..964fc89 100644 --- a/fixtures/loader.go +++ b/fixtures/loader.go @@ -43,7 +43,6 @@ type Loader interface { } func NewLoader(cfg *Config) Loader { - var loader Loader location := strings.TrimRight(cfg.Location, "/") diff --git a/fixtures/mysql/mysql.go b/fixtures/mysql/mysql.go index 65232cb..cfbb24f 100644 --- a/fixtures/mysql/mysql.go +++ b/fixtures/mysql/mysql.go @@ -21,7 +21,7 @@ type LoaderMysql struct { debug bool } -const errNoIdColumn = "Error 1054: Unknown column 'id' in 'where clause'" +const errNoIDColumn = "Error 1054: Unknown column 'id' in 'where clause'" type row map[string]interface{} @@ -85,6 +85,7 @@ func (l *LoaderMysql) loadFile(name string, ctx *loadContext) error { for _, candidate := range candidates { if _, err = os.Stat(candidate); err == nil { file = candidate + break } } @@ -104,6 +105,7 @@ func (l *LoaderMysql) loadFile(name string, ctx *loadContext) error { return err } ctx.files = append(ctx.files, file) + return l.loadYml(data, ctx) } @@ -131,8 +133,7 @@ func (l *LoaderMysql) loadYml(data []byte, ctx *loadContext) error { row := make(row, len(fields)) for _, field := range fields { key := field.Key.(string) - value, _ := field.Value.(interface{}) - row[key] = value + row[key] = field.Value } if base, ok := row["$extend"]; ok { @@ -149,8 +150,8 @@ func (l *LoaderMysql) loadYml(data []byte, ctx *loadContext) error { ctx.refsDefinition[name] = row if l.debug { - rowJson, _ := json.Marshal(row) - fmt.Printf("Populating ref %s as %s from template\n", name, string(rowJson)) + rowJSON, _ := json.Marshal(row) + fmt.Printf("Populating ref %s as %s from template\n", name, string(rowJSON)) } } @@ -174,6 +175,7 @@ func (l *LoaderMysql) loadYml(data []byte, ctx *loadContext) error { } ctx.tables = append(ctx.tables, lt) } + return nil } @@ -224,20 +226,20 @@ func (l *LoaderMysql) truncateTable(tx *sql.Tx, name string) error { } func (l *LoaderMysql) loadTable(tx *sql.Tx, ctx *loadContext, t string, rows table) error { - // $extend keyword allows to import values from a named row for i, row := range rows { - if base, ok := row["$extend"]; ok { - base := base.(string) - baseRow, err := l.resolveReference(ctx.refsDefinition, base) - if err != nil { - return err - } - for k, v := range row { - baseRow[k] = v - } - rows[i] = baseRow + if _, ok := row["$extend"]; !ok { + continue } + base := row["$extend"].(string) + baseRow, err := l.resolveReference(ctx.refsDefinition, base) + if err != nil { + return err + } + for k, v := range row { + baseRow[k] = v + } + rows[i] = baseRow } // issuing query @@ -297,21 +299,21 @@ func (l *LoaderMysql) loadRow(tx *sql.Tx, ctx *loadContext, t string, row row) e // add to references ctx.refsDefinition[name] = row if l.debug { - rowJson, _ := json.Marshal(insertedRowValue) + rowJSON, _ := json.Marshal(insertedRowValue) fmt.Printf( "Populating ref %s as %s from row definition\n", name, - string(rowJson), + string(rowJSON), ) } ctx.refsInserted[name] = insertedRowValue if l.debug { - valuesJson, _ := json.Marshal(insertedRowValue) + valuesJSON, _ := json.Marshal(insertedRowValue) fmt.Printf( "Populating ref %s as %s from inserted values\n", name, - string(valuesJson), + string(valuesJSON), ) } } @@ -351,19 +353,20 @@ func fetchRow(rows *sql.Rows) (row, error) { } func (l *LoaderMysql) insertedRows(tx *sql.Tx, insertRes sql.Result, t string) (*sql.Rows, error) { - lastId, err := insertRes.LastInsertId() + lastID, err := insertRes.LastInsertId() if err != nil { return nil, err } + //nolint:gosec // Obviously shouldn't be used with production DB. query := fmt.Sprintf("SELECT * FROM `%s` WHERE `id` = ?", t) - rows, err := tx.Query(query, lastId) + rows, err := tx.Query(query, lastID) if err != nil { // TODO: now we can take inserted rows only if they have column 'id' // later we can add possibility to specify name of PK column in fixture definition // Also, it's weak error check - if err.Error() == errNoIdColumn { + if err.Error() == errNoIDColumn { return nil, nil } @@ -376,8 +379,7 @@ func (l *LoaderMysql) insertedRows(tx *sql.Tx, insertRes sql.Result, t string) ( // buildInsertQuery builds SQL query for data insertion // based on values read from yaml func (l *LoaderMysql) buildInsertQuery(ctx *loadContext, t string, row row) (string, error) { - - var fields []string + fields := make([]string, 0, len(row)) for name := range row { if strings.HasPrefix(name, "$") { @@ -410,6 +412,7 @@ func (l *LoaderMysql) buildInsertQuery(ctx *loadContext, t string, row row) (str } query := "INSERT INTO `%s` (%s) VALUES %s" + return fmt.Sprintf( query, t, @@ -419,7 +422,6 @@ func (l *LoaderMysql) buildInsertQuery(ctx *loadContext, t string, row row) (str } func (l *LoaderMysql) rowInsertValue(ctx *loadContext, val interface{}) (string, error) { - // resolve references if stringValue, ok := val.(string); ok { if strings.HasPrefix(stringValue, "$") { @@ -427,6 +429,7 @@ func (l *LoaderMysql) rowInsertValue(ctx *loadContext, val interface{}) (string, if err != nil { return "", err } + return v, nil } } @@ -435,6 +438,7 @@ func (l *LoaderMysql) rowInsertValue(ctx *loadContext, val interface{}) (string, if err != nil { return "", err } + return dbValue, nil } @@ -447,16 +451,16 @@ func (l *LoaderMysql) resolveExpression(expr string, ctx *loadContext) (string, re := regexp.MustCompile(`^\$eval\((.+)\)$`) if matches := re.FindStringSubmatch(expr); matches != nil { return "(" + matches[1] + ")", nil - } else { - return "", fmt.Errorf("icorrect $eval() usage: %s", expr) } - } else { - value, err := l.resolveFieldReference(ctx.refsInserted, expr) - if err != nil { - return "", err - } - return toDbValue(value) + + return "", fmt.Errorf("icorrect $eval() usage: %s", expr) } + value, err := l.resolveFieldReference(ctx.refsInserted, expr) + if err != nil { + return "", err + } + + return toDbValue(value) } // resolveReference finds previously stored reference by its name @@ -469,17 +473,17 @@ func (l *LoaderMysql) resolveReference(refs rowsDict, refName string) (row, erro // by the way removing $-records from base row targetCopy := make(row, len(target)) for k, v := range target { - if len(k) == 0 || k[0] != '$' { + if k == "" || k[0] != '$' { targetCopy[k] = v } } + return targetCopy, nil } // resolveFieldReference finds previously stored reference by name // and return value of its field func (l *LoaderMysql) resolveFieldReference(refs rowsDict, ref string) (interface{}, error) { - parts := strings.SplitN(ref, ".", 2) if len(parts) < 2 || len(parts[0]) < 2 || len(parts[1]) < 1 { return nil, fmt.Errorf("invalid reference %s, correct form is $refName.field", ref) @@ -497,6 +501,7 @@ func (l *LoaderMysql) resolveFieldReference(refs rowsDict, ref string) (interfac if !ok { return nil, fmt.Errorf("undefined reference field %s", parts[1]) } + return value, nil } @@ -507,13 +512,13 @@ func inArray(needle string, haystack []string) bool { return true } } + return false } // toDbValue prepares value to be passed in SQL query // with respect to its type and converts it to string func toDbValue(value interface{}) (string, error) { - if value == nil { return "NULL", nil } @@ -535,14 +540,16 @@ func toDbValue(value interface{}) (string, error) { if err != nil { return "", err } + return quoteLiteral(string(encoded)), nil } // quoteLiteral properly escapes string to be safely // passed as a value in SQL query func quoteLiteral(s string) string { - s = strings.Replace(s, `'`, `''`, -1) - s = strings.Replace(s, `\`, `\\`, -1) + s = strings.ReplaceAll(s, `'`, `''`) + s = strings.ReplaceAll(s, `\`, `\\`) + return "'" + s + "'" } diff --git a/fixtures/postgres/postgres.go b/fixtures/postgres/postgres.go index 3983d59..565201c 100644 --- a/fixtures/postgres/postgres.go +++ b/fixtures/postgres/postgres.go @@ -47,16 +47,18 @@ func newTableName(source string) tableName { switch { case len(parts) == 1: parts = append(parts, parts[0]) + fallthrough case parts[0] == "": parts[0] = "public" } lt := tableName{schema: parts[0], name: parts[1]} + return lt } func (t *tableName) getFullName() string { - return fmt.Sprintf("\"%s\".\"%s\"", t.schema, t.name) + return fmt.Sprintf("%q.%q", t.schema, t.name) } type loadContext struct { @@ -86,6 +88,7 @@ func (f *LoaderPostgres) Load(names []string) error { return fmt.Errorf("unable to load fixture %s: %s", name, err.Error()) } } + return f.loadTables(&ctx) } @@ -100,6 +103,7 @@ func (f *LoaderPostgres) loadFile(name string, ctx *loadContext) error { for _, candidate := range candidates { if _, err = os.Stat(candidate); err == nil { file = candidate + break } } @@ -118,6 +122,7 @@ func (f *LoaderPostgres) loadFile(name string, ctx *loadContext) error { return err } ctx.files = append(ctx.files, file) + return f.loadYml(data, ctx) } @@ -150,8 +155,7 @@ func (f *LoaderPostgres) loadYml(data []byte, ctx *loadContext) error { row := make(row, len(fields)) for _, field := range fields { key := field.Key.(string) - value, _ := field.Value.(interface{}) - row[key] = value + row[key] = field.Value } if base, ok := row["$extend"]; ok { base := base.(string) @@ -166,8 +170,8 @@ func (f *LoaderPostgres) loadYml(data []byte, ctx *loadContext) error { } ctx.refsDefinition[name] = row if f.debug { - rowJson, _ := json.Marshal(row) - fmt.Printf("Populating ref %s as %s from template\n", name, string(rowJson)) + rowJSON, _ := json.Marshal(row) + fmt.Printf("Populating ref %s as %s from template\n", name, string(rowJSON)) } } @@ -199,6 +203,7 @@ func (f *LoaderPostgres) loadYml(data []byte, ctx *loadContext) error { } ctx.tables = append(ctx.tables, lt) } + return nil } @@ -254,23 +259,25 @@ func (f *LoaderPostgres) truncateTables(tx *sql.Tx, tables ...loadedTable) error if err != nil { return err } + return nil } func (f *LoaderPostgres) loadTable(ctx *loadContext, tx *sql.Tx, t tableName, rows table) error { // $extend keyword allows to import values from a named row for i, row := range rows { - if base, ok := row["$extend"]; ok { - base := base.(string) - baseRow, err := f.resolveReference(ctx.refsDefinition, base) - if err != nil { - return err - } - for k, v := range row { - baseRow[k] = v - } - rows[i] = baseRow + if _, ok := row["$extend"]; !ok { + continue + } + base := row["$extend"].(string) + baseRow, err := f.resolveReference(ctx.refsDefinition, base) + if err != nil { + return err + } + for k, v := range row { + baseRow[k] = v } + rows[i] = baseRow } // build SQL query, err := f.buildInsertQuery(ctx, t, rows) @@ -299,25 +306,25 @@ func (f *LoaderPostgres) loadTable(ctx *loadContext, tx *sql.Tx, t tableName, ro return fmt.Errorf("duplicating ref name %s", name) } // read values - var rowJson string - if err := insertedRows.Scan(&rowJson); err != nil { + var rowJSON string + if err := insertedRows.Scan(&rowJSON); err != nil { return err } // decode json values := make(map[string]interface{}) - if err := json.Unmarshal([]byte(rowJson), &values); err != nil { + if err := json.Unmarshal([]byte(rowJSON), &values); err != nil { return err } // add to references ctx.refsDefinition[name] = row if f.debug { - rowJson, _ := json.Marshal(row) - fmt.Printf("Populating ref %s as %s from row definition\n", name, string(rowJson)) + rowJSON, _ := json.Marshal(row) + fmt.Printf("Populating ref %s as %s from row definition\n", name, string(rowJSON)) } ctx.refsInserted[name] = values if f.debug { - valuesJson, _ := json.Marshal(values) - fmt.Printf("Populating ref %s as %s from inserted values\n", name, string(valuesJson)) + valuesJSON, _ := json.Marshal(values) + fmt.Printf("Populating ref %s as %s from inserted values\n", name, string(valuesJSON)) } } } @@ -329,6 +336,7 @@ func (f *LoaderPostgres) loadTable(ctx *loadContext, tx *sql.Tx, t tableName, ro if err := insertedRows.Err(); err != nil { return fmt.Errorf("failed to execute query. DB returned error:\n%s", err) } + return err } @@ -360,6 +368,7 @@ END$$ fmt.Println("Issuing SQL:", query) } _, err := tx.Exec(query) + return err } @@ -389,6 +398,7 @@ func (f *LoaderPostgres) buildInsertQuery(ctx *loadContext, t tableName, rows ta value, present := row[name] if !present { dbValuesRow[k] = "default" // default is a PostgreSQL keyword + continue } // resolve references @@ -399,6 +409,7 @@ func (f *LoaderPostgres) buildInsertQuery(ctx *loadContext, t tableName, rows ta if err != nil { return "", err } + continue } } @@ -416,6 +427,7 @@ func (f *LoaderPostgres) buildInsertQuery(ctx *loadContext, t tableName, rows ta } query := "INSERT INTO %s AS row (%s) VALUES %s RETURNING row_to_json(row)" + return fmt.Sprintf(query, t.getFullName(), strings.Join(fields, ", "), strings.Join(dbValues, ", ")), nil } @@ -428,16 +440,17 @@ func (f *LoaderPostgres) resolveExpression(expr string, ctx *loadContext) (strin re := regexp.MustCompile(`^\$eval\((.+)\)$`) if matches := re.FindStringSubmatch(expr); matches != nil { return "(" + matches[1] + ")", nil - } else { - return "", fmt.Errorf("icorrect $eval() usage: %s", expr) } - } else { - value, err := f.resolveFieldReference(ctx.refsInserted, expr) - if err != nil { - return "", nil - } - return toDbValue(value) + + return "", fmt.Errorf("icorrect $eval() usage: %s", expr) } + + value, err := f.resolveFieldReference(ctx.refsInserted, expr) + if err != nil { + return "", nil + } + + return toDbValue(value) } // resolveReference finds previously stored reference by its name @@ -450,10 +463,11 @@ func (f *LoaderPostgres) resolveReference(refs rowsDict, refName string) (row, e // by the way removing $-records from base row targetCopy := make(row, len(target)) for k, v := range target { - if len(k) == 0 || k[0] != '$' { + if k == "" || k[0] != '$' { targetCopy[k] = v } } + return targetCopy, nil } @@ -474,6 +488,7 @@ func (f *LoaderPostgres) resolveFieldReference(refs rowsDict, ref string) (inter if !ok { return nil, fmt.Errorf("undefined reference field %s", parts[1]) } + return value, nil } @@ -484,6 +499,7 @@ func inArray(needle string, haystack []string) bool { return true } } + return false } @@ -511,6 +527,7 @@ func toDbValue(value interface{}) (string, error) { if err != nil { return "", err } + return quoteLiteral(string(encoded)), nil } @@ -521,7 +538,8 @@ func quoteLiteral(s string) string { if strings.Contains(s, `\`) { p = "E" } - s = strings.Replace(s, `'`, `''`, -1) - s = strings.Replace(s, `\`, `\\`, -1) + s = strings.ReplaceAll(s, `'`, `''`) + s = strings.ReplaceAll(s, `\`, `\\`) + return p + `'` + s + `'` } diff --git a/fixtures/redis/parser/context.go b/fixtures/redis/parser/context.go index 464e07c..e8d6492 100644 --- a/fixtures/redis/parser/context.go +++ b/fixtures/redis/parser/context.go @@ -1,6 +1,6 @@ package parser -type context struct { +type Context struct { keyRefs map[string]Keys hashRefs map[string]HashRecordValue setRefs map[string]SetRecordValue @@ -8,8 +8,8 @@ type context struct { zsetRefs map[string]ZSetRecordValue } -func NewContext() *context { - return &context{ +func NewContext() *Context { + return &Context{ keyRefs: make(map[string]Keys), hashRefs: make(map[string]HashRecordValue), setRefs: make(map[string]SetRecordValue), diff --git a/fixtures/redis/parser/file.go b/fixtures/redis/parser/file.go index e662dcf..5d83953 100644 --- a/fixtures/redis/parser/file.go +++ b/fixtures/redis/parser/file.go @@ -1,88 +1,88 @@ package parser import ( - "errors" - "fmt" - "path/filepath" - "strings" + "errors" + "fmt" + "path/filepath" + "strings" ) var ( - ErrFixtureNotFound = errors.New("fixture not found") - ErrFixtureFileLoad = errors.New("failed to load fixture file") - ErrFixtureParseFile = errors.New("failed to parse fixture file") - ErrParserNotFound = errors.New("parser not found") + ErrFixtureNotFound = errors.New("fixture not found") + ErrFixtureFileLoad = errors.New("failed to load fixture file") + ErrFixtureParseFile = errors.New("failed to parse fixture file") + ErrParserNotFound = errors.New("parser not found") ) func loadError(fixtureName string, err error) error { - return fmt.Errorf("%w %s: %s", ErrFixtureFileLoad, fixtureName, err) + return fmt.Errorf("%w %s: %s", ErrFixtureFileLoad, fixtureName, err) } func parseError(fixtureName string, err error) error { - return fmt.Errorf("%w %s: %s", ErrFixtureParseFile, fixtureName, err) + return fmt.Errorf("%w %s: %s", ErrFixtureParseFile, fixtureName, err) } type fileParser struct { - locations []string + locations []string } -func New(locations []string) *fileParser{ - return &fileParser{ - locations: locations, - } +func New(locations []string) *fileParser { + return &fileParser{ + locations: locations, + } } -func (l *fileParser) ParseFiles(ctx *context, names []string) ([]*Fixture, error) { - var fileNameCache = make(map[string]struct{}) - var fixtures []*Fixture +func (l *fileParser) ParseFiles(ctx *Context, names []string) ([]*Fixture, error) { + fileNameCache := make(map[string]struct{}) + var fixtures []*Fixture - for _, name := range names { - for _, loc := range l.locations { - filename, err := l.getFirstExistsFileName(name, loc) - if err != nil { - return nil, loadError(name, err) - } - if _, ok := fileNameCache[filename]; ok { - continue - } + for _, name := range names { + for _, loc := range l.locations { + filename, err := l.getFirstExistsFileName(name, loc) + if err != nil { + return nil, loadError(name, err) + } + if _, ok := fileNameCache[filename]; ok { + continue + } - extension := strings.Replace(filepath.Ext(filename), ".", "", -1) - fixtureParser := GetParser(extension) - if fixtureParser == nil { - return nil, ErrParserNotFound - } - parserCopy := fixtureParser.Copy(l) + extension := strings.ReplaceAll(filepath.Ext(filename), ".", "") + fixtureParser := GetParser(extension) + if fixtureParser == nil { + return nil, ErrParserNotFound + } + parserCopy := fixtureParser.Copy(l) - fixture, err := parserCopy.Parse(ctx, filename) - if err != nil { - return nil, parseError(filename, err) - } + fixture, err := parserCopy.Parse(ctx, filename) + if err != nil { + return nil, parseError(filename, err) + } - fixtures = append(fixtures, fixture) - fileNameCache[filename] = struct{}{} - } - } + fixtures = append(fixtures, fixture) + fileNameCache[filename] = struct{}{} + } + } - return fixtures, nil + return fixtures, nil } -func (l *fileParser) getFirstExistsFileName(name string, location string) (string, error) { - candidates := []string{ - name, - fmt.Sprintf("%s.yaml", name), - fmt.Sprintf("%s.yml", name), - } +func (l *fileParser) getFirstExistsFileName(name, location string) (string, error) { + candidates := []string{ + name, + fmt.Sprintf("%s.yaml", name), + fmt.Sprintf("%s.yml", name), + } - for _, p := range candidates { - path := filepath.Join(location, p) - paths, err := filepath.Glob(path) - if err != nil { - return "", err - } - if len(paths) > 0 { - return paths[0], nil - } - } + for _, p := range candidates { + path := filepath.Join(location, p) + paths, err := filepath.Glob(path) + if err != nil { + return "", err + } + if len(paths) > 0 { + return paths[0], nil + } + } - return "", ErrFixtureNotFound + return "", ErrFixtureNotFound } diff --git a/fixtures/redis/parser/parser.go b/fixtures/redis/parser/parser.go index ddb3fe1..984c705 100644 --- a/fixtures/redis/parser/parser.go +++ b/fixtures/redis/parser/parser.go @@ -1,21 +1,20 @@ package parser type FixtureFileParser interface { - Parse(ctx *context, filename string) (*Fixture, error) - Copy(parser *fileParser) FixtureFileParser + Parse(ctx *Context, filename string) (*Fixture, error) + Copy(parser *fileParser) FixtureFileParser } var fixtureParsersRegistry = make(map[string]FixtureFileParser) func RegisterParser(format string, parser FixtureFileParser) { - fixtureParsersRegistry[format] = parser + fixtureParsersRegistry[format] = parser } func GetParser(format string) FixtureFileParser { - return fixtureParsersRegistry[format] + return fixtureParsersRegistry[format] } func init() { - RegisterParser("yaml", &redisYamlParser{}) + RegisterParser("yaml", &redisYamlParser{}) } - diff --git a/fixtures/redis/parser/parser_test.go b/fixtures/redis/parser/parser_test.go index d5299fc..9ff7db2 100644 --- a/fixtures/redis/parser/parser_test.go +++ b/fixtures/redis/parser/parser_test.go @@ -14,7 +14,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { type want struct { fixtures []*Fixture - ctx *context + ctx *Context } tests := []struct { @@ -144,7 +144,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { }, }, }, - ctx: &context{ + ctx: &Context{ keyRefs: map[string]Keys{}, setRefs: map[string]SetRecordValue{}, hashRefs: map[string]HashRecordValue{}, @@ -228,7 +228,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { }, }, { - Name: "childList", + Name: "childList", Extend: "parentList", Values: []*ListValue{ {Value: Int(1)}, @@ -247,7 +247,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { }, }, { - Name: "childZSet", + Name: "childZSet", Extend: "parentZSet", Values: []*ZSetValue{ {Value: Int(1), Score: 1.2}, @@ -353,7 +353,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { }, }, }, - ctx: &context{ + ctx: &Context{ keyRefs: map[string]Keys{ "parentKeys": { Values: map[string]*KeyValue{ @@ -581,7 +581,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { }, }, }, - ctx: &context{ + ctx: &Context{ keyRefs: map[string]Keys{ "parentKeys": { Values: map[string]*KeyValue{ @@ -714,7 +714,7 @@ func TestRedisFixtureParser_Load(t *testing.T) { if diff := cmp.Diff(test.want.fixtures, fixtures); diff != "" { t.Errorf("ParseFiles - unexpected diff in fixtures: %s", diff) } - if diff := cmp.Diff(test.want.ctx, ctx, cmp.AllowUnexported(context{})); diff != "" { + if diff := cmp.Diff(test.want.ctx, ctx, cmp.AllowUnexported(Context{})); diff != "" { t.Errorf("ParseFiles - unexpected diff in context: %s", diff) } }) diff --git a/fixtures/redis/parser/yaml.go b/fixtures/redis/parser/yaml.go index 96a199e..27a4364 100644 --- a/fixtures/redis/parser/yaml.go +++ b/fixtures/redis/parser/yaml.go @@ -1,478 +1,510 @@ package parser import ( - "errors" - "fmt" - "io/ioutil" + "errors" + "fmt" + "io/ioutil" - "gopkg.in/yaml.v3" + "gopkg.in/yaml.v3" ) type redisYamlParser struct { - fileParser *fileParser + fileParser *fileParser } func (p *redisYamlParser) Copy(fileParser *fileParser) FixtureFileParser { - cp := &(*p) - cp.fileParser = fileParser - return cp + deref := *p + cp := &deref + cp.fileParser = fileParser + + return cp } -func (p *redisYamlParser) extendKeys(ctx *context, child *Keys) error { - if child.Extend == "" { - return nil - } - parent, err := p.resolveKeyReference(ctx.keyRefs, child.Extend) - if err != nil { - return err - } - for k, v := range child.Values { - parent.Values[k] = v - } - child.Values = parent.Values - return nil +func (p *redisYamlParser) extendKeys(ctx *Context, child *Keys) error { + if child.Extend == "" { + return nil + } + parent, err := p.resolveKeyReference(ctx.keyRefs, child.Extend) + if err != nil { + return err + } + for k, v := range child.Values { + parent.Values[k] = v + } + child.Values = parent.Values + + return nil } func (p *redisYamlParser) copyKeyRecord(src *Keys) Keys { - keyRef := Keys{ - Values: make(map[string]*KeyValue, len(src.Values)), - } - for k, v := range src.Values { - var valueCopy *KeyValue - if v != nil { - valueCopy = &(*v) - } - keyRef.Values[k] = valueCopy - } - return keyRef + keyRef := Keys{ + Values: make(map[string]*KeyValue, len(src.Values)), + } + for k, v := range src.Values { + var valueCopy *KeyValue + if v != nil { + deref := *v + valueCopy = &deref + } + keyRef.Values[k] = valueCopy + } + + return keyRef } -func (p *redisYamlParser) extendSet(ctx *context, child *SetRecordValue) error { - if child.Extend == "" { - return nil - } - parent, err := p.resolveSetReference(ctx.setRefs, child.Extend) - if err != nil { - return err - } - var keys []interface{} - parentValuesMapped := make(map[interface{}]*SetValue) - for _, v := range parent.Values { - parentValuesMapped[v] = v - keys = append(keys, v) - } - for _, v := range child.Values { - if _, ok := parentValuesMapped[v]; !ok { - keys = append(keys, v) - } - parentValuesMapped[v] = v - } - setValues := make([]*SetValue, 0, len(parentValuesMapped)) - for _, k := range keys { - setValues = append(setValues, parentValuesMapped[k]) - } - child.Expiration = parent.Expiration - child.Values = setValues - return nil +func (p *redisYamlParser) extendSet(ctx *Context, child *SetRecordValue) error { + if child.Extend == "" { + return nil + } + parent, err := p.resolveSetReference(ctx.setRefs, child.Extend) + if err != nil { + return err + } + var keys []interface{} + parentValuesMapped := make(map[interface{}]*SetValue) + for _, v := range parent.Values { + parentValuesMapped[v] = v + keys = append(keys, v) + } + for _, v := range child.Values { + if _, ok := parentValuesMapped[v]; !ok { + keys = append(keys, v) + } + parentValuesMapped[v] = v + } + setValues := make([]*SetValue, 0, len(parentValuesMapped)) + for _, k := range keys { + setValues = append(setValues, parentValuesMapped[k]) + } + child.Expiration = parent.Expiration + child.Values = setValues + + return nil } -func (p *redisYamlParser) copySetRecord(src *SetRecordValue) SetRecordValue{ - setRef := SetRecordValue{ - Expiration: src.Expiration, - Values: make([]*SetValue, 0, len(src.Values)), - } - for _, v := range src.Values { - var valueCopy *SetValue - if v != nil { - valueCopy = &(*v) - } - setRef.Values = append(setRef.Values, valueCopy) - } - return setRef +func (p *redisYamlParser) copySetRecord(src *SetRecordValue) SetRecordValue { + setRef := SetRecordValue{ + Expiration: src.Expiration, + Values: make([]*SetValue, 0, len(src.Values)), + } + for _, v := range src.Values { + var valueCopy *SetValue + if v != nil { + deref := *v + valueCopy = &deref + } + setRef.Values = append(setRef.Values, valueCopy) + } + + return setRef } -func (p *redisYamlParser) extendHash(ctx *context, child *HashRecordValue) error { - if child.Extend == "" { - return nil - } - parent, err := p.resolveHashReference(ctx.hashRefs, child.Extend) - if err != nil { - return err - } - var keys []interface{} - parentValuesMapped := make(map[interface{}]*HashValue) - for _, v := range parent.Values { - parentValuesMapped[v.Key] = v - keys = append(keys, v.Key) - } - for _, v := range child.Values { - if _, ok := parentValuesMapped[v.Key]; !ok { - keys = append(keys, v.Key) - } - parentValuesMapped[v.Key] = v - } - hashValues := make([]*HashValue, 0, len(parentValuesMapped)) - for _, k := range keys { - hashValues = append(hashValues, parentValuesMapped[k]) - } - - child.Expiration = parent.Expiration - child.Values = hashValues - return nil +func (p *redisYamlParser) extendHash(ctx *Context, child *HashRecordValue) error { + if child.Extend == "" { + return nil + } + parent, err := p.resolveHashReference(ctx.hashRefs, child.Extend) + if err != nil { + return err + } + var keys []interface{} + parentValuesMapped := make(map[interface{}]*HashValue) + for _, v := range parent.Values { + parentValuesMapped[v.Key] = v + keys = append(keys, v.Key) + } + for _, v := range child.Values { + if _, ok := parentValuesMapped[v.Key]; !ok { + keys = append(keys, v.Key) + } + parentValuesMapped[v.Key] = v + } + hashValues := make([]*HashValue, 0, len(parentValuesMapped)) + for _, k := range keys { + hashValues = append(hashValues, parentValuesMapped[k]) + } + + child.Expiration = parent.Expiration + child.Values = hashValues + + return nil } func (p *redisYamlParser) copyHashRecord(src *HashRecordValue) HashRecordValue { - cpy := HashRecordValue{ - Expiration: src.Expiration, - Values: make([]*HashValue, 0, len(src.Values)), - } - for _, v := range src.Values { - var valueCopy *HashValue - if v != nil { - valueCopy = &(*v) - } - cpy.Values = append(cpy.Values, valueCopy) - } - return cpy + cpy := HashRecordValue{ + Expiration: src.Expiration, + Values: make([]*HashValue, 0, len(src.Values)), + } + for _, v := range src.Values { + var valueCopy *HashValue + if v != nil { + deref := *v + valueCopy = &deref + } + cpy.Values = append(cpy.Values, valueCopy) + } + + return cpy } -func (p *redisYamlParser) extendList(ctx *context, child *ListRecordValue) error { - if child.Extend == "" { - return nil - } - parent, err := p.resolveListReference(ctx.listRefs, child.Extend) - if err != nil { - return err - } - for _, v := range child.Values { - parent.Values = append(parent.Values, v) - } - child.Expiration = parent.Expiration - child.Values = parent.Values - return nil +func (p *redisYamlParser) extendList(ctx *Context, child *ListRecordValue) error { + if child.Extend == "" { + return nil + } + parent, err := p.resolveListReference(ctx.listRefs, child.Extend) + if err != nil { + return err + } + parent.Values = append(parent.Values, child.Values...) + child.Expiration = parent.Expiration + child.Values = parent.Values + + return nil } func (p *redisYamlParser) copyListRecord(src *ListRecordValue) ListRecordValue { - ref := ListRecordValue{ - Expiration: src.Expiration, - Values: make([]*ListValue, 0, len(src.Values)), - } - for _, v := range src.Values { - var valueCopy *ListValue - if v != nil { - valueCopy = &(*v) - } - ref.Values = append(ref.Values, valueCopy) - } - return ref + ref := ListRecordValue{ + Expiration: src.Expiration, + Values: make([]*ListValue, 0, len(src.Values)), + } + for _, v := range src.Values { + var valueCopy *ListValue + if v != nil { + deref := *v + valueCopy = &deref + } + ref.Values = append(ref.Values, valueCopy) + } + + return ref } -func (p *redisYamlParser) extendZSet(ctx *context, child *ZSetRecordValue) error { - if child.Extend == "" { - return nil - } - parent, err := p.resolveZSetReference(ctx.zsetRefs, child.Extend) - if err != nil { - return err - } - var keys []interface{} - parentValuesMapped := make(map[interface{}]*ZSetValue) - for _, v := range parent.Values { - parentValuesMapped[v] = v - keys = append(keys, v) - } - for _, v := range child.Values { - if _, ok := parentValuesMapped[v]; !ok { - keys = append(keys, v) - } - parentValuesMapped[v] = v - } - setValues := make([]*ZSetValue, 0, len(parentValuesMapped)) - for _, k := range keys { - setValues = append(setValues, parentValuesMapped[k]) - } - - child.Expiration = parent.Expiration - child.Values = setValues - return nil +func (p *redisYamlParser) extendZSet(ctx *Context, child *ZSetRecordValue) error { + if child.Extend == "" { + return nil + } + parent, err := p.resolveZSetReference(ctx.zsetRefs, child.Extend) + if err != nil { + return err + } + var keys []interface{} + parentValuesMapped := make(map[interface{}]*ZSetValue) + for _, v := range parent.Values { + parentValuesMapped[v] = v + keys = append(keys, v) + } + for _, v := range child.Values { + if _, ok := parentValuesMapped[v]; !ok { + keys = append(keys, v) + } + parentValuesMapped[v] = v + } + setValues := make([]*ZSetValue, 0, len(parentValuesMapped)) + for _, k := range keys { + setValues = append(setValues, parentValuesMapped[k]) + } + + child.Expiration = parent.Expiration + child.Values = setValues + + return nil } func (p *redisYamlParser) copyZSetRecord(src *ZSetRecordValue) ZSetRecordValue { - ref := ZSetRecordValue{ - Expiration: src.Expiration, - Values: make([]*ZSetValue, 0, len(src.Values)), - } - for _, v := range src.Values { - var valueCopy *ZSetValue - if v != nil { - valueCopy = &(*v) - } - ref.Values = append(ref.Values, valueCopy) - } - return ref + ref := ZSetRecordValue{ + Expiration: src.Expiration, + Values: make([]*ZSetValue, 0, len(src.Values)), + } + for _, v := range src.Values { + var valueCopy *ZSetValue + if v != nil { + deref := *v + valueCopy = &deref + } + ref.Values = append(ref.Values, valueCopy) + } + + return ref } -func (p *redisYamlParser) buildKeysTemplates(ctx *context, f Fixture) error { - for _, tplData := range f.Templates.Keys { - refName := tplData.Name - if refName == "" { - return errors.New("template $name is required") - } - if _, ok := ctx.keyRefs[refName]; ok { - return fmt.Errorf("unable to load template %s: duplicating ref name", refName) - } - if err := p.extendKeys(ctx, tplData); err != nil { - return err - } - ctx.keyRefs[refName] = p.copyKeyRecord(tplData) - } - return nil +func (p *redisYamlParser) buildKeysTemplates(ctx *Context, f Fixture) error { + for _, tplData := range f.Templates.Keys { + refName := tplData.Name + if refName == "" { + return errors.New("template $name is required") + } + if _, ok := ctx.keyRefs[refName]; ok { + return fmt.Errorf("unable to load template %s: duplicating ref name", refName) + } + if err := p.extendKeys(ctx, tplData); err != nil { + return err + } + ctx.keyRefs[refName] = p.copyKeyRecord(tplData) + } + + return nil } -func (p *redisYamlParser) buildSetTemplates(ctx *context, f Fixture) error { - for _, tplData := range f.Templates.Sets { - refName := tplData.Name - if refName == "" { - return errors.New("template $name is required") - } - if _, ok := ctx.setRefs[refName]; ok { - return fmt.Errorf("unable to load template %s: duplicating ref name", refName) - } - if err := p.extendSet(ctx, tplData); err != nil { - return err - } - ctx.setRefs[refName] = p.copySetRecord(tplData) - } - return nil +func (p *redisYamlParser) buildSetTemplates(ctx *Context, f Fixture) error { + for _, tplData := range f.Templates.Sets { + refName := tplData.Name + if refName == "" { + return errors.New("template $name is required") + } + if _, ok := ctx.setRefs[refName]; ok { + return fmt.Errorf("unable to load template %s: duplicating ref name", refName) + } + if err := p.extendSet(ctx, tplData); err != nil { + return err + } + ctx.setRefs[refName] = p.copySetRecord(tplData) + } + + return nil } -func (p *redisYamlParser) buildHashTemplates(ctx *context, f Fixture) error { - for _, tplData := range f.Templates.Hashes { - refName := tplData.Name - if refName == "" { - return errors.New("template $name is required") - } - if _, ok := ctx.hashRefs[refName]; ok { - return fmt.Errorf("unable to load template %s: duplicating ref name", refName) - } - if err := p.extendHash(ctx, tplData); err != nil { - return err - } - ctx.hashRefs[refName] = p.copyHashRecord(tplData) - } - return nil +func (p *redisYamlParser) buildHashTemplates(ctx *Context, f Fixture) error { + for _, tplData := range f.Templates.Hashes { + refName := tplData.Name + if refName == "" { + return errors.New("template $name is required") + } + if _, ok := ctx.hashRefs[refName]; ok { + return fmt.Errorf("unable to load template %s: duplicating ref name", refName) + } + if err := p.extendHash(ctx, tplData); err != nil { + return err + } + ctx.hashRefs[refName] = p.copyHashRecord(tplData) + } + + return nil } -func (p *redisYamlParser) buildListTemplates(ctx *context, f Fixture) error { - for _, tplData := range f.Templates.Lists { - refName := tplData.Name - if refName == "" { - return errors.New("template $name is required") - } - if _, ok := ctx.listRefs[refName]; ok { - return fmt.Errorf("unable to load template %s: duplicating ref name", refName) - } - if err := p.extendList(ctx, tplData); err != nil { - return err - } - ctx.listRefs[refName] = p.copyListRecord(tplData) - } - return nil +func (p *redisYamlParser) buildListTemplates(ctx *Context, f Fixture) error { + for _, tplData := range f.Templates.Lists { + refName := tplData.Name + if refName == "" { + return errors.New("template $name is required") + } + if _, ok := ctx.listRefs[refName]; ok { + return fmt.Errorf("unable to load template %s: duplicating ref name", refName) + } + if err := p.extendList(ctx, tplData); err != nil { + return err + } + ctx.listRefs[refName] = p.copyListRecord(tplData) + } + + return nil } -func (p *redisYamlParser) buildZSetTemplates(ctx *context, f Fixture) error { - for _, tplData := range f.Templates.ZSets { - refName := tplData.Name - if refName == "" { - return errors.New("template $name is required") - } - if _, ok := ctx.zsetRefs[refName]; ok { - return fmt.Errorf("unable to load template %s: duplicating ref name", refName) - } - if err := p.extendZSet(ctx, tplData); err != nil { - return err - } - ctx.zsetRefs[refName] = p.copyZSetRecord(tplData) - } - return nil +func (p *redisYamlParser) buildZSetTemplates(ctx *Context, f Fixture) error { + for _, tplData := range f.Templates.ZSets { + refName := tplData.Name + if refName == "" { + return errors.New("template $name is required") + } + if _, ok := ctx.zsetRefs[refName]; ok { + return fmt.Errorf("unable to load template %s: duplicating ref name", refName) + } + if err := p.extendZSet(ctx, tplData); err != nil { + return err + } + ctx.zsetRefs[refName] = p.copyZSetRecord(tplData) + } + + return nil } -func (p *redisYamlParser) buildTemplate(ctx *context, f Fixture) error { - if err := p.buildKeysTemplates(ctx, f); err != nil { - return err - } - if err := p.buildSetTemplates(ctx, f); err != nil { - return err - } - if err := p.buildHashTemplates(ctx, f); err != nil { - return err - } - if err := p.buildListTemplates(ctx, f); err != nil { - return err - } - if err := p.buildZSetTemplates(ctx, f); err != nil { - return err - } - return nil +func (p *redisYamlParser) buildTemplate(ctx *Context, f Fixture) error { + if err := p.buildKeysTemplates(ctx, f); err != nil { + return err + } + if err := p.buildSetTemplates(ctx, f); err != nil { + return err + } + if err := p.buildHashTemplates(ctx, f); err != nil { + return err + } + if err := p.buildListTemplates(ctx, f); err != nil { + return err + } + if err := p.buildZSetTemplates(ctx, f); err != nil { + return err + } + + return nil } func (p *redisYamlParser) resolveKeyReference(refs map[string]Keys, refName string) (*Keys, error) { - refTemplate, ok := refs[refName] - if !ok { - return nil, fmt.Errorf("ref not found: %s", refName) - } - cpy := p.copyKeyRecord(&refTemplate) - return &cpy, nil + refTemplate, ok := refs[refName] + if !ok { + return nil, fmt.Errorf("ref not found: %s", refName) + } + cpy := p.copyKeyRecord(&refTemplate) + + return &cpy, nil } func (p *redisYamlParser) resolveSetReference(refs map[string]SetRecordValue, refName string) (*SetRecordValue, error) { - refTemplate, ok := refs[refName] - if !ok { - return nil, fmt.Errorf("ref not found: %s", refName) - } - cpy := p.copySetRecord(&refTemplate) - return &cpy, nil + refTemplate, ok := refs[refName] + if !ok { + return nil, fmt.Errorf("ref not found: %s", refName) + } + cpy := p.copySetRecord(&refTemplate) + + return &cpy, nil } func (p *redisYamlParser) resolveHashReference(refs map[string]HashRecordValue, refName string) (*HashRecordValue, error) { - refTemplate, ok := refs[refName] - if !ok { - return nil, fmt.Errorf("ref not found: %s", refName) - } - cpy := p.copyHashRecord(&refTemplate) - return &cpy, nil + refTemplate, ok := refs[refName] + if !ok { + return nil, fmt.Errorf("ref not found: %s", refName) + } + cpy := p.copyHashRecord(&refTemplate) + + return &cpy, nil } func (p *redisYamlParser) resolveListReference(refs map[string]ListRecordValue, refName string) (*ListRecordValue, error) { - refTemplate, ok := refs[refName] - if !ok { - return nil, fmt.Errorf("ref not found: %s", refName) - } - cpy := p.copyListRecord(&refTemplate) - return &cpy, nil + refTemplate, ok := refs[refName] + if !ok { + return nil, fmt.Errorf("ref not found: %s", refName) + } + cpy := p.copyListRecord(&refTemplate) + + return &cpy, nil } func (p *redisYamlParser) resolveZSetReference(refs map[string]ZSetRecordValue, refName string) (*ZSetRecordValue, error) { - refTemplate, ok := refs[refName] - if !ok { - return nil, fmt.Errorf("ref not found: %s", refName) - } - cpy := p.copyZSetRecord(&refTemplate) - return &cpy, nil + refTemplate, ok := refs[refName] + if !ok { + return nil, fmt.Errorf("ref not found: %s", refName) + } + cpy := p.copyZSetRecord(&refTemplate) + + return &cpy, nil } -func (p *redisYamlParser) buildKeys(ctx *context, data *Keys) error { - if data == nil { - return nil - } - if err := p.extendKeys(ctx, data); err != nil { - return err - } - if data.Name != "" { - ctx.keyRefs[data.Name] = p.copyKeyRecord(data) - } - return nil +func (p *redisYamlParser) buildKeys(ctx *Context, data *Keys) error { + if data == nil { + return nil + } + if err := p.extendKeys(ctx, data); err != nil { + return err + } + if data.Name != "" { + ctx.keyRefs[data.Name] = p.copyKeyRecord(data) + } + + return nil } -func (p *redisYamlParser) buildSets(ctx *context, data *Sets) error { - if data == nil { - return nil - } - for _, v := range data.Values { - if err := p.extendSet(ctx, v); err != nil { - return fmt.Errorf("extend set error: %w", err) - } - if v.Name != "" { - ctx.setRefs[v.Name] = p.copySetRecord(v) - } - } - return nil +func (p *redisYamlParser) buildSets(ctx *Context, data *Sets) error { + if data == nil { + return nil + } + for _, v := range data.Values { + if err := p.extendSet(ctx, v); err != nil { + return fmt.Errorf("extend set error: %w", err) + } + if v.Name != "" { + ctx.setRefs[v.Name] = p.copySetRecord(v) + } + } + + return nil } -func (p *redisYamlParser) buildMaps(ctx *context, data *Hashes) error { - if data == nil { - return nil - } - for _, v := range data.Values { - if err := p.extendHash(ctx, v); err != nil { - return fmt.Errorf("extend hash error: %w", err) - } - if v.Name != "" { - ctx.hashRefs[v.Name] = p.copyHashRecord(v) - } - } - return nil +func (p *redisYamlParser) buildMaps(ctx *Context, data *Hashes) error { + if data == nil { + return nil + } + for _, v := range data.Values { + if err := p.extendHash(ctx, v); err != nil { + return fmt.Errorf("extend hash error: %w", err) + } + if v.Name != "" { + ctx.hashRefs[v.Name] = p.copyHashRecord(v) + } + } + + return nil } -func (p *redisYamlParser) buildLists(ctx *context, data *Lists) error { - if data == nil { - return nil - } - for _, v := range data.Values { - if err := p.extendList(ctx, v); err != nil { - return fmt.Errorf("extend list error: %w", err) - } - if v.Name != "" { - ctx.listRefs[v.Name] = p.copyListRecord(v) - } - } - return nil +func (p *redisYamlParser) buildLists(ctx *Context, data *Lists) error { + if data == nil { + return nil + } + for _, v := range data.Values { + if err := p.extendList(ctx, v); err != nil { + return fmt.Errorf("extend list error: %w", err) + } + if v.Name != "" { + ctx.listRefs[v.Name] = p.copyListRecord(v) + } + } + + return nil } -func (p *redisYamlParser) buildZSets(ctx *context, data *ZSets) error { - if data == nil { - return nil - } - for _, v := range data.Values { - if err := p.extendZSet(ctx, v); err != nil { - return fmt.Errorf("extend zset error: %w", err) - } - if v.Name != "" { - ctx.zsetRefs[v.Name] = p.copyZSetRecord(v) - } - } - return nil +func (p *redisYamlParser) buildZSets(ctx *Context, data *ZSets) error { + if data == nil { + return nil + } + for _, v := range data.Values { + if err := p.extendZSet(ctx, v); err != nil { + return fmt.Errorf("extend zset error: %w", err) + } + if v.Name != "" { + ctx.zsetRefs[v.Name] = p.copyZSetRecord(v) + } + } + + return nil } -func (p *redisYamlParser) Parse(ctx *context, filename string) (*Fixture, error) { - data, err := ioutil.ReadFile(filename) - if err != nil { - return nil, err - } - - var fixture Fixture - if err := yaml.Unmarshal(data, &fixture); err != nil { - return nil, err - } - - for _, parentFixture := range fixture.Inherits { - _, err := p.fileParser.ParseFiles(ctx, []string{parentFixture}) - if err != nil { - return nil, err - } - } - - if err = p.buildTemplate(ctx, fixture); err != nil { - return nil, err - } - - for _, databaseData := range fixture.Databases { - if err := p.buildKeys(ctx, databaseData.Keys); err != nil { - return nil, err - } - if err := p.buildMaps(ctx, databaseData.Hashes); err != nil { - return nil, err - } - if err := p.buildSets(ctx, databaseData.Sets); err != nil { - return nil, err - } - if err := p.buildLists(ctx, databaseData.Lists); err != nil { - return nil, err - } - if err := p.buildZSets(ctx, databaseData.ZSets); err != nil { - return nil, err - } - } - - return &fixture, nil +func (p *redisYamlParser) Parse(ctx *Context, filename string) (*Fixture, error) { + data, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + var fixture Fixture + if err := yaml.Unmarshal(data, &fixture); err != nil { + return nil, err + } + + for _, parentFixture := range fixture.Inherits { + _, err := p.fileParser.ParseFiles(ctx, []string{parentFixture}) + if err != nil { + return nil, err + } + } + + err = p.buildTemplate(ctx, fixture) + if err != nil { + return nil, err + } + + for _, databaseData := range fixture.Databases { + if err := p.buildKeys(ctx, databaseData.Keys); err != nil { + return nil, err + } + if err := p.buildMaps(ctx, databaseData.Hashes); err != nil { + return nil, err + } + if err := p.buildSets(ctx, databaseData.Sets); err != nil { + return nil, err + } + if err := p.buildLists(ctx, databaseData.Lists); err != nil { + return nil, err + } + if err := p.buildZSets(ctx, databaseData.ZSets); err != nil { + return nil, err + } + } + + return &fixture, nil } diff --git a/fixtures/redis/redis.go b/fixtures/redis/redis.go index 46f7a6f..a13ae50 100644 --- a/fixtures/redis/redis.go +++ b/fixtures/redis/redis.go @@ -1,194 +1,202 @@ package redis import ( - "context" + "context" - "github.com/go-redis/redis/v9" - "github.com/lamoda/gonkey/fixtures/redis/parser" + "github.com/go-redis/redis/v9" + "github.com/lamoda/gonkey/fixtures/redis/parser" ) -type loader struct { - locations []string - client *redis.Client +type Loader struct { + locations []string + client *redis.Client } type LoaderOptions struct { - FixtureDir string - Redis *redis.Options + FixtureDir string + Redis *redis.Options } -func New(opts LoaderOptions) *loader { - client := redis.NewClient(opts.Redis) - return &loader{ - locations: []string{opts.FixtureDir}, - client: client, - } +func New(opts LoaderOptions) *Loader { + client := redis.NewClient(opts.Redis) + + return &Loader{ + locations: []string{opts.FixtureDir}, + client: client, + } } -func (l *loader) Load(names []string) error { - ctx := parser.NewContext() - fileParser := parser.New(l.locations) - fixtureList, err := fileParser.ParseFiles(ctx, names) - if err != nil { - return err - } - return l.loadData(fixtureList) +func (l *Loader) Load(names []string) error { + ctx := parser.NewContext() + fileParser := parser.New(l.locations) + fixtureList, err := fileParser.ParseFiles(ctx, names) + if err != nil { + return err + } + + return l.loadData(fixtureList) } -func (l *loader) loadKeys(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { - if db.Keys == nil { - return nil - } - for k, v := range db.Keys.Values { - if err := pipe.Set(ctx, k, v.Value.Value, v.Expiration).Err(); err != nil { - return err - } - } - return nil +func (l *Loader) loadKeys(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { + if db.Keys == nil { + return nil + } + for k, v := range db.Keys.Values { + if err := pipe.Set(ctx, k, v.Value.Value, v.Expiration).Err(); err != nil { + return err + } + } + + return nil } -func (l *loader) loadSets(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { - if db.Sets == nil { - return nil - } - for setKey, setRecord := range db.Sets.Values { - values := make([]interface{}, 0, len(setRecord.Values)) - for _, v := range setRecord.Values { - values = append(values, v.Value.Value) - } - if err := pipe.SAdd(ctx, setKey, values).Err(); err != nil { - return err - } - if setRecord.Expiration > 0 { - if err := pipe.Expire(ctx, setKey, setRecord.Expiration).Err(); err != nil { - return err - } - } - } - return nil +func (l *Loader) loadSets(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { + if db.Sets == nil { + return nil + } + for setKey, setRecord := range db.Sets.Values { + values := make([]interface{}, 0, len(setRecord.Values)) + for _, v := range setRecord.Values { + values = append(values, v.Value.Value) + } + if err := pipe.SAdd(ctx, setKey, values).Err(); err != nil { + return err + } + if setRecord.Expiration > 0 { + if err := pipe.Expire(ctx, setKey, setRecord.Expiration).Err(); err != nil { + return err + } + } + } + + return nil } -func (l *loader) loadHashes(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { - if db.Hashes == nil { - return nil - } - for key, record := range db.Hashes.Values { - values := make([]interface{}, 0, len(record.Values) * 2) - for _, v := range record.Values { - values = append(values, v.Key.Value, v.Value.Value) - } - if err := pipe.HSet(ctx, key, values...).Err(); err != nil { - return err - } - if record.Expiration > 0 { - if err := pipe.Expire(ctx, key, record.Expiration).Err(); err != nil { - return err - } - } - } - return nil +func (l *Loader) loadHashes(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { + if db.Hashes == nil { + return nil + } + for key, record := range db.Hashes.Values { + values := make([]interface{}, 0, len(record.Values)*2) + for _, v := range record.Values { + values = append(values, v.Key.Value, v.Value.Value) + } + if err := pipe.HSet(ctx, key, values...).Err(); err != nil { + return err + } + if record.Expiration > 0 { + if err := pipe.Expire(ctx, key, record.Expiration).Err(); err != nil { + return err + } + } + } + + return nil } -func (l *loader) loadLists(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error{ - if db.Lists == nil { - return nil - } - for key, record := range db.Lists.Values { - values := make([]interface{}, 0, len(record.Values)) - for _, v := range record.Values { - values = append(values, v.Value.Value) - } - if err := pipe.RPush(ctx, key, values...).Err(); err != nil { - return err - } - if record.Expiration > 0 { - if err := pipe.Expire(ctx, key, record.Expiration).Err(); err != nil { - return err - } - } - } - return nil +func (l *Loader) loadLists(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { + if db.Lists == nil { + return nil + } + for key, record := range db.Lists.Values { + values := make([]interface{}, 0, len(record.Values)) + for _, v := range record.Values { + values = append(values, v.Value.Value) + } + if err := pipe.RPush(ctx, key, values...).Err(); err != nil { + return err + } + if record.Expiration > 0 { + if err := pipe.Expire(ctx, key, record.Expiration).Err(); err != nil { + return err + } + } + } + + return nil } -func (l *loader) loadSortedSets(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { - if db.ZSets == nil { - return nil - } - for key, record := range db.ZSets.Values { - values := make([]redis.Z, 0, len(record.Values)) - for _, v := range record.Values { - values = append(values, redis.Z{ - Score: v.Score, - Member: v.Value.Value, - }) - } - if err := pipe.ZAdd(ctx, key, values...).Err(); err != nil { - return err - } - if record.Expiration > 0 { - if err := pipe.Expire(ctx, key, record.Expiration).Err(); err != nil { - return err - } - } - } - return nil +func (l *Loader) loadSortedSets(ctx context.Context, pipe redis.Pipeliner, db parser.Database) error { + if db.ZSets == nil { + return nil + } + for key, record := range db.ZSets.Values { + values := make([]redis.Z, 0, len(record.Values)) + for _, v := range record.Values { + values = append(values, redis.Z{ + Score: v.Score, + Member: v.Value.Value, + }) + } + if err := pipe.ZAdd(ctx, key, values...).Err(); err != nil { + return err + } + if record.Expiration > 0 { + if err := pipe.Expire(ctx, key, record.Expiration).Err(); err != nil { + return err + } + } + } + + return nil } -func (l *loader) loadRedisDatabase(ctx context.Context, dbID int, db parser.Database, needTruncate bool) error { - pipe := l.client.Pipeline() - err := pipe.Select(ctx, dbID).Err() - if err != nil { - return err - } +func (l *Loader) loadRedisDatabase(ctx context.Context, dbID int, db parser.Database, needTruncate bool) error { + pipe := l.client.Pipeline() + err := pipe.Select(ctx, dbID).Err() + if err != nil { + return err + } - if needTruncate { - if err := pipe.FlushDB(ctx).Err(); err != nil { - return err - } - } + if needTruncate { + if err := pipe.FlushDB(ctx).Err(); err != nil { + return err + } + } - if err := l.loadKeys(ctx, pipe, db); err != nil { - return err - } + if err := l.loadKeys(ctx, pipe, db); err != nil { + return err + } - if err := l.loadSets(ctx, pipe, db); err != nil { - return err - } + if err := l.loadSets(ctx, pipe, db); err != nil { + return err + } - if err := l.loadHashes(ctx, pipe, db); err != nil { - return err - } + if err := l.loadHashes(ctx, pipe, db); err != nil { + return err + } - if err := l.loadLists(ctx, pipe, db); err != nil { - return err - } + if err := l.loadLists(ctx, pipe, db); err != nil { + return err + } - if err := l.loadSortedSets(ctx, pipe, db); err != nil { - return err - } + if err := l.loadSortedSets(ctx, pipe, db); err != nil { + return err + } - if _, err := pipe.Exec(ctx); err != nil { - return err - } + if _, err := pipe.Exec(ctx); err != nil { + return err + } - return nil + return nil } -func (l *loader) loadData(fixtures []*parser.Fixture) error { - truncatedDatabases := make(map[int]struct{}) - - for _, redisFixture := range fixtures { - for dbID, db := range redisFixture.Databases { - var needTruncate bool - if _, ok := truncatedDatabases[dbID]; !ok { - truncatedDatabases[dbID] = struct{}{} - needTruncate = true - } - err := l.loadRedisDatabase(context.Background(), dbID, db, needTruncate) - if err != nil { - return err - } - } - } - return nil +func (l *Loader) loadData(fixtures []*parser.Fixture) error { + truncatedDatabases := make(map[int]struct{}) + + for _, redisFixture := range fixtures { + for dbID, db := range redisFixture.Databases { + var needTruncate bool + if _, ok := truncatedDatabases[dbID]; !ok { + truncatedDatabases[dbID] = struct{}{} + needTruncate = true + } + err := l.loadRedisDatabase(context.Background(), dbID, db, needTruncate) + if err != nil { + return err + } + } + } + + return nil } diff --git a/main.go b/main.go index 37aa543..d8797b2 100644 --- a/main.go +++ b/main.go @@ -91,6 +91,7 @@ func main() { func initStorages(cfg config) storages { db := initDB(cfg) aerospikeClient := initAerospike(cfg) + return storages{ db: db, aerospike: aerospikeClient, @@ -98,41 +99,44 @@ func initStorages(cfg config) storages { } func initLoaders(storages storages, cfg config) fixtures.Loader { + if cfg.FixturesLocation == "" { + return nil + } + var fixturesLoader fixtures.Loader - if cfg.FixturesLocation != "" { - if storages.db != nil || storages.aerospike != nil { - fixturesLoader = fixtures.NewLoader(&fixtures.Config{ - DB: storages.db, - Aerospike: storages.aerospike, - Location: cfg.FixturesLocation, - Debug: cfg.Debug, - DbType: fixtures.FetchDbType(cfg.DbType), - }) - } else if cfg.DbType == fixtures.RedisParam { - redisOptions, err := redis.ParseURL(cfg.RedisURL) - if err != nil { - log.Panic("redis_url attribute is not a valid URL") - } - fixturesLoader = redisLoader.New(redisLoader.LoaderOptions{ - FixtureDir: cfg.FixturesLocation, - Redis: redisOptions, - }) - } else { - log.Fatal(errors.New("you should specify db_dsn to load fixtures")) + switch { + case storages.db != nil || storages.aerospike != nil: + fixturesLoader = fixtures.NewLoader(&fixtures.Config{ + DB: storages.db, + Aerospike: storages.aerospike, + Location: cfg.FixturesLocation, + Debug: cfg.Debug, + DbType: fixtures.FetchDbType(cfg.DbType), + }) + case cfg.DbType == fixtures.RedisParam: + redisOptions, err := redis.ParseURL(cfg.RedisURL) + if err != nil { + log.Panic("redis_url attribute is not a valid URL") } + fixturesLoader = redisLoader.New(redisLoader.LoaderOptions{ + FixtureDir: cfg.FixturesLocation, + Redis: redisOptions, + }) + default: + log.Fatal(errors.New("you should specify db_dsn to load fixtures")) } + return fixturesLoader } func validateConfig(cfg *config) { if cfg.Host == "" { log.Fatal(errors.New("service hostname not provided")) - } else { - if !strings.HasPrefix(cfg.Host, "http://") && !strings.HasPrefix(cfg.Host, "https://") { - cfg.Host = "http://" + cfg.Host - } - cfg.Host = strings.TrimRight(cfg.Host, "/") } + if !strings.HasPrefix(cfg.Host, "http://") && !strings.HasPrefix(cfg.Host, "https://") { + cfg.Host = "http://" + cfg.Host + } + cfg.Host = strings.TrimRight(cfg.Host, "/") if cfg.TestsLocation == "" { log.Fatal(errors.New("no tests location provided")) @@ -163,7 +167,7 @@ func initRunner( Host: cfg.Host, FixturesLoader: fixturesLoader, Variables: variables.New(), - HttpProxyURL: proxyURL, + HTTPProxyURL: proxyURL, }, yaml_file.NewLoader(cfg.TestsLocation), handler.HandleTest, @@ -177,6 +181,7 @@ func initAerospike(cfg config) *aerospikeAdapter.Client { if err != nil { log.Fatal("Couldn't connect to aerospike: ", err) } + return aerospikeAdapter.New(client, namespace) } @@ -190,6 +195,7 @@ func initDB(cfg config) *sql.DB { if err != nil { log.Fatal(err) } + return db } @@ -202,7 +208,12 @@ func getConfig() config { flag.StringVar(&cfg.Host, "host", "", "Target system hostname") flag.StringVar(&cfg.TestsLocation, "tests", "", "Path to tests file or directory") flag.StringVar(&cfg.DbDsn, "db_dsn", "", "DSN for the fixtures database (WARNING! Db tables will be truncated)") - flag.StringVar(&cfg.AerospikeHost, "aerospike_host", "", "Aerospike host for fixtures in form of 'host:port/namespace' (WARNING! Aerospike sets will be truncated)") + flag.StringVar( + &cfg.AerospikeHost, + "aerospike_host", + "", + "Aerospike host for fixtures in form of 'host:port/namespace' (WARNING! Aerospike sets will be truncated)", + ) flag.StringVar(&cfg.RedisURL, "redis_url", "", "Redis server URL for fixture loading") flag.StringVar(&cfg.FixturesLocation, "fixtures", "", "Path to fixtures directory") flag.StringVar(&cfg.EnvFile, "env-file", "", "Path to env-file") @@ -217,6 +228,7 @@ func getConfig() config { ) flag.Parse() + return cfg } @@ -240,11 +252,13 @@ func parseAerospikeHost(dsn string) (address string, port int, namespace string) func proxyURLFromEnv() (*url.URL, error) { if os.Getenv("HTTP_PROXY") != "" { - httpUrl, err := url.Parse(os.Getenv("HTTP_PROXY")) + httpURL, err := url.Parse(os.Getenv("HTTP_PROXY")) if err != nil { return nil, err } - return httpUrl, nil + + return httpURL, nil } + return nil, nil } diff --git a/mocks/loader.go b/mocks/loader.go index 6a7ac86..d785885 100644 --- a/mocks/loader.go +++ b/mocks/loader.go @@ -358,8 +358,8 @@ func (l *Loader) loadConstraintOfKind(kind string, def map[interface{}]interface } } -func readCompareParams(def map[interface{}]interface{}) (compare.CompareParams, error) { - params := compare.CompareParams{ +func readCompareParams(def map[interface{}]interface{}) (compare.Params, error) { + params := compare.Params{ IgnoreArraysOrdering: true, } @@ -557,7 +557,7 @@ func (l *Loader) loadBodyMatchesTextConstraint(def map[interface{}]interface{}) } func validateMapKeys(m map[interface{}]interface{}, allowedKeys ...string) error { - for k, _ := range m { + for k := range m { k := k.(string) found := false for _, ak := range allowedKeys { diff --git a/mocks/request_constraint.go b/mocks/request_constraint.go index 29a8387..42faba7 100644 --- a/mocks/request_constraint.go +++ b/mocks/request_constraint.go @@ -22,25 +22,25 @@ type verifier interface { Verify(r *http.Request) []error } -type nopConstraint struct {} +type nopConstraint struct{} func (c *nopConstraint) Verify(r *http.Request) []error { return nil } type bodyMatchesXMLConstraint struct { - expectedBody interface{} - compareParams compare.CompareParams + expectedBody interface{} + compareParams compare.Params } -func newBodyMatchesXMLConstraint(expected string, params compare.CompareParams) (verifier, error) { +func newBodyMatchesXMLConstraint(expected string, params compare.Params) (verifier, error) { expectedBody, err := xmlparsing.Parse(expected) if err != nil { return nil, err } res := &bodyMatchesXMLConstraint{ - expectedBody: expectedBody, + expectedBody: expectedBody, compareParams: params, } return res, nil @@ -66,18 +66,18 @@ func (c *bodyMatchesXMLConstraint) Verify(r *http.Request) []error { } type bodyMatchesJSONConstraint struct { - expectedBody interface{} - compareParams compare.CompareParams + expectedBody interface{} + compareParams compare.Params } -func newBodyMatchesJSONConstraint(expected string, params compare.CompareParams) (verifier, error) { +func newBodyMatchesJSONConstraint(expected string, params compare.Params) (verifier, error) { var expectedBody interface{} err := json.Unmarshal([]byte(expected), &expectedBody) if err != nil { return nil, err } res := &bodyMatchesJSONConstraint{ - expectedBody: expectedBody, + expectedBody: expectedBody, compareParams: params, } return res, nil @@ -102,20 +102,20 @@ func (c *bodyMatchesJSONConstraint) Verify(r *http.Request) []error { } type bodyJSONFieldMatchesJSONConstraint struct { - path string - expected interface{} - compareParams compare.CompareParams + path string + expected interface{} + compareParams compare.Params } -func newBodyJSONFieldMatchesJSONConstraint(path, expected string, params compare.CompareParams) (verifier, error) { +func newBodyJSONFieldMatchesJSONConstraint(path, expected string, params compare.Params) (verifier, error) { var v interface{} err := json.Unmarshal([]byte(expected), &v) if err != nil { return nil, err } res := &bodyJSONFieldMatchesJSONConstraint{ - path: path, - expected: v, + path: path, + expected: v, compareParams: params, } return res, nil @@ -270,7 +270,7 @@ func (c *queryRegexpConstraint) Verify(r *http.Request) (errors []error) { continue } - if ok, err := compare.CompareQuery(want, got); err != nil { + if ok, err := compare.Query(want, got); err != nil { errors = append(errors, fmt.Errorf( "'%s' parameters comparison failed. \n %s'", key, err.Error(), )) @@ -339,7 +339,6 @@ func newBodyMatchesTextConstraint(body, re string) (verifier, error) { func (c *bodyMatchesTextConstraint) Verify(r *http.Request) []error { ioBody, err := ioutil.ReadAll(r.Body) - if err != nil { return []error{err} } diff --git a/models/result.go b/models/result.go index e5ca548..ba3398a 100644 --- a/models/result.go +++ b/models/result.go @@ -47,8 +47,8 @@ func (r *Result) AllureStatus() (string, error) { } var ( - status = "passed" - testErrors []error = nil + status = "passed" + testErrors []error ) if len(r.Errors) != 0 { @@ -61,6 +61,7 @@ func (r *Result) AllureStatus() (string, error) { for _, err := range testErrors { errText = errText + err.Error() + "\n" } + return status, errors.New(errText) } diff --git a/output/allure_report/allure.go b/output/allure_report/allure.go index 06aee2a..7770af7 100644 --- a/output/allure_report/allure.go +++ b/output/allure_report/allure.go @@ -37,13 +37,16 @@ func (a *Allure) EndSuite(end time.Time) error { return err } } - //remove first/current suite + // remove first/current suite a.Suites = a.Suites[1:] + return nil } -var currentState = map[*beans.Suite]*beans.TestCase{} -var currentStep = map[*beans.Suite]*beans.Step{} +var ( + currentState = map[*beans.Suite]*beans.TestCase{} + currentStep = map[*beans.Suite]*beans.Step{} +) func (a *Allure) StartCase(testName string, start time.Time) *beans.TestCase { test := beans.NewTestCase(testName, start) @@ -52,6 +55,7 @@ func (a *Allure) StartCase(testName string, start time.Time) *beans.TestCase { currentState[suite] = test currentStep[suite] = step suite.AddTest(test) + return test } @@ -68,7 +72,7 @@ func (a *Allure) CreateStep(name string, stepFunc func()) { a.StartStep(name, time.Now()) // if test error stepFunc() - //end + // end a.EndStep(status, time.Now()) } @@ -104,7 +108,7 @@ func (a *Allure) PendingCase(testName string, start time.Time) { a.EndCase("pending", errors.New("test ignored"), start) } -//utils +// utils func getBufferInfo(buf bytes.Buffer, typ string) (string, string) { // exts,err := mime.ExtensionsByType(typ) // if err != nil { @@ -115,7 +119,8 @@ func getBufferInfo(buf bytes.Buffer, typ string) (string, string) { func writeBuffer(pathDir string, buf bytes.Buffer, ext string) (string, error) { fileName := uuid.New().String() + `-attachment.` + ext - err := ioutil.WriteFile(filepath.Join(pathDir, fileName), buf.Bytes(), 0644) + err := ioutil.WriteFile(filepath.Join(pathDir, fileName), buf.Bytes(), 0o644) + return fileName, err } @@ -124,9 +129,10 @@ func writeSuite(pathDir string, suite *beans.Suite) error { if err != nil { return err } - err = ioutil.WriteFile(filepath.Join(pathDir, uuid.New().String()+`-testsuite.xml`), b, 0644) + err = ioutil.WriteFile(filepath.Join(pathDir, uuid.New().String()+`-testsuite.xml`), b, 0o644) if err != nil { return err } + return nil } diff --git a/output/allure_report/allure_report.go b/output/allure_report/allure_report.go index bb15b0e..995f838 100644 --- a/output/allure_report/allure_report.go +++ b/output/allure_report/allure_report.go @@ -17,14 +17,13 @@ type AllureReportOutput struct { func NewOutput(suiteName, reportLocation string) *AllureReportOutput { resultsDir, _ := filepath.Abs(reportLocation) - if err := os.Mkdir(resultsDir, 0777); err != nil { - // likely dir is already exists - } + _ = os.Mkdir(resultsDir, 0o777) a := Allure{ Suites: nil, TargetDir: resultsDir, } a.StartSuite(suiteName, time.Now()) + return &AllureReportOutput{ reportLocation: reportLocation, allure: a, @@ -65,5 +64,5 @@ func (o *AllureReportOutput) Process(t models.TestInterface, result *models.Resu } func (o *AllureReportOutput) Finalize() { - o.allure.EndSuite(time.Now()) + _ = o.allure.EndSuite(time.Now()) } diff --git a/output/allure_report/beans/suite.go b/output/allure_report/beans/suite.go index 17c378d..047832a 100644 --- a/output/allure_report/beans/suite.go +++ b/output/allure_report/beans/suite.go @@ -38,19 +38,19 @@ func NewSuite(name string, start time.Time) *Suite { // SetEnd set end time for suite func (s *Suite) SetEnd(endTime time.Time) { if !endTime.IsZero() { - //strict UTC + // strict UTC s.End = endTime.UTC().UnixNano() / 1000 } else { s.End = time.Now().UTC().UnixNano() / 1000 } } -//suite has test-cases? +// suite has test-cases? func (s Suite) HasTests() bool { return len(s.TestCases.Cases) > 0 } -//add test in suite +// add test in suite func (s *Suite) AddTest(test *TestCase) { s.TestCases.Cases = append(s.TestCases.Cases, test) } diff --git a/output/allure_report/beans/test.go b/output/allure_report/beans/test.go index 0729176..192fe16 100644 --- a/output/allure_report/beans/test.go +++ b/output/allure_report/beans/test.go @@ -5,7 +5,7 @@ import ( "time" ) -//start new test case +// start new test case func NewTestCase(name string, start time.Time) *TestCase { test := new(TestCase) test.Name = name @@ -49,7 +49,7 @@ func (t *TestCase) SetDescription(desc string) { t.Desc = desc } -func (t *TestCase) SetDescriptionOrDefaultValue(desc string, defVal string) { +func (t *TestCase) SetDescriptionOrDefaultValue(desc, defVal string) { if desc == "" { t.Desc = defVal } else { diff --git a/output/console_colored/console_colored.go b/output/console_colored/console_colored.go index a23c669..63e1486 100644 --- a/output/console_colored/console_colored.go +++ b/output/console_colored/console_colored.go @@ -23,7 +23,7 @@ func NewOutput(verbose bool) *ConsoleColoredOutput { } } -func (o *ConsoleColoredOutput) Process(t models.TestInterface, result *models.Result) error { +func (o *ConsoleColoredOutput) Process(_ models.TestInterface, result *models.Result) error { if !result.Passed() || o.verbose { text, err := renderResult(result) if err != nil { @@ -37,6 +37,7 @@ func (o *ConsoleColoredOutput) Process(t models.TestInterface, result *models.Re o.coloredPrintf("\n") } } + return nil } @@ -98,6 +99,7 @@ Errors: if err := t.Execute(&buffer, result); err != nil { return "", err } + return buffer.String(), nil } diff --git a/output/testing/testing.go b/output/testing/testing.go index e345baf..b95c7df 100644 --- a/output/testing/testing.go +++ b/output/testing/testing.go @@ -8,13 +8,13 @@ import ( "github.com/lamoda/gonkey/models" ) -type TestingOutput struct{} +type Output struct{} -func NewOutput() *TestingOutput { - return &TestingOutput{} +func NewOutput() *Output { + return &Output{} } -func (o *TestingOutput) Process(t models.TestInterface, result *models.Result) error { +func (o *Output) Process(_ models.TestInterface, result *models.Result) error { if !result.Passed() { text, err := renderResult(result) if err != nil { @@ -22,6 +22,7 @@ func (o *TestingOutput) Process(t models.TestInterface, result *models.Result) e } fmt.Println(text) } + return nil } @@ -87,5 +88,6 @@ Errors: if err := t.Execute(&buffer, result); err != nil { return "", err } + return buffer.String(), nil } diff --git a/runner/request.go b/runner/request.go index 1ce57a3..9e41e8e 100644 --- a/runner/request.go +++ b/runner/request.go @@ -18,7 +18,7 @@ import ( func newClient(proxyURL *url.URL) *http.Client { transport := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, //nolint:gosec // Client is only used for testing. Proxy: http.ProxyURL(proxyURL), } @@ -31,7 +31,6 @@ func newClient(proxyURL *url.URL) *http.Client { } func newRequest(host string, test models.TestInterface) (req *http.Request, err error) { - if test.GetForm() != nil { req, err = newMultipartRequest(host, test) if err != nil { @@ -52,7 +51,6 @@ func newRequest(host string, test models.TestInterface) (req *http.Request, err } func newMultipartRequest(host string, test models.TestInterface) (*http.Request, error) { - if test.ContentType() != "" && test.ContentType() != "multipart/form-data" { return nil, fmt.Errorf( "test has unexpected Content-Type: %s, expected: multipart/form-data", @@ -89,7 +87,6 @@ func newMultipartRequest(host string, test models.TestInterface) (*http.Request, req.Header.Set("Content-Type", w.FormDataContentType()) return req, nil - } func addFiles(files map[string]string, w *multipart.Writer) error { @@ -120,6 +117,7 @@ func addFile(path string, w *multipart.Writer, name string) error { if _, err = io.Copy(fw, f); err != nil { return err } + return nil } @@ -142,7 +140,6 @@ func addFields(params url.Values, w *multipart.Writer) error { } func newCommonRequest(host string, test models.TestInterface) (*http.Request, error) { - body, err := test.ToJSON() if err != nil { return nil, err @@ -161,7 +158,6 @@ func newCommonRequest(host string, test models.TestInterface) (*http.Request, er } func request(test models.TestInterface, b *bytes.Buffer, host string) (*http.Request, error) { - req, err := http.NewRequest( strings.ToUpper(test.GetMethod()), host+test.Path()+test.ToQuery(), @@ -172,12 +168,13 @@ func request(test models.TestInterface, b *bytes.Buffer, host string) (*http.Req } for k, v := range test.Headers() { - if strings.ToLower(k) == "host" { + if strings.EqualFold(k, "host") { req.Host = v } else { req.Header.Add(k, v) } } + return req, nil } @@ -185,7 +182,9 @@ func actualRequestBody(req *http.Request) string { if req.Body != nil { reqBodyStream, _ := req.GetBody() reqBody, _ := ioutil.ReadAll(reqBodyStream) + return string(reqBody) } + return "" } diff --git a/runner/runner.go b/runner/runner.go index c64013e..744bba8 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -25,11 +25,13 @@ type Config struct { Mocks *mocks.Mocks MocksLoader *mocks.Loader Variables *variables.Variables - HttpProxyURL *url.URL + HTTPProxyURL *url.URL } -type testExecutor func(models.TestInterface) (*models.Result, error) -type testHandler func(models.TestInterface, testExecutor) error +type ( + testExecutor func(models.TestInterface) (*models.Result, error) + testHandler func(models.TestInterface, testExecutor) error +) type Runner struct { loader testloader.LoaderInterface @@ -46,7 +48,7 @@ func New(config *Config, loader testloader.LoaderInterface, handler testHandler) config: config, loader: loader, testExecutionHandler: handler, - client: newClient(config.HttpProxyURL), + client: newClient(config.HTTPProxyURL), } } @@ -91,6 +93,7 @@ func (r *Runner) Run() error { return nil, err } } + return testResult, nil } err := r.testExecutionHandler(test, testExecutor) @@ -109,7 +112,6 @@ var ( ) func (r *Runner) executeTest(v models.TestInterface) (*models.Result, error) { - if v.GetStatus() != "" { if v.GetStatus() == "broken" { return &models.Result{Test: v}, errTestBroken @@ -221,15 +223,14 @@ func (r *Runner) executeTest(v models.TestInterface) (*models.Result, error) { } func (r *Runner) setVariablesFromResponse(t models.TestInterface, contentType, body string, statusCode int) error { - varTemplates := t.GetVariablesToSet() if varTemplates == nil { return nil } - isJson := strings.Contains(contentType, "json") && body != "" + isJSON := strings.Contains(contentType, "json") && body != "" - vars, err := variables.FromResponse(varTemplates[statusCode], body, isJson) + vars, err := variables.FromResponse(varTemplates[statusCode], body, isJSON) if err != nil { return err } @@ -249,5 +250,6 @@ func checkHasFocused(tests []models.TestInterface) bool { return true } } + return false } diff --git a/runner/runner_testing.go b/runner/runner_testing.go index 0697650..a009e3a 100644 --- a/runner/runner_testing.go +++ b/runner/runner_testing.go @@ -87,11 +87,11 @@ func RunWithTesting(t *testing.T, params *RunWithTestingParams) { var proxyURL *url.URL if os.Getenv("HTTP_PROXY") != "" { - httpUrl, err := url.Parse(os.Getenv("HTTP_PROXY")) + httpURL, err := url.Parse(os.Getenv("HTTP_PROXY")) if err != nil { t.Fatal(err) } - proxyURL = httpUrl + proxyURL = httpURL } runner := initRunner(t, params, mocksLoader, fixturesLoader, proxyURL) @@ -116,7 +116,13 @@ func RunWithTesting(t *testing.T, params *RunWithTestingParams) { } } -func initRunner(t *testing.T, params *RunWithTestingParams, mocksLoader *mocks.Loader, fixturesLoader fixtures.Loader, proxyURL *url.URL) *Runner { +func initRunner( + t *testing.T, + params *RunWithTestingParams, + mocksLoader *mocks.Loader, + fixturesLoader fixtures.Loader, + proxyURL *url.URL, +) *Runner { yamlLoader := yaml_file.NewLoader(params.TestsDir) yamlLoader.SetFileFilter(os.Getenv("GONKEY_FILE_FILTER")) @@ -128,11 +134,12 @@ func initRunner(t *testing.T, params *RunWithTestingParams, mocksLoader *mocks.L MocksLoader: mocksLoader, FixturesLoader: fixturesLoader, Variables: variables.New(), - HttpProxyURL: proxyURL, + HTTPProxyURL: proxyURL, }, yamlLoader, handler.HandleTest, ) + return runner } @@ -168,5 +175,6 @@ func (h testingHandler) HandleTest(test models.TestInterface, executeTest testEx t.Fail() } }) + return returnErr } diff --git a/storage/aerospike/aerospike.go b/storage/aerospike/aerospike.go index 8467d3d..371863c 100644 --- a/storage/aerospike/aerospike.go +++ b/storage/aerospike/aerospike.go @@ -10,7 +10,6 @@ type Client struct { } func New(client *aerospike.Client, namespace string) *Client { - return &Client{ Client: client, namespace: namespace, @@ -21,7 +20,7 @@ func (c *Client) Truncate(set string) error { return c.Client.Truncate(nil, c.namespace, set, nil) } -func (c *Client) InsertBinMap(set string, key string, binMap map[string]interface{}) error { +func (c *Client) InsertBinMap(set, key string, binMap map[string]interface{}) error { aerospikeKey, err := aerospike.NewKey(c.namespace, set, key) if err != nil { return err @@ -39,5 +38,6 @@ func prepareBins(binmap map[string]interface{}) []*aerospike.Bin { } bins = append(bins, aerospike.NewBin(binName, binData)) } + return bins } diff --git a/testloader/yaml_file/parser.go b/testloader/yaml_file/parser.go index 6f3bf50..0e9f2cf 100644 --- a/testloader/yaml_file/parser.go +++ b/testloader/yaml_file/parser.go @@ -18,7 +18,7 @@ const ( gonkeyProtectSubstitute = "!protect!" ) -var gonkeyProtectTemplate = regexp.MustCompile("{{\\s*\\$") +var gonkeyProtectTemplate = regexp.MustCompile(`{{\s*\$`) func parseTestDefinitionFile(absPath string) ([]Test, error) { data, err := ioutil.ReadFile(absPath) @@ -35,12 +35,13 @@ func parseTestDefinitionFile(absPath string) ([]Test, error) { var tests []Test - for _, definition := range testDefinitions { - if testCases, err := makeTestFromDefinition(absPath, definition); err != nil { + for i := range testDefinitions { + testCases, err := makeTestFromDefinition(absPath, testDefinitions[i]) + if err != nil { return nil, err - } else { - tests = append(tests, testCases...) } + + tests = append(tests, testCases...) } return tests, nil @@ -74,6 +75,7 @@ func substituteArgsToMap(tmpl map[string]string, args map[string]interface{}) (m return nil, err } } + return res, nil } diff --git a/testloader/yaml_file/test.go b/testloader/yaml_file/test.go index 06784de..87b226f 100644 --- a/testloader/yaml_file/test.go +++ b/testloader/yaml_file/test.go @@ -60,11 +60,13 @@ func (t *Test) GetResponses() map[int]string { func (t *Test) GetResponse(code int) (string, bool) { val, ok := t.Responses[code] + return val, ok } func (t *Test) GetResponseHeaders(code int) (map[string]string, bool) { val, ok := t.ResponseHeaders[code] + return val, ok } @@ -134,8 +136,7 @@ func (t *Test) Headers() map[string]string { // TODO: it might make sense to do support of case-insensitive checking func (t *Test) ContentType() string { - ct, _ := t.HeadersVal["Content-Type"] - return ct + return t.HeadersVal["Content-Type"] } func (t *Test) DbQueryString() string { @@ -188,6 +189,7 @@ func (t *Test) SetQuery(val string) { func (t *Test) SetMethod(val string) { t.Method = val } + func (t *Test) SetPath(val string) { t.RequestURL = val } diff --git a/testloader/yaml_file/test_definition.go b/testloader/yaml_file/test_definition.go index 06a1601..a06d023 100644 --- a/testloader/yaml_file/test_definition.go +++ b/testloader/yaml_file/test_definition.go @@ -23,7 +23,7 @@ type TestDefinition struct { HeadersVal map[string]string `json:"headers" yaml:"headers"` CookiesVal map[string]string `json:"cookies" yaml:"cookies"` Cases []CaseData `json:"cases" yaml:"cases"` - ComparisonParams compare.CompareParams `json:"comparisonParams" yaml:"comparisonParams"` + ComparisonParams compare.Params `json:"comparisonParams" yaml:"comparisonParams"` FixtureFiles []string `json:"fixtures" yaml:"fixtures"` MocksDefinition map[string]interface{} `json:"mocks" yaml:"mocks"` PauseValue int `json:"pause" yaml:"pause"` @@ -76,7 +76,6 @@ There can be two types of data in yaml-file: : "" */ func (v *VariablesToSet) UnmarshalYAML(unmarshal func(interface{}) error) error { - res := make(map[int]map[string]string) // try to unmarshall as plaint text @@ -90,6 +89,7 @@ func (v *VariablesToSet) UnmarshalYAML(unmarshal func(interface{}) error) error } *v = res + return nil } @@ -99,5 +99,6 @@ func (v *VariablesToSet) UnmarshalYAML(unmarshal func(interface{}) error) error } *v = res + return nil } diff --git a/testloader/yaml_file/yaml_file.go b/testloader/yaml_file/yaml_file.go index 9fd3bc2..5511d61 100644 --- a/testloader/yaml_file/yaml_file.go +++ b/testloader/yaml_file/yaml_file.go @@ -26,10 +26,11 @@ func (l *YamlFileLoader) Load() ([]models.TestInterface, error) { } ret := make([]models.TestInterface, len(fileTests)) - for i, test := range fileTests { - test := test + for i := range fileTests { + test := fileTests[i] ret[i] = &test } + return ret, nil } @@ -42,6 +43,7 @@ func (l *YamlFileLoader) parseTestsWithCases(path string) ([]Test, error) { if err != nil { return nil, err } + return l.lookupPath(path, stat) } @@ -51,6 +53,7 @@ func (l *YamlFileLoader) lookupPath(path string, fi os.FileInfo) ([]Test, error) if !l.fitsFilter(path) { return []Test{}, nil } + return parseTestDefinitionFile(path) } files, err := ioutil.ReadDir(path) @@ -68,6 +71,7 @@ func (l *YamlFileLoader) lookupPath(path string, fi os.FileInfo) ([]Test, error) } tests = append(tests, moreTests...) } + return tests, nil } @@ -75,6 +79,7 @@ func (l *YamlFileLoader) fitsFilter(fileName string) bool { if l.fileFilter == "" { return true } + return strings.Contains(fileName, l.fileFilter) } diff --git a/variables/response_parser.go b/variables/response_parser.go index 95d1965..f92f2ab 100644 --- a/variables/response_parser.go +++ b/variables/response_parser.go @@ -6,13 +6,12 @@ import ( "github.com/tidwall/gjson" ) -func FromResponse(varsToSet map[string]string, body string, isJson bool) (vars *Variables, err error) { - +func FromResponse(varsToSet map[string]string, body string, isJSON bool) (vars *Variables, err error) { names, paths := split(varsToSet) switch { - case isJson: - vars, err = fromJson(names, paths, body) + case isJSON: + vars, err = fromJSON(names, paths, body) if err != nil { return nil, err } @@ -24,11 +23,9 @@ func FromResponse(varsToSet map[string]string, body string, isJson bool) (vars * } return vars, nil - } -func fromJson(names, paths []string, body string) (*Variables, error) { - +func fromJSON(names, paths []string, body string) (*Variables, error) { vars := New() results := gjson.GetMany(body, paths...) @@ -46,7 +43,6 @@ func fromJson(names, paths []string, body string) (*Variables, error) { } func fromPlainText(names []string, body string) (*Variables, error) { - if len(names) != 1 { return nil, fmt.Errorf( @@ -59,10 +55,9 @@ func fromPlainText(names []string, body string) (*Variables, error) { } // split returns keys and values of given map as separate slices -func split(m map[string]string) ([]string, []string) { - - values := make([]string, 0, len(m)) - keys := make([]string, 0, len(m)) +func split(m map[string]string) (keys, values []string) { + values = make([]string, 0, len(m)) + keys = make([]string, 0, len(m)) for k, v := range m { keys = append(keys, k) diff --git a/variables/variables.go b/variables/variables.go index 2ef73f5..21bae00 100644 --- a/variables/variables.go +++ b/variables/variables.go @@ -37,7 +37,6 @@ func (vs *Variables) Set(name, value string) { } func (vs *Variables) Apply(t models.TestInterface) models.TestInterface { - newTest := t.Clone() if vs == nil { @@ -96,7 +95,6 @@ func usedVariables(str string) (res []string) { // perform replaces all variables in str to their values // and returns result string func (vs *Variables) perform(str string) string { - varNames := usedVariables(str) for _, k := range varNames { @@ -130,7 +128,6 @@ func (vs *Variables) performInterface(value interface{}) { } func (vs *Variables) get(name string) *Variable { - v := vs.variables[name] if v == nil { v = NewFromEnvironment(name) @@ -140,32 +137,32 @@ func (vs *Variables) get(name string) *Variable { } func (vs *Variables) performForm(form *models.Form) *models.Form { - files := make(map[string]string, len(form.Files)) for k, v := range form.Files { files[k] = vs.perform(v) } + return &models.Form{Files: files} } func (vs *Variables) performHeaders(headers map[string]string) map[string]string { - res := make(map[string]string) for k, v := range headers { res[k] = vs.perform(v) } + return res } func (vs *Variables) performResponses(responses map[int]string) map[int]string { - res := make(map[int]string) for k, v := range responses { res[k] = vs.perform(v) } + return res } diff --git a/xmlparsing/parser.go b/xmlparsing/parser.go index 6adf915..d14b093 100644 --- a/xmlparsing/parser.go +++ b/xmlparsing/parser.go @@ -4,9 +4,9 @@ import ( "encoding/xml" ) -func Parse(rawXml string) (map[string]interface{}, error) { +func Parse(rawXML string) (map[string]interface{}, error) { var n node - if err := xml.Unmarshal([]byte(rawXml), &n); err != nil { + if err := xml.Unmarshal([]byte(rawXML), &n); err != nil { return nil, err } @@ -43,6 +43,7 @@ func buildArray(nodes []node) []interface{} { for i, n := range nodes { arr[i] = buildNode(n) } + return arr } @@ -53,6 +54,7 @@ func buildNode(n node) interface{} { if hasAttrs && hasChildren { result := buildMap(n.Children) result["-attrs"] = buildAttributes(n.Attrs) + return result } @@ -75,6 +77,7 @@ func buildAttributes(attrs []xml.Attr) map[string]string { for _, attr := range attrs { m[joinXMLName(attr.Name)] = attr.Value } + return m } @@ -89,13 +92,14 @@ func regroupNodesByName(nodes []node) map[string][]node { grouped[name] = append(grouped[name], n) } + return grouped } -func joinXMLName(XMLName xml.Name) string { - name := XMLName.Local - if len(XMLName.Space) != 0 { - name = XMLName.Space + ":" + name +func joinXMLName(xmlName xml.Name) string { + name := xmlName.Local + if xmlName.Space != "" { + name = xmlName.Space + ":" + name } return name