From 48f01b9306bbe5511d4e5da926ee6e9b9759263b Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Fri, 22 Sep 2023 09:48:38 +0530 Subject: [PATCH 01/46] Fix health check of gateway service --- deploy/docker/docker-compose.yml | 2 +- deploy/docker/scripts/load.sh | 8 ++++++++ deploy/docker/scripts/save.sh | 8 ++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100755 deploy/docker/scripts/load.sh create mode 100755 deploy/docker/scripts/save.sh diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index a6d8ee10..af8593e6 100755 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -245,7 +245,7 @@ services: #ports: # - "${LISTEN_IP:-127.0.0.1}:8443:443" # https healthcheck: - test: curl -vk 0.0.0.0:443 + test: "/bin/bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/443; exit $?;'" interval: 15s timeout: 15s retries: 15 diff --git a/deploy/docker/scripts/load.sh b/deploy/docker/scripts/load.sh new file mode 100755 index 00000000..abbb8a98 --- /dev/null +++ b/deploy/docker/scripts/load.sh @@ -0,0 +1,8 @@ +docker load -i mailhog.tar +docker load -i gateway-service.tar +docker load -i crapi-identity.tar +docker load -i crapi-community.tar +docker load -i crapi-workshop.tar +docker load -i crapi-web.tar +docker load -i postgres.tar +docker load -i mongo.tar diff --git a/deploy/docker/scripts/save.sh b/deploy/docker/scripts/save.sh new file mode 100755 index 00000000..ca63187a --- /dev/null +++ b/deploy/docker/scripts/save.sh @@ -0,0 +1,8 @@ +docker save crapi/mailhog:develop -o mailhog.tar +docker save crapi/gateway-service:develop -o gateway-service.tar +docker save crapi/crapi-identity:develop -o crapi-identity.tar +docker save crapi/crapi-community:develop -o crapi-community.tar +docker save crapi/crapi-workshop:develop -o crapi-workshop.tar +docker save crapi/crapi-web:develop -o crapi-web.tar +docker save postgres:14 -o postgres.tar +docker save mongo:4.4 -o mongo.tar From b53be04f2a7b8e2115e49c258e9da97524affe16 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Fri, 22 Sep 2023 10:35:46 +0530 Subject: [PATCH 02/46] Update docker compose --- deploy/docker/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index af8593e6..9b87e03d 100755 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -245,7 +245,7 @@ services: #ports: # - "${LISTEN_IP:-127.0.0.1}:8443:443" # https healthcheck: - test: "/bin/bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/443; exit $?;'" + test: echo -n "GET / HTTP/1.1\n\n" > /dev/tcp/api.mypremiumdealership.com/443 interval: 15s timeout: 15s retries: 15 From 90daf85d0e574eed4bb521c6532258a8ce53f887 Mon Sep 17 00:00:00 2001 From: Mathew Jose Mammoottil Date: Mon, 27 Nov 2023 15:45:41 +0530 Subject: [PATCH 03/46] jwt configurable --- deploy/docker/docker-compose.yml | 72 +++++++++---------- deploy/helm/templates/identity/config.yaml | 3 +- deploy/helm/values.yaml | 3 +- deploy/k8s/base/identity/config.yaml | 3 +- services/identity/.env | 1 + .../src/main/resources/application.properties | 2 +- 6 files changed, 44 insertions(+), 40 deletions(-) diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index 9b87e03d..5589ef6e 100755 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - services: crapi-identity: @@ -39,6 +38,7 @@ services: - SMTP_FROM=no-reply@example.com - SMTP_AUTH=true - SMTP_STARTTLS=true + - JWT_EXPIRATION=604800000 - ENABLE_LOG4J=${ENABLE_LOG4J:-false} - API_GATEWAY_URL=https://api.mypremiumdealership.com - TLS_ENABLED=${TLS_ENABLED:-false} @@ -53,10 +53,10 @@ services: mongodb: condition: service_healthy healthcheck: - test: /app/health.sh - interval: 15s - timeout: 15s - retries: 15 + test: /app/health.sh + interval: 15s + timeout: 15s + retries: 15 deploy: resources: limits: @@ -92,10 +92,10 @@ services: crapi-identity: condition: service_healthy healthcheck: - test: /app/health.sh - interval: 15s - timeout: 15s - retries: 15 + test: /app/health.sh + interval: 15s + timeout: 15s + retries: 15 deploy: resources: limits: @@ -135,10 +135,10 @@ services: crapi-community: condition: service_healthy healthcheck: - test: /app/health.sh - interval: 15s - timeout: 15s - retries: 15 + test: /app/health.sh + interval: 15s + timeout: 15s + retries: 15 deploy: resources: limits: @@ -164,10 +164,10 @@ services: crapi-workshop: condition: service_healthy healthcheck: - test: curl 0.0.0.0:80/web/health - interval: 15s - timeout: 15s - retries: 15 + test: curl 0.0.0.0:80/web/health + interval: 15s + timeout: 15s + retries: 15 deploy: resources: limits: @@ -184,10 +184,10 @@ services: #ports: # - "${LISTEN_IP:-127.0.0.1}:5432:5432" healthcheck: - test: ["CMD-SHELL", "pg_isready"] - interval: 15s - timeout: 15s - retries: 15 + test: [ "CMD-SHELL", "pg_isready" ] + interval: 15s + timeout: 15s + retries: 15 volumes: - postgresql-data:/var/lib/postgresql/data/ deploy: @@ -205,11 +205,11 @@ services: #ports: # - "${LISTEN_IP:-127.0.0.1}:27017:27017" healthcheck: - test: echo 'db.runCommand("ping").ok' | mongo mongodb:27017/test --quiet - interval: 15s - timeout: 15s - retries: 15 - start_period: 20s + test: echo 'db.runCommand("ping").ok' | mongo mongodb:27017/test --quiet + interval: 15s + timeout: 15s + retries: 15 + start_period: 20s volumes: - mongodb-data:/data/db deploy: @@ -226,13 +226,13 @@ services: MH_MONGO_URI: admin:crapisecretpassword@mongodb:27017 MH_STORAGE: mongodb ports: - # - "127.0.0.1:1025:1025" # smtp server + # - "127.0.0.1:1025:1025" # smtp server - "${LISTEN_IP:-127.0.0.1}:8025:8025" # Mail ui healthcheck: - test: ["CMD", "nc", "-z", "localhost", "8025"] - interval: 15s - timeout: 15s - retries: 15 + test: [ "CMD", "nc", "-z", "localhost", "8025" ] + interval: 15s + timeout: 15s + retries: 15 deploy: resources: limits: @@ -245,11 +245,11 @@ services: #ports: # - "${LISTEN_IP:-127.0.0.1}:8443:443" # https healthcheck: - test: echo -n "GET / HTTP/1.1\n\n" > /dev/tcp/api.mypremiumdealership.com/443 - interval: 15s - timeout: 15s - retries: 15 - start_period: 15s + test: echo -n "GET / HTTP/1.1\n\n" > /dev/tcp/api.mypremiumdealership.com/443 + interval: 15s + timeout: 15s + retries: 15 + start_period: 15s deploy: resources: limits: diff --git a/deploy/helm/templates/identity/config.yaml b/deploy/helm/templates/identity/config.yaml index c79facdb..a6f989eb 100644 --- a/deploy/helm/templates/identity/config.yaml +++ b/deploy/helm/templates/identity/config.yaml @@ -26,7 +26,8 @@ data: SMTP_EMAIL: {{ .Values.identity.config.smtpEmail }} SMTP_PASS: {{ .Values.identity.config.smtpPass }} SMTP_FROM: {{ .Values.identity.config.smtpFrom }} - SMTP_AUTH: {{ .Values.identity.config.smtpAuth | quote }} + SMTP_AUTH: {{ .Values.identity.config.smtpAuth | 604800000 }} + JWT_EXPIRATION: {{ .Values.identity.config.jwtExpiration | quote }} SMTP_STARTTLS: {{ .Values.identity.config.smtpStartTLS | quote }} SERVER_PORT: {{ .Values.identity.port | quote }} API_GATEWAY_URL: {{ if .Values.apiGatewayServiceInstall }}"https://{{ .Values.apiGatewayService.service.name }}"{{ else }}{{ .Values.apiGatewayServiceUrl }}{{ end }} diff --git a/deploy/helm/values.yaml b/deploy/helm/values.yaml index d64a239a..41259177 100644 --- a/deploy/helm/values.yaml +++ b/deploy/helm/values.yaml @@ -8,7 +8,7 @@ enableShellInjection: true imagePullPolicy: Always apiGatewayServiceUrl: https://api.mypremiumdealership.com apiGatewayServiceInstall: true -apiGatewayPassword: +apiGatewayPassword: tlsEnabled: false waitForK8sResources: @@ -73,6 +73,7 @@ identity: smtpFrom: "no-reply@example.com" smtpAuth: true smtpStartTLS: true + jwtExpiration: 604800000 keyStoreType: PKCS12 keyStore: classpath:certs/server.p12 keyStorePassword: passw0rd diff --git a/deploy/k8s/base/identity/config.yaml b/deploy/k8s/base/identity/config.yaml index 39cf6d9c..0615da60 100644 --- a/deploy/k8s/base/identity/config.yaml +++ b/deploy/k8s/base/identity/config.yaml @@ -7,7 +7,7 @@ metadata: data: DB_HOST: postgresdb DB_DRIVER: postgresql - JWT_SECRET: crapi #Used for creating a JWT. Can be anything + JWT_SECRET: crapi #Used for creating a JWT. Can be anything DB_USER: admin DB_PASSWORD: crapisecretpassword DB_NAME: crapi @@ -24,5 +24,6 @@ data: SMTP_PASS: "xxxxxxxxxxxxxx" SMTP_FROM: "no-reply@example.com" SMTP_AUTH: "true" + JWT_EXPIRATION: "604800000" SMTP_STARTTLS: "true" SERVER_PORT: "8080" diff --git a/services/identity/.env b/services/identity/.env index 96026081..f4767d05 100644 --- a/services/identity/.env +++ b/services/identity/.env @@ -19,6 +19,7 @@ export SMTP_FROM=no-reply@example.com export ENABLE_SHELL_INJECTION=false export JWT_SECRET=crapi export SMTP_HOST=smtp.example.com +export JWT_EXPIRATION=604800000 export API_GATEWAY_URL=https://api.mypremiumdealership.com export TLS_ENABLED=false export TLS_KEYSTORE_TYPE=PKCS12 diff --git a/services/identity/src/main/resources/application.properties b/services/identity/src/main/resources/application.properties index ab8ee158..9a433948 100644 --- a/services/identity/src/main/resources/application.properties +++ b/services/identity/src/main/resources/application.properties @@ -15,7 +15,7 @@ spring.jpa.hibernate.ddl-auto=update app.jwksJson=${JWKS} -app.jwtExpiration=604800000 +app.jwtExpiration=${JWT_EXPIRATION} #Mail Configuration mail.from=${SMTP_FROM} From d9837f8e8173f888337cf8cd57f4cd420a4ecea5 Mon Sep 17 00:00:00 2001 From: Mathew Jose Mammoottil Date: Mon, 27 Nov 2023 17:18:54 +0530 Subject: [PATCH 04/46] fix bug --- deploy/helm/templates/identity/config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deploy/helm/templates/identity/config.yaml b/deploy/helm/templates/identity/config.yaml index a6f989eb..cce520de 100644 --- a/deploy/helm/templates/identity/config.yaml +++ b/deploy/helm/templates/identity/config.yaml @@ -26,8 +26,8 @@ data: SMTP_EMAIL: {{ .Values.identity.config.smtpEmail }} SMTP_PASS: {{ .Values.identity.config.smtpPass }} SMTP_FROM: {{ .Values.identity.config.smtpFrom }} - SMTP_AUTH: {{ .Values.identity.config.smtpAuth | 604800000 }} - JWT_EXPIRATION: {{ .Values.identity.config.jwtExpiration | quote }} + SMTP_AUTH: {{ .Values.identity.config.smtpAuth | quote }} + JWT_EXPIRATION: {{ .Values.identity.config.jwtExpiration | 604800000 }} SMTP_STARTTLS: {{ .Values.identity.config.smtpStartTLS | quote }} SERVER_PORT: {{ .Values.identity.port | quote }} API_GATEWAY_URL: {{ if .Values.apiGatewayServiceInstall }}"https://{{ .Values.apiGatewayService.service.name }}"{{ else }}{{ .Values.apiGatewayServiceUrl }}{{ end }} From 58dc77a718a573ef1a3c32bdfed80354b06aef0f Mon Sep 17 00:00:00 2001 From: Mathew Jose Mammoottil Date: Mon, 27 Nov 2023 17:21:49 +0530 Subject: [PATCH 05/46] fix bug - 2 --- deploy/helm/templates/identity/config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/helm/templates/identity/config.yaml b/deploy/helm/templates/identity/config.yaml index cce520de..8bd427f4 100644 --- a/deploy/helm/templates/identity/config.yaml +++ b/deploy/helm/templates/identity/config.yaml @@ -27,7 +27,7 @@ data: SMTP_PASS: {{ .Values.identity.config.smtpPass }} SMTP_FROM: {{ .Values.identity.config.smtpFrom }} SMTP_AUTH: {{ .Values.identity.config.smtpAuth | quote }} - JWT_EXPIRATION: {{ .Values.identity.config.jwtExpiration | 604800000 }} + JWT_EXPIRATION: {{ .Values.identity.config.jwtExpiration | quote }} SMTP_STARTTLS: {{ .Values.identity.config.smtpStartTLS | quote }} SERVER_PORT: {{ .Values.identity.port | quote }} API_GATEWAY_URL: {{ if .Values.apiGatewayServiceInstall }}"https://{{ .Values.apiGatewayService.service.name }}"{{ else }}{{ .Values.apiGatewayServiceUrl }}{{ end }} From 07d7afc6dcc9a0b7734653214b88db752f57bba8 Mon Sep 17 00:00:00 2001 From: Mathew Jose Mammoottil Date: Mon, 27 Nov 2023 19:41:11 +0530 Subject: [PATCH 06/46] added long and short int convertor --- .../src/main/java/com/crapi/config/JwtProvider.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/identity/src/main/java/com/crapi/config/JwtProvider.java b/services/identity/src/main/java/com/crapi/config/JwtProvider.java index aeee3583..5826b40f 100644 --- a/services/identity/src/main/java/com/crapi/config/JwtProvider.java +++ b/services/identity/src/main/java/com/crapi/config/JwtProvider.java @@ -28,6 +28,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.math.BigDecimal; import java.net.URI; import java.net.URLConnection; import java.nio.charset.StandardCharsets; @@ -45,7 +46,7 @@ public class JwtProvider { private static final Logger logger = LoggerFactory.getLogger(JwtProvider.class); @Value("${app.jwtExpiration}") - private int jwtExpiration; + private String jwtExpiration; private KeyPair keyPair; @@ -82,11 +83,14 @@ public String getPublicJwkSet() { * @return generated token with expire date */ public String generateJwtToken(User user) { + int jwtExpirationInt; + if (jwtExpiration.contains("e+")) jwtExpirationInt = new BigDecimal(jwtExpiration).intValue(); + else jwtExpirationInt = Integer.parseInt(jwtExpiration); return Jwts.builder() .setSubject((user.getEmail())) .claim("role", user.getRole().getName()) .setIssuedAt(new Date()) - .setExpiration(new Date((new Date()).getTime() + jwtExpiration)) + .setExpiration(new Date((new Date()).getTime() + jwtExpirationInt)) .signWith(SignatureAlgorithm.RS256, this.keyPair.getPrivate()) .compact(); } From 450a40ffbd7662f4f023f699713fa5976f629872 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Thu, 7 Dec 2023 21:19:09 +0530 Subject: [PATCH 07/46] Fix Community Server open connections (#223) --- services/community/api/router/routes.go | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/services/community/api/router/routes.go b/services/community/api/router/routes.go index 026b5d4d..63032e5e 100644 --- a/services/community/api/router/routes.go +++ b/services/community/api/router/routes.go @@ -18,6 +18,7 @@ import ( "fmt" "net/http" "os" + "time" "crapi.proj/goservice/api/config" "crapi.proj/goservice/api/controllers" @@ -59,6 +60,12 @@ func (server *Server) InitializeRoutes() *mux.Router { func (server *Server) Run(addr string) { fmt.Println("Listening to port " + os.Getenv("SERVER_PORT")) + srv := &http.Server{ + Addr: addr, + Handler: server.Router, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + } if utils.IsTLSEnabled() { // Check if env variable TLS_CERTIFICATE is set then use it as certificate else default to certs/server.crt certificate, is_cert := os.LookupEnv("TLS_CERTIFICATE") @@ -70,12 +77,12 @@ func (server *Server) Run(addr string) { if !is_key || key == "" { key = "certs/server.key" } - err := http.ListenAndServeTLS(addr, certificate, key, server.Router) + err := srv.ListenAndServeTLS(certificate, key) if err != nil { fmt.Println(err) } } else { - err := http.ListenAndServe(addr, server.Router) + err := srv.ListenAndServe() if err != nil { fmt.Println(err) } From c3d5c02811d17d9188c8ad6845dec56ea8b087bb Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Thu, 7 Dec 2023 22:16:26 +0530 Subject: [PATCH 08/46] Update .env (#220) * Update .env * Update .env * Update .env --- deploy/docker/.env | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/deploy/docker/.env b/deploy/docker/.env index ec4df7da..d263946e 100644 --- a/deploy/docker/.env +++ b/deploy/docker/.env @@ -1,4 +1,8 @@ IDENTITY_SERVER_PORT=8080 COMMUNITY_SERVER_PORT=8087 WORKSHOP_SERVER_PORT=8000 -ENABLE_LOG4J=false \ No newline at end of file +ENABLE_SHELL_INJECTION=false +ENABLE_LOG4J=false +LISTEN_IP="127.0.0.1" +TLS_ENABLED=false +VERSION=latest From 6bfa7eb5550ddc316eeaf54f9b2b814227ca92f8 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Tue, 9 Jan 2024 16:24:41 +0530 Subject: [PATCH 09/46] Add pagination support for posts (#226) --- .../api/controllers/post_controller.go | 27 +++++++++++++++++-- services/community/api/models/post.go | 5 ++-- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/services/community/api/controllers/post_controller.go b/services/community/api/controllers/post_controller.go index 646092b3..2372f2e5 100644 --- a/services/community/api/controllers/post_controller.go +++ b/services/community/api/controllers/post_controller.go @@ -18,10 +18,11 @@ import ( "encoding/json" "io/ioutil" "net/http" + "strconv" - "github.com/gorilla/mux" "crapi.proj/goservice/api/models" "crapi.proj/goservice/api/responses" + "github.com/gorilla/mux" ) //AddNewPost add post in database, @@ -72,8 +73,30 @@ func (s *Server) GetPostByID(w http.ResponseWriter, r *http.Request) { //GetPost Vulnerabilities func (s *Server) GetPost(w http.ResponseWriter, r *http.Request) { //post := models.Post{} + limit_param := r.URL.Query().Get("limit") + limit := 30 + err := error(nil) + if limit_param != "" { + // Parse limit_param and set to limit + limit, err = strconv.Atoi(limit_param) + if err != nil { + limit = 30 + } + } + if limit > 50 { + limit = 50 + } + + page_param := r.URL.Query().Get("page") + page := 0 + if page_param != "" { + page, err = strconv.Atoi(page_param) + if err != nil { + page = 0 + } + } + posts, err := models.FindAllPost(s.Client, page, limit) - posts, err := models.FindAllPost(s.Client) if err != nil { responses.ERROR(w, http.StatusInternalServerError, err) return diff --git a/services/community/api/models/post.go b/services/community/api/models/post.go index bd324cce..3d81537e 100644 --- a/services/community/api/models/post.go +++ b/services/community/api/models/post.go @@ -104,12 +104,13 @@ func GetPostByID(client *mongo.Client, ID string) (Post, error) { } //FindAllPost return all recent post -func FindAllPost(client *mongo.Client) ([]interface{}, error) { +func FindAllPost(client *mongo.Client, page int, limit int) ([]interface{}, error) { post := []Post{} options := options.Find() options.SetSort(bson.D{{"_id", -1}}) - options.SetLimit(10) + options.SetLimit(int64(limit)) + options.SetSkip(int64(page * limit)) collection := client.Database("crapi").Collection("post") cur, err := collection.Find(context.Background(), bson.D{}, options) if err != nil { From 4c8655ecad472d50308633b03b9d3501ed5b493d Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Wed, 10 Jan 2024 02:20:48 +0530 Subject: [PATCH 10/46] Workshop management api and add limits on request (#227) * Workshop management user details api * Limit offset support for service req query * Update node version to 16 --- .github/workflows/ci.yml | 2 +- deploy/docker/docker-compose.yml | 2 +- deploy/helm/Chart.yaml | 2 +- deploy/helm/templates/identity/config.yaml | 2 +- deploy/helm/values.yaml | 2 +- openapi-spec/openapi-spec.json | 416 ++++- .../crAPI.postman_collection.json | 1576 +++++++++++++---- .../api/controllers/post_controller.go | 12 +- services/community/api/models/post.go | 4 +- .../src/main/resources/application.properties | 2 +- services/workshop/crapi/admin.py | 2 +- services/workshop/crapi/apps.py | 22 +- services/workshop/crapi/mechanic/models.py | 2 +- .../workshop/crapi/mechanic/serializers.py | 2 +- services/workshop/crapi/mechanic/tests.py | 2 +- services/workshop/crapi/mechanic/views.py | 23 +- .../workshop/crapi/merchant/serializers.py | 4 +- services/workshop/crapi/merchant/tests.py | 6 +- services/workshop/crapi/merchant/views.py | 7 +- .../workshop/crapi/migrations/0001_initial.py | 81 +- services/workshop/crapi/shop/models.py | 4 +- services/workshop/crapi/shop/serializers.py | 2 +- services/workshop/crapi/shop/tests.py | 2 +- services/workshop/crapi/shop/views.py | 6 +- services/workshop/crapi/urls.py | 1 + .../workshop/{ => crapi}/user/__init__.py | 0 services/workshop/{ => crapi}/user/models.py | 1 + .../{user/apps.py => crapi/user/sapps.py} | 0 .../workshop/{ => crapi}/user/serializers.py | 16 +- services/workshop/crapi/user/tests.py | 128 ++ .../__init__.py => crapi/user/urls.py} | 12 + services/workshop/crapi/user/views.py | 79 + services/workshop/crapi_site/settings.py | 4 +- services/workshop/requirements.txt | 3 +- .../workshop/user/migrations/0001_initial.py | 102 -- services/workshop/utils/jwt.py | 2 +- services/workshop/utils/messages.py | 2 + services/workshop/utils/mock_methods.py | 38 +- 38 files changed, 1963 insertions(+), 610 deletions(-) rename services/workshop/{ => crapi}/user/__init__.py (100%) rename services/workshop/{ => crapi}/user/models.py (99%) rename services/workshop/{user/apps.py => crapi/user/sapps.py} (100%) rename services/workshop/{ => crapi}/user/serializers.py (77%) create mode 100644 services/workshop/crapi/user/tests.py rename services/workshop/{user/migrations/__init__.py => crapi/user/urls.py} (66%) create mode 100644 services/workshop/crapi/user/views.py delete mode 100644 services/workshop/user/migrations/0001_initial.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4d7a41c1..0c4fddbc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -177,7 +177,7 @@ jobs: - name: Install Node uses: actions/setup-node@v3 with: - node-version: 14 + node-version: 16 - name: Install newman run: npm install -g newman diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index 5589ef6e..577f93d8 100755 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -245,7 +245,7 @@ services: #ports: # - "${LISTEN_IP:-127.0.0.1}:8443:443" # https healthcheck: - test: echo -n "GET / HTTP/1.1\n\n" > /dev/tcp/api.mypremiumdealership.com/443 + test: echo -n "GET / HTTP/1.1\n\n" > /dev/tcp/127.0.0.1/443 interval: 15s timeout: 15s retries: 15 diff --git a/deploy/helm/Chart.yaml b/deploy/helm/Chart.yaml index bd2c14e3..2729c248 100644 --- a/deploy/helm/Chart.yaml +++ b/deploy/helm/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.1 +version: 0.1.4 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/deploy/helm/templates/identity/config.yaml b/deploy/helm/templates/identity/config.yaml index 8bd427f4..06dc2cb5 100644 --- a/deploy/helm/templates/identity/config.yaml +++ b/deploy/helm/templates/identity/config.yaml @@ -27,7 +27,7 @@ data: SMTP_PASS: {{ .Values.identity.config.smtpPass }} SMTP_FROM: {{ .Values.identity.config.smtpFrom }} SMTP_AUTH: {{ .Values.identity.config.smtpAuth | quote }} - JWT_EXPIRATION: {{ .Values.identity.config.jwtExpiration | quote }} + JWT_EXPIRATION: {{ .Values.jwtExpiration | quote }} SMTP_STARTTLS: {{ .Values.identity.config.smtpStartTLS | quote }} SERVER_PORT: {{ .Values.identity.port | quote }} API_GATEWAY_URL: {{ if .Values.apiGatewayServiceInstall }}"https://{{ .Values.apiGatewayService.service.name }}"{{ else }}{{ .Values.apiGatewayServiceUrl }}{{ end }} diff --git a/deploy/helm/values.yaml b/deploy/helm/values.yaml index 41259177..6735da7c 100644 --- a/deploy/helm/values.yaml +++ b/deploy/helm/values.yaml @@ -10,6 +10,7 @@ apiGatewayServiceUrl: https://api.mypremiumdealership.com apiGatewayServiceInstall: true apiGatewayPassword: tlsEnabled: false +jwtExpiration: 604800000 waitForK8sResources: image: groundnuty/k8s-wait-for:v1.3 @@ -73,7 +74,6 @@ identity: smtpFrom: "no-reply@example.com" smtpAuth: true smtpStartTLS: true - jwtExpiration: 604800000 keyStoreType: PKCS12 keyStore: classpath:certs/server.p12 keyStorePassword: passw0rd diff --git a/openapi-spec/openapi-spec.json b/openapi-spec/openapi-spec.json index 96f71227..cb774075 100644 --- a/openapi-spec/openapi-spec.json +++ b/openapi-spec/openapi-spec.json @@ -28,19 +28,19 @@ "properties" : { "email" : { "type" : "string", - "example": "Cristobal.Weissnat@example.com" + "example" : "Cristobal.Weissnat@example.com" }, "name" : { "type" : "string", - "example": "Cristobal.Weissnat" + "example" : "Cristobal.Weissnat" }, "number" : { "type" : "string", - "example": "6915656974" + "example" : "6915656974" }, "password" : { "type" : "string", - "example": "5hmb0gvyC__hVQg" + "example" : "5hmb0gvyC__hVQg" } } } @@ -97,11 +97,11 @@ "properties" : { "email" : { "type" : "string", - "example": "test@example.com" + "example" : "test@example.com" }, "password" : { "type" : "string", - "example": "Test!123" + "example" : "Test!123" } } } @@ -149,7 +149,7 @@ "properties" : { "email" : { "type" : "string", - "example": "adam007@example.com" + "example" : "adam007@example.com" } } } @@ -412,11 +412,11 @@ "properties" : { "new_email" : { "type" : "string", - "example": "Sofia.Predovic@example.com" + "example" : "Sofia.Predovic@example.com" }, "old_email" : { "type" : "string", - "example": "Cristobal.Weissnat@example.com" + "example" : "Cristobal.Weissnat@example.com" } } } @@ -460,11 +460,11 @@ }, "new_email" : { "type" : "string", - "example": "Danielle.Ankunding@example.com" + "example" : "Danielle.Ankunding@example.com" }, "token" : { "type" : "string", - "example": "T9O2s6i3C7o2E8l7X5Y4" + "example" : "T9O2s6i3C7o2E8l7X5Y4" } } } @@ -526,6 +526,18 @@ "video_name" : { }, "video_url" : { }, "picture_url" : { } + }, + "example" : { + "id" : 35, + "name" : "Jasen.Hamill", + "email" : "Jasen.Hamill@example.com", + "number" : "7005397357", + "picture_url" : null, + "video_url" : null, + "video_name" : null, + "available_credit" : 155, + "video_id" : 0, + "role" : "ROLE_USER" } } } @@ -677,7 +689,8 @@ "schema" : { "type" : "integer", "format" : "int64" - } + }, + "example" : 1 } ], "responses" : { "200" : { @@ -717,7 +730,8 @@ "schema" : { "type" : "integer", "format" : "int64" - } + }, + "example" : 10 } ], "requestBody" : { "content" : { @@ -767,7 +781,8 @@ "schema" : { "type" : "integer", "format" : "int64" - } + }, + "example" : 1 } ], "responses" : { "200" : { @@ -819,7 +834,8 @@ "schema" : { "type" : "integer", "format" : "int64" - } + }, + "example" : 1 } ], "responses" : { "200" : { @@ -851,7 +867,8 @@ "schema" : { "type" : "integer", "format" : "int64" - } + }, + "example" : 12345 } ], "responses" : { "200" : { @@ -1009,11 +1026,11 @@ "properties" : { "pincode" : { "type" : "string", - "example": "9896" + "example" : "9896" }, "vin" : { "type" : "string", - "example": "0IOJO38SMVL663989" + "example" : "0IOJO38SMVL663989" } } } @@ -1108,7 +1125,7 @@ "schema" : { "type" : "string", "format" : "uuid", - "example": "0be319f0-f0dd-44aa-af0b-af927f3a383f" + "example" : "0be319f0-f0dd-44aa-af0b-af927f3a383f" } } ] }, @@ -1174,7 +1191,7 @@ "required" : true, "schema" : { "type" : "string", - "example": "tiSTSUzh4BwtvYSLWPsqu9" + "example" : "tiSTSUzh4BwtvYSLWPsqu9" } } ] }, @@ -1195,11 +1212,11 @@ "properties" : { "content" : { "type" : "string", - "example": "Est maiores voluptas velit. Necessitatibus vero veniam quos nobis." + "example" : "Est maiores voluptas velit. Necessitatibus vero veniam quos nobis." }, "title" : { "type" : "string", - "example": "Velit quia minima." + "example" : "Velit quia minima." } } } @@ -1282,7 +1299,7 @@ "properties" : { "content" : { "type" : "string", - "example": "Porro aut ratione et." + "example" : "Porro aut ratione et." } } } @@ -1388,7 +1405,7 @@ "required" : true, "schema" : { "type" : "string", - "example": "tiSTSUzh4BwtvYSLWPsqu9" + "example" : "tiSTSUzh4BwtvYSLWPsqu9" } } ] }, @@ -1401,6 +1418,23 @@ "security" : [ { "bearerAuth" : [ ] } ], + "parameters" : [ { + "in" : "query", + "name" : "limit", + "required" : false, + "schema" : { + "type" : "integer", + "example" : "30" + } + }, { + "in" : "query", + "name" : "offset", + "required" : false, + "schema" : { + "type" : "integer", + "example" : "0" + } + } ], "responses" : { "200" : { "description" : "OK", @@ -1522,11 +1556,10 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Coupon" + "$ref" : "#/components/schemas/AddCouponRequest" } } - }, - "required" : true + } }, "responses" : { "200" : { @@ -1534,7 +1567,7 @@ "content" : { "application/json" : { "schema" : { - "type" : "string" + "$ref" : "#/components/schemas/AddCouponResponse" } } } @@ -1565,49 +1598,53 @@ "/community/api/v2/coupon/validate-coupon" : { "post" : { "operationId" : "validate_coupon", - "summary" : "Validate Coupon - Version 2", + "summary" : "Validate Coupon", "description" : "Used to validate the provided discount coupon code", "tags" : [ "Community / Coupon" ], "security" : [ { "bearerAuth" : [ ] } ], "requestBody" : { + "description" : "Coupon", "content" : { "application/json" : { "schema" : { - "type" : "object", - "required" : [ "coupon_code" ], - "properties" : { - "coupon_code" : { - "type" : "string", - "example": "TRAC075" - } - } + "$ref" : "#/components/schemas/ValidateCouponRequest" } } - } + }, + "required" : true }, "responses" : { "200" : { + "description" : "Validate coupon response", "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/Coupon" + "$ref" : "#/components/schemas/ValidateCouponResponse" } } - }, - "description" : "" + } + }, + "400" : { + "description" : "Bad Request", + "content" : { + "application/json" : { + "schema" : { + "type" : "string" + } + } + } }, "500" : { + "description" : "Internal Server Error", "content" : { "application/json" : { "schema" : { - "type" : "object", - "properties" : { } + "type" : "string" } } - }, - "description" : "" + } } } } @@ -1634,7 +1671,7 @@ "type" : "number" }, "products" : { - "$ref" : "#/components/schemas/Product" + "$ref" : "#/components/schemas/Products" } } } @@ -1724,6 +1761,11 @@ "type" : "number", "format" : "float" } + }, + "example" : { + "id" : 30, + "message" : "Order sent successfully.", + "credit" : 155 } } } @@ -1767,7 +1809,8 @@ "schema" : { "type" : "integer" }, - "required" : true + "required" : true, + "example" : 1 } ], "requestBody" : { "content" : { @@ -1850,7 +1893,8 @@ "schema" : { "type" : "integer" }, - "required" : true + "required" : true, + "example" : 1 } ], "responses" : { "200" : { @@ -1983,7 +2027,7 @@ "name" : "order_id", "schema" : { "type" : "integer", - "example": 33 + "example" : 33 }, "required" : true } ], @@ -2065,18 +2109,7 @@ "content" : { "application/json" : { "schema" : { - "type" : "object", - "required" : [ "amount", "coupon_code" ], - "properties" : { - "amount" : { - "type" : "number", - "example": 75 - }, - "coupon_code" : { - "type" : "string", - "example": "TRAC075" - } - } + "$ref" : "#/components/schemas/ApplyCouponRequest" } } } @@ -2086,16 +2119,7 @@ "content" : { "application/json" : { "schema" : { - "type" : "object", - "required" : [ "credit", "message" ], - "properties" : { - "credit" : { - "type" : "number" - }, - "message" : { - "type" : "string" - } - } + "$ref" : "#/components/schemas/ApplyCouponResponse" } } }, @@ -2154,6 +2178,101 @@ } } }, + "/workshop/api/management/users/all" : { + "get" : { + "operationId" : "get_workshop_users_all", + "summary" : "Get Workshop Users Detail", + "description" : "Used to get all the users in the workshop database.", + "tags" : [ "Workshop / Shop" ], + "security" : [ { + "bearerAuth" : [ ] + } ], + "parameters" : [ { + "in" : "query", + "name" : "limit", + "required" : false, + "schema" : { + "type" : "integer", + "example" : "30" + } + }, { + "in" : "query", + "name" : "offset", + "required" : false, + "schema" : { + "type" : "integer", + "example" : "0" + } + } ], + "responses" : { + "200" : { + "description" : "OK", + "headers" : { + "Transfer-Encoding" : { + "content" : { + "text/plain" : { + "schema" : { + "type" : "string" + }, + "example" : "chunked" + } + } + } + }, + "content" : { + "application/json" : { + "schema" : { + "type" : "object", + "required" : [ "users" ], + "properties" : { + "users" : { + "type" : "array", + "items" : { + "type" : "object", + "required" : [ "user", "available_credit" ], + "properties" : { + "available_credit" : { + "type" : "integer", + "format" : "float" + }, + "user" : { + "type" : "object", + "required" : [ "email", "number" ], + "properties" : { + "email" : { + "type" : "string" + }, + "number" : { + "type" : "string" + } + } + } + } + } + } + } + }, + "example" : { + "users" : [ { + "user" : { + "email" : "adam007@example.com", + "number" : "9876895423" + }, + "available_credit" : 100 + }, { + "user" : { + "email" : "pogba006@example.com", + "number" : "9876570006" + }, + "available_credit" : 100 + } ] + } + } + } + } + } + } + }, "/workshop/api/mechanic/" : { "get" : { "operationId" : "get_mechanics", @@ -2626,6 +2745,25 @@ "security" : [ { "bearerAuth" : [ ] } ], + "parameters" : [ { + "name" : "limit", + "in" : "query", + "required" : true, + "schema" : { + "type" : "integer", + "format" : "int32", + "example" : 30 + } + }, { + "name" : "offset", + "in" : "query", + "required" : true, + "schema" : { + "type" : "integer", + "format" : "int32", + "example" : 0 + } + } ], "responses" : { "200" : { "description" : "OK", @@ -2658,7 +2796,13 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/ServiceRequests" + "type" : "object", + "properties" : { + "message" : { + "type" : "string" + } + }, + "required" : [ "message" ] } } } @@ -2802,7 +2946,18 @@ "format" : "url" } }, - "required" : [ "image_url", "name", "price" ] + "required" : [ "image_url", "name", "price" ], + "example" : { + "name" : "WheelBase", + "image_url" : "http://example.com/wheelbase.png", + "price" : "10.12" + } + }, + "Products" : { + "type" : "array", + "items" : { + "$ref" : "#/components/schemas/Product" + } }, "Product" : { "type" : "object", @@ -2824,7 +2979,13 @@ "format" : "url" } }, - "required" : [ "id", "image_url", "name", "price" ] + "required" : [ "id", "image_url", "name", "price" ], + "example" : { + "id" : 1, + "name" : "Seat", + "price" : "10.00", + "image_url" : "images/seat.svg" + } }, "OrderStatusEnum" : { "enum" : [ "delivered", "return pending", "returned" ], @@ -2835,11 +2996,11 @@ "properties" : { "product_id" : { "type" : "integer", - "example": 1 + "example" : 1 }, "quantity" : { "type" : "integer", - "example": 1 + "example" : 1 } }, "required" : [ "product_id", "quantity" ] @@ -2959,19 +3120,19 @@ "maxLength" : 4, "minLength" : 3, "type" : "string", - "example": "9969" + "example" : "9969" }, "password" : { "maxLength" : 30, "minLength" : 5, "type" : "string", - "example": "5hmb0gvyC__hVQg" + "example" : "5hmb0gvyC__hVQg" }, "email" : { "maxLength" : 30, "minLength" : 5, "type" : "string", - "example": "Cristobal.Weissnat@example.com" + "example" : "Cristobal.Weissnat@example.com" } } }, @@ -3028,9 +3189,95 @@ "user" : { "$ref" : "#/components/schemas/User" } + }, + "example" : { + "id" : 1, + "video_name" : "abc.mp4", + "conversion_params" : "-v codec h264", + "profileVideo" : "data:image/jpeg;base64,aGFrZmhhcw==" + } + }, + "ApplyCouponRequest" : { + "type" : "object", + "properties" : { + "amount" : { + "type" : "integer" + }, + "coupon_code" : { + "type" : "string" + } + }, + "required" : [ "amount", "coupon_code" ], + "example" : { + "coupon_code" : "TRAC075", + "amount" : 75 } }, - "Coupon" : { + "ApplyCouponResponse" : { + "type" : "object", + "properties" : { + "credit" : { + "type" : "integer" + }, + "message" : { + "type" : "string" + } + }, + "required" : [ "credit", "message" ], + "example" : { + "credit" : 165, + "message" : "Coupon successfully applied!" + } + }, + "AddCouponRequest" : { + "type" : "object", + "properties" : { + "coupon_code" : { + "type" : "string" + }, + "amount" : { + "type" : "integer" + } + }, + "required" : [ "coupon_code", "amount" ], + "example" : { + "coupon_code" : "TRAC075", + "amount" : 75 + } + }, + "AddCouponResponse" : { + "type" : "object", + "properties" : { + "amount" : { + "type" : "string" + }, + "coupon_code" : { + "type" : "string" + }, + "createdAt" : { + "type" : "string" + } + }, + "required" : [ "amount", "coupon_code", "CreatedAt" ], + "example" : { + "coupon_code" : "TRAC075", + "amount" : "75", + "CreatedAt" : "2023-12-07T14:22:29.832Z" + } + }, + "ValidateCouponRequest" : { + "type" : "object", + "properties" : { + "coupon_code" : { + "type" : "string" + } + }, + "required" : [ "coupon_code" ], + "example" : { + "coupon_code" : "TRAC075" + } + }, + "ValidateCouponResponse" : { "type" : "object", "properties" : { "amount" : { @@ -3043,7 +3290,12 @@ "type" : "string" } }, - "required" : [ "amount", "coupon_code" ] + "required" : [ "amount", "coupon_code", "CreatedAt" ], + "example" : { + "coupon_code" : "TRAC075", + "amount" : "75", + "CreatedAt" : "2023-12-07T14:22:29.832Z" + } }, "ServiceRequests" : { "title" : "Service Requests", @@ -3137,4 +3389,4 @@ } } } -} +} \ No newline at end of file diff --git a/postman_collections/crAPI.postman_collection.json b/postman_collections/crAPI.postman_collection.json index ecf42d98..707898b5 100644 --- a/postman_collections/crAPI.postman_collection.json +++ b/postman_collections/crAPI.postman_collection.json @@ -5,71 +5,19 @@ "name": "crAPI Accepted", "description": "crAPI Training Scripts", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "updatedAt": "2023-02-15T20:57:41.000Z" + "updatedAt": "2024-01-09T18:25:19.000Z", + "uid": "11111111-1111-1111-1111-111111111111", + "createdAt": null, + "lastUpdatedBy": null }, "item": [ - { - "name": "Reset Test Users", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", - "});" - ], - "type": "text/javascript" - } - }, - { - "listen": "prerequest", - "script": { - "exec": [ - "" - ], - "type": "text/javascript" - } - } - ], - "request": { - "method": "POST", - "header": [ - { - "key": "User-Agent", - "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36" - }, - { - "key": "Content-Type", - "value": "application/json" - }, - { - "key": "Accept", - "value": "*/*" - } - ], - "url": { - "raw": "{{url}}/identity/api/auth/reset-test-users", - "host": [ - "{{url}}" - ], - "path": [ - "identity", - "api", - "auth", - "reset-test-users" - ] - } - }, - "response": [] - }, { "name": "Signup example.com", "event": [ { "listen": "test", "script": { - "id": "e857cde3-4585-4826-b7f4-c70ebb0eec03", + "id": "212edd4c-4269-4ec5-84f7-bcddae94fbf2", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -81,7 +29,7 @@ { "listen": "prerequest", "script": { - "id": "4d9cddf8-786f-459f-9328-4e4d868532a7", + "id": "d91fc02a-4d94-4f60-a834-1508ba0d69a7", "exec": [ "const {Property} = require('postman-collection');", "var xff = Property.replaceSubstitutions('{{$randomIP}}');", @@ -114,7 +62,7 @@ } } ], - "id": "95011fcb-1cf0-482d-95f8-6d6a64458924", + "id": "73c92330-9c81-4526-a720-242a647bd1fa", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -154,7 +102,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-73c92330-9c81-4526-a720-242a647bd1fa" }, { "name": "Login", @@ -162,7 +111,6 @@ { "listen": "test", "script": { - "id": "b3dabfb0-9e1e-4bb1-9036-db271ea9be42", "exec": [ "", "pm.test(\"Status code is 200\", function () {", @@ -176,13 +124,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "fc0cb347-ad16-46de-9985-207c5146d7c5" } }, { "listen": "prerequest", "script": { - "id": "dd734d51-fae8-4593-99ac-53b33a210333", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -203,11 +151,12 @@ "});\r", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "c0a43741-3b36-48c0-be1a-9e5595f57c11" } } ], - "id": "6b3b62af-9854-40b8-a774-a0aad25723b9", + "id": "5f9c7b75-681c-43aa-9d80-448f7907669c", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -247,7 +196,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-5f9c7b75-681c-43aa-9d80-448f7907669c" }, { "name": "Verify JWT Token", @@ -261,11 +211,11 @@ "});" ], "type": "text/javascript", - "id": "e15ce32f-489c-4dd7-856f-302e4a0ddaca" + "id": "ed5c1a2a-44dd-4cd7-8adc-d094cd7fdae8" } } ], - "id": "227248d7-751c-4139-81d6-7e6bf00050ec", + "id": "c4ffff4d-e81c-405d-a8c0-caf9299b8020", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -297,7 +247,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-c4ffff4d-e81c-405d-a8c0-caf9299b8020" }, { "name": "Forgot Password", @@ -305,7 +256,6 @@ { "listen": "test", "script": { - "id": "77220831-6b67-48f2-abce-62229adddb7a", "exec": [ "", "pm.test(\"Status code is 200\", function () {", @@ -317,13 +267,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "01bbbbc5-87cd-4337-af44-5a9433e4bf1c" } }, { "listen": "prerequest", "script": { - "id": "11239e51-1e5d-4433-a87a-f8a01f62e963", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -342,11 +292,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "902385be-d824-4990-8e17-022fe8a0313f" } } ], - "id": "4561e464-0591-4acb-94cd-8e6868bb4d93", + "id": "fff7f353-9fbc-457f-97c1-42b7fa4b1024", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -386,7 +337,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-fff7f353-9fbc-457f-97c1-42b7fa4b1024" }, { "name": "SearchMailOTP", @@ -394,7 +346,6 @@ { "listen": "test", "script": { - "id": "bb7b2f05-7128-4f51-88bd-1f90de8c5da1", "exec": [ "", "", @@ -429,19 +380,19 @@ " console.log(otpmatch);", " var otp = otpmatch[1]", " console.log(\"OTP: \"+ otp);", - " pm.globals.set(\"OTP\", otp);", + " pm.collectionVariables.set(\"OTP\", otp);", " });", " ", "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "eed6b360-55c8-43f2-ad6a-4f8e1e8a1da3" } }, { "listen": "prerequest", "script": { - "id": "1b1e2d29-0161-4461-b2af-d6ce6aa4bf3d", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -460,11 +411,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "42443fb8-3a72-4d37-ab36-597308aff3d2" } } ], - "id": "38501ca0-531b-42ed-a3df-6a6a869a0eaf", + "id": "437e1354-581b-4a26-94a8-1ff508ac8420", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -514,7 +466,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-437e1354-581b-4a26-94a8-1ff508ac8420" }, { "name": "Check OTP", @@ -522,7 +475,6 @@ { "listen": "test", "script": { - "id": "03dc20b1-db94-425f-8cd6-8cef62437579", "exec": [ "", "pm.test(\"Status code is 200\", function () {", @@ -530,13 +482,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "a8af689c-e601-43b1-bb64-0b8224908bce" } }, { "listen": "prerequest", "script": { - "id": "5c7e05d5-f2ee-4f56-82e9-b380b3c21781", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -555,11 +507,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "ba8abbec-945d-48f3-9f9a-d407cfbc2571" } } ], - "id": "e93361b7-2a6a-42bb-9e9c-66c12c2cdcb6", + "id": "a45abeb7-f6c3-405d-bd01-3c79786d321b", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -600,7 +553,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-a45abeb7-f6c3-405d-bd01-3c79786d321b" }, { "name": "Resend vehicle email", @@ -608,7 +562,7 @@ { "listen": "test", "script": { - "id": "0e8904cd-5bf5-452d-9f76-c0942e0e1ddd", + "id": "9c1dc5cd-e1d6-4c3b-8bfd-52ae448c4595", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -620,7 +574,7 @@ { "listen": "prerequest", "script": { - "id": "2edb89b6-a1f8-452d-a3d8-44c480bd1194", + "id": "3eebacdf-ed5a-43f7-9ed5-fce2bd2447ce", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -643,11 +597,21 @@ } } ], - "id": "b33396c1-6f43-405a-9d0f-2379cb1341d8", + "id": "598598c0-9905-4291-beef-3199c4a80ef7", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -677,7 +641,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-598598c0-9905-4291-beef-3199c4a80ef7" }, { "name": "SearchMailVehicle", @@ -685,7 +650,6 @@ { "listen": "test", "script": { - "id": "04f79757-0b2e-4572-a378-9ed71916f2cf", "exec": [ "", "function dynamicSort(property) {", @@ -729,13 +693,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "e4b022af-785b-4fe5-b6ef-65ec4ea8d7a1" } }, { "listen": "prerequest", "script": { - "id": "44894299-8190-41f3-b38d-02637870a0ce", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -754,11 +718,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "70271721-08c8-41f9-a5a4-f3b83093dad4" } } ], - "id": "50487bec-79e0-470f-97cc-60229e96aa22", + "id": "c0ef62ef-8ca0-4eaf-a3d8-44c1aa41a407", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -808,7 +773,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-c0ef62ef-8ca0-4eaf-a3d8-44c1aa41a407" }, { "name": "Add Vehicle from Mail", @@ -816,7 +782,7 @@ { "listen": "test", "script": { - "id": "fdaef0e3-04c3-4de2-902b-f511a77d34de", + "id": "aeb0161e-8372-4da8-b151-1ab732099cf8", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -828,7 +794,7 @@ { "listen": "prerequest", "script": { - "id": "c2209f89-7827-4f88-aa0a-55222576d643", + "id": "aa6ed8be-83b9-4071-9c2e-ad35c08050f7", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -851,11 +817,21 @@ } } ], - "id": "75ca2936-1a83-41ed-b9a6-361ec77b0158", + "id": "557cbaea-7709-4da6-8f18-36d05a49f3c5", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -889,7 +865,96 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-557cbaea-7709-4da6-8f18-36d05a49f3c5" + }, + { + "name": "Get Dashboard", + "event": [ + { + "listen": "test", + "script": { + "id": "94c6588b-1056-4828-8c7f-309bdd80a557", + "exec": [ + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "15ed7adb-1256-4039-9d7a-2c81d5b9a764", + "exec": [ + "const {Property} = require('postman-collection');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", + "if (!xff){\r", + " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", + " \r", + "}\r", + "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", + "pm.request.headers.remove('X-Forwarded-For');\r", + "pm.request.headers.add({ \r", + " // These keys appears when you set a header by hand. Just for fun they are here\r", + " disabled: false,\r", + " // Your header, effectively\r", + " key: 'X-Forwarded-For', \r", + " value: xff\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "id": "51aeab4f-44d3-44f4-a68f-8ca534c76eb2", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "User-Agent", + "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36" + }, + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "*/*" + } + ], + "url": { + "raw": "{{url}}/identity/api/v2/user/dashboard", + "host": [ + "{{url}}" + ], + "path": [ + "identity", + "api", + "v2", + "user", + "dashboard" + ] + } + }, + "response": [], + "uid": "13872198-51aeab4f-44d3-44f4-a68f-8ca534c76eb2" }, { "name": "Get vehicles", @@ -897,7 +962,7 @@ { "listen": "test", "script": { - "id": "93cf51a1-5bf4-4147-b0b2-30e87faf8dd9", + "id": "94c6588b-1056-4828-8c7f-309bdd80a557", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -911,7 +976,7 @@ { "listen": "prerequest", "script": { - "id": "d2d0e181-fe2a-4988-b879-6d59101d25bc", + "id": "15ed7adb-1256-4039-9d7a-2c81d5b9a764", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -934,11 +999,21 @@ } } ], - "id": "e5ceaf9c-4baa-45b5-b18e-83e8eebcf68e", + "id": "071e3d09-178d-4119-a4d2-cad20b48bbb5", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -968,7 +1043,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-071e3d09-178d-4119-a4d2-cad20b48bbb5" }, { "name": "Get recent posts", @@ -976,7 +1052,7 @@ { "listen": "test", "script": { - "id": "7cf01ed9-84bb-4848-b26c-28ca4fb3a1f2", + "id": "38fb751f-d2f2-4868-9c73-fc603acfe23f", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -988,7 +1064,7 @@ { "listen": "prerequest", "script": { - "id": "05b8694a-1c5e-472a-a360-b6b5c089c8fd", + "id": "9b6b4be9-5cf3-460a-a34a-e92899854e58", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1011,11 +1087,21 @@ } } ], - "id": "eff155f5-d633-4b52-a3a9-493a6753068b", + "id": "3672b547-3a8f-4d57-aad2-3e180d7fa47c", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -1032,7 +1118,7 @@ } ], "url": { - "raw": "{{url}}/community/api/v2/community/posts/recent", + "raw": "{{url}}/community/api/v2/community/posts/recent?limit=30&offset=0", "host": [ "{{url}}" ], @@ -1043,10 +1129,21 @@ "community", "posts", "recent" + ], + "query": [ + { + "key": "limit", + "value": "30" + }, + { + "key": "offset", + "value": "0" + } ] } }, - "response": [] + "response": [], + "uid": "13872198-3672b547-3a8f-4d57-aad2-3e180d7fa47c" }, { "name": "Create post ", @@ -1054,13 +1151,13 @@ { "listen": "test", "script": { - "id": "9f9b1e4d-c951-42eb-8858-1041141838b3", + "id": "b5458d72-88ae-423b-8ae2-46a26842fe38", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", " var jsonData = JSON.parse(responseBody);", " var order = jsonData.id;", - " pm.globals.set(\"post_id\", order);", + " pm.collectionVariables.set(\"post_id\", order);", " console.log(order);", "});", "" @@ -1071,7 +1168,7 @@ { "listen": "prerequest", "script": { - "id": "0e710fbf-7bf4-4756-a98d-306ff290f5bd", + "id": "f2aa7cd1-923a-4608-81fd-131c19e5c42b", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1094,11 +1191,21 @@ } } ], - "id": "239d5ac0-6a05-43a8-bd58-4645860c30a3", + "id": "36850ebe-8a64-42b2-aeff-d5a36d0005d9", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -1132,7 +1239,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-36850ebe-8a64-42b2-aeff-d5a36d0005d9" }, { "name": "Get post", @@ -1140,7 +1248,7 @@ { "listen": "test", "script": { - "id": "6c112c0a-5544-425b-a268-edf153569c81", + "id": "49844a45-d740-4e48-b00b-bc89ffc12556", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1153,7 +1261,7 @@ { "listen": "prerequest", "script": { - "id": "83e07b19-d6a7-47df-a8d6-30e82973b52d", + "id": "2656982e-1f5a-4812-ba13-84539e8d5cd4", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1176,11 +1284,21 @@ } } ], - "id": "4f2474b3-f0a1-4941-bded-afc46afec0d9", + "id": "6a434301-adbb-41ed-a2d9-b1599aac7dcc", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -1211,7 +1329,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-6a434301-adbb-41ed-a2d9-b1599aac7dcc" }, { "name": "Post comment", @@ -1219,7 +1338,7 @@ { "listen": "test", "script": { - "id": "206f5f46-8078-4387-b33a-61fa50d53c4d", + "id": "4ed7e169-9c0a-4b2f-9900-931fde9cc253", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1231,7 +1350,7 @@ { "listen": "prerequest", "script": { - "id": "ec75a9d0-08d0-4296-bf9c-039b436472ef", + "id": "7f7a041c-4686-497f-af7c-69d182486153", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1254,11 +1373,21 @@ } } ], - "id": "4f62b2d3-9645-4062-a675-b477d8cd45f4", + "id": "f4da30f3-5f68-457e-9f52-57e744d638a0", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -1295,7 +1424,8 @@ }, "description": "Post comment" }, - "response": [] + "response": [], + "uid": "13872198-f4da30f3-5f68-457e-9f52-57e744d638a0" }, { "name": "Get vehicle location", @@ -1303,7 +1433,7 @@ { "listen": "test", "script": { - "id": "073fa9a9-f836-41b9-bde5-3db6e8d75790", + "id": "1d1f1d9e-34e2-462a-8469-f3a48bfce052", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1315,7 +1445,7 @@ { "listen": "prerequest", "script": { - "id": "336eec10-68c0-4ae2-9504-d68d97d65212", + "id": "f13d8ca4-39d1-4003-bf2e-8d31f3b4a178", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1338,11 +1468,21 @@ } } ], - "id": "d9b2f169-5409-4cb4-88bb-56c9e28d575e", + "id": "7fe2e5da-ca18-46c5-b802-eb777841d76c", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -1373,20 +1513,19 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-7fe2e5da-ca18-46c5-b802-eb777841d76c" }, { - "name": "Contact mechanic", + "name": "Get All Mechanics", "event": [ { "listen": "test", "script": { - "id": "ae15ee18-59d9-4171-b6f1-8d2c7fd1579a", + "id": "1d1f1d9e-34e2-462a-8469-f3a48bfce052", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", - " var jsonData = pm.response.json();", - " pm.collectionVariables.set(\"report_id\", jsonData.response_from_mechanic_api.id);", "});" ], "type": "text/javascript" @@ -1395,7 +1534,7 @@ { "listen": "prerequest", "script": { - "id": "6b16a4ec-32b1-4c09-b052-ca2ab017f41b", + "id": "f13d8ca4-39d1-4003-bf2e-8d31f3b4a178", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1418,20 +1557,26 @@ } } ], - "id": "76b2378b-1d7f-4519-a642-c45fb209e0d4", + "id": "76ff951b-e220-4784-a97b-8bf29a63b58f", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { - "method": "POST", + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", "header": [ - { - "key": "Authorization", - "value": "Bearer eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJtb25zdGVyQGRhcmt3ZWIuY29tIiwiaWF0IjoxNjA4NTQzODI0LCJleHAiOjE2MDg2MzAyMjR9.AAIvhrtstlloPf7-NNCcmhEZm-XkO1aO3x3CLns1em7ft9U77B88LpK9jIrF8C-IH32JPWVazRFo0gBIBZ9FpA" - }, { "key": "User-Agent", - "value": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36" + "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36" }, { "key": "Content-Type", @@ -1442,32 +1587,212 @@ "value": "*/*" } ], - "body": { - "mode": "raw", - "raw": "{\"mechanic_code\":\"TRAC_JHN\",\"problem_details\":\"Hi Jhon\",\"vin\":\"{{VIN}}\",\"mechanic_api\":\"{{url}}/workshop/api/mechanic/receive_report\",\"repeat_request_if_failed\":false,\"number_of_repeats\":1}" - }, "url": { - "raw": "{{url}}/workshop/api/merchant/contact_mechanic", + "raw": "{{url}}/workshop/api/mechanic", "host": [ "{{url}}" ], "path": [ "workshop", "api", - "merchant", + "mechanic" + ] + } + }, + "response": [], + "uid": "13872198-76ff951b-e220-4784-a97b-8bf29a63b58f" + }, + { + "name": "Contact mechanic", + "event": [ + { + "listen": "test", + "script": { + "id": "f0f39752-0671-4381-8df8-9cbe570e6925", + "exec": [ + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + " var jsonData = pm.response.json();", + " pm.collectionVariables.set(\"report_id\", jsonData.response_from_mechanic_api.id);", + "});" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "d470cb7e-2d7a-45e8-b610-da6a0d820882", + "exec": [ + "const {Property} = require('postman-collection');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", + "if (!xff){\r", + " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", + " \r", + "}\r", + "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", + "pm.request.headers.remove('X-Forwarded-For');\r", + "pm.request.headers.add({ \r", + " // These keys appears when you set a header by hand. Just for fun they are here\r", + " disabled: false,\r", + " // Your header, effectively\r", + " key: 'X-Forwarded-For', \r", + " value: xff\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "id": "66c46dc2-2386-44cf-890a-63d917d2ca23", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Authorization", + "value": "Bearer eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJtb25zdGVyQGRhcmt3ZWIuY29tIiwiaWF0IjoxNjA4NTQzODI0LCJleHAiOjE2MDg2MzAyMjR9.AAIvhrtstlloPf7-NNCcmhEZm-XkO1aO3x3CLns1em7ft9U77B88LpK9jIrF8C-IH32JPWVazRFo0gBIBZ9FpA" + }, + { + "key": "User-Agent", + "value": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36" + }, + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "*/*" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"mechanic_code\": \"TRAC_JHN\",\r\n \"problem_details\": \"Hi Jhon\",\r\n \"vin\": \"{{VIN}}\",\r\n \"mechanic_api\": \"{{url}}/workshop/api/mechanic/receive_report\",\r\n \"repeat_request_if_failed\": false,\r\n \"number_of_repeats\": 1\r\n}" + }, + "url": { + "raw": "{{url}}/workshop/api/merchant/contact_mechanic", + "host": [ + "{{url}}" + ], + "path": [ + "workshop", + "api", + "merchant", "contact_mechanic" ] } }, - "response": [] + "response": [], + "uid": "13872198-66c46dc2-2386-44cf-890a-63d917d2ca23" }, { - "name": "GetReport", + "name": "Mechanic Login", + "event": [ + { + "listen": "test", + "script": { + "id": "ecefadd8-d1fb-4415-a0b2-cfcae8216ac6", + "exec": [ + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + " var jsonData = JSON.parse(responseBody);", + " pm.collectionVariables.set(\"mechanicToken\", jsonData.token);", + " var token = pm.collectionVariables.get(\"mechanicToken\");", + " pm.test(\"Token is set\", function () {", + " pm.expect(jsonData.token).to.eql(token);", + " });", + "});", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "96148952-7c1a-4316-8577-b7b0cc59fdb4", + "exec": [ + "const {Property} = require('postman-collection');\r", + "var xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + "pm.collectionVariables.set(\"X-Forwarded-For-Admin\", xff);\r", + "pm.request.headers.remove('X-Forwarded-For');\r", + "pm.request.headers.add({ \r", + " // These keys appears when you set a header by hand. Just for fun they are here\r", + " disabled: false,\r", + " // Your header, effectively\r", + " key: 'X-Forwarded-For', \r", + " value: xff\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "id": "0eeb2452-e86b-48f3-bf4f-082703f39884", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "auth": { + "type": "noauth" + }, + "method": "POST", + "header": [ + { + "key": "User-Agent", + "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36" + }, + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "*/*" + } + ], + "body": { + "mode": "raw", + "raw": "{\"email\":\"jhon@example.com\",\"password\":\"Admin1@#\"}" + }, + "url": { + "raw": "{{url}}/identity/api/auth/login", + "host": [ + "{{url}}" + ], + "path": [ + "identity", + "api", + "auth", + "login" + ] + } + }, + "response": [], + "uid": "13872198-0eeb2452-e86b-48f3-bf4f-082703f39884" + }, + { + "name": "Service Requests", "event": [ { "listen": "test", "script": { - "id": "ae15ee18-59d9-4171-b6f1-8d2c7fd1579a", + "id": "f0f39752-0671-4381-8df8-9cbe570e6925", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1479,7 +1804,7 @@ { "listen": "prerequest", "script": { - "id": "6b16a4ec-32b1-4c09-b052-ca2ab017f41b", + "id": "d470cb7e-2d7a-45e8-b610-da6a0d820882", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1502,11 +1827,21 @@ } } ], - "id": "78b2749d-ecad-435f-b257-7d185f178682", + "id": "dc6c5d9a-9cd1-493e-9226-41cb0d50b3a1", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{mechanicToken}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -1528,7 +1863,108 @@ ], "body": { "mode": "raw", - "raw": "{\"mechanic_code\":\"TRAC_JHN\",\"problem_details\":\"Hi Jhon\",\"vin\":\"{{VIN}}\",\"mechanic_api\":\"{{url}}/workshop/api/mechanic/receive_report\",\"repeat_request_if_failed\":false,\"number_of_repeats\":1}" + "raw": "" + }, + "url": { + "raw": "{{url}}/workshop/api/mechanic/service_requests?limit=30&offset=0", + "host": [ + "{{url}}" + ], + "path": [ + "workshop", + "api", + "mechanic", + "service_requests" + ], + "query": [ + { + "key": "limit", + "value": "30" + }, + { + "key": "offset", + "value": "0" + } + ] + } + }, + "response": [], + "uid": "13872198-dc6c5d9a-9cd1-493e-9226-41cb0d50b3a1" + }, + { + "name": "GetReport", + "event": [ + { + "listen": "test", + "script": { + "id": "4f6d9666-0933-4e5c-a4a1-efdd2e2c49aa", + "exec": [ + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "b30d69f4-b8cc-4e58-9acc-fe49d38a8a89", + "exec": [ + "const {Property} = require('postman-collection');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", + "if (!xff){\r", + " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", + " \r", + "}\r", + "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", + "pm.request.headers.remove('X-Forwarded-For');\r", + "pm.request.headers.add({ \r", + " // These keys appears when you set a header by hand. Just for fun they are here\r", + " disabled: false,\r", + " // Your header, effectively\r", + " key: 'X-Forwarded-For', \r", + " value: xff\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "id": "a992c051-2e26-42f6-8aba-52005b766363", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "Authorization", + "value": "Bearer eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJtb25zdGVyQGRhcmt3ZWIuY29tIiwiaWF0IjoxNjA4NTQzODI0LCJleHAiOjE2MDg2MzAyMjR9.AAIvhrtstlloPf7-NNCcmhEZm-XkO1aO3x3CLns1em7ft9U77B88LpK9jIrF8C-IH32JPWVazRFo0gBIBZ9FpA" + }, + { + "key": "User-Agent", + "value": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36" + }, + { + "key": "Accept", + "value": "*/*" + } + ], + "body": { + "mode": "raw", + "raw": "" }, "url": { "raw": "{{url}}/workshop/api/mechanic/mechanic_report?report_id={{report_id}}", @@ -1549,7 +1985,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-a992c051-2e26-42f6-8aba-52005b766363" }, { "name": "Get products", @@ -1557,7 +1994,7 @@ { "listen": "test", "script": { - "id": "943760c1-3ffe-4236-903a-50b40d20d1d6", + "id": "f58f20f7-878b-4ffa-b17a-835c779b561a", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1573,7 +2010,7 @@ { "listen": "prerequest", "script": { - "id": "8c7773ce-bfbf-4c89-90cb-1e8917fa3d78", + "id": "200fbc66-8abc-4d88-8d38-787ee390c27a", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1596,11 +2033,21 @@ } } ], - "id": "c18a307b-b21b-4839-8403-3645e7d5b9c5", + "id": "f76df6ed-a560-4517-ae32-9bf7d7518896", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -1629,7 +2076,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-f76df6ed-a560-4517-ae32-9bf7d7518896" }, { "name": "Validate coupon", @@ -1637,7 +2085,7 @@ { "listen": "test", "script": { - "id": "1dabfe9e-bb80-4eb3-a436-7c322af4ad14", + "id": "6c7102fd-abfd-44a1-bd70-aaf2d46ee7f1", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1649,7 +2097,7 @@ { "listen": "prerequest", "script": { - "id": "d43ae7a4-bf74-49e1-9099-ac44ef6a8cb3", + "id": "785896e4-8b48-4cec-957d-4b27ea74d5c3", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1672,11 +2120,21 @@ } } ], - "id": "59d4b28e-3469-4791-b5e8-b72ab988b173", + "id": "443e66e0-34b9-4804-b761-bd62e7cc4288", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -1714,7 +2172,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-443e66e0-34b9-4804-b761-bd62e7cc4288" }, { "name": "Apply coupon", @@ -1722,7 +2181,7 @@ { "listen": "prerequest", "script": { - "id": "f81179f1-dede-49ec-9e87-fe68a7436dc7", + "id": "51f8310b-533a-4f39-b7bc-c1da9906ae6f", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1745,11 +2204,21 @@ } } ], - "id": "693df9e3-9e63-4cc6-be0c-c0caeafa1eea", + "id": "b3bd0f55-5f8c-4ea6-bca3-94993a1d1804", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -1782,7 +2251,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-b3bd0f55-5f8c-4ea6-bca3-94993a1d1804" }, { "name": "Create Order", @@ -1790,7 +2260,7 @@ { "listen": "test", "script": { - "id": "494bc4be-eb14-4d81-8d3d-e3a796024a50", + "id": "6a143464-3847-43eb-bed4-89cec5a83a72", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1806,7 +2276,7 @@ { "listen": "prerequest", "script": { - "id": "da7db3f5-09b9-4d88-8234-10253cd014a2", + "id": "71cce0c9-d934-4353-be0f-47a19817003e", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1829,11 +2299,21 @@ } } ], - "id": "b87d7feb-f128-4206-a8b7-d7f40815a1b0", + "id": "776bef3d-b912-4847-a268-2e452f9b238b", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -1866,7 +2346,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-776bef3d-b912-4847-a268-2e452f9b238b" }, { "name": "Get orders", @@ -1874,7 +2355,7 @@ { "listen": "test", "script": { - "id": "ec7ea74a-8799-4142-bdb7-ca74183b5455", + "id": "67a60d41-74e2-4d55-90bb-9434ed930515", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1886,7 +2367,7 @@ { "listen": "prerequest", "script": { - "id": "332c5a36-476b-4dfc-8e3c-c3da667914a3", + "id": "8737b250-4b38-4cd9-8da4-549bc83fcd8e", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1909,11 +2390,21 @@ } } ], - "id": "9094e2a9-d3b7-4ecb-a66d-2a87ec9ee421", + "id": "d8b2a762-c4ab-460d-91a9-c1f666897cb0", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -1943,7 +2434,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-d8b2a762-c4ab-460d-91a9-c1f666897cb0" }, { "name": "Get Order", @@ -1951,7 +2443,7 @@ { "listen": "test", "script": { - "id": "fe055072-70ae-4dcf-9eeb-d762d914f5d5", + "id": "63c33dcb-8e01-49e8-b76f-dc552707b126", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -1963,7 +2455,7 @@ { "listen": "prerequest", "script": { - "id": "272887d9-2467-4bbd-8b6c-c9ab22d847bb", + "id": "ee8efbbf-d317-4a0a-b6c2-9af346d11dfd", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -1986,11 +2478,21 @@ } } ], - "id": "3d8817d4-0952-4ac1-84c1-3db35e28655c", + "id": "a42bff0b-3401-410e-855b-6fcff08178e1", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -2020,7 +2522,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-a42bff0b-3401-410e-855b-6fcff08178e1" }, { "name": "Return order", @@ -2028,7 +2531,7 @@ { "listen": "test", "script": { - "id": "b23313b6-db41-4f4c-8444-f50a9ab45ce9", + "id": "95bf82ce-48af-4157-a4aa-8e11dcb386ef", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -2040,7 +2543,7 @@ { "listen": "prerequest", "script": { - "id": "e56a6acb-682a-43e8-a6f5-c2a0a727d4fb", + "id": "e2c85c92-3844-459d-8355-4e182b9786ba", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2063,11 +2566,21 @@ } } ], - "id": "5dc85a94-3f8e-463c-9b40-66bbf9fa3f8f", + "id": "9bb458ba-48cf-4dab-b9f3-4b9789095b72", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -2103,7 +2616,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-9bb458ba-48cf-4dab-b9f3-4b9789095b72" }, { "name": "Add video", @@ -2111,7 +2625,7 @@ { "listen": "test", "script": { - "id": "5895f73f-db45-415e-b872-f48d57468fb2", + "id": "f25025f1-7dd9-4ff2-b1df-90c34e74915f", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -2128,7 +2642,7 @@ { "listen": "prerequest", "script": { - "id": "276fb107-06e0-444c-abdf-fe45a03e9c9a", + "id": "ca302a6a-c9b1-44c1-9ecc-66744ee10ba7", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2151,14 +2665,24 @@ } } ], - "id": "5fd1f6c8-6615-430a-9649-d6b069a2b9e2", + "id": "7bba9029-97f6-458e-9b7a-65edcd364ef8", "protocolProfileBehavior": { - "disableBodyPruning": true, "disabledSystemHeaders": { "content-type": true - } + }, + "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -2197,7 +2721,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-7bba9029-97f6-458e-9b7a-65edcd364ef8" }, { "name": "Get Video", @@ -2205,7 +2730,7 @@ { "listen": "test", "script": { - "id": "68aef3b9-bb21-4a31-ad68-9aadd266e719", + "id": "9610a1b1-fd62-4e42-9779-8c1cecb8b100", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -2217,7 +2742,7 @@ { "listen": "prerequest", "script": { - "id": "db6cd700-8067-48d4-8059-3a7a7c71b2f0", + "id": "bc84846a-b858-49e9-b081-77b7c810596e", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2240,11 +2765,21 @@ } } ], - "id": "76c10078-a6df-4c94-b1f5-e255f58c97dd", + "id": "e5d8f95f-404d-45f5-8df0-b524a2d9dece", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -2275,7 +2810,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-e5d8f95f-404d-45f5-8df0-b524a2d9dece" }, { "name": "Change Video Name", @@ -2283,7 +2819,7 @@ { "listen": "test", "script": { - "id": "460d5f47-867d-4c03-b5fa-48d6af29d8bb", + "id": "529e0248-a28e-49bc-aafc-d31d0701aa90", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -2295,7 +2831,7 @@ { "listen": "prerequest", "script": { - "id": "ae043c78-80b8-40b0-8374-34cca47a7317", + "id": "7b194f02-d120-4d6a-b38e-1faf7606e8c0", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2318,11 +2854,21 @@ } } ], - "id": "1e741c34-e320-4a6d-9249-013b6f0eacdb", + "id": "66ef9d3e-6d22-4a6f-b2f2-73aec8c5f4cc", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "PUT", "header": [ { @@ -2357,7 +2903,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-66ef9d3e-6d22-4a6f-b2f2-73aec8c5f4cc" }, { "name": "Share video", @@ -2365,7 +2912,7 @@ { "listen": "test", "script": { - "id": "cd8a41fb-9621-431b-8520-bbaf7644ca54", + "id": "6d7b66b9-ee50-4b0b-b529-742a494211eb", "exec": [ "pm.test(\"Status code is 403\", function () {", " pm.response.to.have.status(403);", @@ -2379,7 +2926,7 @@ { "listen": "prerequest", "script": { - "id": "1567ead8-846c-446b-b788-3b0594cfcdb2", + "id": "b91969ad-e768-40fd-b3e4-8bb525f5061f", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2402,11 +2949,21 @@ } } ], - "id": "d465a93a-0fa5-4327-a683-9756f2e5fe32", + "id": "d74fc742-a4aa-4a80-b52a-f528d72164cb", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "GET", "header": [ { @@ -2443,7 +3000,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-d74fc742-a4aa-4a80-b52a-f528d72164cb" }, { "name": "Get user dashboard", @@ -2451,10 +3009,195 @@ { "listen": "test", "script": { - "id": "16d7908d-3cca-4e2f-9446-a9654d902984", + "id": "01630281-abcf-4238-b4a2-e38d2fec16c8", + "exec": [ + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "c7d793e8-e008-4199-ba9a-6e47ac54746c", + "exec": [ + "const {Property} = require('postman-collection');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", + "if (!xff){\r", + " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", + " \r", + "}\r", + "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", + "pm.request.headers.remove('X-Forwarded-For');\r", + "pm.request.headers.add({ \r", + " // These keys appears when you set a header by hand. Just for fun they are here\r", + " disabled: false,\r", + " // Your header, effectively\r", + " key: 'X-Forwarded-For', \r", + " value: xff\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "id": "1482944a-48e9-4cb0-8bb8-35914843b76f", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "User-Agent", + "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36" + }, + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "*/*" + } + ], + "url": { + "raw": "{{url}}/identity/api/v2/user/dashboard", + "host": [ + "{{url}}" + ], + "path": [ + "identity", + "api", + "v2", + "user", + "dashboard" + ] + } + }, + "response": [], + "uid": "13872198-1482944a-48e9-4cb0-8bb8-35914843b76f" + }, + { + "name": "Add new video", + "event": [ + { + "listen": "test", + "script": { + "id": "f25d8fc0-edb3-48f3-9721-0ac92825c681", + "exec": [ + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + " var jsonData = JSON.parse(responseBody);", + " var video_id = jsonData.id;", + " pm.collectionVariables.set(\"video_id\", video_id);", + " console.log(video_id);", + "});", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "3510de26-4ffe-4a20-b77c-55b8c441a185", + "exec": [ + "const {Property} = require('postman-collection');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", + "if (!xff){\r", + " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", + " \r", + "}\r", + "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", + "pm.request.headers.remove('X-Forwarded-For');\r", + "pm.request.headers.add({ \r", + " // These keys appears when you set a header by hand. Just for fun they are here\r", + " disabled: false,\r", + " // Your header, effectively\r", + " key: 'X-Forwarded-For', \r", + " value: xff\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "id": "e19ac072-9f09-4564-b23e-94729143cf78", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "User-Agent", + "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36" + }, + { + "key": "Content-Type", + "value": "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW" + }, + { + "key": "Accept", + "value": "*/*" + } + ], + "body": { + "mode": "raw", + "raw": "\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name=\"file\"; filename=\"abc.mp4\"\r\nContent-Type: video/mp4\r\n\r\nhakfhas\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--" + }, + "url": { + "raw": "{{url}}/identity/api/v2/user/videos", + "host": [ + "{{url}}" + ], + "path": [ + "identity", + "api", + "v2", + "user", + "videos" + ] + } + }, + "response": [], + "uid": "13872198-e19ac072-9f09-4564-b23e-94729143cf78" + }, + { + "name": "Delete video", + "event": [ + { + "listen": "test", + "script": { + "id": "4569b343-c5be-4b72-9a94-f409dd6ce7bb", "exec": [ - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", + "pm.test(\"Status code is 403\", function () {", + " pm.response.to.have.status(404);", "});" ], "type": "text/javascript" @@ -2463,7 +3206,7 @@ { "listen": "prerequest", "script": { - "id": "9d59315a-e25d-4eb7-81db-684b043a074e", + "id": "4670b368-098a-4935-b67a-ab518d0e78ce", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2486,12 +3229,22 @@ } } ], - "id": "eff513a9-38e7-4d22-bf56-967c360f1d1b", + "id": "68c9d559-1009-4a99-bdf8-7376ae05f0f2", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { - "method": "GET", + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", "header": [ { "key": "User-Agent", @@ -2507,7 +3260,7 @@ } ], "url": { - "raw": "{{url}}/identity/api/v2/user/dashboard", + "raw": "{{url}}/identity/api/v2/user/videos/{{video_id}}", "host": [ "{{url}}" ], @@ -2516,45 +3269,44 @@ "api", "v2", "user", - "dashboard" + "videos", + "{{video_id}}" ] } }, - "response": [] + "response": [], + "uid": "13872198-68c9d559-1009-4a99-bdf8-7376ae05f0f2" }, { - "name": "Add new video", + "name": "Admin Login", "event": [ { "listen": "test", "script": { - "id": "5895f73f-db45-415e-b872-f48d57468fb2", "exec": [ + "", "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", " var jsonData = JSON.parse(responseBody);", - " var video_id = jsonData.id;", - " pm.collectionVariables.set(\"video_id\", video_id);", - " console.log(video_id);", + " pm.collectionVariables.set(\"adminToken\", jsonData.token);", + " var token = pm.collectionVariables.get(\"adminToken\");", + " pm.test(\"Token is set\", function () {", + " pm.expect(jsonData.token).to.eql(token);", + " });", "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "ecefadd8-d1fb-4415-a0b2-cfcae8216ac6" } }, { "listen": "prerequest", "script": { - "id": "276fb107-06e0-444c-abdf-fe45a03e9c9a", "exec": [ "const {Property} = require('postman-collection');\r", - "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", - "if (!xff){\r", - " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", - " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", - " \r", - "}\r", - "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", + "var xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + "pm.collectionVariables.set(\"X-Forwarded-For-Admin\", xff);\r", "pm.request.headers.remove('X-Forwarded-For');\r", "pm.request.headers.add({ \r", " // These keys appears when you set a header by hand. Just for fun they are here\r", @@ -2564,15 +3316,19 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "96148952-7c1a-4316-8577-b7b0cc59fdb4" } } ], - "id": "aad230d3-1318-44ca-acd7-c5a2de4d80f6", + "id": "29cdda38-7bdf-471c-9a17-9217e4749ce2", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "noauth" + }, "method": "POST", "header": [ { @@ -2581,7 +3337,7 @@ }, { "key": "Content-Type", - "value": "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW" + "value": "application/json" }, { "key": "Accept", @@ -2590,34 +3346,38 @@ ], "body": { "mode": "raw", - "raw": "\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name=\"file\"; filename=\"abc.mp4\"\r\nContent-Type: video/mp4\r\n\r\nhakfhas\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--" + "raw": "{\"email\":\"admin@example.com\",\"password\":\"Admin!123\"}" }, "url": { - "raw": "{{url}}/identity/api/v2/user/videos", + "raw": "{{url}}/identity/api/auth/login", "host": [ "{{url}}" ], "path": [ "identity", "api", - "v2", - "user", - "videos" + "auth", + "login" ] } }, - "response": [] + "response": [], + "uid": "13872198-29cdda38-7bdf-471c-9a17-9217e4749ce2" }, { - "name": "Delete video", + "name": "Add Product", "event": [ { "listen": "test", "script": { - "id": "68aef3b9-bb21-4a31-ad68-9aadd266e719", + "id": "f58f20f7-878b-4ffa-b17a-835c779b561a", "exec": [ - "pm.test(\"Status code is 403\", function () {", - " pm.response.to.have.status(404);", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + " var jsonData = pm.response.json();", + " var productId = jsonData.id;", + " pm.collectionVariables.set(\"product_id\", productId);", + " console.log(\"Product ID \", productId);", "});" ], "type": "text/javascript" @@ -2626,7 +3386,7 @@ { "listen": "prerequest", "script": { - "id": "f80b27fb-14f8-4b29-b3f1-3e7eb6407d28", + "id": "200fbc66-8abc-4d88-8d38-787ee390c27a", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2649,12 +3409,22 @@ } } ], - "id": "339ba02c-b183-48f8-a2e9-f5c401de4167", + "id": "c9414a2a-aac8-4db6-be4d-b501931922d7", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { - "method": "DELETE", + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{adminToken}}", + "type": "string" + } + ] + }, + "method": "POST", "header": [ { "key": "User-Agent", @@ -2669,53 +3439,52 @@ "value": "*/*" } ], + "body": { + "mode": "raw", + "raw": "{\n \"name\": \"NewSeat\",\n \"price\": 100,\n \"image_url\": \"images/seat.svg\"\n}" + }, "url": { - "raw": "{{url}}/identity/api/v2/user/videos/{{video_id}}", + "raw": "{{url}}/workshop/api/shop/products", "host": [ "{{url}}" ], "path": [ - "identity", + "workshop", "api", - "v2", - "user", - "videos", - "{{video_id}}" + "shop", + "products" ] } }, - "response": [] + "response": [], + "uid": "13872198-c9414a2a-aac8-4db6-be4d-b501931922d7" }, { - "name": "Admin Login", + "name": "Delete video by admin", "event": [ { "listen": "test", "script": { - "id": "9f78ac74-9343-43a2-93b9-08ac1e0e3495", "exec": [ - "", "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", - " var jsonData = JSON.parse(responseBody);", - " pm.collectionVariables.set(\"adminToken\", jsonData.token);", - " var token = pm.collectionVariables.get(\"adminToken\");", - " pm.test(\"Token is set\", function () {", - " pm.expect(jsonData.token).to.eql(token);", - " });", - "});", - "" + "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "7ec00694-305a-425c-a234-38349add316d" } }, { "listen": "prerequest", "script": { - "id": "08e257a1-3205-45e4-a75f-0c8f6cfe0113", "exec": [ "const {Property} = require('postman-collection');\r", - "var xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For-Admin');\r", + "if (!xff){\r", + " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", + " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", + " \r", + "}\r", "pm.collectionVariables.set(\"X-Forwarded-For-Admin\", xff);\r", "pm.request.headers.remove('X-Forwarded-For');\r", "pm.request.headers.add({ \r", @@ -2726,19 +3495,27 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "20b3646f-e7ed-47c8-9aa0-d0ac7428f675" } } ], - "id": "7035a54a-536c-4dc7-ab8c-2b5d50af06a7", + "id": "62277a20-07b8-4954-8188-e41729cf4f39", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { "auth": { - "type": "noauth" + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{adminToken}}", + "type": "string" + } + ] }, - "method": "POST", + "method": "DELETE", "header": [ { "key": "User-Agent", @@ -2753,36 +3530,40 @@ "value": "*/*" } ], - "body": { - "mode": "raw", - "raw": "{\"email\":\"admin@example.com\",\"password\":\"Admin!123\"}" - }, "url": { - "raw": "{{url}}/identity/api/auth/login", + "raw": "{{url}}/identity/api/v2/admin/videos/{{video_id}}", "host": [ "{{url}}" ], "path": [ "identity", "api", - "auth", - "login" + "v2", + "admin", + "videos", + "{{video_id}}" ] } }, - "response": [] + "response": [], + "uid": "13872198-62277a20-07b8-4954-8188-e41729cf4f39" }, { - "name": "Delete video by admin", + "name": "ReAdd new video", "event": [ { "listen": "test", "script": { - "id": "68aef3b9-bb21-4a31-ad68-9aadd266e719", + "id": "77752d0e-50d9-4a7f-bf6c-80dabacd5563", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", - "});" + " var jsonData = JSON.parse(responseBody);", + " var video_id = jsonData.id;", + " pm.collectionVariables.set(\"video_id\", video_id);", + " console.log(video_id);", + "});", + "" ], "type": "text/javascript" } @@ -2790,16 +3571,16 @@ { "listen": "prerequest", "script": { - "id": "b064fb58-e796-44da-8d55-460fac7f5213", + "id": "7e0b3d53-0e0e-4d88-829c-541c5ecdae2b", "exec": [ "const {Property} = require('postman-collection');\r", - "var xff = pm.collectionVariables.get('X-Forwarded-For-Admin');\r", + "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", "if (!xff){\r", " xff = Property.replaceSubstitutions('{{$randomIP}}');\r", " console.log(\"X-Forwarded-For setting XFF to \"+xff);\r", " \r", "}\r", - "pm.collectionVariables.set(\"X-Forwarded-For-Admin\", xff);\r", + "pm.collectionVariables.set(\"X-Forwarded-For\", xff);\r", "pm.request.headers.remove('X-Forwarded-For');\r", "pm.request.headers.add({ \r", " // These keys appears when you set a header by hand. Just for fun they are here\r", @@ -2813,7 +3594,7 @@ } } ], - "id": "b1895b03-74bc-4366-b974-88456ffc60c0", + "id": "0eaef3a1-e5b8-4756-bbe7-1eaefa4173d8", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -2823,12 +3604,12 @@ "bearer": [ { "key": "token", - "value": "{{adminToken}}", + "value": "{{token}}", "type": "string" } ] }, - "method": "DELETE", + "method": "POST", "header": [ { "key": "User-Agent", @@ -2836,15 +3617,19 @@ }, { "key": "Content-Type", - "value": "application/json" + "value": "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW" }, { "key": "Accept", "value": "*/*" } ], + "body": { + "mode": "raw", + "raw": "\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name=\"file\"; filename=\"abc.mp4\"\r\nContent-Type: video/mp4\r\n\r\nhakfhas\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--" + }, "url": { - "raw": "{{url}}/identity/api/v2/admin/videos/{{video_id}}", + "raw": "{{url}}/identity/api/v2/user/videos", "host": [ "{{url}}" ], @@ -2852,21 +3637,21 @@ "identity", "api", "v2", - "admin", - "videos", - "{{video_id}}" + "user", + "videos" ] } }, - "response": [] + "response": [], + "uid": "13872198-0eaef3a1-e5b8-4756-bbe7-1eaefa4173d8" }, { - "name": "ReAdd new video", + "name": "Get Workshop User Details", "event": [ { "listen": "test", "script": { - "id": "5895f73f-db45-415e-b872-f48d57468fb2", + "id": "77752d0e-50d9-4a7f-bf6c-80dabacd5563", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -2883,7 +3668,7 @@ { "listen": "prerequest", "script": { - "id": "276fb107-06e0-444c-abdf-fe45a03e9c9a", + "id": "7e0b3d53-0e0e-4d88-829c-541c5ecdae2b", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2906,12 +3691,22 @@ } } ], - "id": "c65d9d61-0670-488f-9de5-4c31174d2bd2", + "id": "b5d507ef-ece1-4b81-b3ab-67d14f15e56c", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { - "method": "POST", + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{adminToken}}", + "type": "string" + } + ] + }, + "method": "GET", "header": [ { "key": "User-Agent", @@ -2931,20 +3726,31 @@ "raw": "\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name=\"file\"; filename=\"abc.mp4\"\r\nContent-Type: video/mp4\r\n\r\nhakfhas\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--" }, "url": { - "raw": "{{url}}/identity/api/v2/user/videos", + "raw": "{{url}}/workshop/api/management/users/all?limit=30&offset=0", "host": [ "{{url}}" ], "path": [ - "identity", + "workshop", "api", - "v2", - "user", - "videos" + "management", + "users", + "all" + ], + "query": [ + { + "key": "limit", + "value": "30" + }, + { + "key": "offset", + "value": "0" + } ] } }, - "response": [] + "response": [], + "uid": "13872198-b5d507ef-ece1-4b81-b3ab-67d14f15e56c" }, { "name": "Change email", @@ -2952,7 +3758,7 @@ { "listen": "test", "script": { - "id": "2f5e6a1e-61b9-4f70-9358-77bb62890c36", + "id": "38a418ce-9e8e-4ceb-9ad5-ee89ee0af464", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -2964,7 +3770,7 @@ { "listen": "prerequest", "script": { - "id": "596bd2dd-bbf9-40a8-a761-cdcfe43d85ec", + "id": "21130742-f16a-44cd-ac8b-190d25fcdf54", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -2987,11 +3793,21 @@ } } ], - "id": "c4a08b50-4d6d-4e89-9c3a-f39920170032", + "id": "14fa4db8-ea42-40ff-ae75-cb9b1c6f5f78", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -3025,7 +3841,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-14fa4db8-ea42-40ff-ae75-cb9b1c6f5f78" }, { "name": "SearchMailVerificationToken", @@ -3033,7 +3850,6 @@ { "listen": "test", "script": { - "id": "fe2c7752-b716-4787-89c8-95587a2ad57e", "exec": [ "", "", @@ -3071,13 +3887,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "e7a12935-7dcc-4524-a0ca-47500df1041d" } }, { "listen": "prerequest", "script": { - "id": "6c4ddaff-ff87-43fa-8acf-219a455dc3c3", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3096,11 +3912,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "8104da9e-1e65-4741-957c-1b60715198d3" } } ], - "id": "eae8cd29-3263-45e2-8c0d-9778a2cdc2db", + "id": "9bdb45a6-0cbc-416b-813b-c63513a9fb78", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -3150,7 +3967,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-9bdb45a6-0cbc-416b-813b-c63513a9fb78" }, { "name": "Verify email token", @@ -3158,7 +3976,7 @@ { "listen": "test", "script": { - "id": "1e128817-13c7-4d26-9094-bc5c5c8bc846", + "id": "9bd693bc-3f61-4b0e-a1de-fbb852f1bc1b", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -3170,7 +3988,7 @@ { "listen": "prerequest", "script": { - "id": "32c4af06-26c1-4843-b928-242193ce338f", + "id": "20995566-89c9-424f-a135-8035330f6f22", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3193,11 +4011,21 @@ } } ], - "id": "07386f04-f0e8-4ffd-9312-0789534eb826", + "id": "a2543f67-c10e-450e-8f43-1ce3bff4a5e4", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -3231,7 +4059,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-a2543f67-c10e-450e-8f43-1ce3bff4a5e4" }, { "name": "Login Back", @@ -3239,7 +4068,6 @@ { "listen": "test", "script": { - "id": "efbd6318-e2ca-4a1c-9c7d-5f76124c039a", "exec": [ "", "pm.test(\"Status code is 200\", function () {", @@ -3253,13 +4081,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "f63d459c-2004-4718-9d85-cfabe679c33f" } }, { "listen": "prerequest", "script": { - "id": "2c56ed4f-eb1c-4daa-ac6b-0e08ca6e3f28", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3278,11 +4106,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "113f6e16-a21c-4cfb-9c8e-da3dff482826" } } ], - "id": "5d58c4ba-58e5-4bb8-b811-d269bea71f8f", + "id": "1db55c21-1d59-4477-98d9-fe5640fff62c", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -3322,7 +4151,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-1db55c21-1d59-4477-98d9-fe5640fff62c" }, { "name": "Change email Back", @@ -3330,7 +4160,7 @@ { "listen": "test", "script": { - "id": "70415a06-9323-4c41-a7e6-f969182bf66d", + "id": "c848598b-7d28-4806-8ad5-0d7e20a9d950", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -3342,7 +4172,7 @@ { "listen": "prerequest", "script": { - "id": "11b8f9ff-9c82-4f2f-9680-feb981b2d7f7", + "id": "3ff5d22f-d710-4ed7-afa4-d115e85ea824", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3365,11 +4195,21 @@ } } ], - "id": "13149265-d58c-439f-be95-9919e3246bf0", + "id": "e09f649c-7215-4aca-9b50-1b1acb8c7f95", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -3403,7 +4243,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-e09f649c-7215-4aca-9b50-1b1acb8c7f95" }, { "name": "SearchMailVerificationToken Back", @@ -3411,7 +4252,7 @@ { "listen": "test", "script": { - "id": "c15e8800-1d01-401e-8931-03b5d8952041", + "id": "77bac1af-c8c4-4989-b04c-1e351c8fd482", "exec": [ "", "function dynamicSort(property) {", @@ -3454,7 +4295,7 @@ { "listen": "prerequest", "script": { - "id": "4500a35d-a34a-419b-a641-a63deb9a35a1", + "id": "b2332f48-9202-415b-baa4-43e43d9ecd55", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3477,11 +4318,14 @@ } } ], - "id": "52e03cf6-c401-4ce3-8e74-bc1da604af14", + "id": "d4e9595d-f33f-4548-bae5-b852749ad857", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "noauth" + }, "method": "GET", "header": [ { @@ -3523,7 +4367,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-d4e9595d-f33f-4548-bae5-b852749ad857" }, { "name": "Verify email token Back", @@ -3531,7 +4376,7 @@ { "listen": "test", "script": { - "id": "8ce19507-6497-462d-b850-32b4bae603b6", + "id": "71747391-89fb-4364-bd0b-aea2c2d8a71e", "exec": [ "pm.test(\"Status code is 200\", function () {", " pm.response.to.have.status(200);", @@ -3543,7 +4388,7 @@ { "listen": "prerequest", "script": { - "id": "913f1dfe-9f4a-4a67-a191-fd505def4101", + "id": "3f45fffa-8639-49d8-be61-203d91dd3e6b", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3566,11 +4411,21 @@ } } ], - "id": "5c96d385-7d37-46ab-93f5-bb96bb14a7a9", + "id": "4c8b0ba7-01f8-4070-9f0c-11c187878ca7", "protocolProfileBehavior": { "disableBodyPruning": true }, "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, "method": "POST", "header": [ { @@ -3604,7 +4459,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-4c8b0ba7-01f8-4070-9f0c-11c187878ca7" }, { "name": "Login", @@ -3612,7 +4468,6 @@ { "listen": "test", "script": { - "id": "9f78ac74-9343-43a2-93b9-08ac1e0e3495", "exec": [ "", "pm.test(\"Status code is 200\", function () {", @@ -3626,13 +4481,13 @@ "});", "" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "d00789ea-c0b8-4df0-bb89-dd64aa184034" } }, { "listen": "prerequest", "script": { - "id": "7cba83e4-7c91-45cf-92bd-583d22882cad", "exec": [ "const {Property} = require('postman-collection');\r", "var xff = pm.collectionVariables.get('X-Forwarded-For');\r", @@ -3651,11 +4506,12 @@ " value: xff\r", "});" ], - "type": "text/javascript" + "type": "text/javascript", + "id": "5db56e2d-beea-4c35-ad5c-0821e4827c89" } } ], - "id": "1c3bdecd-846e-482d-aef5-8cf9957af323", + "id": "2814846e-5762-42e8-adef-a47c76acb06c", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -3695,7 +4551,8 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-2814846e-5762-42e8-adef-a47c76acb06c" }, { "name": "Get Public Key", @@ -3709,11 +4566,11 @@ "});" ], "type": "text/javascript", - "id": "13612403-9af5-4248-b79f-53294102aab5" + "id": "2824bbc2-dcc8-49d3-bbf5-593d32de8651" } } ], - "id": "3f18f71e-dff2-4e6c-887f-530f5b8dd901", + "id": "a29f188e-0ad9-46d8-9282-473d63ab1cbb", "protocolProfileBehavior": { "disableBodyPruning": true }, @@ -3743,24 +4600,15 @@ ] } }, - "response": [] + "response": [], + "uid": "13872198-a29f188e-0ad9-46d8-9282-473d63ab1cbb" } ], - "auth": { - "type": "bearer", - "bearer": [ - { - "key": "token", - "value": "{{token}}", - "type": "string" - } - ] - }, "event": [ { "listen": "prerequest", "script": { - "id": "e843b75c-a27d-4c6f-bd54-60870722d8d8", + "id": "4be90437-8204-4d9d-918d-09768b990a8e", "type": "text/javascript", "exec": [ "const {Property} = require('postman-collection');", @@ -3785,7 +4633,7 @@ { "listen": "test", "script": { - "id": "f1f15ef0-b0ac-4b31-967f-03e8b2e80fb5", + "id": "86278886-cc27-487e-afcd-d3b28158991c", "type": "text/javascript", "exec": [ "" @@ -3795,94 +4643,88 @@ ], "variable": [ { - "id": "976b8d4c-2b5c-4199-bd21-fddf0e9c9204", "key": "X-Forwarded-For", "value": "" }, { - "id": "b0be61d8-3e49-46ec-8f34-9751bae846c4", "key": "X-Forwarded-For-Admin", "value": "" }, { - "id": "2115e633-b732-4edb-aa53-d037fb45d512", "key": "vehicle_id", "value": "" }, { - "id": "5f6e435d-677f-439d-9a83-f6a17ddbd406", "key": "name", "value": "" }, { - "id": "c583957b-eb70-4989-8692-eb9170d1b6ec", "key": "new_name", "value": "" }, { - "id": "ba19d5ef-b2ec-4be1-a48e-891f208f67d4", "key": "email", "value": "" }, { - "id": "d11f3965-a5b0-4bb6-9cce-26f5ab1ece95", "key": "new_email", "value": "" }, { - "id": "74164c23-852f-4d8f-9cbf-d315b653962c", "key": "password", "value": "" }, { - "id": "b8a812fa-9fb3-4906-934e-988999236b46", "key": "phone", "value": "" }, { - "id": "23de9e91-4ef8-4544-959d-82e9333bf74a", "key": "token", "value": "" }, { - "id": "3ca15d6a-41fe-4f2a-9b29-70eaa64a5638", "key": "VIN", "value": "" }, { - "id": "fe426f51-690f-470f-8bc0-a4d8e2987e54", "key": "PIN", "value": "" }, { - "id": "8510b1ab-0fb8-4e2b-8649-7c53717b75e2", "key": "report_id", "value": "" }, { - "id": "04a90206-9df3-4270-ab46-d891ec756ce2", "key": "product_id", "value": "" }, { - "id": "839cfae5-b1f7-4ba4-ad76-bd1b54a4f32e", "key": "order_id", "value": "" }, { - "id": "e9edb208-a992-4df9-936a-bc87d9eefed3", "key": "video_id", "value": "" }, { - "id": "e9f45f5f-2733-4115-882c-23f2715a598e", "key": "adminToken", "value": "" }, { - "id": "83d244c7-b3e5-4292-ab77-d4f297cc08e1", "key": "email_token", "value": "" + }, + { + "key": "OTP", + "value": "" + }, + { + "key": "post_id", + "value": "" + }, + { + "key": "mechanicToken", + "value": "" } ] } diff --git a/services/community/api/controllers/post_controller.go b/services/community/api/controllers/post_controller.go index 2372f2e5..7115881f 100644 --- a/services/community/api/controllers/post_controller.go +++ b/services/community/api/controllers/post_controller.go @@ -87,15 +87,15 @@ func (s *Server) GetPost(w http.ResponseWriter, r *http.Request) { limit = 50 } - page_param := r.URL.Query().Get("page") - page := 0 - if page_param != "" { - page, err = strconv.Atoi(page_param) + offset := 0 + offset_param := r.URL.Query().Get("offset") + if offset_param != "" { + offset, err = strconv.Atoi(offset_param) if err != nil { - page = 0 + offset = 0 } } - posts, err := models.FindAllPost(s.Client, page, limit) + posts, err := models.FindAllPost(s.Client, offset, limit) if err != nil { responses.ERROR(w, http.StatusInternalServerError, err) diff --git a/services/community/api/models/post.go b/services/community/api/models/post.go index 3d81537e..a6c11de1 100644 --- a/services/community/api/models/post.go +++ b/services/community/api/models/post.go @@ -104,13 +104,13 @@ func GetPostByID(client *mongo.Client, ID string) (Post, error) { } //FindAllPost return all recent post -func FindAllPost(client *mongo.Client, page int, limit int) ([]interface{}, error) { +func FindAllPost(client *mongo.Client, offset int, limit int) ([]interface{}, error) { post := []Post{} options := options.Find() options.SetSort(bson.D{{"_id", -1}}) options.SetLimit(int64(limit)) - options.SetSkip(int64(page * limit)) + options.SetSkip(int64(offset * limit)) collection := client.Database("crapi").Collection("post") cur, err := collection.Find(context.Background(), bson.D{}, options) if err != nil { diff --git a/services/identity/src/main/resources/application.properties b/services/identity/src/main/resources/application.properties index 9a433948..fb846ec8 100644 --- a/services/identity/src/main/resources/application.properties +++ b/services/identity/src/main/resources/application.properties @@ -15,7 +15,7 @@ spring.jpa.hibernate.ddl-auto=update app.jwksJson=${JWKS} -app.jwtExpiration=${JWT_EXPIRATION} +app.jwtExpiration=${JWT_EXPIRATION:604800000} #Mail Configuration mail.from=${SMTP_FROM} diff --git a/services/workshop/crapi/admin.py b/services/workshop/crapi/admin.py index fd70a4eb..c9848ef2 100644 --- a/services/workshop/crapi/admin.py +++ b/services/workshop/crapi/admin.py @@ -19,7 +19,7 @@ from crapi.mechanic.models import ServiceRequest, Mechanic from crapi.shop.models import Order, Product, AppliedCoupon -from user.models import User, UserDetails, Vehicle +from crapi.user.models import User, UserDetails, Vehicle admin.site.register(Order) admin.site.register(User) diff --git a/services/workshop/crapi/apps.py b/services/workshop/crapi/apps.py index f99a502a..f0534e7b 100644 --- a/services/workshop/crapi/apps.py +++ b/services/workshop/crapi/apps.py @@ -55,7 +55,7 @@ def create_products(): logger.info("Created Product: "+str(product.__dict__)) def create_mechanics(): - from user.models import User, UserDetails + from crapi.user.models import User, UserDetails from crapi.mechanic.models import Mechanic mechanic_details_all = [ { @@ -100,7 +100,7 @@ def create_mechanics(): logger.info("Created User: "+str(user.__dict__)) else: user = uset.first() - + if Mechanic.objects.filter(mechanic_code=mechanic_details['mechanic_code']): logger.info("Mechanic already exists. Skipping: " + mechanic_details['mechanic_code'] @@ -133,7 +133,7 @@ def create_reports(): import random import sys import textwrap - from user.models import User, UserDetails, Vehicle + from crapi.user.models import User, UserDetails, Vehicle from crapi.mechanic.models import Mechanic, ServiceRequest from django.utils import timezone count = ServiceRequest.objects.all().count() @@ -159,14 +159,14 @@ def create_reports(): problem_details=textwrap.dedent("""\ My car {} - {} is having issues. Can you give me a call on my mobile {}, - Or send me an email at {} + Or send me an email at {} Thanks, {}. """.format( - vehicle_company.name, + vehicle_company.name, vehicle_model.model, - user.number, - user.email, + user.number, + user.email, user_detail.name) ), status=status, @@ -180,7 +180,7 @@ def create_reports(): def create_orders(): import uuid - from user.models import User, UserDetails + from crapi.user.models import User, UserDetails from crapi.shop.models import Product from crapi.shop.models import Order if Order.objects.all().count() >= 1: @@ -205,9 +205,9 @@ def create_orders(): ) order2.save() logger.info("Created Order:2: "+str(order2.__dict__)) - - - + + + class CRAPIConfig(AppConfig): diff --git a/services/workshop/crapi/mechanic/models.py b/services/workshop/crapi/mechanic/models.py index 4fcc1b38..7d6bf721 100644 --- a/services/workshop/crapi/mechanic/models.py +++ b/services/workshop/crapi/mechanic/models.py @@ -18,7 +18,7 @@ """ from django.db import models -from user.models import User, Vehicle +from crapi.user.models import User, Vehicle from collections import OrderedDict from extended_choices import Choices from django_db_cascade.fields import ForeignKey, OneToOneField diff --git a/services/workshop/crapi/mechanic/serializers.py b/services/workshop/crapi/mechanic/serializers.py index 32643bde..7c666914 100644 --- a/services/workshop/crapi/mechanic/serializers.py +++ b/services/workshop/crapi/mechanic/serializers.py @@ -18,7 +18,7 @@ from rest_framework import serializers from crapi.mechanic.models import Mechanic, ServiceRequest -from user.serializers import UserSerializer, VehicleSerializer +from crapi.user.serializers import UserSerializer, VehicleSerializer class MechanicSerializer(serializers.ModelSerializer): diff --git a/services/workshop/crapi/mechanic/tests.py b/services/workshop/crapi/mechanic/tests.py index b7030257..1c22a92b 100644 --- a/services/workshop/crapi/mechanic/tests.py +++ b/services/workshop/crapi/mechanic/tests.py @@ -150,4 +150,4 @@ def test_bad_request(self): res = self.client.post('/workshop/api/mechanic/signup', self.mechanic, content_type="application/json") - self.assertNotEqual(res.status_code, 200) + self.assertEqual(res.status_code, 400) diff --git a/services/workshop/crapi/mechanic/views.py b/services/workshop/crapi/mechanic/views.py index 35bdc6fa..23c816e7 100644 --- a/services/workshop/crapi/mechanic/views.py +++ b/services/workshop/crapi/mechanic/views.py @@ -25,11 +25,13 @@ from django.db import models from utils.jwt import jwt_auth_required from utils import messages -from user.models import User, Vehicle, UserDetails +from crapi.user.models import User, Vehicle, UserDetails from utils.logging import log_error from .models import Mechanic, ServiceRequest from .serializers import MechanicSerializer, ServiceRequestSerializer, ReceiveReportSerializer, SignUpSerializer - +DEFAULT_LIMIT = 10 +DEFAULT_OFFSET = 0 +MAX_LIMIT = 100 class SignUpView(APIView): """ @@ -204,7 +206,22 @@ def get(self, request, user=None): list of service request object and 200 status if no error message and corresponding status if error """ - service_requests = ServiceRequest.objects.filter(mechanic__user=user) + limit = request.GET.get('limit', str(DEFAULT_LIMIT)) + offset = request.GET.get('offset', str(DEFAULT_OFFSET)) + if not limit.isdigit() or not offset.isdigit(): + return Response( + {'message': messages.INVALID_LIMIT_OR_OFFSET}, + status=status.HTTP_400_BAD_REQUEST + ) + limit = int(limit) + offset = int(offset) + if limit > MAX_LIMIT: + limit = 100 + if limit < 0: + limit = DEFAULT_LIMIT + if offset < 0: + offset = DEFAULT_OFFSET + service_requests = ServiceRequest.objects.filter(mechanic__user=user).order_by('id')[offset:offset+limit] serializer = ServiceRequestSerializer(service_requests, many=True) response_data = dict( service_requests=serializer.data diff --git a/services/workshop/crapi/merchant/serializers.py b/services/workshop/crapi/merchant/serializers.py index 1ffc74f5..09074715 100644 --- a/services/workshop/crapi/merchant/serializers.py +++ b/services/workshop/crapi/merchant/serializers.py @@ -23,5 +23,5 @@ class ContactMechanicSerializer(serializers.Serializer): Serializer for Contact Mechanic model. """ mechanic_api = serializers.CharField() - repeat_request_if_failed = serializers.BooleanField() - number_of_repeats = serializers.IntegerField() + repeat_request_if_failed = serializers.BooleanField(required=False) + number_of_repeats = serializers.IntegerField(required=False) diff --git a/services/workshop/crapi/merchant/tests.py b/services/workshop/crapi/merchant/tests.py index 85edc213..7ba8f65e 100644 --- a/services/workshop/crapi/merchant/tests.py +++ b/services/workshop/crapi/merchant/tests.py @@ -22,7 +22,7 @@ from django.test import TestCase, Client from django.utils import timezone from utils import messages -from user.models import User, Vehicle, VehicleModel, VehicleCompany +from crapi.user.models import User, Vehicle, VehicleModel, VehicleCompany class MerchantTestCase(TestCase): @@ -135,7 +135,7 @@ def test_contact_mechanic(self): self.assertIn('Google', res.json()['response_from_mechanic_api']) - def test_bad_request(self): + def test_repeat_missing_request(self): """ deletes repeat_request_if_failed field from contact_mechanic request body should get a bad request response @@ -146,7 +146,7 @@ def test_bad_request(self): self.contact_mechanic_request_body, **self.user_auth_headers, content_type="application/json") - self.assertNotEqual(res.status_code, 200) + self.assertEqual(res.status_code, 200) def test_receive_report_and_get_report(self): """ diff --git a/services/workshop/crapi/merchant/views.py b/services/workshop/crapi/merchant/views.py index 1f9f2a7a..06f516c5 100644 --- a/services/workshop/crapi/merchant/views.py +++ b/services/workshop/crapi/merchant/views.py @@ -21,7 +21,6 @@ from rest_framework import status from rest_framework.response import Response from rest_framework.views import APIView - from crapi.merchant.serializers import ContactMechanicSerializer from utils.jwt import jwt_auth_required from utils import messages @@ -41,7 +40,7 @@ def post(self, request, user=None): :param request: http request for the view method allowed: POST http request should be authorised by the jwt token of the user - mandatory fields: ['mechanic_api', 'repeat_request_if_failed', 'number_of_repeats'] + mandatory fields: ['mechanic_api'] :param user: User object of the requesting user :returns Response object with response_from_mechanic_api and 200 status if no error @@ -53,8 +52,8 @@ def post(self, request, user=None): log_error(request.path, request.data, status.HTTP_400_BAD_REQUEST, serializer.errors) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - repeat_request_if_failed = request_data['repeat_request_if_failed'] - number_of_repeats = request_data['number_of_repeats'] + repeat_request_if_failed = request_data.get('repeat_request_if_failed', False) + number_of_repeats = request_data.get('number_of_repeats', 1) if repeat_request_if_failed and number_of_repeats < 1: return Response( {'message': messages.MIN_NO_OF_REPEATS_FAILED}, diff --git a/services/workshop/crapi/migrations/0001_initial.py b/services/workshop/crapi/migrations/0001_initial.py index e78ef092..35fc5d65 100644 --- a/services/workshop/crapi/migrations/0001_initial.py +++ b/services/workshop/crapi/migrations/0001_initial.py @@ -24,16 +24,87 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('user', '0001_initial'), ] operations = [ + migrations.CreateModel( + name='User', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('created_on', models.DateTimeField()), + ('email', models.CharField(max_length=255, unique=True)), + ('jwt_token', models.CharField(max_length=500, null=True, unique=True)), + ('number', models.CharField(max_length=255, null=True)), + ('password', models.CharField(max_length=255)), + ('role', models.IntegerField(choices=[(2, 1), (0, 0)], default=0)), + ], + options={ + 'db_table': 'user_login', + 'managed': settings.IS_TESTING + }, + ), + migrations.CreateModel( + name='VehicleCompany', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('name', models.CharField(max_length=255)), + ], + options={ + 'db_table': 'vehicle_company', + 'managed': settings.IS_TESTING + }, + ), + migrations.CreateModel( + name='VehicleModel', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('fuel_type', models.BigIntegerField()), + ('model', models.CharField(max_length=255)), + ('vehicle_img', models.CharField(max_length=255, null=True)), + ('vehiclecompany', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crapi.VehicleCompany')) + ], + options={ + 'db_table': 'vehicle_model', + 'managed': settings.IS_TESTING + }, + ), + migrations.CreateModel( + name='Vehicle', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('pincode', models.CharField(max_length=255, null=True)), + ('vin', models.CharField(max_length=255)), + ('year', models.BigIntegerField(null=True)), + ('vehicle_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crapi.VehicleModel')), + ('status', models.CharField(max_length=255)), + ('location_id', models.BigIntegerField(null=True)), + ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crapi.User')), + ], + options={ + 'db_table': 'vehicle_details', + 'managed': settings.IS_TESTING + }, + ), + migrations.CreateModel( + name='UserDetails', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('available_credit', models.FloatField()), + ('name', models.CharField(max_length=255, null=True)), + ('status', models.CharField(max_length=255, null=True)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crapi.User')), + ], + options={ + 'db_table': 'user_details', + 'managed': settings.IS_TESTING + }, + ), migrations.CreateModel( name='Mechanic', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('mechanic_code', models.CharField(max_length=100, unique=True)), - ('user', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='user.User')), + ('user', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='crapi.User')), ], options={ 'db_table': 'mechanic', @@ -60,7 +131,7 @@ class Migration(migrations.Migration): ('updated_on', models.DateTimeField(null=True)), ('status', models.CharField(choices=[('Pending', 'Pending'), ('Finished', 'Finished')], default='Pending', max_length=10)), ('mechanic', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='crapi.Mechanic')), - ('vehicle', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='user.Vehicle')), + ('vehicle', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='crapi.Vehicle')), ], options={ 'db_table': 'service_request', @@ -74,7 +145,7 @@ class Migration(migrations.Migration): ('created_on', models.DateTimeField()), ('status', models.CharField(choices=[('delivered', 'delivered'), ('return pending', 'return pending'), ('returned', 'returned')], default='delivered', max_length=20)), ('product', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='crapi.Product')), - ('user', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='user.User')), + ('user', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='crapi.User')), ], options={ 'db_table': 'order', @@ -85,7 +156,7 @@ class Migration(migrations.Migration): fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('coupon_code', models.CharField(max_length=255)), - ('user', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='user.User')), + ('user', django_db_cascade.fields.ForeignKey(on_delete=django_db_cascade.deletions.DB_CASCADE, to='crapi.User')), ], options={ 'db_table': 'applied_coupon', diff --git a/services/workshop/crapi/shop/models.py b/services/workshop/crapi/shop/models.py index ca5003e8..043ea4d4 100644 --- a/services/workshop/crapi/shop/models.py +++ b/services/workshop/crapi/shop/models.py @@ -20,7 +20,7 @@ from django.db import models from django.conf import settings -from user.models import User +from crapi.user.models import User from extended_choices import Choices from django_db_cascade.fields import ForeignKey, OneToOneField from django_db_cascade.deletions import DB_CASCADE @@ -42,7 +42,7 @@ class Meta: def __str__(self): return f"{self.name} - {self.price}" - + class Order(models.Model): """ Order Model diff --git a/services/workshop/crapi/shop/serializers.py b/services/workshop/crapi/shop/serializers.py index 4b9c7a6e..74ef6f35 100644 --- a/services/workshop/crapi/shop/serializers.py +++ b/services/workshop/crapi/shop/serializers.py @@ -18,7 +18,7 @@ from rest_framework import serializers from crapi.shop.models import Order, Product, Coupon -from user.serializers import UserSerializer +from crapi.user.serializers import UserSerializer class ProductSerializer(serializers.ModelSerializer): diff --git a/services/workshop/crapi/shop/tests.py b/services/workshop/crapi/shop/tests.py index 18f54eb2..312a8883 100644 --- a/services/workshop/crapi/shop/tests.py +++ b/services/workshop/crapi/shop/tests.py @@ -24,7 +24,7 @@ from django.test import TestCase, Client from django.utils import timezone from utils import messages -from user.models import User, UserDetails +from crapi.user.models import User, UserDetails from crapi.shop.models import Coupon logger = logging.getLogger('ProductTest') diff --git a/services/workshop/crapi/shop/views.py b/services/workshop/crapi/shop/views.py index 08999eb4..9c50e41b 100644 --- a/services/workshop/crapi/shop/views.py +++ b/services/workshop/crapi/shop/views.py @@ -29,11 +29,11 @@ from utils.helper import basic_auth from crapi.shop.serializers import OrderSerializer, ProductSerializer, CouponSerializer, ProductQuantitySerializer -from user.serializers import UserSerializer +from crapi.user.serializers import UserSerializer from utils.jwt import jwt_auth_required from utils import messages from crapi.shop.models import Order, Product, AppliedCoupon, Coupon -from user.models import UserDetails +from crapi.user.models import UserDetails from utils.logging import log_error from django.core.exceptions import ObjectDoesNotExist @@ -121,7 +121,7 @@ def get(self, request, order_id=None, user=None): payment_response = requests.post( gateway_endpoint, headers={ - "Authorization": gateway_credential, + "Authorization": gateway_credential, "Content-Type": "application/json" }, json=data, diff --git a/services/workshop/crapi/urls.py b/services/workshop/crapi/urls.py index 48953814..5ffa7cf6 100644 --- a/services/workshop/crapi/urls.py +++ b/services/workshop/crapi/urls.py @@ -22,4 +22,5 @@ path('api/mechanic/', include('crapi.mechanic.urls')), path('api/merchant/', include('crapi.merchant.urls')), path('api/shop/', include('crapi.shop.urls')), + path('api/management/', include('crapi.user.urls')), ] diff --git a/services/workshop/user/__init__.py b/services/workshop/crapi/user/__init__.py similarity index 100% rename from services/workshop/user/__init__.py rename to services/workshop/crapi/user/__init__.py diff --git a/services/workshop/user/models.py b/services/workshop/crapi/user/models.py similarity index 99% rename from services/workshop/user/models.py rename to services/workshop/crapi/user/models.py index a82dfe09..a903b0aa 100644 --- a/services/workshop/user/models.py +++ b/services/workshop/crapi/user/models.py @@ -38,6 +38,7 @@ class User(models.Model): ROLE_CHOICES = Choices( ('USER', 1, 'User'), ('MECH', 2, 'Mechanic'), + ('ADMIN', 3, 'Admin'), dict_class = OrderedDict ) role = models.IntegerField(choices=ROLE_CHOICES, default=ROLE_CHOICES.USER) diff --git a/services/workshop/user/apps.py b/services/workshop/crapi/user/sapps.py similarity index 100% rename from services/workshop/user/apps.py rename to services/workshop/crapi/user/sapps.py diff --git a/services/workshop/user/serializers.py b/services/workshop/crapi/user/serializers.py similarity index 77% rename from services/workshop/user/serializers.py rename to services/workshop/crapi/user/serializers.py index 19f43302..cc9a309d 100644 --- a/services/workshop/user/serializers.py +++ b/services/workshop/crapi/user/serializers.py @@ -17,7 +17,7 @@ """ from rest_framework import serializers -from user.models import User, Vehicle +from crapi.user.models import User, UserDetails, Vehicle class UserSerializer(serializers.ModelSerializer): @@ -33,6 +33,20 @@ class Meta: fields = ('email', 'number') +class UserDetailsSerializer(serializers.ModelSerializer): + """ + Serializer for User Details model + """ + user = UserSerializer() + + class Meta: + """ + Meta class for UserSerializer + """ + model = UserDetails + fields = ('user', 'available_credit') + + class VehicleSerializer(serializers.ModelSerializer): """ Serializer for Vehicle model diff --git a/services/workshop/crapi/user/tests.py b/services/workshop/crapi/user/tests.py new file mode 100644 index 00000000..483f12f5 --- /dev/null +++ b/services/workshop/crapi/user/tests.py @@ -0,0 +1,128 @@ +# +# Licensed under the Apache License, Version 2.0 (the “License”); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an “AS IS” BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +contains all the test cases related to shop management +""" +from unittest.mock import patch + +from django.db import connection +from utils.mock_methods import get_sample_admin_user, get_sample_user_data, get_sample_users, mock_jwt_auth_required + +patch('utils.jwt.jwt_auth_required', mock_jwt_auth_required).start() + +import logging +import bcrypt +import json +from django.test import TestCase, Client +from django.utils import timezone +from utils import messages +from crapi.user.views import DEFAULT_LIMIT +from crapi.user.models import User, UserDetails + +logger = logging.getLogger('UserTest') +MAX_USER_COUNT = 40 + +class UserDetailsTestCase(TestCase): + """ + contains all the test cases related to UserDetails + Attributes: + client: Client object used for testing + user: dummy user object + auth_headers: Auth headers for dummy user + """ + + databases = '__all__' + setup_done = False + + def setUp(self): + self.client = Client() + user_data = get_sample_admin_user() + uset = User.objects.filter(email=user_data['email']) + if not uset.exists(): + user = User.objects.create( + email=user_data['email'], + number=user_data['number'], + password=bcrypt.hashpw(user_data['password'].encode('utf-8'), + bcrypt.gensalt()).decode(), + role=user_data['role'], + created_on=timezone.now()) + user_detail = UserDetails.objects.create(available_credit=100, + name=user_data['name'], + status='ACTIVE', + user=user) + user.save() + user_detail.save() + self.auth_headers = { + 'HTTP_AUTHORIZATION': 'Bearer ' + user_data['email'] + } + + def setup_database(self): + self.users_data = get_sample_users(MAX_USER_COUNT) + for user_data in self.users_data: + uset = User.objects.filter(email=user_data['email']) + if not uset.exists(): + try: + cursor = connection.cursor() + cursor.execute("select nextval('user_login_id_seq')") + result = cursor.fetchone() + user_id = result[0] + except Exception as e: + logger.error("Failed to fetch user_login_id_seq"+str(e)) + user_id = 1 + user_i = User.objects.create( + id = user_id, + email=user_data['email'], + number=user_data['number'], + password=bcrypt.hashpw(user_data['password'].encode('utf-8'), + bcrypt.gensalt()).decode(), + role=user_data['role'], + created_on=timezone.now()) + user_details_i = UserDetails.objects.create(available_credit=100, + name=user_data['name'], + status='ACTIVE', + user=user_i) + user_i.save() + user_details_i.save() + logger.info("Created user with id: "+str(user_id)) + + + def test_get_api_management_users_all(self): + """ + tests the get user details api + :return: None + """ + self.setup_database() + response = self.client.get('/workshop/api/management/users/all', **self.auth_headers) + self.assertEqual(response.status_code, 200) + response_data = json.loads(response.content) + self.assertEqual(len(response_data['users']), DEFAULT_LIMIT) + response = self.client.get('/workshop/api/management/users/all?limit=10&offset=0', **self.auth_headers) + self.assertEqual(response.status_code, 200) + response_data = json.loads(response.content) + self.assertEqual(len(response_data['users']), 10) + response2 = self.client.get('/workshop/api/management/users/all?limit=10&offset=10', **self.auth_headers) + self.assertEqual(response2.status_code, 200) + response_data2 = json.loads(response2.content) + self.assertNotEquals(response_data['users'], response_data2['users']) + + + def test_bad_get_api_management_users_all(self): + """ + tests the get user details api + :return: None + """ + response = self.client.get('/workshop/api/management/users/all') + self.assertEqual(response.status_code, 401) + response = self.client.get('/workshop/api/management/users/all?limit=a&offset=-1', **self.auth_headers) + self.assertEqual(response.status_code, 400) + diff --git a/services/workshop/user/migrations/__init__.py b/services/workshop/crapi/user/urls.py similarity index 66% rename from services/workshop/user/migrations/__init__.py rename to services/workshop/crapi/user/urls.py index bfd4bb9d..5ba07973 100644 --- a/services/workshop/user/migrations/__init__.py +++ b/services/workshop/crapi/user/urls.py @@ -12,3 +12,15 @@ # limitations under the License. +""" +shop URL Configuration +The `urlpatterns` list routes URLs to views. +""" +from django.urls import include, re_path + +import crapi.user.views as user_views + +urlpatterns = [ + # Do not change the order of URLs + re_path(r'users/all$', user_views.AdminUserView.as_view()), +] \ No newline at end of file diff --git a/services/workshop/crapi/user/views.py b/services/workshop/crapi/user/views.py new file mode 100644 index 00000000..43c71946 --- /dev/null +++ b/services/workshop/crapi/user/views.py @@ -0,0 +1,79 @@ +# +# Licensed under the Apache License, Version 2.0 (the “License”); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an “AS IS” BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +contains all the views related to Merchant +""" +import logging +import requests +from requests.exceptions import MissingSchema, InvalidURL +from rest_framework import status +from rest_framework.response import Response +from rest_framework.views import APIView +from crapi.user.serializers import UserDetailsSerializer +from crapi.user.models import User, UserDetails +from utils.jwt import jwt_auth_required +from utils import messages +from utils.logging import log_error + +logger = logging.getLogger() +DEFAULT_LIMIT = 30 +DEFAULT_OFFSET = 0 +MAX_LIMIT = 100 + +class AdminUserView(APIView): + """ + View for admin user to fetch user details + """ + + @jwt_auth_required + def get(self, request, user=None): + """ + Admin user view to fetch user details + :param request: http request for the view + method allowed: GET + http request should be authorised by the jwt token of the user + mandatory fields: [] + :returns Response object with + user details and 200 status if no error + message and corresponding status if error + """ + limit = request.GET.get('limit', str(DEFAULT_LIMIT)) + offset = request.GET.get('offset', str(DEFAULT_OFFSET)) + if not limit.isdigit() or not offset.isdigit(): + return Response( + {'message': messages.INVALID_LIMIT_OFFSET}, + status=status.HTTP_400_BAD_REQUEST + ) + limit = int(limit) + offset = int(offset) + if limit > MAX_LIMIT: + limit = MAX_LIMIT + if int(limit) < 0: + limit = DEFAULT_LIMIT + if offset < 0: + offset = DEFAULT_OFFSET + # Sort by id + userdetails = UserDetails.objects.all().order_by('id')[offset:offset+limit] + if not userdetails: + return Response( + {'message': messages.NO_USER_DETAILS}, + status=status.HTTP_404_NOT_FOUND + ) + serializer = UserDetailsSerializer(userdetails, many=True) + response_data = { + "users": serializer.data + } + return Response(response_data, status=status.HTTP_200_OK) + diff --git a/services/workshop/crapi_site/settings.py b/services/workshop/crapi_site/settings.py index e613ee77..73f04479 100644 --- a/services/workshop/crapi_site/settings.py +++ b/services/workshop/crapi_site/settings.py @@ -71,7 +71,7 @@ def get_env_value(env_variable): 'health_check', 'health_check.db', 'crapi.apps.CRAPIConfig', - 'user.apps.UserConfig', + #'user.apps.UserConfig', "django_extensions", ] @@ -154,7 +154,7 @@ def get_env_value(env_variable): 'djongo': { 'level': 'DEBUG', 'handlers': ['console'], - 'propogate': True, + 'propogate': True, }, } diff --git a/services/workshop/requirements.txt b/services/workshop/requirements.txt index 98d3237a..68110067 100644 --- a/services/workshop/requirements.txt +++ b/services/workshop/requirements.txt @@ -15,4 +15,5 @@ PyJWT==2.7.0 pymongo==3.12.3 pyOpenSSL==23.1.1 requests==2.30.0 -Werkzeug==2.0.3 \ No newline at end of file +Werkzeug==2.0.3 +Faker==22.1.0 \ No newline at end of file diff --git a/services/workshop/user/migrations/0001_initial.py b/services/workshop/user/migrations/0001_initial.py deleted file mode 100644 index f0dafa4a..00000000 --- a/services/workshop/user/migrations/0001_initial.py +++ /dev/null @@ -1,102 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the “License”); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an “AS IS” BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# Generated by Django 2.2.13 on 2020-08-31 14:00 - -from django.db import migrations, models -from django.conf import settings -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='User', - fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('created_on', models.DateTimeField()), - ('email', models.CharField(max_length=255, unique=True)), - ('jwt_token', models.CharField(max_length=500, null=True, unique=True)), - ('number', models.CharField(max_length=255, null=True)), - ('password', models.CharField(max_length=255)), - ('role', models.IntegerField(choices=[(2, 1), (0, 0)], default=0)), - ], - options={ - 'db_table': 'user_login', - 'managed': settings.IS_TESTING - }, - ), - migrations.CreateModel( - name='VehicleCompany', - fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('name', models.CharField(max_length=255)), - ], - options={ - 'db_table': 'vehicle_company', - 'managed': settings.IS_TESTING - }, - ), - migrations.CreateModel( - name='VehicleModel', - fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('fuel_type', models.BigIntegerField()), - ('model', models.CharField(max_length=255)), - ('vehicle_img', models.CharField(max_length=255, null=True)), - ('vehiclecompany', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user.VehicleCompany')) - ], - options={ - 'db_table': 'vehicle_model', - 'managed': settings.IS_TESTING - }, - ), - migrations.CreateModel( - name='Vehicle', - fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('pincode', models.CharField(max_length=255, null=True)), - ('vin', models.CharField(max_length=255)), - ('year', models.BigIntegerField(null=True)), - ('vehicle_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user.VehicleModel')), - ('status', models.CharField(max_length=255)), - ('location_id', models.BigIntegerField(null=True)), - ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user.User')), - ], - options={ - 'db_table': 'vehicle_details', - 'managed': settings.IS_TESTING - }, - ), - migrations.CreateModel( - name='UserDetails', - fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('available_credit', models.FloatField()), - ('name', models.CharField(max_length=255, null=True)), - ('status', models.CharField(max_length=255, null=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user.User')), - ], - options={ - 'db_table': 'user_details', - 'managed': settings.IS_TESTING - }, - ), - ] diff --git a/services/workshop/utils/jwt.py b/services/workshop/utils/jwt.py index 89410fa4..bdb4fbda 100644 --- a/services/workshop/utils/jwt.py +++ b/services/workshop/utils/jwt.py @@ -20,7 +20,7 @@ from rest_framework.response import Response from django.conf import settings from utils import messages -from user.models import User +from crapi.user.models import User import urllib3 import logging diff --git a/services/workshop/utils/messages.py b/services/workshop/utils/messages.py index b4bbb3be..65f69d77 100644 --- a/services/workshop/utils/messages.py +++ b/services/workshop/utils/messages.py @@ -41,3 +41,5 @@ INVALID_REPORT_ID = "Please enter a valid report_id value." REPORT_DOES_NOT_EXIST = "The Report does not exist for given report_id." COULD_NOT_CONNECT = "Could not connect to mechanic api." +INVALID_LIMIT_OFFSET = "Param limit and offset values should be integers." +NO_USER_DETAILS = "No user details found." \ No newline at end of file diff --git a/services/workshop/utils/mock_methods.py b/services/workshop/utils/mock_methods.py index 6ce9caf5..185399b7 100644 --- a/services/workshop/utils/mock_methods.py +++ b/services/workshop/utils/mock_methods.py @@ -17,7 +17,10 @@ from rest_framework import status from rest_framework.response import Response from utils import messages -from user.models import User +from crapi.user.models import User +from faker import Faker +Faker.seed(4321) + """ contains all methods that are common and @@ -51,6 +54,39 @@ def get_sample_user_data(): "password": "password", } +def fake_phone_number(fake: Faker) -> str: + return f'{fake.msisdn()[3:]}' + +def get_sample_users(users_count=100): + """ + gives sample users which can be used for testing + """ + fake = Faker() + users = [] + for i in range(users_count): + users.append({ + "name": fake.name(), + "email": fake.email(), + "number": fake_phone_number(fake), + "password": fake.password(), + "role": fake.random_element(elements=dict(User.ROLE_CHOICES).keys()), + }) + return users + +def get_sample_admin_user(): + """ + gives admin user which can be used for testing + """ + return { + "name": "Admin 1", + "email": "admin1@crapi.com", + "number": "9123456700", + "password": "password1", + "role": User.ROLE_CHOICES.ADMIN, + } + + + def mock_jwt_auth_required(func): """ mock function to validate jwt From 1bfdba980fd546104fadfb90d24ba1f8ce9df879 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Wed, 10 Jan 2024 18:23:00 +0530 Subject: [PATCH 11/46] Order support offset and limit (#228) * Order support offset and limit * Increase default postgres connections --- deploy/docker/docker-compose.yml | 1 + .../helm/templates/postgres/statefulset.yaml | 3 +- deploy/k8s/base/postgres/statefulset.yaml | 3 +- openapi-spec/openapi-spec.json | 19 ++++++++++ .../crAPI.postman_collection.json | 12 +++++- .../src/main/resources/application.properties | 3 ++ services/workshop/crapi/mechanic/views.py | 14 +++---- services/workshop/crapi/shop/views.py | 38 +++++++++++++++---- services/workshop/crapi/user/views.py | 20 +++++----- services/workshop/crapi_site/settings.py | 4 ++ services/workshop/requirements.txt | 2 +- services/workshop/utils/messages.py | 2 +- 12 files changed, 89 insertions(+), 32 deletions(-) diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index 577f93d8..b3eb9c45 100755 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -177,6 +177,7 @@ services: postgresdb: container_name: postgresdb image: 'postgres:14' + command: ["postgres", "-c", "max_connections=500"] environment: POSTGRES_USER: admin POSTGRES_PASSWORD: crapisecretpassword diff --git a/deploy/helm/templates/postgres/statefulset.yaml b/deploy/helm/templates/postgres/statefulset.yaml index a2bd5462..46d6602d 100644 --- a/deploy/helm/templates/postgres/statefulset.yaml +++ b/deploy/helm/templates/postgres/statefulset.yaml @@ -23,12 +23,13 @@ spec: - name: {{ .Values.postgresdb.name }} image: {{ .Values.postgresdb.image }}:{{ .Values.postgresdb.version }} imagePullPolicy: {{ .Values.postgresdb.imagePullPolicy }} + args: ["-c", "max_connections=500"] ports: - containerPort: {{ .Values.postgresdb.port }} envFrom: - configMapRef: name: {{ .Values.postgresdb.config.name }} - + volumeMounts: - mountPath: /var/lib/postgresql/data name: postgres-data diff --git a/deploy/k8s/base/postgres/statefulset.yaml b/deploy/k8s/base/postgres/statefulset.yaml index 69fcc487..909bae5e 100644 --- a/deploy/k8s/base/postgres/statefulset.yaml +++ b/deploy/k8s/base/postgres/statefulset.yaml @@ -16,13 +16,14 @@ spec: containers: - name: postgres image: postgres:14 + args: ["-c", "max_connections=500"] imagePullPolicy: "IfNotPresent" ports: - containerPort: 5432 envFrom: - configMapRef: name: postgres-config - + volumeMounts: - mountPath: /var/lib/postgresql/data name: postgres-data diff --git a/openapi-spec/openapi-spec.json b/openapi-spec/openapi-spec.json index cb774075..4c28d3b9 100644 --- a/openapi-spec/openapi-spec.json +++ b/openapi-spec/openapi-spec.json @@ -1944,6 +1944,25 @@ "security" : [ { "bearerAuth" : [ ] } ], + "parameters" : [ { + "name" : "limit", + "in" : "query", + "required" : true, + "schema" : { + "type" : "integer", + "format" : "int32", + "example" : 30 + } + }, { + "name" : "offset", + "in" : "query", + "required" : true, + "schema" : { + "type" : "integer", + "format" : "int32", + "example" : 0 + } + } ], "responses" : { "200" : { "content" : { diff --git a/postman_collections/crAPI.postman_collection.json b/postman_collections/crAPI.postman_collection.json index 707898b5..057bf401 100644 --- a/postman_collections/crAPI.postman_collection.json +++ b/postman_collections/crAPI.postman_collection.json @@ -2421,7 +2421,7 @@ } ], "url": { - "raw": "{{url}}/workshop/api/shop/orders/all", + "raw": "{{url}}/workshop/api/shop/orders/all?limit=30&offset=0", "host": [ "{{url}}" ], @@ -2431,6 +2431,16 @@ "shop", "orders", "all" + ], + "query": [ + { + "key": "limit", + "value": "30" + }, + { + "key": "offset", + "value": "0" + } ] } }, diff --git a/services/identity/src/main/resources/application.properties b/services/identity/src/main/resources/application.properties index fb846ec8..352de51b 100644 --- a/services/identity/src/main/resources/application.properties +++ b/services/identity/src/main/resources/application.properties @@ -3,6 +3,9 @@ logging.level.org.springframework.web=DEBUG spring.datasource.url= jdbc:postgresql://${DB_HOST}:${DB_PORT}/${DB_NAME} spring.datasource.username=${DB_USER} spring.datasource.password=${DB_PASSWORD} +spring.datasource.max-active=100 +spring.datasource.max-idle=8 +spring.datasource.min-idle=8 spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect diff --git a/services/workshop/crapi/mechanic/views.py b/services/workshop/crapi/mechanic/views.py index 23c816e7..782df179 100644 --- a/services/workshop/crapi/mechanic/views.py +++ b/services/workshop/crapi/mechanic/views.py @@ -23,15 +23,13 @@ from rest_framework.response import Response from rest_framework.views import APIView from django.db import models +from crapi_site import settings from utils.jwt import jwt_auth_required from utils import messages from crapi.user.models import User, Vehicle, UserDetails from utils.logging import log_error from .models import Mechanic, ServiceRequest from .serializers import MechanicSerializer, ServiceRequestSerializer, ReceiveReportSerializer, SignUpSerializer -DEFAULT_LIMIT = 10 -DEFAULT_OFFSET = 0 -MAX_LIMIT = 100 class SignUpView(APIView): """ @@ -206,8 +204,8 @@ def get(self, request, user=None): list of service request object and 200 status if no error message and corresponding status if error """ - limit = request.GET.get('limit', str(DEFAULT_LIMIT)) - offset = request.GET.get('offset', str(DEFAULT_OFFSET)) + limit = request.GET.get('limit', str(settings.DEFAULT_LIMIT)) + offset = request.GET.get('offset', str(settings.DEFAULT_OFFSET)) if not limit.isdigit() or not offset.isdigit(): return Response( {'message': messages.INVALID_LIMIT_OR_OFFSET}, @@ -215,12 +213,12 @@ def get(self, request, user=None): ) limit = int(limit) offset = int(offset) - if limit > MAX_LIMIT: + if limit > settings.MAX_LIMIT: limit = 100 if limit < 0: - limit = DEFAULT_LIMIT + limit = settings.DEFAULT_LIMIT if offset < 0: - offset = DEFAULT_OFFSET + offset = settings.DEFAULT_OFFSET service_requests = ServiceRequest.objects.filter(mechanic__user=user).order_by('id')[offset:offset+limit] serializer = ServiceRequestSerializer(service_requests, many=True) response_data = dict( diff --git a/services/workshop/crapi/shop/views.py b/services/workshop/crapi/shop/views.py index 9c50e41b..ec79bee3 100644 --- a/services/workshop/crapi/shop/views.py +++ b/services/workshop/crapi/shop/views.py @@ -241,7 +241,22 @@ def get(self, request, user=None): list of order object and 200 status if no error message and corresponding status if error """ - orders = Order.objects.filter(user=user) + limit = request.GET.get('limit', str(settings.DEFAULT_LIMIT)) + offset = request.GET.get('offset', str(settings.DEFAULT_OFFSET)) + if not limit.isdigit() or not offset.isdigit(): + return Response( + {'message': messages.INVALID_LIMIT_OR_OFFSET}, + status=status.HTTP_400_BAD_REQUEST + ) + limit = int(limit) + offset = int(offset) + if limit > settings.MAX_LIMIT: + limit = 100 + if limit < 0: + limit = settings.DEFAULT_LIMIT + if offset < 0: + offset = settings.DEFAULT_OFFSET + orders = Order.objects.filter(user=user).order_by('-id')[offset:offset+limit] serializer = OrderSerializer(orders, many=True) response_data = dict( orders=serializer.data @@ -329,14 +344,21 @@ def post(self, request, user=None): if not serializer.is_valid(): log_error(request.path, request.data, 400, serializer.errors) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - + row = None with connection.cursor() as cursor: - cursor.execute("SELECT coupon_code from applied_coupon WHERE user_id = "\ - + str(user.id)\ - + " AND coupon_code = '"\ - + coupon_request_body['coupon_code']\ - + "'") - row = cursor.fetchall() + try: + cursor.execute("SELECT coupon_code from applied_coupon WHERE user_id = "\ + + str(user.id)\ + + " AND coupon_code = '"\ + + coupon_request_body['coupon_code']\ + + "'") + row = cursor.fetchall() + except Exception as e: + log_error(request.path, request.data, 500, e) + return Response( + {'message': e}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) if row and row != None: return Response( diff --git a/services/workshop/crapi/user/views.py b/services/workshop/crapi/user/views.py index 43c71946..a3c99c52 100644 --- a/services/workshop/crapi/user/views.py +++ b/services/workshop/crapi/user/views.py @@ -23,14 +23,12 @@ from rest_framework.views import APIView from crapi.user.serializers import UserDetailsSerializer from crapi.user.models import User, UserDetails +from crapi_site import settings from utils.jwt import jwt_auth_required from utils import messages from utils.logging import log_error logger = logging.getLogger() -DEFAULT_LIMIT = 30 -DEFAULT_OFFSET = 0 -MAX_LIMIT = 100 class AdminUserView(APIView): """ @@ -49,21 +47,21 @@ def get(self, request, user=None): user details and 200 status if no error message and corresponding status if error """ - limit = request.GET.get('limit', str(DEFAULT_LIMIT)) - offset = request.GET.get('offset', str(DEFAULT_OFFSET)) + limit = request.GET.get('limit', str(settings.DEFAULT_LIMIT)) + offset = request.GET.get('offset', str(settings.DEFAULT_OFFSET)) if not limit.isdigit() or not offset.isdigit(): return Response( - {'message': messages.INVALID_LIMIT_OFFSET}, + {'message': messages.INVALID_LIMIT_OR_OFFSET}, status=status.HTTP_400_BAD_REQUEST ) limit = int(limit) offset = int(offset) - if limit > MAX_LIMIT: - limit = MAX_LIMIT - if int(limit) < 0: - limit = DEFAULT_LIMIT + if limit > settings.MAX_LIMIT: + limit = 100 + if limit < 0: + limit = settings.DEFAULT_LIMIT if offset < 0: - offset = DEFAULT_OFFSET + offset = settings.DEFAULT_OFFSET # Sort by id userdetails = UserDetails.objects.all().order_by('id')[offset:offset+limit] if not userdetails: diff --git a/services/workshop/crapi_site/settings.py b/services/workshop/crapi_site/settings.py index 73f04479..c9dfd170 100644 --- a/services/workshop/crapi_site/settings.py +++ b/services/workshop/crapi_site/settings.py @@ -28,6 +28,9 @@ from django.core.exceptions import ImproperlyConfigured +DEFAULT_LIMIT = 10 +DEFAULT_OFFSET = 0 +MAX_LIMIT = 100 def get_env_value(env_variable): try: @@ -173,6 +176,7 @@ def get_env_value(env_variable): 'NAME': 'test_crapi', 'USER': get_env_value('DB_USER'), }, + 'CONN_MAX_AGE': 60, }, 'mongodb': { 'ENGINE': 'djongo', diff --git a/services/workshop/requirements.txt b/services/workshop/requirements.txt index 68110067..99e60605 100644 --- a/services/workshop/requirements.txt +++ b/services/workshop/requirements.txt @@ -10,7 +10,7 @@ django-health-check==3.17.0 djangorestframework==3.14.0 django-sslserver==0.22 djongo==1.3.6 -psycopg2==2.9.6 +psycopg2==2.9.9 PyJWT==2.7.0 pymongo==3.12.3 pyOpenSSL==23.1.1 diff --git a/services/workshop/utils/messages.py b/services/workshop/utils/messages.py index 65f69d77..3b3d83fd 100644 --- a/services/workshop/utils/messages.py +++ b/services/workshop/utils/messages.py @@ -41,5 +41,5 @@ INVALID_REPORT_ID = "Please enter a valid report_id value." REPORT_DOES_NOT_EXIST = "The Report does not exist for given report_id." COULD_NOT_CONNECT = "Could not connect to mechanic api." -INVALID_LIMIT_OFFSET = "Param limit and offset values should be integers." +INVALID_LIMIT_OR_OFFSET = "Param limit and offset values should be integers." NO_USER_DETAILS = "No user details found." \ No newline at end of file From dbfe0a18aec73d54e4941e3cd0f555fffa9bcbe1 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Thu, 11 Jan 2024 00:01:40 +0530 Subject: [PATCH 12/46] Upgrade django from 3.x to 4.1.x (#229) * Upgrade django from 3.x to 4.1.x ( * Increase community container memory --- deploy/docker/docker-compose.yml | 6 +++--- services/workshop/crapi/migrations/0001_initial.py | 4 ++-- services/workshop/crapi/user/tests.py | 4 ++-- services/workshop/crapi_site/settings.py | 2 +- services/workshop/requirements.txt | 11 ++++++----- 5 files changed, 14 insertions(+), 13 deletions(-) diff --git a/deploy/docker/docker-compose.yml b/deploy/docker/docker-compose.yml index b3eb9c45..e552f18c 100755 --- a/deploy/docker/docker-compose.yml +++ b/deploy/docker/docker-compose.yml @@ -100,7 +100,7 @@ services: resources: limits: cpus: '0.3' - memory: 128M + memory: 192M crapi-workshop: container_name: crapi-workshop @@ -194,8 +194,8 @@ services: deploy: resources: limits: - cpus: '0.3' - memory: 128M + cpus: '0.5' + memory: 256M mongodb: container_name: mongodb diff --git a/services/workshop/crapi/migrations/0001_initial.py b/services/workshop/crapi/migrations/0001_initial.py index 35fc5d65..b0cbd678 100644 --- a/services/workshop/crapi/migrations/0001_initial.py +++ b/services/workshop/crapi/migrations/0001_initial.py @@ -36,7 +36,7 @@ class Migration(migrations.Migration): ('jwt_token', models.CharField(max_length=500, null=True, unique=True)), ('number', models.CharField(max_length=255, null=True)), ('password', models.CharField(max_length=255)), - ('role', models.IntegerField(choices=[(2, 1), (0, 0)], default=0)), + ('role', models.IntegerField(choices=[(1, 'User'), (2, 'Mechanic'), (3, 'Admin')], default=1)), ], options={ 'db_table': 'user_login', @@ -165,7 +165,7 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Coupon', fields=[ - ('coupon_code', models.CharField(primary_key=True, max_length=255)), + ('coupon_code', models.CharField(max_length=255, primary_key=True, serialize=False)), ('amount', models.CharField(max_length=255)), ], options={ diff --git a/services/workshop/crapi/user/tests.py b/services/workshop/crapi/user/tests.py index 483f12f5..657f4fbd 100644 --- a/services/workshop/crapi/user/tests.py +++ b/services/workshop/crapi/user/tests.py @@ -26,7 +26,7 @@ from django.test import TestCase, Client from django.utils import timezone from utils import messages -from crapi.user.views import DEFAULT_LIMIT +from crapi_site import settings from crapi.user.models import User, UserDetails logger = logging.getLogger('UserTest') @@ -105,7 +105,7 @@ def test_get_api_management_users_all(self): response = self.client.get('/workshop/api/management/users/all', **self.auth_headers) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) - self.assertEqual(len(response_data['users']), DEFAULT_LIMIT) + self.assertEqual(len(response_data['users']), settings.DEFAULT_LIMIT) response = self.client.get('/workshop/api/management/users/all?limit=10&offset=0', **self.auth_headers) self.assertEqual(response.status_code, 200) response_data = json.loads(response.content) diff --git a/services/workshop/crapi_site/settings.py b/services/workshop/crapi_site/settings.py index c9dfd170..85026849 100644 --- a/services/workshop/crapi_site/settings.py +++ b/services/workshop/crapi_site/settings.py @@ -176,7 +176,7 @@ def get_env_value(env_variable): 'NAME': 'test_crapi', 'USER': get_env_value('DB_USER'), }, - 'CONN_MAX_AGE': 60, + 'CONN_MAX_AGE': 0, }, 'mongodb': { 'ENGINE': 'djongo', diff --git a/services/workshop/requirements.txt b/services/workshop/requirements.txt index 99e60605..6662beb2 100644 --- a/services/workshop/requirements.txt +++ b/services/workshop/requirements.txt @@ -1,18 +1,19 @@ -bcrypt==3.1.7 -Django~=3.2.19 +bcrypt==4.1.2 +Django~=4.1.13 cryptography==40.0.2 django-cors-headers==4.0.0 django-db-cascade-2==0.3.5 django-environ==0.10.0 django-extended-choices==1.3.3 -django-extensions==3.2.1 +django-extensions==3.2.3 django-health-check==3.17.0 djangorestframework==3.14.0 django-sslserver==0.22 -djongo==1.3.6 +djongo==1.3.6 #max version for django 4.1.13 +sqlparse==0.2.4 #djongo-dependency psycopg2==2.9.9 PyJWT==2.7.0 -pymongo==3.12.3 +pymongo==3.13.0 pyOpenSSL==23.1.1 requests==2.30.0 Werkzeug==2.0.3 From f34fb50f6fb3088436024c13277c10ddb134822f Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Sun, 4 Feb 2024 21:27:36 +0530 Subject: [PATCH 13/46] Better seeding of users (#231) * Better seeding of default data --- README.md | 6 + deploy/vagrant/crapi.service | 2 +- openapi-spec/openapi-spec.json | 8 +- .../com/crapi/config/InitialDataConfig.java | 122 +++++++++++++++--- .../java/com/crapi/constant/TestUsers.java | 58 ++++++++- .../java/com/crapi/entity/VehicleDetails.java | 13 +- .../java/com/crapi/entity/VehicleModel.java | 2 +- .../main/java/com/crapi/model/SeedUser.java | 37 +++++- .../repository/VehicleCompanyRepository.java | 24 ++++ .../service/Impl/ProfileServiceImpl.java | 1 + .../java/com/crapi/utils/GenerateVIN.java | 27 ++-- .../com/crapi/utils/VehicleModelData.java | 58 --------- 12 files changed, 259 insertions(+), 99 deletions(-) create mode 100644 services/identity/src/main/java/com/crapi/repository/VehicleCompanyRepository.java delete mode 100644 services/identity/src/main/java/com/crapi/utils/VehicleModelData.java diff --git a/README.md b/README.md index f7ad1eea..9a8c29a9 100644 --- a/README.md +++ b/README.md @@ -66,6 +66,12 @@ You can use prebuilt images generated by our CI workflow. docker-compose -f docker-compose.yml --compatibility up -d ``` + - To Stop and Cleanup crAPI + + ``` + docker-compose -f docker-compose.yml --compatibility down –volumes + ``` + Visit [http://localhost:8888](http://localhost:8888) **Note**: All emails are sent to mailhog service by default and can be checked on diff --git a/deploy/vagrant/crapi.service b/deploy/vagrant/crapi.service index 54e5c01d..ab6285cf 100644 --- a/deploy/vagrant/crapi.service +++ b/deploy/vagrant/crapi.service @@ -8,7 +8,7 @@ Type=oneshot RemainAfterExit=true WorkingDirectory=/opt/crapi ExecStart=/bin/bash -c "/usr/local/bin/docker-compose up -d --remove-orphans" -ExecStop=/bin/bash -c "/usr/local/bin/docker-compose down" +ExecStop=/bin/bash -c "/usr/local/bin/docker-compose down –volumes" [Install] WantedBy=multi-user.target diff --git a/openapi-spec/openapi-spec.json b/openapi-spec/openapi-spec.json index 4c28d3b9..5d86dc81 100644 --- a/openapi-spec/openapi-spec.json +++ b/openapi-spec/openapi-spec.json @@ -1125,7 +1125,7 @@ "schema" : { "type" : "string", "format" : "uuid", - "example" : "0be319f0-f0dd-44aa-af0b-af927f3a383f" + "example" : "1929186d-8b67-4163-a208-de52a41f7301" } } ] }, @@ -2730,8 +2730,8 @@ } }, "example" : { - "id" : 23, - "vin" : "0FOPP90TFEE927859", + "id" : 3, + "vin" : "1G1OP124017231334", "owner" : { "email" : "victim.one@example.com", "number" : "4156895423" @@ -3096,7 +3096,7 @@ "example" : { "nickname" : "Hacker", "email" : "hacker@darkweb.com", - "vehicleid" : "abac4018-5a38-466c-ab7f-361908afeab6", + "vehicleid" : "4bae9968-ec7f-4de3-a3a0-ba1b2ab5e5e5", "profile_pic_url" : "", "created_at" : "2021-09-16T01:46:32.432Z" } diff --git a/services/identity/src/main/java/com/crapi/config/InitialDataConfig.java b/services/identity/src/main/java/com/crapi/config/InitialDataConfig.java index b2a4e2c7..31c6441b 100644 --- a/services/identity/src/main/java/com/crapi/config/InitialDataConfig.java +++ b/services/identity/src/main/java/com/crapi/config/InitialDataConfig.java @@ -15,17 +15,23 @@ package com.crapi.config; import com.crapi.constant.TestUsers; +import com.crapi.entity.ProfileVideo; import com.crapi.entity.User; import com.crapi.entity.UserDetails; +import com.crapi.entity.VehicleCompany; import com.crapi.entity.VehicleDetails; +import com.crapi.entity.VehicleLocation; +import com.crapi.entity.VehicleModel; +import com.crapi.enums.EFuelType; import com.crapi.enums.ERole; import com.crapi.model.SeedUser; import com.crapi.repository.*; import com.crapi.service.VehicleService; +import com.crapi.utils.GenerateVIN; import com.crapi.utils.UserData; -import com.crapi.utils.VehicleLocationData; -import com.crapi.utils.VehicleModelData; import java.util.ArrayList; +import java.util.List; +import java.util.Random; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -40,8 +46,16 @@ public class InitialDataConfig { private static final Logger logger = LoggerFactory.getLogger(InitialDataConfig.class); + private static long seed = 0; + + private Random random; + + GenerateVIN generateVIN; + @Autowired VehicleLocationRepository vehicleLocationRepository; + @Autowired VehicleCompanyRepository vehicleCompanyRepository; + @Autowired VehicleModelRepository vehicleModelRepository; @Autowired VehicleDetailsRepository vehicleDetailsRepository; @@ -56,30 +70,57 @@ public class InitialDataConfig { @Autowired PasswordEncoder encoder; - public void addLocation() { - if (CollectionUtils.isEmpty(vehicleLocationRepository.findAll())) { - VehicleLocationData vehicleLocationData = new VehicleLocationData(); - vehicleLocationRepository.saveAll(vehicleLocationData.getVehicleLocationData()); - } + public void createModels() { + VehicleCompany vehicleCompany = new VehicleCompany("Hyundai"); + VehicleModel vehicleModel = + new VehicleModel("Creta", EFuelType.DIESEL, vehicleCompany, "images/hyundai-creta.jpg"); + vehicleModelRepository.save(vehicleModel); + + vehicleCompany = new VehicleCompany("Lamborghini"); + vehicleModel = + new VehicleModel( + "Aventador", EFuelType.PETROL, vehicleCompany, "images/lamborghini-aventador.jpg"); + vehicleModel = vehicleModelRepository.save(vehicleModel); + + vehicleCompany = new VehicleCompany("Mercedes-Benz"); + vehicleModel = + new VehicleModel( + "GLA Class", EFuelType.DIESEL, vehicleCompany, "images/mercedesbenz-gla.jpg"); + vehicleModelRepository.save(vehicleModel); + + vehicleCompany = new VehicleCompany("BMW"); + vehicleModel = + new VehicleModel("5 Series", EFuelType.PETROL, vehicleCompany, "images/bmw-5.jpg"); + vehicleModelRepository.save(vehicleModel); + + vehicleCompany = new VehicleCompany("Audi"); + vehicleModel = new VehicleModel("RS7", EFuelType.DIESEL, vehicleCompany, "images/audi-rs7.jpg"); + vehicleModelRepository.save(vehicleModel); + + vehicleCompany = new VehicleCompany("MG Motor"); + vehicleModel = + new VehicleModel( + "Hector Plus", EFuelType.PETROL, vehicleCompany, "images/mgmotor-hectorplus.jpg"); + vehicleModel = vehicleModelRepository.save(vehicleModel); } public void addVehicleModel() { if (CollectionUtils.isEmpty(vehicleModelRepository.findAll())) { - VehicleModelData vehicleModelData = new VehicleModelData(); - vehicleModelRepository.saveAll(vehicleModelData.getModelList()); + createModels(); } } @EventListener public void setup(ApplicationReadyEvent event) { - - addLocation(); + random = new Random(); + random.setSeed(seed); + generateVIN = new GenerateVIN(); addVehicleModel(); addUser(); } public void addUser() { - if (CollectionUtils.isEmpty(userDetailsRepository.findAll())) { + if (CollectionUtils.isEmpty(userDetailsRepository.findAll()) || false) { ArrayList userDetailList = new TestUsers().getUsers(); for (SeedUser userDetails : userDetailList) { boolean user = @@ -88,7 +129,12 @@ public void addUser() { userDetails.getEmail(), userDetails.getPassword(), userDetails.getNumber(), - userDetails.getRole()); + userDetails.getRole(), + userDetails.getCarid(), + userDetails.getVin(), + userDetails.getPincode(), + userDetails.getLatitude(), + userDetails.getLongitude()); if (!user) { logger.error("Fail to create predefined users"); } @@ -96,24 +142,62 @@ public void addUser() { } } + public VehicleDetails createVehicle( + String carId, String vin, String pincode, String latitude, String longitude) { + List modelList = null; + modelList = vehicleModelRepository.findAll(); + if (modelList != null) { + VehicleLocation vehicleLocation = new VehicleLocation(latitude, longitude); + VehicleDetails vehicleDetails = new VehicleDetails(carId, pincode, vin); + VehicleModel vehicleModel = modelList.get(random.nextInt(modelList.size())); + vehicleModel = vehicleModelRepository.findById(vehicleModel.getId()).get(); + vehicleDetails.setVehicleLocation(vehicleLocation); + vehicleDetails = vehicleDetailsRepository.save(vehicleDetails); + vehicleDetails.setModel(vehicleModel); + vehicleDetails = vehicleDetailsRepository.save(vehicleDetails); + logger.debug("Created vehicle for {} successfully", vehicleDetails); + return vehicleDetails; + } + return null; + } + public boolean predefineUserData( - String name, String email, String password, String number, ERole role) { + String name, + String email, + String password, + String number, + ERole role, + String carId, + String vin, + String pincode, + String latitude, + String longitude) { UserData userData = new UserData(); VehicleDetails vehicleDetails = null; UserDetails userDetails = null; try { User user = new User(email, number, encoder.encode(password), role); user = userRepository.save(user); + user = userRepository.findById(user.getId()).get(); userDetails = userData.getPredefineUser(name, user); - userDetailsRepository.save(userDetails); - vehicleDetails = vehicleService.createVehicle(); + userDetails = userDetailsRepository.save(userDetails); + vehicleDetails = createVehicle(carId, vin, pincode, latitude, longitude); if (vehicleDetails != null) { vehicleDetails.setOwner(user); vehicleDetailsRepository.save(vehicleDetails); - return true; + } else { + logger.error("Fail to create vehicle for user {}", email); + return false; } - logger.error("Fail to create vehicle for user {}", email); - return false; + // generate random bytes + byte[] videoBytes = new byte[10]; + random.nextBytes(videoBytes); + String videoName = userDetails.getName().replace(" ", "_") + "_video"; + String conversionParam = "-v codec h264"; + ProfileVideo profileVideo = new ProfileVideo(videoName, videoBytes, user); + profileVideo.setConversion_params(conversionParam); + profileVideoRepository.save(profileVideo); + return true; } catch (Exception e) { logger.error("Fail to create user {}, Exception :: {}", email, e); return false; diff --git a/services/identity/src/main/java/com/crapi/constant/TestUsers.java b/services/identity/src/main/java/com/crapi/constant/TestUsers.java index 165184aa..c4236222 100644 --- a/services/identity/src/main/java/com/crapi/constant/TestUsers.java +++ b/services/identity/src/main/java/com/crapi/constant/TestUsers.java @@ -25,15 +25,63 @@ public class TestUsers { public TestUsers() { users.add( new SeedUser( - "Adam", "adam007@example.com", "9876895423", "adam007!123", ERole.ROLE_PREDEFINE)); + "Adam", + "adam007@example.com", + "9876895423", + "adam007!123", + ERole.ROLE_PREDEFINE, + "f89b5f21-7829-45cb-a650-299a61090378", + "7ECOX34KJTV359804", + "123456", + "32.778889", + "-91.919243")); users.add( new SeedUser( - "Pogba", "pogba006@example.com", "9876570006", "pogba006!123", ERole.ROLE_PREDEFINE)); + "Pogba", + "pogba006@example.com", + "9876570006", + "pogba006!123", + ERole.ROLE_PREDEFINE, + "cd515c12-0fc1-48ae-8b61-9230b70a845b", + "8VAUI03PRUQ686911", + "123456", + "31.284788", + "-92.471176")); users.add( new SeedUser( - "Robot", "robot001@example.com", "9876570001", "robot001!123", ERole.ROLE_PREDEFINE)); - users.add(new SeedUser("Test", "test@example.com", "9876540001", "Test!123", ERole.ROLE_USER)); + "Robot", + "robot001@example.com", + "9876570001", + "robot001!123", + ERole.ROLE_PREDEFINE, + "4bae9968-ec7f-4de3-a3a0-ba1b2ab5e5e5", + "0NKPZ09IHOP508673", + "123456", + "37.746880", + "-84.301460")); users.add( - new SeedUser("Admin", "admin@example.com", "9010203040", "Admin!123", ERole.ROLE_ADMIN)); + new SeedUser( + "Test", + "test@example.com", + "9876540001", + "Test!123", + ERole.ROLE_USER, + "1929186d-8b67-4163-a208-de52a41f7301", + "8IGEF39BZUJ159285", + "123456", + "38.206348", + "-84.270172")); + users.add( + new SeedUser( + "Admin", + "admin@example.com", + "9010203040", + "Admin!123", + ERole.ROLE_ADMIN, + "f5c506f5-3af2-4120-926c-64ad8b10ddc2", + "6NBBY70FWUM324316", + "123456", + "37.406769", + "-94.705528")); } } diff --git a/services/identity/src/main/java/com/crapi/entity/VehicleDetails.java b/services/identity/src/main/java/com/crapi/entity/VehicleDetails.java index 53eb684f..4665b4e7 100644 --- a/services/identity/src/main/java/com/crapi/entity/VehicleDetails.java +++ b/services/identity/src/main/java/com/crapi/entity/VehicleDetails.java @@ -33,7 +33,7 @@ public class VehicleDetails { private long id; @Column(name = "uuid", updatable = false, nullable = false, unique = true) - private UUID uuid = UUID.randomUUID(); + private UUID uuid; private String pincode; private String vin; @@ -41,7 +41,7 @@ public class VehicleDetails { private EStatus status; @Transient List previousOwners; - @OneToOne(cascade = CascadeType.ALL) + @ManyToOne(cascade = CascadeType.ALL) @JoinColumn(name = "vehicle_model_id") private VehicleModel model; @@ -54,7 +54,16 @@ public class VehicleDetails { private User owner; public VehicleDetails(String pincode, String vin) { + this.uuid = UUID.randomUUID(); + this.pincode = pincode; + this.vin = vin; + this.status = EStatus.ACTIVE; + this.year = LocalDate.now().getYear(); + this.previousOwners = Arrays.asList(); + } + public VehicleDetails(String uuid, String pincode, String vin) { + this.uuid = UUID.fromString(uuid); this.pincode = pincode; this.vin = vin; this.status = EStatus.ACTIVE; diff --git a/services/identity/src/main/java/com/crapi/entity/VehicleModel.java b/services/identity/src/main/java/com/crapi/entity/VehicleModel.java index d9b68dae..b2ca69dd 100644 --- a/services/identity/src/main/java/com/crapi/entity/VehicleModel.java +++ b/services/identity/src/main/java/com/crapi/entity/VehicleModel.java @@ -32,7 +32,7 @@ public class VehicleModel implements Serializable { private EFuelType fuel_type; private String vehicle_img; - @OneToOne(cascade = CascadeType.ALL) + @ManyToOne(cascade = CascadeType.ALL) private VehicleCompany vehiclecompany; public VehicleModel() {} diff --git a/services/identity/src/main/java/com/crapi/model/SeedUser.java b/services/identity/src/main/java/com/crapi/model/SeedUser.java index fe92b721..06362058 100644 --- a/services/identity/src/main/java/com/crapi/model/SeedUser.java +++ b/services/identity/src/main/java/com/crapi/model/SeedUser.java @@ -43,11 +43,46 @@ public class SeedUser { @Size(min = 3, max = 100) private ERole role; - public SeedUser(String name, String email, String number, String password, ERole role) { + @NotBlank + @Size(min = 3, max = 100) + private String carid; + + @NotBlank + @Size(min = 3, max = 100) + private String vin; + + @NotBlank + @Size(min = 3, max = 100) + private String pincode; + + @NotBlank + @Size(min = 3, max = 100) + private String latitude; + + @NotBlank + @Size(min = 3, max = 100) + private String longitude; + + public SeedUser( + String name, + String email, + String number, + String password, + ERole role, + String carid, + String vin, + String pincode, + String latitude, + String longitude) { this.name = name; this.email = email; this.number = number; this.password = password; this.role = role; + this.carid = carid; + this.vin = vin; + this.pincode = pincode; + this.latitude = latitude; + this.longitude = longitude; } } diff --git a/services/identity/src/main/java/com/crapi/repository/VehicleCompanyRepository.java b/services/identity/src/main/java/com/crapi/repository/VehicleCompanyRepository.java new file mode 100644 index 00000000..7b1aefff --- /dev/null +++ b/services/identity/src/main/java/com/crapi/repository/VehicleCompanyRepository.java @@ -0,0 +1,24 @@ +/* + * Licensed under the Apache License, Version 2.0 (the “License”); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an “AS IS” BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.crapi.repository; + +import com.crapi.entity.VehicleCompany; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.stereotype.Repository; + +@Repository +public interface VehicleCompanyRepository extends JpaRepository { + VehicleCompany findByName(String name); +} diff --git a/services/identity/src/main/java/com/crapi/service/Impl/ProfileServiceImpl.java b/services/identity/src/main/java/com/crapi/service/Impl/ProfileServiceImpl.java index 62963301..c203711c 100644 --- a/services/identity/src/main/java/com/crapi/service/Impl/ProfileServiceImpl.java +++ b/services/identity/src/main/java/com/crapi/service/Impl/ProfileServiceImpl.java @@ -104,6 +104,7 @@ public ProfileVideo uploadProfileVideo(MultipartFile file, HttpServletRequest re profileVideo.setVideo_name(file.getOriginalFilename()); } else { profileVideo = new ProfileVideo(file.getOriginalFilename(), file.getBytes(), user); + profileVideo.setConversion_params(conversionParam); } profileVideoRepository.save(profileVideo); return profileVideo; diff --git a/services/identity/src/main/java/com/crapi/utils/GenerateVIN.java b/services/identity/src/main/java/com/crapi/utils/GenerateVIN.java index 7af345c0..b3c5a5b8 100644 --- a/services/identity/src/main/java/com/crapi/utils/GenerateVIN.java +++ b/services/identity/src/main/java/com/crapi/utils/GenerateVIN.java @@ -14,12 +14,23 @@ package com.crapi.utils; +import java.util.Random; + public class GenerateVIN { static String charsequence = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; static String num = "0123456789"; String vin = ""; String pincode = ""; + Random random; + + public GenerateVIN() { + random = new Random(); + } + + public GenerateVIN(long seed) { + random = new Random(seed); + } /** @return random generate pin code for add vehicle */ public String generatePincode() { @@ -34,26 +45,26 @@ public String generateVIN() { } public String getChar(int num) { - String random = ""; - for (int j = 0; j <= num; j++) random += randomCharacter(); - return random; + String randStr = ""; + for (int j = 0; j <= num; j++) randStr += randomCharacter(); + return randStr; } public String getNum(int num) { - String random = ""; - for (int k = 0; k <= num; k++) random += randomNumber(); - return random; + String randNum = ""; + for (int k = 0; k <= num; k++) randNum += randomNumber(); + return randNum; } public String randomCharacter() { int n = charsequence.length(); - int r = (int) (n * Math.random()); + int r = random.nextInt(n); return charsequence.substring(r, r + 1); } public String randomNumber() { int n = num.length(); - int r = (int) (n * Math.random()); + int r = random.nextInt(n); return num.substring(r, r + 1); } } diff --git a/services/identity/src/main/java/com/crapi/utils/VehicleModelData.java b/services/identity/src/main/java/com/crapi/utils/VehicleModelData.java deleted file mode 100644 index a7a9cf04..00000000 --- a/services/identity/src/main/java/com/crapi/utils/VehicleModelData.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the “License”); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an “AS IS” BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.crapi.utils; - -import com.crapi.entity.VehicleCompany; -import com.crapi.entity.VehicleModel; -import com.crapi.enums.EFuelType; -import java.util.ArrayList; -import java.util.List; - -public class VehicleModelData { - - /** @return List of hard coded Vehicle Model and Company for pre data setup. */ - public List getModelList() { - List vehicleModelList = new ArrayList<>(); - vehicleModelList.add( - new VehicleModel( - "Creta", EFuelType.DIESEL, new VehicleCompany("Hyundai"), "images/hyundai-creta.jpg")); - vehicleModelList.add( - new VehicleModel( - "Aventador", - EFuelType.PETROL, - new VehicleCompany("Lamborghini"), - "images/lamborghini-aventador.jpg")); - vehicleModelList.add( - new VehicleModel( - "GLA Class", - EFuelType.DIESEL, - new VehicleCompany("Mercedes-Benz"), - "images/mercedesbenz-gla.jpg")); - vehicleModelList.add( - new VehicleModel( - "5 Series", EFuelType.PETROL, new VehicleCompany("BMW"), "images/bmw-5.jpg")); - vehicleModelList.add( - new VehicleModel( - "RS7", EFuelType.DIESEL, new VehicleCompany("Audi"), "images/audi-rs7.jpg")); - vehicleModelList.add( - new VehicleModel( - "Hector Plus", - EFuelType.PETROL, - new VehicleCompany("MG Motor"), - "images/mgmotor-hectorplus.jpg")); - - return vehicleModelList; - } -} From d8cdb88ddfeb743d123d6e8a3c44c243fd9639b1 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Tue, 6 Feb 2024 16:32:49 +0530 Subject: [PATCH 14/46] Tests and coverage (#232) * Separate Tests and coverage --- .github/workflows/{ci.yml => pr-build.yml} | 165 +++++++------ .github/workflows/publish.yml | 142 +++++++++++ .gitignore | 1 + deploy/docker/.env | 2 +- services/community/.gitignore | 1 + services/community/Dockerfile | 4 +- services/docker-database.yml | 50 ++++ services/identity/.gitignore | 1 + services/workshop/.gitignore | 3 + services/workshop/Dockerfile | 9 +- services/workshop/core/__init__.py | 0 services/workshop/core/management/__init__.py | 0 .../core/management/commands/seed_database.py | 226 ++++++++++++++++++ services/workshop/crapi/apps.py | 36 +-- services/workshop/crapi/mechanic/models.py | 4 +- services/workshop/crapi/user/models.py | 1 + services/workshop/crapi_site/settings.py | 10 +- services/workshop/requirements.txt | 5 +- services/workshop/runner.sh | 11 +- services/workshop/utils/messages.py | 1 + 20 files changed, 547 insertions(+), 125 deletions(-) rename .github/workflows/{ci.yml => pr-build.yml} (57%) create mode 100644 .github/workflows/publish.yml create mode 100644 services/community/.gitignore create mode 100644 services/docker-database.yml create mode 100644 services/workshop/core/__init__.py create mode 100644 services/workshop/core/management/__init__.py create mode 100644 services/workshop/core/management/commands/seed_database.py diff --git a/.github/workflows/ci.yml b/.github/workflows/pr-build.yml similarity index 57% rename from .github/workflows/ci.yml rename to .github/workflows/pr-build.yml index 0c4fddbc..c85332fe 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/pr-build.yml @@ -1,4 +1,4 @@ -name: Docker Image CI +name: PR Build on: push: @@ -23,8 +23,6 @@ on: jobs: build-context: runs-on: ubuntu-latest - outputs: - push_image: ${{ (github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop')) || (github.event_name == 'release' && github.event.action == 'published') }} steps: - name: Dump GitHub context env: @@ -37,14 +35,14 @@ jobs: PLATFORMS: "linux/amd64,linux/arm64" steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Env seen prerun run: env - name: Get branch name id: branch-name - uses: tj-actions/branch-names@v5.2 + uses: tj-actions/branch-names@v8 - name: Current branch name run: | @@ -79,20 +77,13 @@ jobs: # setup Docker build action - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Set up QEMU id: qemu - uses: docker/setup-qemu-action@v2 - - - name: Login to DockerHub - if: needs.build-context.outputs.push_image == 'true' - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + uses: docker/setup-qemu-action@v3 - name: Build crapi-identity image - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: ./services/identity tags: crapi/crapi-identity:${{ env.TAG_LATEST }},crapi/crapi-identity:${{ env.TAG_NAME }} @@ -103,7 +94,7 @@ jobs: cache-to: type=gha,mode=max,scope=identity-service - name: Build crapi-workshop image - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: ./services/workshop tags: crapi/crapi-workshop:${{ env.TAG_LATEST }},crapi/crapi-workshop:${{ env.TAG_NAME }} @@ -114,7 +105,7 @@ jobs: cache-to: type=gha,mode=max,scope=workshop-service - name: Build crapi-community image - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: ./services/community tags: crapi/crapi-community:${{ env.TAG_LATEST }},crapi/crapi-community:${{ env.TAG_NAME }} @@ -125,7 +116,7 @@ jobs: cache-to: type=gha,mode=max,scope=community-service - name: Build crapi-web image - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: ./services/web tags: crapi/crapi-web:${{ env.TAG_LATEST }},crapi/crapi-web:${{ env.TAG_NAME }} @@ -136,7 +127,7 @@ jobs: cache-to: type=gha,mode=max,scope=web-service - name: Build gateway-service image - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: ./services/gateway-service tags: crapi/gateway-service:${{ env.TAG_LATEST }},crapi/gateway-service:${{ env.TAG_NAME }} @@ -146,16 +137,8 @@ jobs: cache-from: type=gha,scope=gateway-service cache-to: type=gha,mode=max,scope=gateway-service - - - name: Check Mailhog existence - id: check_mailhog_exists - uses: andstor/file-existence-action@v1 - with: - files: "./services/mailhog" - - name: Build mailhog image - if: steps.check_mailhog_exists.outputs.files_exists == 'true' - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: ./services/mailhog tags: crapi/mailhog:${{ env.TAG_LATEST }},crapi/mailhog:${{ env.TAG_NAME }} @@ -177,7 +160,7 @@ jobs: - name: Install Node uses: actions/setup-node@v3 with: - node-version: 16 + node-version: 20 - name: Install newman run: npm install -g newman @@ -185,68 +168,82 @@ jobs: - name: Run Postman Collection run: (newman run "./postman_collections/crAPI.postman_collection.json" -e ./postman_collections/crAPI.postman_environment.json --verbose) || true - - name: Build crapi-identity all platforms and conditionally push to Docker Hub - uses: docker/build-push-action@v2 - with: - context: ./services/identity - tags: crapi/crapi-identity:${{ env.TAG_LATEST }},crapi/crapi-identity:${{ env.TAG_NAME }} - push: ${{ needs.build-context.outputs.push_image }} - platforms: ${{ env.PLATFORMS }} - cache-from: type=gha,scope=identity-service - cache-to: type=gha,mode=max,scope=identity-service + - name: Dump docker logs on failure + if: failure() + uses: jwalton/gh-docker-logs@v2 - - name: Build crapi-workshop all platforms and conditionally push to Docker Hub - uses: docker/build-push-action@v2 - with: - context: ./services/workshop - tags: crapi/crapi-workshop:${{ env.TAG_LATEST }},crapi/crapi-workshop:${{ env.TAG_NAME }} - push: ${{ needs.build-context.outputs.push_image }} - platforms: ${{ env.PLATFORMS }} - cache-from: type=gha,scope=workshop-service - cache-to: type=gha,mode=max,scope=workshop-service + - name: Run crAPI using built images + run: docker-compose -f deploy/docker/docker-compose.yml down --volumes --remove-orphans - - name: Build crapi-community all platforms and conditionally push to Docker Hub - uses: docker/build-push-action@v2 - with: - context: ./services/community - tags: crapi/crapi-community:${{ env.TAG_LATEST }},crapi/crapi-community:${{ env.TAG_NAME }} - push: ${{ needs.build-context.outputs.push_image }} - platforms: ${{ env.PLATFORMS }} - cache-from: type=gha,scope=community-service - cache-to: type=gha,mode=max,scope=community-service - - name: Build crapi-web all platforms and conditionally push to Docker Hub - uses: docker/build-push-action@v2 + tests: + needs: build-context + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 with: - context: ./services/web - tags: crapi/crapi-web:${{ env.TAG_LATEST }},crapi/crapi-web:${{ env.TAG_NAME }} - push: ${{ needs.build-context.outputs.push_image }} - platforms: ${{ env.PLATFORMS }} - cache-from: type=gha,scope=web-service - cache-to: type=gha,mode=max,scope=web-service + python-version: '3.10' - - name: Build gateway-service all platforms and conditionally push to Docker Hub - uses: docker/build-push-action@v2 + - name: Setup Java + uses: actions/setup-java@v4 with: - context: ./services/gateway-service - tags: crapi/gateway-service:${{ env.TAG_LATEST }},crapi/gateway-service:${{ env.TAG_NAME }} - push: ${{ needs.build-context.outputs.push_image }} - platforms: ${{ env.PLATFORMS }} - cache-from: type=gha,scope=gateway-service - cache-to: type=gha,mode=max,scope=gateway-service + distribution: 'adopt' + java-version: '11' - - name: Build mailhog all platforms and conditionally push to Docker Hub - if: steps.check_mailhog_exists.outputs.files_exists == 'true' - uses: docker/build-push-action@v2 + - name: Setup Go + uses: actions/setup-go@v5 with: - context: ./services/mailhog - tags: crapi/mailhog:${{ env.TAG_LATEST }},crapi/mailhog:${{ env.TAG_NAME }} - push: ${{ needs.build-context.outputs.push_image }} - platforms: ${{ env.PLATFORMS }} - cache-from: type=gha,scope=mailhog-service - cache-to: type=gha,mode=max,scope=mailhog-service + go-version: '1.21' - - name: Dump docker logs on failure - if: failure() - uses: jwalton/gh-docker-logs@v2 + - name: Start the database + run: docker-compose -f services/docker-database.yml up -d + - name: Run identity tests + run: | + cd services/identity + ./gradlew test + + - name: Run community tests + run: | + cd services/community + go test -v ./... + mkdir test-results + go run gotest.tools/gotestsum@latest --format testname --junitfile test-results/unit-tests.xml + + - name: Run workshop tests + run: | + cd services/workshop + pip3 install virtualenv + virtualenv venv + source venv/bin/activate + pip3 install -r requirements.txt + mkdir -p test-results + source .env + IS_TESTING=True python3 manage.py test --no-input --testrunner xmlrunner.extra.djangotestrunner.XMLTestRunner + + - name: Publish Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + if: always() + with: + files: | + **/test-results/**/*.xml + **/test-results/**/*.json + + - name: Run workshop coverage + run: | + cd services/workshop + source venv/bin/activate + source .env + IS_TESTING=True coverage run ./manage.py test --no-input crapi + coverage report + coverage xml -o coverage.xml + + - name: Publish Coverage for workshop + uses: orgoro/coverage@v3.1 + with: + coverageFile: services/workshop/coverage.xml + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..2f6a266b --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,142 @@ +name: Publish Images + +on: + push: + branches: + - 'develop' + - 'main' + tags: + - 'v*.*.*' + paths: + - 'services/**' + - 'postman_collections/**' + - 'deploy/**' + - '.github/workflows/**' + workflow_dispatch: + +jobs: + build-context: + runs-on: ubuntu-latest + steps: + - name: Dump GitHub context + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: echo "$GITHUB_CONTEXT" + build: + needs: build-context + runs-on: ubuntu-latest + env: + PLATFORMS: "linux/amd64,linux/arm64" + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Env seen prerun + run: env + + - name: Get branch name + id: branch-name + uses: tj-actions/branch-names@v8 + + - name: Current branch name + run: | + echo "Current Branch: ${{ steps.branch-name.outputs.current_branch }}" + echo "Target Branch: ${{ steps.branch-name.outputs.base_ref_branch }}" + echo "Tag if exist: ${{ steps.branch-name.outputs.tag }}" + + - name: Running on a tag. + if: steps.branch-name.outputs.is_tag == 'true' + run: | + echo "TAG_NAME=${{ steps.branch-name.outputs.tag }}" >> ${GITHUB_ENV} + echo "TAG_LATEST=latest" >> ${GITHUB_ENV} + + - name: Running on a branch and merge. + if: steps.branch-name.outputs.is_tag != 'true' && github.event_name == 'push' && steps.branch-name.outputs.current_branch != 'main' + run: | + echo "TAG_NAME=${{ steps.branch-name.outputs.current_branch }}" >> ${GITHUB_ENV} + echo "TAG_LATEST=${{ steps.branch-name.outputs.current_branch }}" >> ${GITHUB_ENV} + + - name: Running on main branch. + if: steps.branch-name.outputs.is_tag != 'true' && github.event_name == 'push' && steps.branch-name.outputs.current_branch == 'main' + run: | + echo "TAG_NAME=${{ steps.branch-name.outputs.current_branch }}" >> ${GITHUB_ENV} + echo "TAG_LATEST=latest" >> ${GITHUB_ENV} + + - name: Running on a branch and pull req. + if: steps.branch-name.outputs.is_tag != 'true' && github.event_name != 'push' + run: | + echo "TAG_NAME=${{ steps.branch-name.outputs.base_ref_branch }}" >> ${GITHUB_ENV} + echo "TAG_LATEST=${{ steps.branch-name.outputs.base_ref_branch }}" >> ${GITHUB_ENV} + + # setup Docker build action + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Set up QEMU + id: qemu + uses: docker/setup-qemu-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build crapi-identity all platforms and conditionally push to Docker Hub + uses: docker/build-push-action@v3 + with: + context: ./services/identity + tags: crapi/crapi-identity:${{ env.TAG_LATEST }},crapi/crapi-identity:${{ env.TAG_NAME }} + platforms: ${{ env.PLATFORMS }} + cache-from: type=gha,scope=identity-service + cache-to: type=gha,mode=max,scope=identity-service + + - name: Build crapi-workshop all platforms and conditionally push to Docker Hub + uses: docker/build-push-action@v3 + with: + context: ./services/workshop + tags: crapi/crapi-workshop:${{ env.TAG_LATEST }},crapi/crapi-workshop:${{ env.TAG_NAME }} + platforms: ${{ env.PLATFORMS }} + cache-from: type=gha,scope=workshop-service + cache-to: type=gha,mode=max,scope=workshop-service + + - name: Build crapi-community all platforms and conditionally push to Docker Hub + uses: docker/build-push-action@v3 + with: + context: ./services/community + tags: crapi/crapi-community:${{ env.TAG_LATEST }},crapi/crapi-community:${{ env.TAG_NAME }} + platforms: ${{ env.PLATFORMS }} + cache-from: type=gha,scope=community-service + cache-to: type=gha,mode=max,scope=community-service + + - name: Build crapi-web all platforms and conditionally push to Docker Hub + uses: docker/build-push-action@v3 + with: + context: ./services/web + tags: crapi/crapi-web:${{ env.TAG_LATEST }},crapi/crapi-web:${{ env.TAG_NAME }} + platforms: ${{ env.PLATFORMS }} + cache-from: type=gha,scope=web-service + cache-to: type=gha,mode=max,scope=web-service + + - name: Build gateway-service all platforms and conditionally push to Docker Hub + uses: docker/build-push-action@v3 + with: + context: ./services/gateway-service + tags: crapi/gateway-service:${{ env.TAG_LATEST }},crapi/gateway-service:${{ env.TAG_NAME }} + platforms: ${{ env.PLATFORMS }} + cache-from: type=gha,scope=gateway-service + cache-to: type=gha,mode=max,scope=gateway-service + + - name: Build mailhog all platforms and conditionally push to Docker Hub + uses: docker/build-push-action@v3 + with: + context: ./services/mailhog + tags: crapi/mailhog:${{ env.TAG_LATEST }},crapi/mailhog:${{ env.TAG_NAME }} + platforms: ${{ env.PLATFORMS }} + cache-from: type=gha,scope=mailhog-service + cache-to: type=gha,mode=max,scope=mailhog-service + + - name: Dump docker logs on failure + if: failure() + uses: jwalton/gh-docker-logs@v2 + diff --git a/.gitignore b/.gitignore index 8552476c..dbc12474 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ deploy/vagrant/*.log deploy/vagrant/.vagrant .secrets .vscode/ +*.local diff --git a/deploy/docker/.env b/deploy/docker/.env index d263946e..602468f0 100644 --- a/deploy/docker/.env +++ b/deploy/docker/.env @@ -4,5 +4,5 @@ WORKSHOP_SERVER_PORT=8000 ENABLE_SHELL_INJECTION=false ENABLE_LOG4J=false LISTEN_IP="127.0.0.1" -TLS_ENABLED=false +TLS_ENABLED=true VERSION=latest diff --git a/services/community/.gitignore b/services/community/.gitignore new file mode 100644 index 00000000..51511d1f --- /dev/null +++ b/services/community/.gitignore @@ -0,0 +1 @@ +test-results/ diff --git a/services/community/Dockerfile b/services/community/Dockerfile index 89b85f8f..d7b6cf8e 100644 --- a/services/community/Dockerfile +++ b/services/community/Dockerfile @@ -12,7 +12,7 @@ # GoLang Build -FROM golang:alpine AS builder +FROM golang:1.21-alpine AS builder ENV GO111MODULE=on \ CGO_ENABLED=0 WORKDIR /build @@ -39,6 +39,6 @@ COPY certs /app/certs COPY health.sh /app/health.sh RUN ls -al /app -ARG SERVER_PORT +ARG SERVER_PORT EXPOSE ${SERVER_PORT} CMD /app/main diff --git a/services/docker-database.yml b/services/docker-database.yml new file mode 100644 index 00000000..8a2cf61e --- /dev/null +++ b/services/docker-database.yml @@ -0,0 +1,50 @@ +# Licensed under the Apache License, Version 2.0 (the “License”); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an “AS IS” BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +services: + + postgresdb: + container_name: postgresdb + image: 'postgres:14' + command: ["postgres", "-c", "max_connections=500"] + environment: + POSTGRES_USER: admin + POSTGRES_PASSWORD: crapisecretpassword + POSTGRES_DB: crapi + ports: + - "127.0.0.1:5432:5432" + healthcheck: + test: [ "CMD-SHELL", "pg_isready" ] + interval: 15s + timeout: 15s + retries: 15 + + mongodb: + container_name: mongodb + image: 'mongo:4.4' + environment: + MONGO_INITDB_ROOT_USERNAME: admin + MONGO_INITDB_ROOT_PASSWORD: crapisecretpassword + ports: + - "127.0.0.1:27017:27017" + healthcheck: + test: echo 'db.runCommand("ping").ok' | mongo mongodb:27017/test --quiet + interval: 15s + timeout: 15s + retries: 15 + start_period: 20s + + + + + + diff --git a/services/identity/.gitignore b/services/identity/.gitignore index 87b17ba0..23090a4e 100644 --- a/services/identity/.gitignore +++ b/services/identity/.gitignore @@ -21,3 +21,4 @@ gradle-app.setting # JDT-specific (Eclipse Java Development Tools) .classpath bin/ +test-results/ diff --git a/services/workshop/.gitignore b/services/workshop/.gitignore index ff7f2033..5fa7cefa 100644 --- a/services/workshop/.gitignore +++ b/services/workshop/.gitignore @@ -1,3 +1,6 @@ __pycache__/ *.pyc *.log +.coverage +*.xml +test-results/ diff --git a/services/workshop/Dockerfile b/services/workshop/Dockerfile index fd29593f..17481fc3 100644 --- a/services/workshop/Dockerfile +++ b/services/workshop/Dockerfile @@ -12,21 +12,20 @@ # limitations under the License. -FROM python:3.8-alpine3.15 as build +FROM python:3.10-alpine as build # Not using alpine based on suggestion # https://pythonspeed.com/articles/alpine-docker-python/ -# Python +# Python ENV PYTHONUNBUFFERED=1 RUN apk add --update --no-cache gettext curl py3-pip postgresql-dev RUN apk add --update --no-cache --virtual .build-deps \ python3-dev openssl-dev \ libffi-dev gcc py3-pip \ python3-dev \ - libressl-dev \ musl-dev \ libffi-dev - + ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONUNBUFFERED 1 RUN mkdir /app @@ -34,7 +33,7 @@ WORKDIR /app COPY ./ /app RUN pip install wheel && pip wheel . --wheel-dir /app/wheels -FROM python:3.8-alpine3.15 +FROM python:3.10-alpine COPY --from=build /app /app WORKDIR /app RUN apk update && apk add --no-cache postgresql-libs curl diff --git a/services/workshop/core/__init__.py b/services/workshop/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/workshop/core/management/__init__.py b/services/workshop/core/management/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/services/workshop/core/management/commands/seed_database.py b/services/workshop/core/management/commands/seed_database.py new file mode 100644 index 00000000..597650c1 --- /dev/null +++ b/services/workshop/core/management/commands/seed_database.py @@ -0,0 +1,226 @@ +# +# Licensed under the Apache License, Version 2.0 (the “License”); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an “AS IS” BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +""" +Configuration for crapi application +""" +import django +import sys +from django.apps import AppConfig +import bcrypt +from django.utils import timezone +from django.db import models +from django.db import connection, transaction +import logging +import traceback +from django.core.management.base import BaseCommand +from django.utils import timezone + +logger = logging.getLogger() + + +def create_products(): + from crapi.shop.models import Product + product_details_all = [ + { + 'name': 'Seat', + 'price': 10, + 'image_url': 'images/seat.svg' + }, + { + 'name': 'Wheel', + 'price': 10, + 'image_url': 'images/wheel.svg' + } + ] + for product_details in product_details_all: + if Product.objects.filter(name=product_details['name']).exists(): + logger.info("Product already exists. Skipping: "+ product_details['name']) + continue + product = Product.objects.create( + name=product_details['name'], + price=float(product_details['price']), + image_url=product_details['image_url'] + ) + product.save() + logger.info("Created Product: "+str(product.__dict__)) + +def create_mechanics(): + from crapi.user.models import User, UserDetails + from crapi.mechanic.models import Mechanic + mechanic_details_all = [ + { + 'name': 'Jhon', + 'email': 'jhon@example.com', + 'number': '', + 'password': 'Admin1@#', + 'mechanic_code': 'TRAC_JHN' + }, + { + 'name': 'James', + 'email': 'james@example.com', + 'number': '', + 'password': 'Admin1@#', + 'mechanic_code': 'TRAC_JME' + }, + ] + for mechanic_details in mechanic_details_all: + uset = User.objects.filter(email=mechanic_details['email']) + if not uset.exists(): + try: + cursor = connection.cursor() + cursor.execute("select nextval('user_login_id_seq')") + result = cursor.fetchone() + user_id = result[0] + except Exception as e: + logger.error("Failed to fetch user_login_id_seq"+str(e)) + user_id = 1 + + user = User.objects.create( + id=user_id, + email=mechanic_details['email'], + number=mechanic_details['number'], + password=bcrypt.hashpw( + mechanic_details['password'].encode('utf-8'), + bcrypt.gensalt() + ).decode(), + role=User.ROLE_CHOICES.MECH, + created_on=timezone.now() + ) + user.save() + logger.info("Created User: "+str(user.__dict__)) + else: + user = uset.first() + + if Mechanic.objects.filter(mechanic_code=mechanic_details['mechanic_code']): + logger.info("Mechanic already exists. Skipping: " + + mechanic_details['mechanic_code'] + + " " + mechanic_details['name'] + " " + + mechanic_details['email']) + continue + mechanic = Mechanic.objects.create( + mechanic_code=mechanic_details['mechanic_code'], + user=user + ) + mechanic.save() + try: + cursor = connection.cursor() + cursor.execute("select nextval('user_details_id_seq')") + result = cursor.fetchone() + user_details_id = result[0] + except Exception as e: + logger.error("Failed to fetch user_details_id_seq"+str(e)) + user_details_id = 1 + userdetails = UserDetails.objects.create( + id=user_details_id, + available_credit=0, + name=mechanic_details['name'], + status='ACTIVE', + user=user + ) + userdetails.save() + +def create_reports(): + import random + import sys + import textwrap + from crapi.user.models import User, UserDetails, Vehicle + from crapi.mechanic.models import Mechanic, ServiceRequest + from django.utils import timezone + count = ServiceRequest.objects.all().count() + if (count >= 5): + return + logger.info("Creating Reports") + mechanics = Mechanic.objects.all() + vehicles = Vehicle.objects.all() + for i in range(5): + try: + mechanic = random.choice(mechanics) + vehicle = random.choice(vehicles) + status = random.choice(ServiceRequest.STATUS_CHOICES)[0] + vehicle_model = vehicle.vehicle_model + vehicle_company = vehicle_model.vehiclecompany + user = vehicle.owner + user_detail = UserDetails.objects.filter(user=user).first() + service_request = ServiceRequest.objects.create( + vehicle=vehicle, + mechanic=mechanic, + problem_details=textwrap.dedent("""\ + My car {} - {} is having issues. + Can you give me a call on my mobile {}, + Or send me an email at {} + Thanks, + {}. + """.format( + vehicle_company.name, + vehicle_model.model, + user.number, + user.email, + user_detail.name) + ), + status=status, + created_on=timezone.now() + ) + service_request.save() + logger.info("Created Service Request for User %s: %s", user.email, service_request.__dict__) + except Exception as e: + print(sys.exc_info()[0]) + logger.error("Failed to create report: "+str(e)) + +def create_orders(): + import uuid + from crapi.user.models import User, UserDetails + from crapi.shop.models import Product + from crapi.shop.models import Order + if Order.objects.all().count() >= 1: + return + users = User.objects.filter(role=User.ROLE_CHOICES.PREDEFINED).order_by('id') + for user in users: + product = Product.objects.filter(name='Seat').first() + order = Order.objects.create( + user=user, + product=product, + quantity=2, + created_on=timezone.now(), + transaction_id=uuid.uuid4(), + ) + order.save() + logger.info("Created Order for User %s: %s", user.email, order.__dict__) + + +class Command(BaseCommand): + help = 'Seed the database with initial data.' + + def handle(self, *args, **kwargs): + """ + Pre-populate mechanic model and product model + :return: None + """ + logger.info("Pre Populating Model Data") + try: + create_products() + except Exception as e: + logger.error("Cannot Pre Populate Products: "+str(e)) + try: + create_mechanics() + except Exception as e: + logger.error("Cannot Pre Populate Mechanics: "+str(e)) + try: + create_reports() + except Exception as e: + logger.error("Cannot Pre Populate Reports: "+str(e)) + try: + create_orders() + except Exception as e: + logger.error("Cannot Pre Populate Orders: "+str(e)) diff --git a/services/workshop/crapi/apps.py b/services/workshop/crapi/apps.py index f0534e7b..59b3913f 100644 --- a/services/workshop/crapi/apps.py +++ b/services/workshop/crapi/apps.py @@ -147,8 +147,6 @@ def create_reports(): mechanic = random.choice(mechanics) vehicle = random.choice(vehicles) status = random.choice(ServiceRequest.STATUS_CHOICES)[0] - logger.info(vehicle.__dict__) - logger.info(status) vehicle_model = vehicle.vehicle_model vehicle_company = vehicle_model.vehiclecompany user = vehicle.owner @@ -173,7 +171,7 @@ def create_reports(): created_on=timezone.now() ) service_request.save() - logger.info(service_request.__dict__) + logger.info("Created Service Request for User %s: %s", user.email, service_request.__dict__) except Exception as e: print(sys.exc_info()[0]) logger.error("Failed to create report: "+str(e)) @@ -185,26 +183,18 @@ def create_orders(): from crapi.shop.models import Order if Order.objects.all().count() >= 1: return - user = User.objects.filter(email='test@example.com').first() - product = Product.objects.filter(name='Seat').first() - order1 = Order.objects.create( - user=user, - product=product, - quantity=2, - created_on=timezone.now(), - transaction_id=uuid.uuid4(), - ) - order1.save() - logger.info("Created Order:1: "+str(order1.__dict__)) - order2 = Order.objects.create( - user=user, - product=product, - quantity=2, - created_on=timezone.now(), - transaction_id=uuid.uuid4(), - ) - order2.save() - logger.info("Created Order:2: "+str(order2.__dict__)) + users = User.objects.filter(role=User.ROLE_CHOICES.PREDEFINED).order_by('id') + for user in users: + product = Product.objects.filter(name='Seat').first() + order = Order.objects.create( + user=user, + product=product, + quantity=2, + created_on=timezone.now(), + transaction_id=uuid.uuid4(), + ) + order.save() + logger.info("Created Order for User %s: %s", user.email, order.__dict__) diff --git a/services/workshop/crapi/mechanic/models.py b/services/workshop/crapi/mechanic/models.py index 7d6bf721..09b7bf26 100644 --- a/services/workshop/crapi/mechanic/models.py +++ b/services/workshop/crapi/mechanic/models.py @@ -53,8 +53,8 @@ class ServiceRequest(models.Model): updated_on = models.DateTimeField(null=True) STATUS_CHOICES = Choices( - ('PEN', "Pending", "Pending"), - ('FIN', "Finished", "Finished") + ('PEN', "pending", "Pending"), + ('FIN', "finished", "Finished") ) status = models.CharField(max_length=10, choices=STATUS_CHOICES, default=STATUS_CHOICES.PEN) diff --git a/services/workshop/crapi/user/models.py b/services/workshop/crapi/user/models.py index a903b0aa..c248f4c2 100644 --- a/services/workshop/crapi/user/models.py +++ b/services/workshop/crapi/user/models.py @@ -36,6 +36,7 @@ class User(models.Model): password = models.CharField(max_length=255) ROLE_CHOICES = Choices( + ('PREDEFINED', 0, 'Predefined'), ('USER', 1, 'User'), ('MECH', 2, 'Mechanic'), ('ADMIN', 3, 'Admin'), diff --git a/services/workshop/crapi_site/settings.py b/services/workshop/crapi_site/settings.py index 85026849..d81d19fe 100644 --- a/services/workshop/crapi_site/settings.py +++ b/services/workshop/crapi_site/settings.py @@ -73,8 +73,10 @@ def get_env_value(env_variable): 'corsheaders', 'health_check', 'health_check.db', - 'crapi.apps.CRAPIConfig', - #'user.apps.UserConfig', + 'core', + 'crapi', + # 'crapi.apps.CRAPIConfig', + # 'user.apps.UserConfig', "django_extensions", ] @@ -95,6 +97,10 @@ def get_env_value(env_variable): ROOT_URLCONF = 'crapi_site.urls' +TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner' + +TEST_OUTPUT_DIR = os.path.join(BASE_DIR, 'test-reports') + TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', diff --git a/services/workshop/requirements.txt b/services/workshop/requirements.txt index 6662beb2..d8b2d67b 100644 --- a/services/workshop/requirements.txt +++ b/services/workshop/requirements.txt @@ -17,4 +17,7 @@ pymongo==3.13.0 pyOpenSSL==23.1.1 requests==2.30.0 Werkzeug==2.0.3 -Faker==22.1.0 \ No newline at end of file +Faker==22.1.0 +gunicorn==21.2.0 +coverage==7.4.1 +unittest-xml-reporting==3.2.0 \ No newline at end of file diff --git a/services/workshop/runner.sh b/services/workshop/runner.sh index 5725079b..27b85160 100755 --- a/services/workshop/runner.sh +++ b/services/workshop/runner.sh @@ -23,8 +23,8 @@ python3 manage.py migrate python3 manage.py check &&\ python3 manage.py health_check -## Uncomment the following line if you wish to run tests -IS_TESTING=True python3 manage.py test --no-input +echo "Seeding the database" +python3 manage.py seed_database echo "Starting Django server" if [ "$TLS_ENABLED" = "true" ] || [ "$TLS_ENABLED" = "1" ]; then @@ -38,9 +38,10 @@ if [ "$TLS_ENABLED" = "true" ] || [ "$TLS_ENABLED" = "1" ]; then fi echo "TLS_CERTIFICATE: $TLS_CERTIFICATE" echo "TLS_KEY: $TLS_KEY" - python3 manage.py runserver_plus --cert-file $TLS_CERTIFICATE --key-file $TLS_KEY --noreload 0.0.0.0:${SERVER_PORT} + # python3 manage.py runserver_plus --cert-file $TLS_CERTIFICATE --key-file $TLS_KEY --noreload 0.0.0.0:${SERVER_PORT} + gunicorn --workers=2 --threads=10 --timeout 60 --bind 0.0.0.0:${SERVER_PORT} --certfile $TLS_CERTIFICATE --keyfile $TLS_KEY --log-level=debug crapi_site.wsgi else echo "TLS is DISABLED" - python3 manage.py runserver 0.0.0.0:${SERVER_PORT} --noreload + # python3 manage.py runserver 0.0.0.0:${SERVER_PORT} --noreload + gunicorn --workers=2 --threads=10 --timeout 60 --bind 0.0.0.0:${SERVER_PORT} --log-level=debug crapi_site.wsgi fi -exec "$@" diff --git a/services/workshop/utils/messages.py b/services/workshop/utils/messages.py index 3b3d83fd..6393f961 100644 --- a/services/workshop/utils/messages.py +++ b/services/workshop/utils/messages.py @@ -37,6 +37,7 @@ COUPON_NOT_FOUND = "Coupon not found" RESTRICTED = "You are not allowed to access this resource!" INVALID_STATUS = "The value of 'status' has to be 'delivered','return pending' or 'returned'" +INVALID_SERVICE_REQUEST_STATUS = "The value of 'status' has to be 'Pending' or 'Finished'" REPORT_ID_MISSING = "Please enter the report_id value." INVALID_REPORT_ID = "Please enter a valid report_id value." REPORT_DOES_NOT_EXIST = "The Report does not exist for given report_id." From 9ce23936a4ddd3b428123be3403fe079075d2bda Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Tue, 6 Feb 2024 19:08:37 +0530 Subject: [PATCH 15/46] Fix pull workflow (#233) * Fix pull workflow * Dev requirements --- .github/workflows/pr-build.yml | 12 +----------- services/workshop/dev-requirements.txt | 2 ++ 2 files changed, 3 insertions(+), 11 deletions(-) create mode 100644 services/workshop/dev-requirements.txt diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index c85332fe..bbc0baf6 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -1,17 +1,6 @@ name: PR Build on: - push: - branches: - - 'develop' - - 'main' - tags: - - 'v*.*.*' - paths: - - 'services/**' - - 'postman_collections/**' - - 'deploy/**' - - '.github/workflows/**' pull_request: paths: - 'services/**' @@ -221,6 +210,7 @@ jobs: virtualenv venv source venv/bin/activate pip3 install -r requirements.txt + pip3 install -r dev-requirements.txt mkdir -p test-results source .env IS_TESTING=True python3 manage.py test --no-input --testrunner xmlrunner.extra.djangotestrunner.XMLTestRunner diff --git a/services/workshop/dev-requirements.txt b/services/workshop/dev-requirements.txt new file mode 100644 index 00000000..b8540af4 --- /dev/null +++ b/services/workshop/dev-requirements.txt @@ -0,0 +1,2 @@ +coverage==7.4.1 +unittest-xml-reporting==3.2.0 \ No newline at end of file From 45c42b65227e49a8cb3400cc1ba91ed98475a33c Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Wed, 7 Feb 2024 21:27:34 +0530 Subject: [PATCH 16/46] Fix publish --- .github/workflows/publish.yml | 22 ++++++++++++---------- services/workshop/runner.sh | 4 ++-- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 2f6a266b..158a9821 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -82,61 +82,63 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build crapi-identity all platforms and conditionally push to Docker Hub + - name: Build crapi-identity all platforms and push to Docker Hub uses: docker/build-push-action@v3 with: context: ./services/identity tags: crapi/crapi-identity:${{ env.TAG_LATEST }},crapi/crapi-identity:${{ env.TAG_NAME }} platforms: ${{ env.PLATFORMS }} + push: true cache-from: type=gha,scope=identity-service cache-to: type=gha,mode=max,scope=identity-service - - name: Build crapi-workshop all platforms and conditionally push to Docker Hub + - name: Build crapi-workshop all platforms and push to Docker Hub uses: docker/build-push-action@v3 with: context: ./services/workshop tags: crapi/crapi-workshop:${{ env.TAG_LATEST }},crapi/crapi-workshop:${{ env.TAG_NAME }} platforms: ${{ env.PLATFORMS }} + push: true cache-from: type=gha,scope=workshop-service cache-to: type=gha,mode=max,scope=workshop-service - - name: Build crapi-community all platforms and conditionally push to Docker Hub + - name: Build crapi-community all platforms and push to Docker Hub uses: docker/build-push-action@v3 with: context: ./services/community tags: crapi/crapi-community:${{ env.TAG_LATEST }},crapi/crapi-community:${{ env.TAG_NAME }} platforms: ${{ env.PLATFORMS }} + push: true cache-from: type=gha,scope=community-service cache-to: type=gha,mode=max,scope=community-service - - name: Build crapi-web all platforms and conditionally push to Docker Hub + - name: Build crapi-web all platforms and push to Docker Hub uses: docker/build-push-action@v3 with: context: ./services/web tags: crapi/crapi-web:${{ env.TAG_LATEST }},crapi/crapi-web:${{ env.TAG_NAME }} platforms: ${{ env.PLATFORMS }} + push: true cache-from: type=gha,scope=web-service cache-to: type=gha,mode=max,scope=web-service - - name: Build gateway-service all platforms and conditionally push to Docker Hub + - name: Build gateway-service all platforms and push to Docker Hub uses: docker/build-push-action@v3 with: context: ./services/gateway-service tags: crapi/gateway-service:${{ env.TAG_LATEST }},crapi/gateway-service:${{ env.TAG_NAME }} platforms: ${{ env.PLATFORMS }} + push: true cache-from: type=gha,scope=gateway-service cache-to: type=gha,mode=max,scope=gateway-service - - name: Build mailhog all platforms and conditionally push to Docker Hub + - name: Build mailhog all platforms and push to Docker Hub uses: docker/build-push-action@v3 with: context: ./services/mailhog tags: crapi/mailhog:${{ env.TAG_LATEST }},crapi/mailhog:${{ env.TAG_NAME }} platforms: ${{ env.PLATFORMS }} + push: true cache-from: type=gha,scope=mailhog-service cache-to: type=gha,mode=max,scope=mailhog-service - - name: Dump docker logs on failure - if: failure() - uses: jwalton/gh-docker-logs@v2 - diff --git a/services/workshop/runner.sh b/services/workshop/runner.sh index 27b85160..445755e1 100755 --- a/services/workshop/runner.sh +++ b/services/workshop/runner.sh @@ -39,9 +39,9 @@ if [ "$TLS_ENABLED" = "true" ] || [ "$TLS_ENABLED" = "1" ]; then echo "TLS_CERTIFICATE: $TLS_CERTIFICATE" echo "TLS_KEY: $TLS_KEY" # python3 manage.py runserver_plus --cert-file $TLS_CERTIFICATE --key-file $TLS_KEY --noreload 0.0.0.0:${SERVER_PORT} - gunicorn --workers=2 --threads=10 --timeout 60 --bind 0.0.0.0:${SERVER_PORT} --certfile $TLS_CERTIFICATE --keyfile $TLS_KEY --log-level=debug crapi_site.wsgi + gunicorn --workers=1 --threads=20 --timeout 60 --bind 0.0.0.0:${SERVER_PORT} --certfile $TLS_CERTIFICATE --keyfile $TLS_KEY --log-level=debug crapi_site.wsgi else echo "TLS is DISABLED" # python3 manage.py runserver 0.0.0.0:${SERVER_PORT} --noreload - gunicorn --workers=2 --threads=10 --timeout 60 --bind 0.0.0.0:${SERVER_PORT} --log-level=debug crapi_site.wsgi + gunicorn --workers=1 --threads=20 --timeout 60 --bind 0.0.0.0:${SERVER_PORT} --log-level=debug crapi_site.wsgi fi From c393df67357454fe3ec2bbaa2a35cd497c67d49a Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Wed, 7 Feb 2024 22:26:16 +0530 Subject: [PATCH 17/46] Fix orders --- services/workshop/core/management/commands/seed_database.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/workshop/core/management/commands/seed_database.py b/services/workshop/core/management/commands/seed_database.py index 597650c1..29517ea5 100644 --- a/services/workshop/core/management/commands/seed_database.py +++ b/services/workshop/core/management/commands/seed_database.py @@ -185,8 +185,9 @@ def create_orders(): from crapi.shop.models import Order if Order.objects.all().count() >= 1: return - users = User.objects.filter(role=User.ROLE_CHOICES.PREDEFINED).order_by('id') - for user in users: + users = User.objects.all().order_by('id') + users_seed = users[:5] + for user in users_seed: product = Product.objects.filter(name='Seat').first() order = Order.objects.create( user=user, From 17625b6c37d796628a934355b517b3cf0c82ac11 Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Thu, 8 Feb 2024 10:25:31 +0530 Subject: [PATCH 18/46] Rename openapi-spec.json to crapi-openapi-spec.json --- openapi-spec/{openapi-spec.json => crapi-openapi-spec.json} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename openapi-spec/{openapi-spec.json => crapi-openapi-spec.json} (99%) diff --git a/openapi-spec/openapi-spec.json b/openapi-spec/crapi-openapi-spec.json similarity index 99% rename from openapi-spec/openapi-spec.json rename to openapi-spec/crapi-openapi-spec.json index 5d86dc81..a33fef2f 100644 --- a/openapi-spec/openapi-spec.json +++ b/openapi-spec/crapi-openapi-spec.json @@ -3408,4 +3408,4 @@ } } } -} \ No newline at end of file +} From 98c8b9e830fe85d1fea1f276cb3b3234a2dd278c Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Wed, 14 Feb 2024 10:39:06 +0530 Subject: [PATCH 19/46] Cleanup resource limits --- deploy/helm/values.yaml | 36 ++---------------------------------- 1 file changed, 2 insertions(+), 34 deletions(-) diff --git a/deploy/helm/values.yaml b/deploy/helm/values.yaml index 6735da7c..a5b6c8df 100644 --- a/deploy/helm/values.yaml +++ b/deploy/helm/values.yaml @@ -32,11 +32,6 @@ web: name: crapi-web-configmap labels: app: crapi-web - resources: - limits: - cpu: "500m" - requests: - cpu: 256m deploymentLabels: app: crapi-web podLabels: @@ -79,11 +74,6 @@ identity: keyStorePassword: passw0rd keyPassword: passw0rd keyAlias: identity - resources: - limits: - cpu: "500m" - requests: - cpu: 256m deploymentLabels: app: crapi-identity podLabels: @@ -108,11 +98,6 @@ community: app: crapi-community postgresDbDriver: postgres mongoDbDriver: mongodb - resources: - limits: - cpu: "500m" - requests: - cpu: 256m deploymentLabels: app: crapi-community podLabels: @@ -138,11 +123,6 @@ workshop: postgresDbDriver: postgres mongoDbDriver: mongodb secretKey: crapi - resources: - limits: - cpu: "256m" - requests: - cpu: 256m deploymentLabels: app: crapi-workshop podLabels: @@ -175,13 +155,6 @@ mailhog: labels: app: mailhog mailhogStorage: mongodb - resources: - limits: - cpu: "1" - memory: 1024Mi - requests: - cpu: 50m - memory: 64Mi deploymentLabels: app: mailhog podLabels: @@ -218,7 +191,7 @@ mongodb: app: mongodb resources: requests: - storage: 1Gi + storage: 2Gi serviceSelectorLabels: app: mongodb podLabels: @@ -252,7 +225,7 @@ postgresdb: app: postgresdb resources: requests: - storage: 1Gi + storage: 2Gi serviceSelectorLabels: app: postgresdb podLabels: @@ -275,11 +248,6 @@ apiGatewayService: name: gateway-service labels: app: gateway-service - resources: - limits: - cpu: "100m" - requests: - cpu: 50m deploymentLabels: app: gateway-service podLabels: From 8b0cb5226baebd6522e9a78c5076b26d74939e3b Mon Sep 17 00:00:00 2001 From: Roshan Piyush Date: Sat, 17 Feb 2024 15:13:35 +0530 Subject: [PATCH 20/46] Clean community service --- services/community/go.mod | 15 - services/community/go.sum | 30 +- .../github.com/globalsign/mgo/.gitignore | 2 - .../github.com/globalsign/mgo/.travis.yml | 49 - .../github.com/globalsign/mgo/CONTRIBUTING.md | 14 - .../vendor/github.com/globalsign/mgo/LICENSE | 25 - .../vendor/github.com/globalsign/mgo/Makefile | 5 - .../github.com/globalsign/mgo/README.md | 105 - .../vendor/github.com/globalsign/mgo/auth.go | 467 -- .../github.com/globalsign/mgo/bson/LICENSE | 25 - .../github.com/globalsign/mgo/bson/README.md | 12 - .../github.com/globalsign/mgo/bson/bson.go | 836 --- .../globalsign/mgo/bson/compatibility.go | 29 - .../github.com/globalsign/mgo/bson/decimal.go | 312 - .../github.com/globalsign/mgo/bson/decode.go | 1055 --- .../github.com/globalsign/mgo/bson/encode.go | 645 -- .../github.com/globalsign/mgo/bson/json.go | 384 -- .../github.com/globalsign/mgo/bson/stream.go | 90 - .../vendor/github.com/globalsign/mgo/bulk.go | 366 -- .../globalsign/mgo/changestreams.go | 357 -- .../github.com/globalsign/mgo/cluster.go | 704 -- .../github.com/globalsign/mgo/coarse_time.go | 62 - .../vendor/github.com/globalsign/mgo/doc.go | 35 - .../github.com/globalsign/mgo/gridfs.go | 782 --- .../globalsign/mgo/internal/json/LICENSE | 27 - .../globalsign/mgo/internal/json/decode.go | 1685 ----- .../globalsign/mgo/internal/json/encode.go | 1260 ---- .../globalsign/mgo/internal/json/extension.go | 95 - .../globalsign/mgo/internal/json/fold.go | 143 - .../globalsign/mgo/internal/json/indent.go | 141 - .../globalsign/mgo/internal/json/scanner.go | 697 -- .../globalsign/mgo/internal/json/stream.go | 510 -- .../globalsign/mgo/internal/json/tags.go | 44 - .../globalsign/mgo/internal/sasl/sasl.c | 77 - .../globalsign/mgo/internal/sasl/sasl.go | 142 - .../mgo/internal/sasl/sasl_windows.c | 122 - .../mgo/internal/sasl/sasl_windows.go | 142 - .../mgo/internal/sasl/sasl_windows.h | 7 - .../mgo/internal/sasl/sspi_windows.c | 96 - .../mgo/internal/sasl/sspi_windows.h | 70 - .../globalsign/mgo/internal/scram/scram.go | 266 - .../vendor/github.com/globalsign/mgo/log.go | 133 - .../vendor/github.com/globalsign/mgo/queue.go | 91 - .../github.com/globalsign/mgo/raceoff.go | 5 - .../github.com/globalsign/mgo/raceon.go | 5 - .../github.com/globalsign/mgo/saslimpl.go | 11 - .../github.com/globalsign/mgo/saslstub.go | 11 - .../github.com/globalsign/mgo/server.go | 609 -- .../github.com/globalsign/mgo/session.go | 5656 ----------------- .../github.com/globalsign/mgo/socket.go | 756 --- .../vendor/github.com/globalsign/mgo/stats.go | 184 - .../github.com/go-bongo/go-dotaccess/LICENSE | 21 - .../go-bongo/go-dotaccess/README.md | 46 - .../go-bongo/go-dotaccess/dotaccess.go | 91 - .../klauspost/compress/fse/README.md | 156 +- .../klauspost/compress/huff0/README.md | 172 +- .../github.com/oleiade/reflections/.gitignore | 22 - .../oleiade/reflections/.travis.yml | 7 - .../github.com/oleiade/reflections/AUTHORS.md | 8 - .../github.com/oleiade/reflections/LICENSE | 20 - .../github.com/oleiade/reflections/README.md | 227 - .../oleiade/reflections/reflections.go | 284 - services/community/vendor/modules.txt | 29 - 63 files changed, 175 insertions(+), 20299 deletions(-) delete mode 100644 services/community/vendor/github.com/globalsign/mgo/.gitignore delete mode 100644 services/community/vendor/github.com/globalsign/mgo/.travis.yml delete mode 100644 services/community/vendor/github.com/globalsign/mgo/CONTRIBUTING.md delete mode 100644 services/community/vendor/github.com/globalsign/mgo/LICENSE delete mode 100644 services/community/vendor/github.com/globalsign/mgo/Makefile delete mode 100644 services/community/vendor/github.com/globalsign/mgo/README.md delete mode 100644 services/community/vendor/github.com/globalsign/mgo/auth.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/LICENSE delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/README.md delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/bson.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/compatibility.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/decimal.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/decode.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/encode.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/json.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bson/stream.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/bulk.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/changestreams.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/cluster.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/coarse_time.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/doc.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/gridfs.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/LICENSE delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/decode.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/encode.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/extension.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/fold.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/indent.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/scanner.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/stream.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/json/tags.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sasl.c delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sasl.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sasl_windows.c delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sasl_windows.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sasl_windows.h delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sspi_windows.c delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/sasl/sspi_windows.h delete mode 100644 services/community/vendor/github.com/globalsign/mgo/internal/scram/scram.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/log.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/queue.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/raceoff.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/raceon.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/saslimpl.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/saslstub.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/server.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/session.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/socket.go delete mode 100644 services/community/vendor/github.com/globalsign/mgo/stats.go delete mode 100644 services/community/vendor/github.com/go-bongo/go-dotaccess/LICENSE delete mode 100644 services/community/vendor/github.com/go-bongo/go-dotaccess/README.md delete mode 100644 services/community/vendor/github.com/go-bongo/go-dotaccess/dotaccess.go delete mode 100644 services/community/vendor/github.com/oleiade/reflections/.gitignore delete mode 100644 services/community/vendor/github.com/oleiade/reflections/.travis.yml delete mode 100644 services/community/vendor/github.com/oleiade/reflections/AUTHORS.md delete mode 100644 services/community/vendor/github.com/oleiade/reflections/LICENSE delete mode 100644 services/community/vendor/github.com/oleiade/reflections/README.md delete mode 100644 services/community/vendor/github.com/oleiade/reflections/reflections.go diff --git a/services/community/go.mod b/services/community/go.mod index 56461214..5f56e7bd 100644 --- a/services/community/go.mod +++ b/services/community/go.mod @@ -5,27 +5,12 @@ go 1.14 require ( github.com/badoux/checkmail v0.0.0-20200623144435-f9f80cb795fa github.com/dgrijalva/jwt-go v3.2.0+incompatible - github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8 - github.com/go-bongo/go-dotaccess v0.0.0-20190924013105-74ea4f4ca4eb - github.com/go-stack/stack v1.8.0 - github.com/golang/snappy v0.0.1 github.com/google/uuid v1.1.1 // indirect - github.com/goonode/mogo v0.0.0-20181028112152-10c38e9be609 // indirect - github.com/gorilla/handlers v1.4.2 // indirect github.com/gorilla/mux v1.7.4 github.com/jinzhu/gorm v1.9.14 github.com/joho/godotenv v1.3.0 - github.com/klauspost/compress v1.9.5 github.com/lib/pq v1.7.0 // indirect github.com/lithammer/shortuuid v3.0.0+incompatible - github.com/oleiade/reflections v1.0.0 - github.com/pkg/errors v0.8.1 - github.com/rs/cors v1.7.0 // indirect - github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c - github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc go.mongodb.org/mongo-driver v1.3.5 golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899 - golang.org/x/sync v0.0.0-20190423024810-112230192c58 - golang.org/x/text v0.3.3 - gopkg.in/go-playground/assert.v1 v1.2.1 // indirect ) diff --git a/services/community/go.sum b/services/community/go.sum index 45ff6829..025c1607 100644 --- a/services/community/go.sum +++ b/services/community/go.sum @@ -4,16 +4,15 @@ github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9Pq github.com/badoux/checkmail v0.0.0-20200623144435-f9f80cb795fa h1:Wd0sN2PB+jhNm+z/eJz9p6XT23H8MVUIQUJs+8DQnXc= github.com/badoux/checkmail v0.0.0-20200623144435-f9f80cb795fa/go.mod h1:XroCOBU5zzZJcLvgwU15I+2xXyCdTWXyR9MGfRhBYy0= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM= github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= -github.com/dgrijalva/jwt-go v1.0.2 h1:KPldsxuKGsS2FPWsNeg9ZO18aCrGKujPoWXn2yo+KQM= github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y= github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0= -github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8 h1:DujepqpGd1hyOd7aW59XpK7Qymp8iy83xq74fLr21is= -github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= -github.com/go-bongo/go-dotaccess v0.0.0-20190924013105-74ea4f4ca4eb h1:wI1Bi9HWHqeYHEzynJVKO1j4c6bDcujSo3+aFqECbug= -github.com/go-bongo/go-dotaccess v0.0.0-20190924013105-74ea4f4ca4eb/go.mod h1:qN1bnlshxJYF58B+mdviLPf2sYHX99yec7pQVoEPJ2I= +github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -41,24 +40,22 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/goonode/mogo v0.0.0-20181028112152-10c38e9be609 h1:3NRjuD9F0Kdhmr7gcogqlv0IawixhXYL47rAGs6ggsk= -github.com/goonode/mogo v0.0.0-20181028112152-10c38e9be609/go.mod h1:Ek7bF4sZmDxL+4lg6xDKCjg2g5HUIuB1B3lbB8qX/FI= -github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jbarham/gopgsqldriver v0.0.0-20120227112153-f8287ee9bfe2 h1:gOLSX4BzjbqvSVohlYamLd5V1ayRFNgw8/NeY6Hn74Q= -github.com/jbarham/gopgsqldriver v0.0.0-20120227112153-f8287ee9bfe2/go.mod h1:lQcihSZp1fUp8WcdmZoKHJKLBE5fmimy7Uj7zAZbKPc= github.com/jinzhu/gorm v1.9.14 h1:Kg3ShyTPcM6nzVo148fRrcMO6MNKuqtOUwnzqMgVniM= github.com/jinzhu/gorm v1.9.14/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.0.1 h1:HjfetcXq097iXP0uoPCdnM4Efp5/9MsM0/M+XOTeR3M= github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= @@ -71,29 +68,25 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxv github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/lib/pq v1.1.1 h1:sJZmqHoEaY7f+NPP8pgLB/WxulyR3fewgCM2qaSlBb4= github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.7.0 h1:h93mCPfUSkaul3Ka/VG8uZdmW1uMHDGxzu0NWHuJmHY= github.com/lib/pq v1.7.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lithammer/shortuuid v1.0.0 h1:kdcbvjGVEgqeVeDIRtnANOi/F6ftbKrtbxY+cjQmK1Q= github.com/lithammer/shortuuid v3.0.0+incompatible h1:NcD0xWW/MZYXEHa6ITy6kaXN5nwm/V115vj2YXfhS0w= github.com/lithammer/shortuuid v3.0.0+incompatible/go.mod h1:FR74pbAuElzOUuenUHTK2Tciko1/vKuIKS9dSkDrA4w= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/mattn/go-sqlite3 v1.14.0 h1:mLyGNKR8+Vv9CAU7PphKa2hkEqxxhn8i32J6FPj1/QA= github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/oleiade/reflections v1.0.0 h1:0ir4pc6v8/PJ0yw5AEtMddfXpWBXg9cnG7SgSoJuCgY= -github.com/oleiade/reflections v1.0.0/go.mod h1:RbATFBbKYkVdqmSFtx13Bb/tVhR0lgOBXunWTZKeL4w= github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= -github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -102,7 +95,9 @@ github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= @@ -115,7 +110,6 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd h1:GGJVjV8waZKRHrgwvtH66z9ZGVurTD1MT0n1Bb+q4aM= golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899 h1:DZhuSZLsGlFL4CmhA8BcRA0mnthyA/nZ00AqCUo7vHg= golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -147,6 +141,4 @@ golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgw gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM= -gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/services/community/vendor/github.com/globalsign/mgo/.gitignore b/services/community/vendor/github.com/globalsign/mgo/.gitignore deleted file mode 100644 index 9a3120f6..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -_harness -.vscode \ No newline at end of file diff --git a/services/community/vendor/github.com/globalsign/mgo/.travis.yml b/services/community/vendor/github.com/globalsign/mgo/.travis.yml deleted file mode 100644 index f1f89e96..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/.travis.yml +++ /dev/null @@ -1,49 +0,0 @@ -language: go - -go_import_path: github.com/globalsign/mgo - -go: - - 1.9.x - - 1.10.x - -env: - global: - - BUCKET=https://s3.eu-west-2.amazonaws.com/globalsign-mgo - - FASTDL=https://fastdl.mongodb.org/linux - matrix: - - MONGODB=x86_64-ubuntu1404-3.0.15 - - MONGODB=x86_64-ubuntu1404-3.2.17 - - MONGODB=x86_64-ubuntu1404-3.4.10 - - MONGODB=x86_64-ubuntu1404-3.6.0 - -install: - - - wget $FASTDL/mongodb-linux-$MONGODB.tgz - - tar xzvf mongodb-linux-$MONGODB.tgz - - export PATH=$PWD/mongodb-linux-$MONGODB/bin:$PATH - - - wget $BUCKET/daemontools.tar.gz - - tar xzvf daemontools.tar.gz - - export PATH=$PWD/daemontools:$PATH - - - go get gopkg.in/check.v1 - - go get gopkg.in/yaml.v2 - - go get gopkg.in/tomb.v2 - - go get golang.org/x/lint/golint - -before_script: - - golint ./... | grep -v 'ID' | cat - - go vet github.com/globalsign/mgo/bson github.com/globalsign/mgo/txn github.com/globalsign/mgo - - export NOIPV6=1 - - make startdb - -script: - - (cd bson && go test -check.v) - - go test -check.v -fast - - (cd txn && go test -check.v) - - make stopdb - -git: - depth: 3 - -# vim:sw=4:ts=4:et diff --git a/services/community/vendor/github.com/globalsign/mgo/CONTRIBUTING.md b/services/community/vendor/github.com/globalsign/mgo/CONTRIBUTING.md deleted file mode 100644 index 79539955..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/CONTRIBUTING.md +++ /dev/null @@ -1,14 +0,0 @@ -Contributing -------------------------- - -We really appreciate contributions, but they must meet the following requirements: - -* A PR should have a brief description of the problem/feature being proposed -* Pull requests should target the `development` branch -* Existing tests should pass and any new code should be covered with it's own test(s) (use [travis-ci](https://travis-ci.org)) -* New functions should be [documented](https://blog.golang.org/godoc-documenting-go-code) clearly -* Code should pass `golint`, `go vet` and `go fmt` - -We merge PRs into `development`, which is then tested in a sharded, replicated environment in our datacenter for regressions. Once everyone is happy, we merge to master - this is to maintain a bit of quality control past the usual PR process. - -**Thanks** for helping! diff --git a/services/community/vendor/github.com/globalsign/mgo/LICENSE b/services/community/vendor/github.com/globalsign/mgo/LICENSE deleted file mode 100644 index 770c7672..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -mgo - MongoDB driver for Go - -Copyright (c) 2010-2013 - Gustavo Niemeyer - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/services/community/vendor/github.com/globalsign/mgo/Makefile b/services/community/vendor/github.com/globalsign/mgo/Makefile deleted file mode 100644 index d1027d45..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/Makefile +++ /dev/null @@ -1,5 +0,0 @@ -startdb: - @harness/setup.sh start - -stopdb: - @harness/setup.sh stop diff --git a/services/community/vendor/github.com/globalsign/mgo/README.md b/services/community/vendor/github.com/globalsign/mgo/README.md deleted file mode 100644 index 76fd0554..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/README.md +++ /dev/null @@ -1,105 +0,0 @@ -[![Build Status](https://travis-ci.org/globalsign/mgo.svg?branch=master)](https://travis-ci.org/globalsign/mgo) [![GoDoc](https://godoc.org/github.com/globalsign/mgo?status.svg)](https://godoc.org/github.com/globalsign/mgo) - -The MongoDB driver for Go -------------------------- - -This fork has had a few improvements by ourselves as well as several PR's merged from the original mgo repo that are currently awaiting review. -Changes are mostly geared towards performance improvements and bug fixes, though a few new features have been added. - -Further PR's (with tests) are welcome, but please maintain backwards compatibility. - -Detailed documentation of the API is available at -[GoDoc](https://godoc.org/github.com/globalsign/mgo). - -A [sub-package](https://godoc.org/github.com/globalsign/mgo/bson) that implements the [BSON](http://bsonspec.org) specification is also included, and may be used independently of the driver. - -## Supported Versions - -`mgo` is known to work well on (and has integration tests against) MongoDB v3.0, 3.2, 3.4 and 3.6. - -MongoDB 4.0 is currently experimental - we would happily accept PRs to help improve support! - -## Changes -* Fixes attempting to authenticate before every query ([details](https://github.com/go-mgo/mgo/issues/254)) -* Removes bulk update / delete batch size limitations ([details](https://github.com/go-mgo/mgo/issues/288)) -* Adds native support for `time.Duration` marshalling ([details](https://github.com/go-mgo/mgo/pull/373)) -* Reduce memory footprint / garbage collection pressure by reusing buffers ([details](https://github.com/go-mgo/mgo/pull/229), [more](https://github.com/globalsign/mgo/pull/56)) -* Support majority read concerns ([details](https://github.com/globalsign/mgo/pull/2)) -* Improved connection handling ([details](https://github.com/globalsign/mgo/pull/5)) -* Hides SASL warnings ([details](https://github.com/globalsign/mgo/pull/7)) -* Support for partial indexes ([details](https://github.com/domodwyer/mgo/commit/5efe8eccb028238d93c222828cae4806aeae9f51)) -* Fixes timezone handling ([details](https://github.com/go-mgo/mgo/pull/464)) -* Integration tests run against MongoDB 3.2 & 3.4 releases ([details](https://github.com/globalsign/mgo/pull/4), [more](https://github.com/globalsign/mgo/pull/24), [more](https://github.com/globalsign/mgo/pull/35)) -* Improved multi-document transaction performance ([details](https://github.com/globalsign/mgo/pull/10), [more](https://github.com/globalsign/mgo/pull/11), [more](https://github.com/globalsign/mgo/pull/16)) -* Fixes cursor timeouts ([details](https://jira.mongodb.org/browse/SERVER-24899)) -* Support index hints and timeouts for count queries ([details](https://github.com/globalsign/mgo/pull/17)) -* Don't panic when handling indexed `int64` fields ([details](https://github.com/go-mgo/mgo/issues/475)) -* Supports dropping all indexes on a collection ([details](https://github.com/globalsign/mgo/pull/25)) -* Annotates log entries/profiler output with optional appName on 3.4+ ([details](https://github.com/globalsign/mgo/pull/28)) -* Support for read-only [views](https://docs.mongodb.com/manual/core/views/) in 3.4+ ([details](https://github.com/globalsign/mgo/pull/33)) -* Support for [collations](https://docs.mongodb.com/manual/reference/collation/) in 3.4+ ([details](https://github.com/globalsign/mgo/pull/37), [more](https://github.com/globalsign/mgo/pull/166)) -* Provide BSON constants for convenience/sanity ([details](https://github.com/globalsign/mgo/pull/41)) -* Consistently unmarshal time.Time values as UTC ([details](https://github.com/globalsign/mgo/pull/42)) -* Enforces best practise coding guidelines ([details](https://github.com/globalsign/mgo/pull/44)) -* GetBSON correctly handles structs with both fields and pointers ([details](https://github.com/globalsign/mgo/pull/40)) -* Improved bson.Raw unmarshalling performance ([details](https://github.com/globalsign/mgo/pull/49)) -* Minimise socket connection timeouts due to excessive locking ([details](https://github.com/globalsign/mgo/pull/52)) -* Natively support X509 client authentication ([details](https://github.com/globalsign/mgo/pull/55)) -* Gracefully recover from a temporarily unreachable server ([details](https://github.com/globalsign/mgo/pull/69)) -* Use JSON tags when no explicit BSON are tags set ([details](https://github.com/globalsign/mgo/pull/91)) -* Support [$changeStream](https://docs.mongodb.com/manual/changeStreams/) tailing on 3.6+ ([details](https://github.com/globalsign/mgo/pull/97)) -* Fix deadlock in cluster synchronisation ([details](https://github.com/globalsign/mgo/issues/120)) -* Implement `maxIdleTimeout` for pooled connections ([details](https://github.com/globalsign/mgo/pull/116)) -* Connection pool waiting improvements ([details](https://github.com/globalsign/mgo/pull/115)) -* Fixes BSON encoding for `$in` and friends ([details](https://github.com/globalsign/mgo/pull/128)) -* Add BSON stream encoders ([details](https://github.com/globalsign/mgo/pull/127)) -* Add integer map key support in the BSON encoder ([details](https://github.com/globalsign/mgo/pull/140)) -* Support aggregation [collations](https://docs.mongodb.com/manual/reference/collation/) ([details](https://github.com/globalsign/mgo/pull/144)) -* Support encoding of inline struct references ([details](https://github.com/globalsign/mgo/pull/146)) -* Improved windows test harness ([details](https://github.com/globalsign/mgo/pull/158)) -* Improved type and nil handling in the BSON codec ([details](https://github.com/globalsign/mgo/pull/147/files), [more](https://github.com/globalsign/mgo/pull/181)) -* Separated network read/write timeouts ([details](https://github.com/globalsign/mgo/pull/161)) -* Expanded dial string configuration options ([details](https://github.com/globalsign/mgo/pull/162)) -* Implement MongoTimestamp ([details](https://github.com/globalsign/mgo/pull/171)) -* Support setting `writeConcern` for `findAndModify` operations ([details](https://github.com/globalsign/mgo/pull/185)) -* Add `ssl` to the dial string options ([details](https://github.com/globalsign/mgo/pull/184)) - - ---- - -### Thanks to -* @aksentyev -* @bachue -* @bozaro -* @BenLubar -* @carldunham -* @carter2000 -* @cedric-cordenier -* @cezarsa -* @DaytonG -* @ddspog -* @drichelson -* @dvic -* @eaglerayp -* @feliixx -* @fmpwizard -* @gazoon -* @gedge -* @gnawux -* @idy -* @jameinel -* @jefferickson -* @johnlawsharrison -* @KJTsanaktsidis -* @larrycinnabar -* @mapete94 -* @maxnoel -* @mcspring -* @Mei-Zhao -* @peterdeka -* @Reenjii -* @roobre -* @smoya -* @steve-gray -* @tbruyelle -* @wgallagher diff --git a/services/community/vendor/github.com/globalsign/mgo/auth.go b/services/community/vendor/github.com/globalsign/mgo/auth.go deleted file mode 100644 index 75d2ebc3..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/auth.go +++ /dev/null @@ -1,467 +0,0 @@ -// mgo - MongoDB driver for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package mgo - -import ( - "crypto/md5" - "crypto/sha1" - "encoding/hex" - "errors" - "fmt" - "sync" - - "github.com/globalsign/mgo/bson" - "github.com/globalsign/mgo/internal/scram" -) - -type authCmd struct { - Authenticate int - - Nonce string - User string - Key string -} - -type startSaslCmd struct { - StartSASL int `bson:"startSasl"` -} - -type authResult struct { - ErrMsg string - Ok bool -} - -type getNonceCmd struct { - GetNonce int -} - -type getNonceResult struct { - Nonce string - Err string `bson:"$err"` - Code int -} - -type logoutCmd struct { - Logout int -} - -type saslCmd struct { - Start int `bson:"saslStart,omitempty"` - Continue int `bson:"saslContinue,omitempty"` - ConversationId int `bson:"conversationId,omitempty"` - Mechanism string `bson:"mechanism,omitempty"` - Payload []byte -} - -type saslResult struct { - Ok bool `bson:"ok"` - NotOk bool `bson:"code"` // Server <= 2.3.2 returns ok=1 & code>0 on errors (WTF?) - Done bool - - ConversationId int `bson:"conversationId"` - Payload []byte - ErrMsg string -} - -type saslStepper interface { - Step(serverData []byte) (clientData []byte, done bool, err error) - Close() -} - -func (socket *mongoSocket) getNonce() (nonce string, err error) { - socket.Lock() - for socket.cachedNonce == "" && socket.dead == nil { - debugf("Socket %p to %s: waiting for nonce", socket, socket.addr) - socket.gotNonce.Wait() - } - if socket.cachedNonce == "mongos" { - socket.Unlock() - return "", errors.New("Can't authenticate with mongos; see http://j.mp/mongos-auth") - } - debugf("Socket %p to %s: got nonce", socket, socket.addr) - nonce, err = socket.cachedNonce, socket.dead - socket.cachedNonce = "" - socket.Unlock() - if err != nil { - nonce = "" - } - return -} - -func (socket *mongoSocket) resetNonce() { - debugf("Socket %p to %s: requesting a new nonce", socket, socket.addr) - op := &queryOp{} - op.query = &getNonceCmd{GetNonce: 1} - op.collection = "admin.$cmd" - op.limit = -1 - op.replyFunc = func(err error, reply *replyOp, docNum int, docData []byte) { - if err != nil { - socket.kill(errors.New("getNonce: "+err.Error()), true) - return - } - result := &getNonceResult{} - err = bson.Unmarshal(docData, &result) - if err != nil { - socket.kill(errors.New("Failed to unmarshal nonce: "+err.Error()), true) - return - } - debugf("Socket %p to %s: nonce unmarshalled: %#v", socket, socket.addr, result) - if result.Code == 13390 { - // mongos doesn't yet support auth (see http://j.mp/mongos-auth) - result.Nonce = "mongos" - } else if result.Nonce == "" { - var msg string - if result.Err != "" { - msg = fmt.Sprintf("Got an empty nonce: %s (%d)", result.Err, result.Code) - } else { - msg = "Got an empty nonce" - } - socket.kill(errors.New(msg), true) - return - } - socket.Lock() - if socket.cachedNonce != "" { - socket.Unlock() - panic("resetNonce: nonce already cached") - } - socket.cachedNonce = result.Nonce - socket.gotNonce.Signal() - socket.Unlock() - } - err := socket.Query(op) - if err != nil { - socket.kill(errors.New("resetNonce: "+err.Error()), true) - } -} - -func (socket *mongoSocket) Login(cred Credential) error { - socket.Lock() - if cred.Mechanism == "" && socket.serverInfo.MaxWireVersion >= 3 { - cred.Mechanism = "SCRAM-SHA-1" - } - for _, sockCred := range socket.creds { - if sockCred == cred { - debugf("Socket %p to %s: login: db=%q user=%q (already logged in)", socket, socket.addr, cred.Source, cred.Username) - socket.Unlock() - return nil - } - } - if socket.dropLogout(cred) { - debugf("Socket %p to %s: login: db=%q user=%q (cached)", socket, socket.addr, cred.Source, cred.Username) - socket.creds = append(socket.creds, cred) - socket.Unlock() - return nil - } - socket.Unlock() - - debugf("Socket %p to %s: login: db=%q user=%q", socket, socket.addr, cred.Source, cred.Username) - - var err error - switch cred.Mechanism { - case "", "MONGODB-CR", "MONGO-CR": // Name changed to MONGODB-CR in SERVER-8501. - err = socket.loginClassic(cred) - case "PLAIN": - err = socket.loginPlain(cred) - case "MONGODB-X509": - err = socket.loginX509(cred) - default: - // Try SASL for everything else, if it is available. - err = socket.loginSASL(cred) - } - - if err != nil { - debugf("Socket %p to %s: login error: %s", socket, socket.addr, err) - } else { - debugf("Socket %p to %s: login successful", socket, socket.addr) - } - return err -} - -func (socket *mongoSocket) loginClassic(cred Credential) error { - // Note that this only works properly because this function is - // synchronous, which means the nonce won't get reset while we're - // using it and any other login requests will block waiting for a - // new nonce provided in the defer call below. - nonce, err := socket.getNonce() - if err != nil { - return err - } - defer socket.resetNonce() - - psum := md5.New() - psum.Write([]byte(cred.Username + ":mongo:" + cred.Password)) - - ksum := md5.New() - ksum.Write([]byte(nonce + cred.Username)) - ksum.Write([]byte(hex.EncodeToString(psum.Sum(nil)))) - - key := hex.EncodeToString(ksum.Sum(nil)) - - cmd := authCmd{Authenticate: 1, User: cred.Username, Nonce: nonce, Key: key} - res := authResult{} - return socket.loginRun(cred.Source, &cmd, &res, func() error { - if !res.Ok { - return errors.New(res.ErrMsg) - } - socket.Lock() - socket.dropAuth(cred.Source) - socket.creds = append(socket.creds, cred) - socket.Unlock() - return nil - }) -} - -type authX509Cmd struct { - Authenticate int - User string - Mechanism string -} - -func (socket *mongoSocket) loginX509(cred Credential) error { - cmd := authX509Cmd{Authenticate: 1, User: cred.Username, Mechanism: "MONGODB-X509"} - res := authResult{} - return socket.loginRun(cred.Source, &cmd, &res, func() error { - if !res.Ok { - return errors.New(res.ErrMsg) - } - socket.Lock() - socket.dropAuth(cred.Source) - socket.creds = append(socket.creds, cred) - socket.Unlock() - return nil - }) -} - -func (socket *mongoSocket) loginPlain(cred Credential) error { - cmd := saslCmd{Start: 1, Mechanism: "PLAIN", Payload: []byte("\x00" + cred.Username + "\x00" + cred.Password)} - res := authResult{} - return socket.loginRun(cred.Source, &cmd, &res, func() error { - if !res.Ok { - return errors.New(res.ErrMsg) - } - socket.Lock() - socket.dropAuth(cred.Source) - socket.creds = append(socket.creds, cred) - socket.Unlock() - return nil - }) -} - -func (socket *mongoSocket) loginSASL(cred Credential) error { - var sasl saslStepper - var err error - if cred.Mechanism == "SCRAM-SHA-1" { - // SCRAM is handled without external libraries. - sasl = saslNewScram(cred) - } else if len(cred.ServiceHost) > 0 { - sasl, err = saslNew(cred, cred.ServiceHost) - } else { - sasl, err = saslNew(cred, socket.Server().Addr) - } - if err != nil { - return err - } - defer sasl.Close() - - // The goal of this logic is to carry a locked socket until the - // local SASL step confirms the auth is valid; the socket needs to be - // locked so that concurrent action doesn't leave the socket in an - // auth state that doesn't reflect the operations that took place. - // As a simple case, imagine inverting login=>logout to logout=>login. - // - // The logic below works because the lock func isn't called concurrently. - locked := false - lock := func(b bool) { - if locked != b { - locked = b - if b { - socket.Lock() - } else { - socket.Unlock() - } - } - } - - lock(true) - defer lock(false) - - start := 1 - cmd := saslCmd{} - res := saslResult{} - for { - payload, done, err := sasl.Step(res.Payload) - if err != nil { - return err - } - if done && res.Done { - socket.dropAuth(cred.Source) - socket.creds = append(socket.creds, cred) - break - } - lock(false) - - cmd = saslCmd{ - Start: start, - Continue: 1 - start, - ConversationId: res.ConversationId, - Mechanism: cred.Mechanism, - Payload: payload, - } - start = 0 - err = socket.loginRun(cred.Source, &cmd, &res, func() error { - // See the comment on lock for why this is necessary. - lock(true) - if !res.Ok || res.NotOk { - return fmt.Errorf("server returned error on SASL authentication step: %s", res.ErrMsg) - } - return nil - }) - if err != nil { - return err - } - if done && res.Done { - socket.dropAuth(cred.Source) - socket.creds = append(socket.creds, cred) - break - } - } - - return nil -} - -func saslNewScram(cred Credential) *saslScram { - credsum := md5.New() - credsum.Write([]byte(cred.Username + ":mongo:" + cred.Password)) - client := scram.NewClient(sha1.New, cred.Username, hex.EncodeToString(credsum.Sum(nil))) - return &saslScram{cred: cred, client: client} -} - -type saslScram struct { - cred Credential - client *scram.Client -} - -func (s *saslScram) Close() {} - -func (s *saslScram) Step(serverData []byte) (clientData []byte, done bool, err error) { - more := s.client.Step(serverData) - return s.client.Out(), !more, s.client.Err() -} - -func (socket *mongoSocket) loginRun(db string, query, result interface{}, f func() error) error { - var mutex sync.Mutex - var replyErr error - mutex.Lock() - - op := queryOp{} - op.query = query - op.collection = db + ".$cmd" - op.limit = -1 - op.replyFunc = func(err error, reply *replyOp, docNum int, docData []byte) { - defer mutex.Unlock() - - if err != nil { - replyErr = err - return - } - - err = bson.Unmarshal(docData, result) - if err != nil { - replyErr = err - } else { - // Must handle this within the read loop for the socket, so - // that concurrent login requests are properly ordered. - replyErr = f() - } - } - - err := socket.Query(&op) - if err != nil { - return err - } - mutex.Lock() // Wait. - return replyErr -} - -func (socket *mongoSocket) Logout(db string) { - socket.Lock() - cred, found := socket.dropAuth(db) - if found { - debugf("Socket %p to %s: logout: db=%q (flagged)", socket, socket.addr, db) - socket.logout = append(socket.logout, cred) - } - socket.Unlock() -} - -func (socket *mongoSocket) LogoutAll() { - socket.Lock() - if l := len(socket.creds); l > 0 { - debugf("Socket %p to %s: logout all (flagged %d)", socket, socket.addr, l) - socket.logout = append(socket.logout, socket.creds...) - socket.creds = socket.creds[0:0] - } - socket.Unlock() -} - -func (socket *mongoSocket) flushLogout() (ops []interface{}) { - socket.Lock() - if l := len(socket.logout); l > 0 { - debugf("Socket %p to %s: logout all (flushing %d)", socket, socket.addr, l) - for i := 0; i != l; i++ { - op := queryOp{} - op.query = &logoutCmd{1} - op.collection = socket.logout[i].Source + ".$cmd" - op.limit = -1 - ops = append(ops, &op) - } - socket.logout = socket.logout[0:0] - } - socket.Unlock() - return -} - -func (socket *mongoSocket) dropAuth(db string) (cred Credential, found bool) { - for i, sockCred := range socket.creds { - if sockCred.Source == db { - copy(socket.creds[i:], socket.creds[i+1:]) - socket.creds = socket.creds[:len(socket.creds)-1] - return sockCred, true - } - } - return cred, false -} - -func (socket *mongoSocket) dropLogout(cred Credential) (found bool) { - for i, sockCred := range socket.logout { - if sockCred == cred { - copy(socket.logout[i:], socket.logout[i+1:]) - socket.logout = socket.logout[:len(socket.logout)-1] - return true - } - } - return false -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/LICENSE b/services/community/vendor/github.com/globalsign/mgo/bson/LICENSE deleted file mode 100644 index 89032601..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -BSON library for Go - -Copyright (c) 2010-2012 - Gustavo Niemeyer - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/README.md b/services/community/vendor/github.com/globalsign/mgo/bson/README.md deleted file mode 100644 index 5c5819e6..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/README.md +++ /dev/null @@ -1,12 +0,0 @@ -[![GoDoc](https://godoc.org/github.com/globalsign/mgo/bson?status.svg)](https://godoc.org/github.com/globalsign/mgo/bson) - -An Implementation of BSON for Go --------------------------------- - -Package bson is an implementation of the [BSON specification](http://bsonspec.org) for Go. - -While the BSON package implements the BSON spec as faithfully as possible, there -is some MongoDB specific behaviour (such as map keys `$in`, `$all`, etc) in the -`bson` package. The priority is for backwards compatibility for the `mgo` -driver, though fixes for obviously buggy behaviour is welcome (and features, etc -behind feature flags). diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/bson.go b/services/community/vendor/github.com/globalsign/mgo/bson/bson.go deleted file mode 100644 index eb87ef62..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/bson.go +++ /dev/null @@ -1,836 +0,0 @@ -// BSON library for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Package bson is an implementation of the BSON specification for Go: -// -// http://bsonspec.org -// -// It was created as part of the mgo MongoDB driver for Go, but is standalone -// and may be used on its own without the driver. -package bson - -import ( - "bytes" - "crypto/md5" - "crypto/rand" - "encoding/binary" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "io" - "math" - "os" - "reflect" - "runtime" - "strings" - "sync" - "sync/atomic" - "time" -) - -//go:generate go run bson_corpus_spec_test_generator.go - -// -------------------------------------------------------------------------- -// The public API. - -// Element types constants from BSON specification. -const ( - ElementFloat64 byte = 0x01 - ElementString byte = 0x02 - ElementDocument byte = 0x03 - ElementArray byte = 0x04 - ElementBinary byte = 0x05 - Element06 byte = 0x06 - ElementObjectId byte = 0x07 - ElementBool byte = 0x08 - ElementDatetime byte = 0x09 - ElementNil byte = 0x0A - ElementRegEx byte = 0x0B - ElementDBPointer byte = 0x0C - ElementJavaScriptWithoutScope byte = 0x0D - ElementSymbol byte = 0x0E - ElementJavaScriptWithScope byte = 0x0F - ElementInt32 byte = 0x10 - ElementTimestamp byte = 0x11 - ElementInt64 byte = 0x12 - ElementDecimal128 byte = 0x13 - ElementMinKey byte = 0xFF - ElementMaxKey byte = 0x7F - - BinaryGeneric byte = 0x00 - BinaryFunction byte = 0x01 - BinaryBinaryOld byte = 0x02 - BinaryUUIDOld byte = 0x03 - BinaryUUID byte = 0x04 - BinaryMD5 byte = 0x05 - BinaryUserDefined byte = 0x80 -) - -// Getter interface: a value implementing the bson.Getter interface will have its GetBSON -// method called when the given value has to be marshalled, and the result -// of this method will be marshaled in place of the actual object. -// -// If GetBSON returns return a non-nil error, the marshalling procedure -// will stop and error out with the provided value. -type Getter interface { - GetBSON() (interface{}, error) -} - -// Setter interface: a value implementing the bson.Setter interface will receive the BSON -// value via the SetBSON method during unmarshaling, and the object -// itself will not be changed as usual. -// -// If setting the value works, the method should return nil or alternatively -// bson.ErrSetZero to set the respective field to its zero value (nil for -// pointer types). If SetBSON returns a value of type bson.TypeError, the -// BSON value will be omitted from a map or slice being decoded and the -// unmarshalling will continue. If it returns any other non-nil error, the -// unmarshalling procedure will stop and error out with the provided value. -// -// This interface is generally useful in pointer receivers, since the method -// will want to change the receiver. A type field that implements the Setter -// interface doesn't have to be a pointer, though. -// -// Unlike the usual behavior, unmarshalling onto a value that implements a -// Setter interface will NOT reset the value to its zero state. This allows -// the value to decide by itself how to be unmarshalled. -// -// For example: -// -// type MyString string -// -// func (s *MyString) SetBSON(raw bson.Raw) error { -// return raw.Unmarshal(s) -// } -// -type Setter interface { - SetBSON(raw Raw) error -} - -// ErrSetZero may be returned from a SetBSON method to have the value set to -// its respective zero value. When used in pointer values, this will set the -// field to nil rather than to the pre-allocated value. -var ErrSetZero = errors.New("set to zero") - -// M is a convenient alias for a map[string]interface{} map, useful for -// dealing with BSON in a native way. For instance: -// -// bson.M{"a": 1, "b": true} -// -// There's no special handling for this type in addition to what's done anyway -// for an equivalent map type. Elements in the map will be dumped in an -// undefined ordered. See also the bson.D type for an ordered alternative. -type M map[string]interface{} - -// D represents a BSON document containing ordered elements. For example: -// -// bson.D{{"a", 1}, {"b", true}} -// -// In some situations, such as when creating indexes for MongoDB, the order in -// which the elements are defined is important. If the order is not important, -// using a map is generally more comfortable. See bson.M and bson.RawD. -type D []DocElem - -// DocElem is an element of the bson.D document representation. -type DocElem struct { - Name string - Value interface{} -} - -// Map returns a map out of the ordered element name/value pairs in d. -func (d D) Map() (m M) { - m = make(M, len(d)) - for _, item := range d { - m[item.Name] = item.Value - } - return m -} - -// The Raw type represents raw unprocessed BSON documents and elements. -// Kind is the kind of element as defined per the BSON specification, and -// Data is the raw unprocessed data for the respective element. -// Using this type it is possible to unmarshal or marshal values partially. -// -// Relevant documentation: -// -// http://bsonspec.org/#/specification -// -type Raw struct { - Kind byte - Data []byte -} - -// RawD represents a BSON document containing raw unprocessed elements. -// This low-level representation may be useful when lazily processing -// documents of uncertain content, or when manipulating the raw content -// documents in general. -type RawD []RawDocElem - -// RawDocElem elements of RawD type. -type RawDocElem struct { - Name string - Value Raw -} - -// ObjectId is a unique ID identifying a BSON value. It must be exactly 12 bytes -// long. MongoDB objects by default have such a property set in their "_id" -// property. -// -// http://www.mongodb.org/display/DOCS/Object+Ids -type ObjectId string - -// ObjectIdHex returns an ObjectId from the provided hex representation. -// Calling this function with an invalid hex representation will -// cause a runtime panic. See the IsObjectIdHex function. -func ObjectIdHex(s string) ObjectId { - d, err := hex.DecodeString(s) - if err != nil || len(d) != 12 { - panic(fmt.Sprintf("invalid input to ObjectIdHex: %q", s)) - } - return ObjectId(d) -} - -// IsObjectIdHex returns whether s is a valid hex representation of -// an ObjectId. See the ObjectIdHex function. -func IsObjectIdHex(s string) bool { - if len(s) != 24 { - return false - } - _, err := hex.DecodeString(s) - return err == nil -} - -// objectIdCounter is atomically incremented when generating a new ObjectId -// using NewObjectId() function. It's used as a counter part of an id. -var objectIdCounter = readRandomUint32() - -// readRandomUint32 returns a random objectIdCounter. -func readRandomUint32() uint32 { - var b [4]byte - _, err := io.ReadFull(rand.Reader, b[:]) - if err != nil { - panic(fmt.Errorf("cannot read random object id: %v", err)) - } - return uint32((uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24)) -} - -// machineId stores machine id generated once and used in subsequent calls -// to NewObjectId function. -var machineId = readMachineId() -var processId = os.Getpid() - -// readMachineId generates and returns a machine id. -// If this function fails to get the hostname it will cause a runtime error. -func readMachineId() []byte { - var sum [3]byte - id := sum[:] - hostname, err1 := os.Hostname() - if err1 != nil { - _, err2 := io.ReadFull(rand.Reader, id) - if err2 != nil { - panic(fmt.Errorf("cannot get hostname: %v; %v", err1, err2)) - } - return id - } - hw := md5.New() - hw.Write([]byte(hostname)) - copy(id, hw.Sum(nil)) - return id -} - -// NewObjectId returns a new unique ObjectId. -func NewObjectId() ObjectId { - var b [12]byte - // Timestamp, 4 bytes, big endian - binary.BigEndian.PutUint32(b[:], uint32(time.Now().Unix())) - // Machine, first 3 bytes of md5(hostname) - b[4] = machineId[0] - b[5] = machineId[1] - b[6] = machineId[2] - // Pid, 2 bytes, specs don't specify endianness, but we use big endian. - b[7] = byte(processId >> 8) - b[8] = byte(processId) - // Increment, 3 bytes, big endian - i := atomic.AddUint32(&objectIdCounter, 1) - b[9] = byte(i >> 16) - b[10] = byte(i >> 8) - b[11] = byte(i) - return ObjectId(b[:]) -} - -// NewObjectIdWithTime returns a dummy ObjectId with the timestamp part filled -// with the provided number of seconds from epoch UTC, and all other parts -// filled with zeroes. It's not safe to insert a document with an id generated -// by this method, it is useful only for queries to find documents with ids -// generated before or after the specified timestamp. -func NewObjectIdWithTime(t time.Time) ObjectId { - var b [12]byte - binary.BigEndian.PutUint32(b[:4], uint32(t.Unix())) - return ObjectId(string(b[:])) -} - -// String returns a hex string representation of the id. -// Example: ObjectIdHex("4d88e15b60f486e428412dc9"). -func (id ObjectId) String() string { - return fmt.Sprintf(`ObjectIdHex("%x")`, string(id)) -} - -// Hex returns a hex representation of the ObjectId. -func (id ObjectId) Hex() string { - return hex.EncodeToString([]byte(id)) -} - -// MarshalJSON turns a bson.ObjectId into a json.Marshaller. -func (id ObjectId) MarshalJSON() ([]byte, error) { - return []byte(fmt.Sprintf(`"%x"`, string(id))), nil -} - -var nullBytes = []byte("null") - -// UnmarshalJSON turns *bson.ObjectId into a json.Unmarshaller. -func (id *ObjectId) UnmarshalJSON(data []byte) error { - if len(data) > 0 && (data[0] == '{' || data[0] == 'O') { - var v struct { - Id json.RawMessage `json:"$oid"` - Func struct { - Id json.RawMessage - } `json:"$oidFunc"` - } - err := jdec(data, &v) - if err == nil { - if len(v.Id) > 0 { - data = []byte(v.Id) - } else { - data = []byte(v.Func.Id) - } - } - } - if len(data) == 2 && data[0] == '"' && data[1] == '"' || bytes.Equal(data, nullBytes) { - *id = "" - return nil - } - if len(data) != 26 || data[0] != '"' || data[25] != '"' { - return fmt.Errorf("invalid ObjectId in JSON: %s", string(data)) - } - var buf [12]byte - _, err := hex.Decode(buf[:], data[1:25]) - if err != nil { - return fmt.Errorf("invalid ObjectId in JSON: %s (%s)", string(data), err) - } - *id = ObjectId(string(buf[:])) - return nil -} - -// MarshalText turns bson.ObjectId into an encoding.TextMarshaler. -func (id ObjectId) MarshalText() ([]byte, error) { - return []byte(fmt.Sprintf("%x", string(id))), nil -} - -// UnmarshalText turns *bson.ObjectId into an encoding.TextUnmarshaler. -func (id *ObjectId) UnmarshalText(data []byte) error { - if len(data) == 1 && data[0] == ' ' || len(data) == 0 { - *id = "" - return nil - } - if len(data) != 24 { - return fmt.Errorf("invalid ObjectId: %s", data) - } - var buf [12]byte - _, err := hex.Decode(buf[:], data[:]) - if err != nil { - return fmt.Errorf("invalid ObjectId: %s (%s)", data, err) - } - *id = ObjectId(string(buf[:])) - return nil -} - -// Valid returns true if id is valid. A valid id must contain exactly 12 bytes. -func (id ObjectId) Valid() bool { - return len(id) == 12 -} - -// byteSlice returns byte slice of id from start to end. -// Calling this function with an invalid id will cause a runtime panic. -func (id ObjectId) byteSlice(start, end int) []byte { - if len(id) != 12 { - panic(fmt.Sprintf("invalid ObjectId: %q", string(id))) - } - return []byte(string(id)[start:end]) -} - -// Time returns the timestamp part of the id. -// It's a runtime error to call this method with an invalid id. -func (id ObjectId) Time() time.Time { - // First 4 bytes of ObjectId is 32-bit big-endian seconds from epoch. - secs := int64(binary.BigEndian.Uint32(id.byteSlice(0, 4))) - return time.Unix(secs, 0) -} - -// Machine returns the 3-byte machine id part of the id. -// It's a runtime error to call this method with an invalid id. -func (id ObjectId) Machine() []byte { - return id.byteSlice(4, 7) -} - -// Pid returns the process id part of the id. -// It's a runtime error to call this method with an invalid id. -func (id ObjectId) Pid() uint16 { - return binary.BigEndian.Uint16(id.byteSlice(7, 9)) -} - -// Counter returns the incrementing value part of the id. -// It's a runtime error to call this method with an invalid id. -func (id ObjectId) Counter() int32 { - b := id.byteSlice(9, 12) - // Counter is stored as big-endian 3-byte value - return int32(uint32(b[0])<<16 | uint32(b[1])<<8 | uint32(b[2])) -} - -// The Symbol type is similar to a string and is used in languages with a -// distinct symbol type. -type Symbol string - -// Now returns the current time with millisecond precision. MongoDB stores -// timestamps with the same precision, so a Time returned from this method -// will not change after a roundtrip to the database. That's the only reason -// why this function exists. Using the time.Now function also works fine -// otherwise. -func Now() time.Time { - return time.Unix(0, time.Now().UnixNano()/1e6*1e6) -} - -// MongoTimestamp is a special internal type used by MongoDB that for some -// strange reason has its own datatype defined in BSON. -type MongoTimestamp int64 - -// Time returns the time part of ts which is stored with second precision. -func (ts MongoTimestamp) Time() time.Time { - return time.Unix(int64(uint64(ts)>>32), 0) -} - -// Counter returns the counter part of ts. -func (ts MongoTimestamp) Counter() uint32 { - return uint32(ts) -} - -// NewMongoTimestamp creates a timestamp using the given -// date `t` (with second precision) and counter `c` (unique for `t`). -// -// Returns an error if time `t` is not between 1970-01-01T00:00:00Z -// and 2106-02-07T06:28:15Z (inclusive). -// -// Note that two MongoTimestamps should never have the same (time, counter) combination: -// the caller must ensure the counter `c` is increased if creating multiple MongoTimestamp -// values for the same time `t` (ignoring fractions of seconds). -func NewMongoTimestamp(t time.Time, c uint32) (MongoTimestamp, error) { - u := t.Unix() - if u < 0 || u > math.MaxUint32 { - return -1, errors.New("invalid value for time") - } - - i := int64(u<<32 | int64(c)) - - return MongoTimestamp(i), nil -} - -type orderKey int64 - -// MaxKey is a special value that compares higher than all other possible BSON -// values in a MongoDB database. -var MaxKey = orderKey(1<<63 - 1) - -// MinKey is a special value that compares lower than all other possible BSON -// values in a MongoDB database. -var MinKey = orderKey(-1 << 63) - -type undefined struct{} - -// Undefined represents the undefined BSON value. -var Undefined undefined - -// Binary is a representation for non-standard binary values. Any kind should -// work, but the following are known as of this writing: -// -// 0x00 - Generic. This is decoded as []byte(data), not Binary{0x00, data}. -// 0x01 - Function (!?) -// 0x02 - Obsolete generic. -// 0x03 - UUID -// 0x05 - MD5 -// 0x80 - User defined. -// -type Binary struct { - Kind byte - Data []byte -} - -// RegEx represents a regular expression. The Options field may contain -// individual characters defining the way in which the pattern should be -// applied, and must be sorted. Valid options as of this writing are 'i' for -// case insensitive matching, 'm' for multi-line matching, 'x' for verbose -// mode, 'l' to make \w, \W, and similar be locale-dependent, 's' for dot-all -// mode (a '.' matches everything), and 'u' to make \w, \W, and similar match -// unicode. The value of the Options parameter is not verified before being -// marshaled into the BSON format. -type RegEx struct { - Pattern string - Options string -} - -// JavaScript is a type that holds JavaScript code. If Scope is non-nil, it -// will be marshaled as a mapping from identifiers to values that may be -// used when evaluating the provided Code. -type JavaScript struct { - Code string - Scope interface{} -} - -// DBPointer refers to a document id in a namespace. -// -// This type is deprecated in the BSON specification and should not be used -// except for backwards compatibility with ancient applications. -type DBPointer struct { - Namespace string - Id ObjectId -} - -const initialBufferSize = 64 - -func handleErr(err *error) { - if r := recover(); r != nil { - if _, ok := r.(runtime.Error); ok { - panic(r) - } else if _, ok := r.(externalPanic); ok { - panic(r) - } else if s, ok := r.(string); ok { - *err = errors.New(s) - } else if e, ok := r.(error); ok { - *err = e - } else { - panic(r) - } - } -} - -// Marshal serializes the in value, which may be a map or a struct value. -// In the case of struct values, only exported fields will be serialized, -// and the order of serialized fields will match that of the struct itself. -// The lowercased field name is used as the key for each exported field, -// but this behavior may be changed using the respective field tag. -// The tag may also contain flags to tweak the marshalling behavior for -// the field. The tag formats accepted are: -// -// "[][,[,]]" -// -// `(...) bson:"[][,[,]]" (...)` -// -// The following flags are currently supported: -// -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// -// minsize Marshal an int64 value as an int32, if that's feasible -// while preserving the numeric value. -// -// inline Inline the field, which must be a struct or a map, -// causing all of its fields or keys to be processed as if -// they were part of the outer struct. For maps, keys must -// not conflict with the bson keys of other struct fields. -// -// Some examples: -// -// type T struct { -// A bool -// B int "myb" -// C string "myc,omitempty" -// D string `bson:",omitempty" json:"jsonkey"` -// E int64 ",minsize" -// F int64 "myf,omitempty,minsize" -// } -// -func Marshal(in interface{}) (out []byte, err error) { - return MarshalBuffer(in, make([]byte, 0, initialBufferSize)) -} - -// MarshalBuffer behaves the same way as Marshal, except that instead of -// allocating a new byte slice it tries to use the received byte slice and -// only allocates more memory if necessary to fit the marshaled value. -func MarshalBuffer(in interface{}, buf []byte) (out []byte, err error) { - defer handleErr(&err) - e := &encoder{buf} - e.addDoc(reflect.ValueOf(in)) - return e.out, nil -} - -// Unmarshal deserializes data from in into the out value. The out value -// must be a map, a pointer to a struct, or a pointer to a bson.D value. -// In the case of struct values, only exported fields will be deserialized. -// The lowercased field name is used as the key for each exported field, -// but this behavior may be changed using the respective field tag. -// The tag may also contain flags to tweak the marshalling behavior for -// the field. The tag formats accepted are: -// -// "[][,[,]]" -// -// `(...) bson:"[][,[,]]" (...)` -// -// The following flags are currently supported during unmarshal (see the -// Marshal method for other flags): -// -// inline Inline the field, which must be a struct or a map. -// Inlined structs are handled as if its fields were part -// of the outer struct. An inlined map causes keys that do -// not match any other struct field to be inserted in the -// map rather than being discarded as usual. -// -// The target field or element types of out may not necessarily match -// the BSON values of the provided data. The following conversions are -// made automatically: -// -// - Numeric types are converted if at least the integer part of the -// value would be preserved correctly -// - Bools are converted to numeric types as 1 or 0 -// - Numeric types are converted to bools as true if not 0 or false otherwise -// - Binary and string BSON data is converted to a string, array or byte slice -// -// If the value would not fit the type and cannot be converted, it's -// silently skipped. -// -// Pointer values are initialized when necessary. -func Unmarshal(in []byte, out interface{}) (err error) { - if raw, ok := out.(*Raw); ok { - raw.Kind = 3 - raw.Data = in - return nil - } - defer handleErr(&err) - v := reflect.ValueOf(out) - switch v.Kind() { - case reflect.Ptr: - fallthrough - case reflect.Map: - d := newDecoder(in) - d.readDocTo(v) - if d.i < len(d.in) { - return errors.New("document is corrupted") - } - case reflect.Struct: - return errors.New("unmarshal can't deal with struct values. Use a pointer") - default: - return errors.New("unmarshal needs a map or a pointer to a struct") - } - return nil -} - -// Unmarshal deserializes raw into the out value. If the out value type -// is not compatible with raw, a *bson.TypeError is returned. -// -// See the Unmarshal function documentation for more details on the -// unmarshalling process. -func (raw Raw) Unmarshal(out interface{}) (err error) { - defer handleErr(&err) - v := reflect.ValueOf(out) - switch v.Kind() { - case reflect.Ptr: - v = v.Elem() - fallthrough - case reflect.Map: - d := newDecoder(raw.Data) - good := d.readElemTo(v, raw.Kind) - if !good { - return &TypeError{v.Type(), raw.Kind} - } - case reflect.Struct: - return errors.New("raw Unmarshal can't deal with struct values. Use a pointer") - default: - return errors.New("raw Unmarshal needs a map or a valid pointer") - } - return nil -} - -// TypeError store details for type error occuring -// during unmarshaling -type TypeError struct { - Type reflect.Type - Kind byte -} - -func (e *TypeError) Error() string { - return fmt.Sprintf("BSON kind 0x%02x isn't compatible with type %s", e.Kind, e.Type.String()) -} - -// -------------------------------------------------------------------------- -// Maintain a mapping of keys to structure field indexes - -type structInfo struct { - FieldsMap map[string]fieldInfo - FieldsList []fieldInfo - InlineMap int - Zero reflect.Value -} - -type fieldInfo struct { - Key string - Num int - OmitEmpty bool - MinSize bool - Inline []int -} - -var structMap = make(map[reflect.Type]*structInfo) -var structMapMutex sync.RWMutex - -type externalPanic string - -func (e externalPanic) String() string { - return string(e) -} - -func getStructInfo(st reflect.Type) (*structInfo, error) { - structMapMutex.RLock() - sinfo, found := structMap[st] - structMapMutex.RUnlock() - if found { - return sinfo, nil - } - n := st.NumField() - fieldsMap := make(map[string]fieldInfo) - fieldsList := make([]fieldInfo, 0, n) - inlineMap := -1 - for i := 0; i != n; i++ { - field := st.Field(i) - if field.PkgPath != "" && !field.Anonymous { - continue // Private field - } - - info := fieldInfo{Num: i} - - tag := field.Tag.Get("bson") - - // Fall-back to JSON struct tag, if feature flag is set. - if tag == "" && useJSONTagFallback { - tag = field.Tag.Get("json") - } - - // If there's no bson/json tag available. - if tag == "" { - // If there's no tag, and also no tag: value splits (i.e. no colon) - // then assume the entire tag is the value - if strings.Index(string(field.Tag), ":") < 0 { - tag = string(field.Tag) - } - } - - if tag == "-" { - continue - } - - inline := false - fields := strings.Split(tag, ",") - if len(fields) > 1 { - for _, flag := range fields[1:] { - switch flag { - case "omitempty": - info.OmitEmpty = true - case "minsize": - info.MinSize = true - case "inline": - inline = true - default: - msg := fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st) - panic(externalPanic(msg)) - } - } - tag = fields[0] - } - - if inline { - switch field.Type.Kind() { - case reflect.Map: - if inlineMap >= 0 { - return nil, errors.New("Multiple ,inline maps in struct " + st.String()) - } - if field.Type.Key() != reflect.TypeOf("") { - return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) - } - inlineMap = info.Num - case reflect.Ptr: - // allow only pointer to struct - if kind := field.Type.Elem().Kind(); kind != reflect.Struct { - return nil, errors.New("Option ,inline allows a pointer only to a struct, was given pointer to " + kind.String()) - } - - field.Type = field.Type.Elem() - fallthrough - case reflect.Struct: - sinfo, err := getStructInfo(field.Type) - if err != nil { - return nil, err - } - for _, finfo := range sinfo.FieldsList { - if _, found := fieldsMap[finfo.Key]; found { - msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - if finfo.Inline == nil { - finfo.Inline = []int{i, finfo.Num} - } else { - finfo.Inline = append([]int{i}, finfo.Inline...) - } - fieldsMap[finfo.Key] = finfo - fieldsList = append(fieldsList, finfo) - } - default: - panic("Option ,inline needs a struct value or a pointer to a struct or map field") - } - continue - } - - if tag != "" { - info.Key = tag - } else { - info.Key = strings.ToLower(field.Name) - } - - if _, found = fieldsMap[info.Key]; found { - msg := "Duplicated key '" + info.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - - fieldsList = append(fieldsList, info) - fieldsMap[info.Key] = info - } - sinfo = &structInfo{ - fieldsMap, - fieldsList, - inlineMap, - reflect.New(st).Elem(), - } - structMapMutex.Lock() - structMap[st] = sinfo - structMapMutex.Unlock() - return sinfo, nil -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/compatibility.go b/services/community/vendor/github.com/globalsign/mgo/bson/compatibility.go deleted file mode 100644 index 66efd465..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/compatibility.go +++ /dev/null @@ -1,29 +0,0 @@ -package bson - -// Current state of the JSON tag fallback option. -var useJSONTagFallback = false -var useRespectNilValues = false - -// SetJSONTagFallback enables or disables the JSON-tag fallback for structure tagging. When this is enabled, structures -// without BSON tags on a field will fall-back to using the JSON tag (if present). -func SetJSONTagFallback(state bool) { - useJSONTagFallback = state -} - -// JSONTagFallbackState returns the current status of the JSON tag fallback compatability option. See SetJSONTagFallback -// for more information. -func JSONTagFallbackState() bool { - return useJSONTagFallback -} - -// SetRespectNilValues enables or disables serializing nil slices or maps to `null` values. -// In other words it enables `encoding/json` compatible behaviour. -func SetRespectNilValues(state bool) { - useRespectNilValues = state -} - -// RespectNilValuesState returns the current status of the JSON nil slices and maps fallback compatibility option. -// See SetRespectNilValues for more information. -func RespectNilValuesState() bool { - return useRespectNilValues -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/decimal.go b/services/community/vendor/github.com/globalsign/mgo/bson/decimal.go deleted file mode 100644 index 672ba182..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/decimal.go +++ /dev/null @@ -1,312 +0,0 @@ -// BSON library for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package bson - -import ( - "fmt" - "strconv" - "strings" -) - -// Decimal128 holds decimal128 BSON values. -type Decimal128 struct { - h, l uint64 -} - -func (d Decimal128) String() string { - var pos int // positive sign - var e int // exponent - var h, l uint64 // significand high/low - - if d.h>>63&1 == 0 { - pos = 1 - } - - switch d.h >> 58 & (1<<5 - 1) { - case 0x1F: - return "NaN" - case 0x1E: - return "-Inf"[pos:] - } - - l = d.l - if d.h>>61&3 == 3 { - // Bits: 1*sign 2*ignored 14*exponent 111*significand. - // Implicit 0b100 prefix in significand. - e = int(d.h>>47&(1<<14-1)) - 6176 - //h = 4<<47 | d.h&(1<<47-1) - // Spec says all of these values are out of range. - h, l = 0, 0 - } else { - // Bits: 1*sign 14*exponent 113*significand - e = int(d.h>>49&(1<<14-1)) - 6176 - h = d.h & (1<<49 - 1) - } - - // Would be handled by the logic below, but that's trivial and common. - if h == 0 && l == 0 && e == 0 { - return "-0"[pos:] - } - - var repr [48]byte // Loop 5 times over 9 digits plus dot, negative sign, and leading zero. - var last = len(repr) - var i = len(repr) - var dot = len(repr) + e - var rem uint32 -Loop: - for d9 := 0; d9 < 5; d9++ { - h, l, rem = divmod(h, l, 1e9) - for d1 := 0; d1 < 9; d1++ { - // Handle "-0.0", "0.00123400", "-1.00E-6", "1.050E+3", etc. - if i < len(repr) && (dot == i || l == 0 && h == 0 && rem > 0 && rem < 10 && (dot < i-6 || e > 0)) { - e += len(repr) - i - i-- - repr[i] = '.' - last = i - 1 - dot = len(repr) // Unmark. - } - c := '0' + byte(rem%10) - rem /= 10 - i-- - repr[i] = c - // Handle "0E+3", "1E+3", etc. - if l == 0 && h == 0 && rem == 0 && i == len(repr)-1 && (dot < i-5 || e > 0) { - last = i - break Loop - } - if c != '0' { - last = i - } - // Break early. Works without it, but why. - if dot > i && l == 0 && h == 0 && rem == 0 { - break Loop - } - } - } - repr[last-1] = '-' - last-- - - if e > 0 { - return string(repr[last+pos:]) + "E+" + strconv.Itoa(e) - } - if e < 0 { - return string(repr[last+pos:]) + "E" + strconv.Itoa(e) - } - return string(repr[last+pos:]) -} - -func divmod(h, l uint64, div uint32) (qh, ql uint64, rem uint32) { - div64 := uint64(div) - a := h >> 32 - aq := a / div64 - ar := a % div64 - b := ar<<32 + h&(1<<32-1) - bq := b / div64 - br := b % div64 - c := br<<32 + l>>32 - cq := c / div64 - cr := c % div64 - d := cr<<32 + l&(1<<32-1) - dq := d / div64 - dr := d % div64 - return (aq<<32 | bq), (cq<<32 | dq), uint32(dr) -} - -var dNaN = Decimal128{0x1F << 58, 0} -var dPosInf = Decimal128{0x1E << 58, 0} -var dNegInf = Decimal128{0x3E << 58, 0} - -func dErr(s string) (Decimal128, error) { - return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s) -} - -// ParseDecimal128 parse a string and return the corresponding value as -// a decimal128 -func ParseDecimal128(s string) (Decimal128, error) { - orig := s - if s == "" { - return dErr(orig) - } - neg := s[0] == '-' - if neg || s[0] == '+' { - s = s[1:] - } - - if (len(s) == 3 || len(s) == 8) && (s[0] == 'N' || s[0] == 'n' || s[0] == 'I' || s[0] == 'i') { - if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") { - return dNaN, nil - } - if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") { - if neg { - return dNegInf, nil - } - return dPosInf, nil - } - return dErr(orig) - } - - var h, l uint64 - var e int - - var add, ovr uint32 - var mul uint32 = 1 - var dot = -1 - var digits = 0 - var i = 0 - for i < len(s) { - c := s[i] - if mul == 1e9 { - h, l, ovr = muladd(h, l, mul, add) - mul, add = 1, 0 - if ovr > 0 || h&((1<<15-1)<<49) > 0 { - return dErr(orig) - } - } - if c >= '0' && c <= '9' { - i++ - if c > '0' || digits > 0 { - digits++ - } - if digits > 34 { - if c == '0' { - // Exact rounding. - e++ - continue - } - return dErr(orig) - } - mul *= 10 - add *= 10 - add += uint32(c - '0') - continue - } - if c == '.' { - i++ - if dot >= 0 || i == 1 && len(s) == 1 { - return dErr(orig) - } - if i == len(s) { - break - } - if s[i] < '0' || s[i] > '9' || e > 0 { - return dErr(orig) - } - dot = i - continue - } - break - } - if i == 0 { - return dErr(orig) - } - if mul > 1 { - h, l, ovr = muladd(h, l, mul, add) - if ovr > 0 || h&((1<<15-1)<<49) > 0 { - return dErr(orig) - } - } - if dot >= 0 { - e += dot - i - } - if i+1 < len(s) && (s[i] == 'E' || s[i] == 'e') { - i++ - eneg := s[i] == '-' - if eneg || s[i] == '+' { - i++ - if i == len(s) { - return dErr(orig) - } - } - n := 0 - for i < len(s) && n < 1e4 { - c := s[i] - i++ - if c < '0' || c > '9' { - return dErr(orig) - } - n *= 10 - n += int(c - '0') - } - if eneg { - n = -n - } - e += n - for e < -6176 { - // Subnormal. - var div uint32 = 1 - for div < 1e9 && e < -6176 { - div *= 10 - e++ - } - var rem uint32 - h, l, rem = divmod(h, l, div) - if rem > 0 { - return dErr(orig) - } - } - for e > 6111 { - // Clamped. - var mul uint32 = 1 - for mul < 1e9 && e > 6111 { - mul *= 10 - e-- - } - h, l, ovr = muladd(h, l, mul, 0) - if ovr > 0 || h&((1<<15-1)<<49) > 0 { - return dErr(orig) - } - } - if e < -6176 || e > 6111 { - return dErr(orig) - } - } - - if i < len(s) { - return dErr(orig) - } - - h |= uint64(e+6176) & uint64(1<<14-1) << 49 - if neg { - h |= 1 << 63 - } - return Decimal128{h, l}, nil -} - -func muladd(h, l uint64, mul uint32, add uint32) (resh, resl uint64, overflow uint32) { - mul64 := uint64(mul) - a := mul64 * (l & (1<<32 - 1)) - b := a>>32 + mul64*(l>>32) - c := b>>32 + mul64*(h&(1<<32-1)) - d := c>>32 + mul64*(h>>32) - - a = a&(1<<32-1) + uint64(add) - b = b&(1<<32-1) + a>>32 - c = c&(1<<32-1) + b>>32 - d = d&(1<<32-1) + c>>32 - - return (d<<32 | c&(1<<32-1)), (b<<32 | a&(1<<32-1)), uint32(d >> 32) -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/decode.go b/services/community/vendor/github.com/globalsign/mgo/bson/decode.go deleted file mode 100644 index 658856ad..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/decode.go +++ /dev/null @@ -1,1055 +0,0 @@ -// BSON library for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// gobson - BSON library for Go. - -package bson - -import ( - "errors" - "fmt" - "io" - "math" - "net/url" - "reflect" - "strconv" - "sync" - "time" -) - -type decoder struct { - in []byte - i int - docType reflect.Type -} - -var typeM = reflect.TypeOf(M{}) - -func newDecoder(in []byte) *decoder { - return &decoder{in, 0, typeM} -} - -// -------------------------------------------------------------------------- -// Some helper functions. - -func corrupted() { - panic("Document is corrupted") -} - -// -------------------------------------------------------------------------- -// Unmarshaling of documents. - -const ( - setterUnknown = iota - setterNone - setterType - setterAddr -) - -var setterStyles map[reflect.Type]int -var setterIface reflect.Type -var setterMutex sync.RWMutex - -func init() { - var iface Setter - setterIface = reflect.TypeOf(&iface).Elem() - setterStyles = make(map[reflect.Type]int) -} - -func setterStyle(outt reflect.Type) int { - setterMutex.RLock() - style := setterStyles[outt] - setterMutex.RUnlock() - if style != setterUnknown { - return style - } - - setterMutex.Lock() - defer setterMutex.Unlock() - if outt.Implements(setterIface) { - style = setterType - } else if reflect.PtrTo(outt).Implements(setterIface) { - style = setterAddr - } else { - style = setterNone - } - setterStyles[outt] = style - return style -} - -func getSetter(outt reflect.Type, out reflect.Value) Setter { - style := setterStyle(outt) - if style == setterNone { - return nil - } - if style == setterAddr { - if !out.CanAddr() { - return nil - } - out = out.Addr() - } else if outt.Kind() == reflect.Ptr && out.IsNil() { - out.Set(reflect.New(outt.Elem())) - } - return out.Interface().(Setter) -} - -func clearMap(m reflect.Value) { - var none reflect.Value - for _, k := range m.MapKeys() { - m.SetMapIndex(k, none) - } -} - -func (d *decoder) readDocTo(out reflect.Value) { - var elemType reflect.Type - outt := out.Type() - outk := outt.Kind() - - for { - if outk == reflect.Ptr && out.IsNil() { - out.Set(reflect.New(outt.Elem())) - } - if setter := getSetter(outt, out); setter != nil { - raw := d.readRaw(ElementDocument) - err := setter.SetBSON(raw) - if _, ok := err.(*TypeError); err != nil && !ok { - panic(err) - } - return - } - if outk == reflect.Ptr { - out = out.Elem() - outt = out.Type() - outk = out.Kind() - continue - } - break - } - - var fieldsMap map[string]fieldInfo - var inlineMap reflect.Value - if outt == typeRaw { - out.Set(reflect.ValueOf(d.readRaw(ElementDocument))) - return - } - - origout := out - if outk == reflect.Interface { - if d.docType.Kind() == reflect.Map { - mv := reflect.MakeMap(d.docType) - out.Set(mv) - out = mv - } else { - dv := reflect.New(d.docType).Elem() - out.Set(dv) - out = dv - } - outt = out.Type() - outk = outt.Kind() - } - - docType := d.docType - keyType := typeString - convertKey := false - switch outk { - case reflect.Map: - keyType = outt.Key() - if keyType != typeString { - convertKey = true - } - elemType = outt.Elem() - if elemType == typeIface { - d.docType = outt - } - if out.IsNil() { - out.Set(reflect.MakeMap(out.Type())) - } else if out.Len() > 0 { - clearMap(out) - } - case reflect.Struct: - sinfo, err := getStructInfo(out.Type()) - if err != nil { - panic(err) - } - fieldsMap = sinfo.FieldsMap - out.Set(sinfo.Zero) - if sinfo.InlineMap != -1 { - inlineMap = out.Field(sinfo.InlineMap) - if !inlineMap.IsNil() && inlineMap.Len() > 0 { - clearMap(inlineMap) - } - elemType = inlineMap.Type().Elem() - if elemType == typeIface { - d.docType = inlineMap.Type() - } - } - case reflect.Slice: - switch outt.Elem() { - case typeDocElem: - origout.Set(d.readDocElems(outt)) - return - case typeRawDocElem: - origout.Set(d.readRawDocElems(outt)) - return - } - fallthrough - default: - panic("Unsupported document type for unmarshalling: " + out.Type().String()) - } - - end := int(d.readInt32()) - end += d.i - 4 - if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { - corrupted() - } - for d.in[d.i] != '\x00' { - kind := d.readByte() - name := d.readCStr() - if d.i >= end { - corrupted() - } - - switch outk { - case reflect.Map: - e := reflect.New(elemType).Elem() - if d.readElemTo(e, kind) { - k := reflect.ValueOf(name) - if convertKey { - mapKeyType := out.Type().Key() - mapKeyKind := mapKeyType.Kind() - - switch mapKeyKind { - case reflect.Int: - fallthrough - case reflect.Int8: - fallthrough - case reflect.Int16: - fallthrough - case reflect.Int32: - fallthrough - case reflect.Int64: - fallthrough - case reflect.Uint: - fallthrough - case reflect.Uint8: - fallthrough - case reflect.Uint16: - fallthrough - case reflect.Uint32: - fallthrough - case reflect.Uint64: - fallthrough - case reflect.Float32: - fallthrough - case reflect.Float64: - parsed := d.parseMapKeyAsFloat(k, mapKeyKind) - k = reflect.ValueOf(parsed) - case reflect.String: - mapKeyType = keyType - default: - panic("BSON map must have string or decimal keys. Got: " + outt.String()) - } - - k = k.Convert(mapKeyType) - } - out.SetMapIndex(k, e) - } - case reflect.Struct: - if info, ok := fieldsMap[name]; ok { - if info.Inline == nil { - d.readElemTo(out.Field(info.Num), kind) - } else { - d.readElemTo(out.FieldByIndex(info.Inline), kind) - } - } else if inlineMap.IsValid() { - if inlineMap.IsNil() { - inlineMap.Set(reflect.MakeMap(inlineMap.Type())) - } - e := reflect.New(elemType).Elem() - if d.readElemTo(e, kind) { - inlineMap.SetMapIndex(reflect.ValueOf(name), e) - } - } else { - d.dropElem(kind) - } - case reflect.Slice: - } - - if d.i >= end { - corrupted() - } - } - d.i++ // '\x00' - if d.i != end { - corrupted() - } - d.docType = docType -} - -func (decoder) parseMapKeyAsFloat(k reflect.Value, mapKeyKind reflect.Kind) float64 { - parsed, err := strconv.ParseFloat(k.String(), 64) - if err != nil { - panic("Map key is defined to be a decimal type (" + mapKeyKind.String() + ") but got error " + - err.Error()) - } - - return parsed -} - -func (d *decoder) readArrayDocTo(out reflect.Value) { - end := int(d.readInt32()) - end += d.i - 4 - if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { - corrupted() - } - i := 0 - l := out.Len() - for d.in[d.i] != '\x00' { - if i >= l { - panic("Length mismatch on array field") - } - kind := d.readByte() - for d.i < end && d.in[d.i] != '\x00' { - d.i++ - } - if d.i >= end { - corrupted() - } - d.i++ - d.readElemTo(out.Index(i), kind) - if d.i >= end { - corrupted() - } - i++ - } - if i != l { - panic("Length mismatch on array field") - } - d.i++ // '\x00' - if d.i != end { - corrupted() - } -} - -func (d *decoder) readSliceDoc(t reflect.Type) interface{} { - tmp := make([]reflect.Value, 0, 8) - elemType := t.Elem() - if elemType == typeRawDocElem { - d.dropElem(ElementArray) - return reflect.Zero(t).Interface() - } - if elemType == typeRaw { - return d.readSliceOfRaw() - } - - end := int(d.readInt32()) - end += d.i - 4 - if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { - corrupted() - } - for d.in[d.i] != '\x00' { - kind := d.readByte() - for d.i < end && d.in[d.i] != '\x00' { - d.i++ - } - if d.i >= end { - corrupted() - } - d.i++ - e := reflect.New(elemType).Elem() - if d.readElemTo(e, kind) { - tmp = append(tmp, e) - } - if d.i >= end { - corrupted() - } - } - d.i++ // '\x00' - if d.i != end { - corrupted() - } - - n := len(tmp) - slice := reflect.MakeSlice(t, n, n) - for i := 0; i != n; i++ { - slice.Index(i).Set(tmp[i]) - } - return slice.Interface() -} - -func BSONElementSize(kind byte, offset int, buffer []byte) (int, error) { - switch kind { - case ElementFloat64: // Float64 - return 8, nil - case ElementJavaScriptWithoutScope: // JavaScript without scope - fallthrough - case ElementSymbol: // Symbol - fallthrough - case ElementString: // UTF-8 string - size, err := getSize(offset, buffer) - if err != nil { - return 0, err - } - if size < 1 { - return 0, errors.New("String size can't be less then one byte") - } - size += 4 - if offset+size > len(buffer) { - return 0, io.ErrUnexpectedEOF - } - if buffer[offset+size-1] != 0 { - return 0, errors.New("Invalid string: non zero-terminated") - } - return size, nil - case ElementArray: // Array - fallthrough - case ElementDocument: // Document - size, err := getSize(offset, buffer) - if err != nil { - return 0, err - } - if size < 5 { - return 0, errors.New("Declared document size is too small") - } - return size, nil - case ElementBinary: // Binary - size, err := getSize(offset, buffer) - if err != nil { - return 0, err - } - if size < 0 { - return 0, errors.New("Binary data size can't be negative") - } - return size + 5, nil - case Element06: // Undefined (obsolete, but still seen in the wild) - return 0, nil - case ElementObjectId: // ObjectId - return 12, nil - case ElementBool: // Bool - return 1, nil - case ElementDatetime: // Timestamp - return 8, nil - case ElementNil: // Nil - return 0, nil - case ElementRegEx: // RegEx - end := offset - for i := 0; i < 2; i++ { - for end < len(buffer) && buffer[end] != '\x00' { - end++ - } - end++ - } - if end > len(buffer) { - return 0, io.ErrUnexpectedEOF - } - return end - offset, nil - case ElementDBPointer: // DBPointer - size, err := getSize(offset, buffer) - if err != nil { - return 0, err - } - if size < 1 { - return 0, errors.New("String size can't be less then one byte") - } - return size + 12 + 4, nil - case ElementJavaScriptWithScope: // JavaScript with scope - size, err := getSize(offset, buffer) - if err != nil { - return 0, err - } - if size < 4+5+5 { - return 0, errors.New("Declared document element is too small") - } - return size, nil - case ElementInt32: // Int32 - return 4, nil - case ElementTimestamp: // Mongo-specific timestamp - return 8, nil - case ElementInt64: // Int64 - return 8, nil - case ElementDecimal128: // Decimal128 - return 16, nil - case ElementMaxKey: // Max key - return 0, nil - case ElementMinKey: // Min key - return 0, nil - default: - return 0, errors.New(fmt.Sprintf("Unknown element kind (0x%02X)", kind)) - } -} - -func (d *decoder) readRaw(kind byte) Raw { - size, err := BSONElementSize(kind, d.i, d.in) - if err != nil { - corrupted() - } - if d.i+size > len(d.in) { - corrupted() - } - d.i += size - return Raw{ - Kind: kind, - Data: d.in[d.i-size : d.i], - } -} - -func (d *decoder) readSliceOfRaw() interface{} { - tmp := make([]Raw, 0, 8) - end := int(d.readInt32()) - end += d.i - 4 - if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { - corrupted() - } - for d.in[d.i] != '\x00' { - kind := d.readByte() - for d.i < end && d.in[d.i] != '\x00' { - d.i++ - } - if d.i >= end { - corrupted() - } - d.i++ - e := d.readRaw(kind) - tmp = append(tmp, e) - if d.i >= end { - corrupted() - } - } - d.i++ // '\x00' - if d.i != end { - corrupted() - } - return tmp -} - -var typeSlice = reflect.TypeOf([]interface{}{}) -var typeIface = typeSlice.Elem() - -func (d *decoder) readDocElems(typ reflect.Type) reflect.Value { - docType := d.docType - d.docType = typ - slice := make([]DocElem, 0, 8) - d.readDocWith(func(kind byte, name string) { - e := DocElem{Name: name} - v := reflect.ValueOf(&e.Value) - if d.readElemTo(v.Elem(), kind) { - slice = append(slice, e) - } - }) - slicev := reflect.New(typ).Elem() - slicev.Set(reflect.ValueOf(slice)) - d.docType = docType - return slicev -} - -func (d *decoder) readRawDocElems(typ reflect.Type) reflect.Value { - docType := d.docType - d.docType = typ - slice := make([]RawDocElem, 0, 8) - d.readDocWith(func(kind byte, name string) { - e := RawDocElem{Name: name, Value: d.readRaw(kind)} - slice = append(slice, e) - }) - slicev := reflect.New(typ).Elem() - slicev.Set(reflect.ValueOf(slice)) - d.docType = docType - return slicev -} - -func (d *decoder) readDocWith(f func(kind byte, name string)) { - end := int(d.readInt32()) - end += d.i - 4 - if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { - corrupted() - } - for d.in[d.i] != '\x00' { - kind := d.readByte() - name := d.readCStr() - if d.i >= end { - corrupted() - } - f(kind, name) - if d.i >= end { - corrupted() - } - } - d.i++ // '\x00' - if d.i != end { - corrupted() - } -} - -// -------------------------------------------------------------------------- -// Unmarshaling of individual elements within a document. -func (d *decoder) dropElem(kind byte) { - size, err := BSONElementSize(kind, d.i, d.in) - if err != nil { - corrupted() - } - if d.i+size > len(d.in) { - corrupted() - } - d.i += size -} - -// Attempt to decode an element from the document and put it into out. -// If the types are not compatible, the returned ok value will be -// false and out will be unchanged. -func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) { - outt := out.Type() - - if outt == typeRaw { - out.Set(reflect.ValueOf(d.readRaw(kind))) - return true - } - - if outt == typeRawPtr { - raw := d.readRaw(kind) - out.Set(reflect.ValueOf(&raw)) - return true - } - - if kind == ElementDocument { - // Delegate unmarshaling of documents. - outt := out.Type() - outk := out.Kind() - switch outk { - case reflect.Interface, reflect.Ptr, reflect.Struct, reflect.Map: - d.readDocTo(out) - return true - } - if setterStyle(outt) != setterNone { - d.readDocTo(out) - return true - } - if outk == reflect.Slice { - switch outt.Elem() { - case typeDocElem: - out.Set(d.readDocElems(outt)) - case typeRawDocElem: - out.Set(d.readRawDocElems(outt)) - default: - d.dropElem(kind) - } - return true - } - d.dropElem(kind) - return true - } - - if setter := getSetter(outt, out); setter != nil { - err := setter.SetBSON(d.readRaw(kind)) - if err == ErrSetZero { - out.Set(reflect.Zero(outt)) - return true - } - if err == nil { - return true - } - if _, ok := err.(*TypeError); !ok { - panic(err) - } - return false - } - - var in interface{} - - switch kind { - case ElementFloat64: - in = d.readFloat64() - case ElementString: - in = d.readStr() - case ElementDocument: - panic("Can't happen. Handled above.") - case ElementArray: - outt := out.Type() - if setterStyle(outt) != setterNone { - // Skip the value so its data is handed to the setter below. - d.dropElem(kind) - break - } - for outt.Kind() == reflect.Ptr { - outt = outt.Elem() - } - switch outt.Kind() { - case reflect.Array: - d.readArrayDocTo(out) - return true - case reflect.Slice: - in = d.readSliceDoc(outt) - default: - in = d.readSliceDoc(typeSlice) - } - case ElementBinary: - b := d.readBinary() - if b.Kind == BinaryGeneric || b.Kind == BinaryBinaryOld { - in = b.Data - } else { - in = b - } - case Element06: // Undefined (obsolete, but still seen in the wild) - in = Undefined - case ElementObjectId: - in = ObjectId(d.readBytes(12)) - case ElementBool: - in = d.readBool() - case ElementDatetime: // Timestamp - // MongoDB handles timestamps as milliseconds. - i := d.readInt64() - if i == -62135596800000 { - in = time.Time{} // In UTC for convenience. - } else { - in = time.Unix(i/1e3, i%1e3*1e6).UTC() - } - case ElementNil: - in = nil - case ElementRegEx: - in = d.readRegEx() - case ElementDBPointer: - in = DBPointer{Namespace: d.readStr(), Id: ObjectId(d.readBytes(12))} - case ElementJavaScriptWithoutScope: - in = JavaScript{Code: d.readStr()} - case ElementSymbol: - in = Symbol(d.readStr()) - case ElementJavaScriptWithScope: - start := d.i - l := int(d.readInt32()) - js := JavaScript{d.readStr(), make(M)} - d.readDocTo(reflect.ValueOf(js.Scope)) - if d.i != start+l { - corrupted() - } - in = js - case ElementInt32: - in = int(d.readInt32()) - case ElementTimestamp: // Mongo-specific timestamp - in = MongoTimestamp(d.readInt64()) - case ElementInt64: - switch out.Type() { - case typeTimeDuration: - in = time.Duration(time.Duration(d.readInt64()) * time.Millisecond) - default: - in = d.readInt64() - } - case ElementDecimal128: - in = Decimal128{ - l: uint64(d.readInt64()), - h: uint64(d.readInt64()), - } - case ElementMaxKey: - in = MaxKey - case ElementMinKey: - in = MinKey - default: - panic(fmt.Sprintf("Unknown element kind (0x%02X)", kind)) - } - - if in == nil { - out.Set(reflect.Zero(outt)) - return true - } - - outk := outt.Kind() - - // Dereference and initialize pointer if necessary. - first := true - for outk == reflect.Ptr { - if !out.IsNil() { - out = out.Elem() - } else { - elem := reflect.New(outt.Elem()) - if first { - // Only set if value is compatible. - first = false - defer func(out, elem reflect.Value) { - if good { - out.Set(elem) - } - }(out, elem) - } else { - out.Set(elem) - } - out = elem - } - outt = out.Type() - outk = outt.Kind() - } - - inv := reflect.ValueOf(in) - if outt == inv.Type() { - out.Set(inv) - return true - } - - switch outk { - case reflect.Interface: - out.Set(inv) - return true - case reflect.String: - switch inv.Kind() { - case reflect.String: - out.SetString(inv.String()) - return true - case reflect.Slice: - if b, ok := in.([]byte); ok { - out.SetString(string(b)) - return true - } - case reflect.Int, reflect.Int64: - if outt == typeJSONNumber { - out.SetString(strconv.FormatInt(inv.Int(), 10)) - return true - } - case reflect.Float64: - if outt == typeJSONNumber { - out.SetString(strconv.FormatFloat(inv.Float(), 'f', -1, 64)) - return true - } - } - case reflect.Slice, reflect.Array: - // Remember, array (0x04) slices are built with the correct - // element type. If we are here, must be a cross BSON kind - // conversion (e.g. 0x05 unmarshalling on string). - if outt.Elem().Kind() != reflect.Uint8 { - break - } - switch inv.Kind() { - case reflect.String: - slice := []byte(inv.String()) - out.Set(reflect.ValueOf(slice)) - return true - case reflect.Slice: - switch outt.Kind() { - case reflect.Array: - reflect.Copy(out, inv) - case reflect.Slice: - out.SetBytes(inv.Bytes()) - } - return true - } - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - switch inv.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - out.SetInt(inv.Int()) - return true - case reflect.Float32, reflect.Float64: - out.SetInt(int64(inv.Float())) - return true - case reflect.Bool: - if inv.Bool() { - out.SetInt(1) - } else { - out.SetInt(0) - } - return true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - panic("can't happen: no uint types in BSON (!?)") - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - switch inv.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - out.SetUint(uint64(inv.Int())) - return true - case reflect.Float32, reflect.Float64: - out.SetUint(uint64(inv.Float())) - return true - case reflect.Bool: - if inv.Bool() { - out.SetUint(1) - } else { - out.SetUint(0) - } - return true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - panic("Can't happen. No uint types in BSON.") - } - case reflect.Float32, reflect.Float64: - switch inv.Kind() { - case reflect.Float32, reflect.Float64: - out.SetFloat(inv.Float()) - return true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - out.SetFloat(float64(inv.Int())) - return true - case reflect.Bool: - if inv.Bool() { - out.SetFloat(1) - } else { - out.SetFloat(0) - } - return true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - panic("Can't happen. No uint types in BSON?") - } - case reflect.Bool: - switch inv.Kind() { - case reflect.Bool: - out.SetBool(inv.Bool()) - return true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - out.SetBool(inv.Int() != 0) - return true - case reflect.Float32, reflect.Float64: - out.SetBool(inv.Float() != 0) - return true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - panic("Can't happen. No uint types in BSON?") - } - case reflect.Struct: - if outt == typeURL && inv.Kind() == reflect.String { - u, err := url.Parse(inv.String()) - if err != nil { - panic(err) - } - out.Set(reflect.ValueOf(u).Elem()) - return true - } - if outt == typeBinary { - if b, ok := in.([]byte); ok { - out.Set(reflect.ValueOf(Binary{Data: b})) - return true - } - } - } - - return false -} - -// -------------------------------------------------------------------------- -// Parsers of basic types. - -func (d *decoder) readRegEx() RegEx { - re := RegEx{} - re.Pattern = d.readCStr() - re.Options = d.readCStr() - return re -} - -func (d *decoder) readBinary() Binary { - l := d.readInt32() - b := Binary{} - b.Kind = d.readByte() - if b.Kind == BinaryBinaryOld && l > 4 { - // Weird obsolete format with redundant length. - rl := d.readInt32() - if rl != l-4 { - corrupted() - } - l = rl - } - b.Data = d.readBytes(l) - return b -} - -func (d *decoder) readStr() string { - l := d.readInt32() - b := d.readBytes(l - 1) - if d.readByte() != '\x00' { - corrupted() - } - return string(b) -} - -func (d *decoder) readCStr() string { - start := d.i - end := start - l := len(d.in) - for ; end != l; end++ { - if d.in[end] == '\x00' { - break - } - } - d.i = end + 1 - if d.i > l { - corrupted() - } - return string(d.in[start:end]) -} - -func (d *decoder) readBool() bool { - b := d.readByte() - if b == 0 { - return false - } - if b == 1 { - return true - } - panic(fmt.Sprintf("encoded boolean must be 1 or 0, found %d", b)) -} - -func (d *decoder) readFloat64() float64 { - return math.Float64frombits(uint64(d.readInt64())) -} - -func (d *decoder) readInt32() int32 { - b := d.readBytes(4) - return int32((uint32(b[0]) << 0) | - (uint32(b[1]) << 8) | - (uint32(b[2]) << 16) | - (uint32(b[3]) << 24)) -} - -func getSize(offset int, b []byte) (int, error) { - if offset+4 > len(b) { - return 0, io.ErrUnexpectedEOF - } - return int((uint32(b[offset]) << 0) | - (uint32(b[offset+1]) << 8) | - (uint32(b[offset+2]) << 16) | - (uint32(b[offset+3]) << 24)), nil -} - -func (d *decoder) readInt64() int64 { - b := d.readBytes(8) - return int64((uint64(b[0]) << 0) | - (uint64(b[1]) << 8) | - (uint64(b[2]) << 16) | - (uint64(b[3]) << 24) | - (uint64(b[4]) << 32) | - (uint64(b[5]) << 40) | - (uint64(b[6]) << 48) | - (uint64(b[7]) << 56)) -} - -func (d *decoder) readByte() byte { - i := d.i - d.i++ - if d.i > len(d.in) { - corrupted() - } - return d.in[i] -} - -func (d *decoder) readBytes(length int32) []byte { - if length < 0 { - corrupted() - } - start := d.i - d.i += int(length) - if d.i < start || d.i > len(d.in) { - corrupted() - } - return d.in[start : start+int(length)] -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/encode.go b/services/community/vendor/github.com/globalsign/mgo/bson/encode.go deleted file mode 100644 index d0c6b2a8..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/encode.go +++ /dev/null @@ -1,645 +0,0 @@ -// BSON library for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// gobson - BSON library for Go. - -package bson - -import ( - "encoding/json" - "fmt" - "math" - "net/url" - "reflect" - "sort" - "strconv" - "sync" - "time" -) - -// -------------------------------------------------------------------------- -// Some internal infrastructure. - -var ( - typeBinary = reflect.TypeOf(Binary{}) - typeObjectId = reflect.TypeOf(ObjectId("")) - typeDBPointer = reflect.TypeOf(DBPointer{"", ObjectId("")}) - typeSymbol = reflect.TypeOf(Symbol("")) - typeMongoTimestamp = reflect.TypeOf(MongoTimestamp(0)) - typeOrderKey = reflect.TypeOf(MinKey) - typeDocElem = reflect.TypeOf(DocElem{}) - typeRawDocElem = reflect.TypeOf(RawDocElem{}) - typeRaw = reflect.TypeOf(Raw{}) - typeRawPtr = reflect.PtrTo(reflect.TypeOf(Raw{})) - typeURL = reflect.TypeOf(url.URL{}) - typeTime = reflect.TypeOf(time.Time{}) - typeString = reflect.TypeOf("") - typeJSONNumber = reflect.TypeOf(json.Number("")) - typeTimeDuration = reflect.TypeOf(time.Duration(0)) -) - -var ( - // spec for []uint8 or []byte encoding - arrayOps = map[string]bool{ - "$in": true, - "$nin": true, - "$all": true, - } -) - -const itoaCacheSize = 32 - -const ( - getterUnknown = iota - getterNone - getterTypeVal - getterTypePtr - getterAddr -) - -var itoaCache []string - -var getterStyles map[reflect.Type]int -var getterIface reflect.Type -var getterMutex sync.RWMutex - -func init() { - itoaCache = make([]string, itoaCacheSize) - for i := 0; i != itoaCacheSize; i++ { - itoaCache[i] = strconv.Itoa(i) - } - var iface Getter - getterIface = reflect.TypeOf(&iface).Elem() - getterStyles = make(map[reflect.Type]int) -} - -func itoa(i int) string { - if i < itoaCacheSize { - return itoaCache[i] - } - return strconv.Itoa(i) -} - -func getterStyle(outt reflect.Type) int { - getterMutex.RLock() - style := getterStyles[outt] - getterMutex.RUnlock() - if style != getterUnknown { - return style - } - - getterMutex.Lock() - defer getterMutex.Unlock() - if outt.Implements(getterIface) { - vt := outt - for vt.Kind() == reflect.Ptr { - vt = vt.Elem() - } - if vt.Implements(getterIface) { - style = getterTypeVal - } else { - style = getterTypePtr - } - } else if reflect.PtrTo(outt).Implements(getterIface) { - style = getterAddr - } else { - style = getterNone - } - getterStyles[outt] = style - return style -} - -func getGetter(outt reflect.Type, out reflect.Value) Getter { - style := getterStyle(outt) - if style == getterNone { - return nil - } - if style == getterAddr { - if !out.CanAddr() { - return nil - } - return out.Addr().Interface().(Getter) - } - if style == getterTypeVal && out.Kind() == reflect.Ptr && out.IsNil() { - return nil - } - return out.Interface().(Getter) -} - -// -------------------------------------------------------------------------- -// Marshaling of the document value itself. - -type encoder struct { - out []byte -} - -func (e *encoder) addDoc(v reflect.Value) { - for { - if vi, ok := v.Interface().(Getter); ok { - getv, err := vi.GetBSON() - if err != nil { - panic(err) - } - v = reflect.ValueOf(getv) - continue - } - if v.Kind() == reflect.Ptr { - v = v.Elem() - continue - } - break - } - - if v.Type() == typeRaw { - raw := v.Interface().(Raw) - if raw.Kind != 0x03 && raw.Kind != 0x00 { - panic("Attempted to marshal Raw kind " + strconv.Itoa(int(raw.Kind)) + " as a document") - } - if len(raw.Data) == 0 { - panic("Attempted to marshal empty Raw document") - } - e.addBytes(raw.Data...) - return - } - - start := e.reserveInt32() - - switch v.Kind() { - case reflect.Map: - e.addMap(v) - case reflect.Struct: - e.addStruct(v) - case reflect.Array, reflect.Slice: - e.addSlice(v) - default: - panic("Can't marshal " + v.Type().String() + " as a BSON document") - } - - e.addBytes(0) - e.setInt32(start, int32(len(e.out)-start)) -} - -func (e *encoder) addMap(v reflect.Value) { - for _, k := range v.MapKeys() { - e.addElem(fmt.Sprint(k), v.MapIndex(k), false) - } -} - -func (e *encoder) addStruct(v reflect.Value) { - sinfo, err := getStructInfo(v.Type()) - if err != nil { - panic(err) - } - var value reflect.Value - if sinfo.InlineMap >= 0 { - m := v.Field(sinfo.InlineMap) - if m.Len() > 0 { - for _, k := range m.MapKeys() { - ks := k.String() - if _, found := sinfo.FieldsMap[ks]; found { - panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", ks)) - } - e.addElem(ks, m.MapIndex(k), false) - } - } - } - for _, info := range sinfo.FieldsList { - if info.Inline == nil { - value = v.Field(info.Num) - } else { - // as pointers to struct are allowed here, - // there is no guarantee that pointer won't be nil. - // - // It is expected allowed behaviour - // so info.Inline MAY consist index to a nil pointer - // and that is why we safely call v.FieldByIndex and just continue on panic - field, errField := safeFieldByIndex(v, info.Inline) - if errField != nil { - continue - } - - value = field - } - if info.OmitEmpty && isZero(value) { - continue - } - if useRespectNilValues && - (value.Kind() == reflect.Slice || value.Kind() == reflect.Map) && - value.IsNil() { - e.addElem(info.Key, reflect.ValueOf(nil), info.MinSize) - continue - } - e.addElem(info.Key, value, info.MinSize) - } -} - -func safeFieldByIndex(v reflect.Value, index []int) (result reflect.Value, err error) { - defer func() { - if recovered := recover(); recovered != nil { - switch r := recovered.(type) { - case string: - err = fmt.Errorf("%s", r) - case error: - err = r - } - } - }() - - result = v.FieldByIndex(index) - return -} - -func isZero(v reflect.Value) bool { - switch v.Kind() { - case reflect.String: - return len(v.String()) == 0 - case reflect.Ptr, reflect.Interface: - return v.IsNil() - case reflect.Slice: - return v.Len() == 0 - case reflect.Map: - return v.Len() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Struct: - vt := v.Type() - if vt == typeTime { - return v.Interface().(time.Time).IsZero() - } - for i := 0; i < v.NumField(); i++ { - if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous { - continue // Private field - } - if !isZero(v.Field(i)) { - return false - } - } - return true - } - return false -} - -func (e *encoder) addSlice(v reflect.Value) { - vi := v.Interface() - if d, ok := vi.(D); ok { - for _, elem := range d { - e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) - } - return - } - if d, ok := vi.(RawD); ok { - for _, elem := range d { - e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) - } - return - } - l := v.Len() - et := v.Type().Elem() - if et == typeDocElem { - for i := 0; i < l; i++ { - elem := v.Index(i).Interface().(DocElem) - e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) - } - return - } - if et == typeRawDocElem { - for i := 0; i < l; i++ { - elem := v.Index(i).Interface().(RawDocElem) - e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) - } - return - } - for i := 0; i < l; i++ { - e.addElem(itoa(i), v.Index(i), false) - } -} - -// -------------------------------------------------------------------------- -// Marshaling of elements in a document. - -func (e *encoder) addElemName(kind byte, name string) { - e.addBytes(kind) - e.addBytes([]byte(name)...) - e.addBytes(0) -} - -func (e *encoder) addElem(name string, v reflect.Value, minSize bool) { - - if !v.IsValid() { - e.addElemName(0x0A, name) - return - } - - if getter := getGetter(v.Type(), v); getter != nil { - getv, err := getter.GetBSON() - if err != nil { - panic(err) - } - e.addElem(name, reflect.ValueOf(getv), minSize) - return - } - - switch v.Kind() { - - case reflect.Interface: - e.addElem(name, v.Elem(), minSize) - - case reflect.Ptr: - e.addElem(name, v.Elem(), minSize) - - case reflect.String: - s := v.String() - switch v.Type() { - case typeObjectId: - if len(s) != 12 { - panic("ObjectIDs must be exactly 12 bytes long (got " + - strconv.Itoa(len(s)) + ")") - } - e.addElemName(0x07, name) - e.addBytes([]byte(s)...) - case typeSymbol: - e.addElemName(0x0E, name) - e.addStr(s) - case typeJSONNumber: - n := v.Interface().(json.Number) - if i, err := n.Int64(); err == nil { - e.addElemName(0x12, name) - e.addInt64(i) - } else if f, err := n.Float64(); err == nil { - e.addElemName(0x01, name) - e.addFloat64(f) - } else { - panic("failed to convert json.Number to a number: " + s) - } - default: - e.addElemName(0x02, name) - e.addStr(s) - } - - case reflect.Float32, reflect.Float64: - e.addElemName(0x01, name) - e.addFloat64(v.Float()) - - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - u := v.Uint() - if int64(u) < 0 { - panic("BSON has no uint64 type, and value is too large to fit correctly in an int64") - } else if u <= math.MaxInt32 && (minSize || v.Kind() <= reflect.Uint32) { - e.addElemName(0x10, name) - e.addInt32(int32(u)) - } else { - e.addElemName(0x12, name) - e.addInt64(int64(u)) - } - - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - switch v.Type() { - case typeMongoTimestamp: - e.addElemName(0x11, name) - e.addInt64(v.Int()) - - case typeOrderKey: - if v.Int() == int64(MaxKey) { - e.addElemName(0x7F, name) - } else { - e.addElemName(0xFF, name) - } - case typeTimeDuration: - // Stored as int64 - e.addElemName(0x12, name) - - e.addInt64(int64(v.Int() / 1e6)) - default: - i := v.Int() - if (minSize || v.Type().Kind() != reflect.Int64) && i >= math.MinInt32 && i <= math.MaxInt32 { - // It fits into an int32, encode as such. - e.addElemName(0x10, name) - e.addInt32(int32(i)) - } else { - e.addElemName(0x12, name) - e.addInt64(i) - } - } - - case reflect.Bool: - e.addElemName(0x08, name) - if v.Bool() { - e.addBytes(1) - } else { - e.addBytes(0) - } - - case reflect.Map: - e.addElemName(0x03, name) - e.addDoc(v) - - case reflect.Slice: - vt := v.Type() - et := vt.Elem() - if et.Kind() == reflect.Uint8 { - if arrayOps[name] { - e.addElemName(0x04, name) - e.addDoc(v) - } else { - e.addElemName(0x05, name) - e.addBinary(0x00, v.Bytes()) - } - } else if et == typeDocElem || et == typeRawDocElem { - e.addElemName(0x03, name) - e.addDoc(v) - } else { - e.addElemName(0x04, name) - e.addDoc(v) - } - - case reflect.Array: - et := v.Type().Elem() - if et.Kind() == reflect.Uint8 { - if arrayOps[name] { - e.addElemName(0x04, name) - e.addDoc(v) - } else { - e.addElemName(0x05, name) - if v.CanAddr() { - e.addBinary(0x00, v.Slice(0, v.Len()).Interface().([]byte)) - } else { - n := v.Len() - e.addInt32(int32(n)) - e.addBytes(0x00) - for i := 0; i < n; i++ { - el := v.Index(i) - e.addBytes(byte(el.Uint())) - } - } - } - } else { - e.addElemName(0x04, name) - e.addDoc(v) - } - - case reflect.Struct: - switch s := v.Interface().(type) { - - case Raw: - kind := s.Kind - if kind == 0x00 { - kind = 0x03 - } - if len(s.Data) == 0 && kind != 0x06 && kind != 0x0A && kind != 0xFF && kind != 0x7F { - panic("Attempted to marshal empty Raw document") - } - e.addElemName(kind, name) - e.addBytes(s.Data...) - - case Binary: - e.addElemName(0x05, name) - e.addBinary(s.Kind, s.Data) - - case Decimal128: - e.addElemName(0x13, name) - e.addInt64(int64(s.l)) - e.addInt64(int64(s.h)) - - case DBPointer: - e.addElemName(0x0C, name) - e.addStr(s.Namespace) - if len(s.Id) != 12 { - panic("ObjectIDs must be exactly 12 bytes long (got " + - strconv.Itoa(len(s.Id)) + ")") - } - e.addBytes([]byte(s.Id)...) - - case RegEx: - e.addElemName(0x0B, name) - e.addCStr(s.Pattern) - options := runes(s.Options) - sort.Sort(options) - e.addCStr(string(options)) - - case JavaScript: - if s.Scope == nil { - e.addElemName(0x0D, name) - e.addStr(s.Code) - } else { - e.addElemName(0x0F, name) - start := e.reserveInt32() - e.addStr(s.Code) - e.addDoc(reflect.ValueOf(s.Scope)) - e.setInt32(start, int32(len(e.out)-start)) - } - - case time.Time: - // MongoDB handles timestamps as milliseconds. - e.addElemName(0x09, name) - e.addInt64(s.Unix()*1000 + int64(s.Nanosecond()/1e6)) - - case url.URL: - e.addElemName(0x02, name) - e.addStr(s.String()) - - case undefined: - e.addElemName(0x06, name) - - default: - e.addElemName(0x03, name) - e.addDoc(v) - } - - default: - panic("Can't marshal " + v.Type().String() + " in a BSON document") - } -} - -// ------------- -// Helper method for sorting regex options -type runes []rune - -func (a runes) Len() int { return len(a) } -func (a runes) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a runes) Less(i, j int) bool { return a[i] < a[j] } - -// -------------------------------------------------------------------------- -// Marshaling of base types. - -func (e *encoder) addBinary(subtype byte, v []byte) { - if subtype == 0x02 { - // Wonder how that brilliant idea came to life. Obsolete, luckily. - e.addInt32(int32(len(v) + 4)) - e.addBytes(subtype) - e.addInt32(int32(len(v))) - } else { - e.addInt32(int32(len(v))) - e.addBytes(subtype) - } - e.addBytes(v...) -} - -func (e *encoder) addStr(v string) { - e.addInt32(int32(len(v) + 1)) - e.addCStr(v) -} - -func (e *encoder) addCStr(v string) { - e.addBytes([]byte(v)...) - e.addBytes(0) -} - -func (e *encoder) reserveInt32() (pos int) { - pos = len(e.out) - e.addBytes(0, 0, 0, 0) - return pos -} - -func (e *encoder) setInt32(pos int, v int32) { - e.out[pos+0] = byte(v) - e.out[pos+1] = byte(v >> 8) - e.out[pos+2] = byte(v >> 16) - e.out[pos+3] = byte(v >> 24) -} - -func (e *encoder) addInt32(v int32) { - u := uint32(v) - e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24)) -} - -func (e *encoder) addInt64(v int64) { - u := uint64(v) - e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24), - byte(u>>32), byte(u>>40), byte(u>>48), byte(u>>56)) -} - -func (e *encoder) addFloat64(v float64) { - e.addInt64(int64(math.Float64bits(v))) -} - -func (e *encoder) addBytes(v ...byte) { - e.out = append(e.out, v...) -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/json.go b/services/community/vendor/github.com/globalsign/mgo/bson/json.go deleted file mode 100644 index 045c7130..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/json.go +++ /dev/null @@ -1,384 +0,0 @@ -package bson - -import ( - "bytes" - "encoding/base64" - "fmt" - "strconv" - "strings" - "time" - - "github.com/globalsign/mgo/internal/json" -) - -// UnmarshalJSON unmarshals a JSON value that may hold non-standard -// syntax as defined in BSON's extended JSON specification. -func UnmarshalJSON(data []byte, value interface{}) error { - d := json.NewDecoder(bytes.NewBuffer(data)) - d.Extend(&jsonExt) - return d.Decode(value) -} - -// MarshalJSON marshals a JSON value that may hold non-standard -// syntax as defined in BSON's extended JSON specification. -func MarshalJSON(value interface{}) ([]byte, error) { - var buf bytes.Buffer - e := json.NewEncoder(&buf) - e.Extend(&jsonExt) - err := e.Encode(value) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// jdec is used internally by the JSON decoding functions -// so they may unmarshal functions without getting into endless -// recursion due to keyed objects. -func jdec(data []byte, value interface{}) error { - d := json.NewDecoder(bytes.NewBuffer(data)) - d.Extend(&funcExt) - return d.Decode(value) -} - -var jsonExt json.Extension -var funcExt json.Extension - -// TODO -// - Shell regular expressions ("/regexp/opts") - -func init() { - jsonExt.DecodeUnquotedKeys(true) - jsonExt.DecodeTrailingCommas(true) - - funcExt.DecodeFunc("BinData", "$binaryFunc", "$type", "$binary") - jsonExt.DecodeKeyed("$binary", jdecBinary) - jsonExt.DecodeKeyed("$binaryFunc", jdecBinary) - jsonExt.EncodeType([]byte(nil), jencBinarySlice) - jsonExt.EncodeType(Binary{}, jencBinaryType) - - funcExt.DecodeFunc("ISODate", "$dateFunc", "S") - funcExt.DecodeFunc("new Date", "$dateFunc", "S") - jsonExt.DecodeKeyed("$date", jdecDate) - jsonExt.DecodeKeyed("$dateFunc", jdecDate) - jsonExt.EncodeType(time.Time{}, jencDate) - - funcExt.DecodeFunc("Timestamp", "$timestamp", "t", "i") - jsonExt.DecodeKeyed("$timestamp", jdecTimestamp) - jsonExt.EncodeType(MongoTimestamp(0), jencTimestamp) - - funcExt.DecodeConst("undefined", Undefined) - - jsonExt.DecodeKeyed("$regex", jdecRegEx) - jsonExt.EncodeType(RegEx{}, jencRegEx) - - funcExt.DecodeFunc("ObjectId", "$oidFunc", "Id") - jsonExt.DecodeKeyed("$oid", jdecObjectId) - jsonExt.DecodeKeyed("$oidFunc", jdecObjectId) - jsonExt.EncodeType(ObjectId(""), jencObjectId) - - funcExt.DecodeFunc("DBRef", "$dbrefFunc", "$ref", "$id") - jsonExt.DecodeKeyed("$dbrefFunc", jdecDBRef) - - funcExt.DecodeFunc("NumberLong", "$numberLongFunc", "N") - jsonExt.DecodeKeyed("$numberLong", jdecNumberLong) - jsonExt.DecodeKeyed("$numberLongFunc", jdecNumberLong) - jsonExt.EncodeType(int64(0), jencNumberLong) - jsonExt.EncodeType(int(0), jencInt) - - funcExt.DecodeConst("MinKey", MinKey) - funcExt.DecodeConst("MaxKey", MaxKey) - jsonExt.DecodeKeyed("$minKey", jdecMinKey) - jsonExt.DecodeKeyed("$maxKey", jdecMaxKey) - jsonExt.EncodeType(orderKey(0), jencMinMaxKey) - - jsonExt.DecodeKeyed("$undefined", jdecUndefined) - jsonExt.EncodeType(Undefined, jencUndefined) - - jsonExt.Extend(&funcExt) -} - -func fbytes(format string, args ...interface{}) []byte { - var buf bytes.Buffer - fmt.Fprintf(&buf, format, args...) - return buf.Bytes() -} - -func jdecBinary(data []byte) (interface{}, error) { - var v struct { - Binary []byte `json:"$binary"` - Type string `json:"$type"` - Func struct { - Binary []byte `json:"$binary"` - Type int64 `json:"$type"` - } `json:"$binaryFunc"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - - var binData []byte - var binKind int64 - if v.Type == "" && v.Binary == nil { - binData = v.Func.Binary - binKind = v.Func.Type - } else if v.Type == "" { - return v.Binary, nil - } else { - binData = v.Binary - binKind, err = strconv.ParseInt(v.Type, 0, 64) - if err != nil { - binKind = -1 - } - } - - if binKind == 0 { - return binData, nil - } - if binKind < 0 || binKind > 255 { - return nil, fmt.Errorf("invalid type in binary object: %s", data) - } - - return Binary{Kind: byte(binKind), Data: binData}, nil -} - -func jencBinarySlice(v interface{}) ([]byte, error) { - in := v.([]byte) - out := make([]byte, base64.StdEncoding.EncodedLen(len(in))) - base64.StdEncoding.Encode(out, in) - return fbytes(`{"$binary":"%s","$type":"0x0"}`, out), nil -} - -func jencBinaryType(v interface{}) ([]byte, error) { - in := v.(Binary) - out := make([]byte, base64.StdEncoding.EncodedLen(len(in.Data))) - base64.StdEncoding.Encode(out, in.Data) - return fbytes(`{"$binary":"%s","$type":"0x%x"}`, out, in.Kind), nil -} - -const jdateFormat = "2006-01-02T15:04:05.999Z07:00" - -func jdecDate(data []byte) (interface{}, error) { - var v struct { - S string `json:"$date"` - Func struct { - S string - } `json:"$dateFunc"` - } - _ = jdec(data, &v) - if v.S == "" { - v.S = v.Func.S - } - if v.S != "" { - var errs []string - for _, format := range []string{jdateFormat, "2006-01-02"} { - t, err := time.Parse(format, v.S) - if err == nil { - return t, nil - } - errs = append(errs, err.Error()) - } - return nil, fmt.Errorf("cannot parse date: %q [%s]", v.S, strings.Join(errs, ", ")) - } - - var vn struct { - Date struct { - N int64 `json:"$numberLong,string"` - } `json:"$date"` - Func struct { - S int64 - } `json:"$dateFunc"` - } - err := jdec(data, &vn) - if err != nil { - return nil, fmt.Errorf("cannot parse date: %q", data) - } - n := vn.Date.N - if n == 0 { - n = vn.Func.S - } - return time.Unix(n/1000, n%1000*1e6).UTC(), nil -} - -func jencDate(v interface{}) ([]byte, error) { - t := v.(time.Time) - return fbytes(`{"$date":%q}`, t.Format(jdateFormat)), nil -} - -func jdecTimestamp(data []byte) (interface{}, error) { - var v struct { - Func struct { - T int32 `json:"t"` - I int32 `json:"i"` - } `json:"$timestamp"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - return MongoTimestamp(uint64(v.Func.T)<<32 | uint64(uint32(v.Func.I))), nil -} - -func jencTimestamp(v interface{}) ([]byte, error) { - ts := uint64(v.(MongoTimestamp)) - return fbytes(`{"$timestamp":{"t":%d,"i":%d}}`, ts>>32, uint32(ts)), nil -} - -func jdecRegEx(data []byte) (interface{}, error) { - var v struct { - Regex string `json:"$regex"` - Options string `json:"$options"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - return RegEx{v.Regex, v.Options}, nil -} - -func jencRegEx(v interface{}) ([]byte, error) { - re := v.(RegEx) - type regex struct { - Regex string `json:"$regex"` - Options string `json:"$options"` - } - return json.Marshal(regex{re.Pattern, re.Options}) -} - -func jdecObjectId(data []byte) (interface{}, error) { - var v struct { - Id string `json:"$oid"` - Func struct { - Id string - } `json:"$oidFunc"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - if v.Id == "" { - v.Id = v.Func.Id - } - return ObjectIdHex(v.Id), nil -} - -func jencObjectId(v interface{}) ([]byte, error) { - return fbytes(`{"$oid":"%s"}`, v.(ObjectId).Hex()), nil -} - -func jdecDBRef(data []byte) (interface{}, error) { - // TODO Support unmarshaling $ref and $id into the input value. - var v struct { - Obj map[string]interface{} `json:"$dbrefFunc"` - } - // TODO Fix this. Must not be required. - v.Obj = make(map[string]interface{}) - err := jdec(data, &v) - if err != nil { - return nil, err - } - return v.Obj, nil -} - -func jdecNumberLong(data []byte) (interface{}, error) { - var v struct { - N int64 `json:"$numberLong,string"` - Func struct { - N int64 `json:",string"` - } `json:"$numberLongFunc"` - } - var vn struct { - N int64 `json:"$numberLong"` - Func struct { - N int64 - } `json:"$numberLongFunc"` - } - err := jdec(data, &v) - if err != nil { - err = jdec(data, &vn) - v.N = vn.N - v.Func.N = vn.Func.N - } - if err != nil { - return nil, err - } - if v.N != 0 { - return v.N, nil - } - return v.Func.N, nil -} - -func jencNumberLong(v interface{}) ([]byte, error) { - n := v.(int64) - f := `{"$numberLong":"%d"}` - if n <= 1<<53 { - f = `{"$numberLong":%d}` - } - return fbytes(f, n), nil -} - -func jencInt(v interface{}) ([]byte, error) { - n := v.(int) - f := `{"$numberLong":"%d"}` - if int64(n) <= 1<<53 { - f = `%d` - } - return fbytes(f, n), nil -} - -func jdecMinKey(data []byte) (interface{}, error) { - var v struct { - N int64 `json:"$minKey"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - if v.N != 1 { - return nil, fmt.Errorf("invalid $minKey object: %s", data) - } - return MinKey, nil -} - -func jdecMaxKey(data []byte) (interface{}, error) { - var v struct { - N int64 `json:"$maxKey"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - if v.N != 1 { - return nil, fmt.Errorf("invalid $maxKey object: %s", data) - } - return MaxKey, nil -} - -func jencMinMaxKey(v interface{}) ([]byte, error) { - switch v.(orderKey) { - case MinKey: - return []byte(`{"$minKey":1}`), nil - case MaxKey: - return []byte(`{"$maxKey":1}`), nil - } - panic(fmt.Sprintf("invalid $minKey/$maxKey value: %d", v)) -} - -func jdecUndefined(data []byte) (interface{}, error) { - var v struct { - B bool `json:"$undefined"` - } - err := jdec(data, &v) - if err != nil { - return nil, err - } - if !v.B { - return nil, fmt.Errorf("invalid $undefined object: %s", data) - } - return Undefined, nil -} - -func jencUndefined(v interface{}) ([]byte, error) { - return []byte(`{"$undefined":true}`), nil -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bson/stream.go b/services/community/vendor/github.com/globalsign/mgo/bson/stream.go deleted file mode 100644 index 46652845..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bson/stream.go +++ /dev/null @@ -1,90 +0,0 @@ -package bson - -import ( - "bytes" - "encoding/binary" - "fmt" - "io" -) - -const ( - // MinDocumentSize is the size of the smallest possible valid BSON document: - // an int32 size header + 0x00 (end of document). - MinDocumentSize = 5 - - // MaxDocumentSize is the largest possible size for a BSON document allowed by MongoDB, - // that is, 16 MiB (see https://docs.mongodb.com/manual/reference/limits/). - MaxDocumentSize = 16777216 -) - -// ErrInvalidDocumentSize is an error returned when a BSON document's header -// contains a size smaller than MinDocumentSize or greater than MaxDocumentSize. -type ErrInvalidDocumentSize struct { - DocumentSize int32 -} - -func (e ErrInvalidDocumentSize) Error() string { - return fmt.Sprintf("invalid document size %d", e.DocumentSize) -} - -// A Decoder reads and decodes BSON values from an input stream. -type Decoder struct { - source io.Reader -} - -// NewDecoder returns a new Decoder that reads from source. -// It does not add any extra buffering, and may not read data from source beyond the BSON values requested. -func NewDecoder(source io.Reader) *Decoder { - return &Decoder{source: source} -} - -// Decode reads the next BSON-encoded value from its input and stores it in the value pointed to by v. -// See the documentation for Unmarshal for details about the conversion of BSON into a Go value. -func (dec *Decoder) Decode(v interface{}) (err error) { - // BSON documents start with their size as a *signed* int32. - var docSize int32 - if err = binary.Read(dec.source, binary.LittleEndian, &docSize); err != nil { - return - } - - if docSize < MinDocumentSize || docSize > MaxDocumentSize { - return ErrInvalidDocumentSize{DocumentSize: docSize} - } - - docBuffer := bytes.NewBuffer(make([]byte, 0, docSize)) - if err = binary.Write(docBuffer, binary.LittleEndian, docSize); err != nil { - return - } - - // docSize is the *full* document's size (including the 4-byte size header, - // which has already been read). - if _, err = io.CopyN(docBuffer, dec.source, int64(docSize-4)); err != nil { - return - } - - // Let Unmarshal handle the rest. - defer handleErr(&err) - return Unmarshal(docBuffer.Bytes(), v) -} - -// An Encoder encodes and writes BSON values to an output stream. -type Encoder struct { - target io.Writer -} - -// NewEncoder returns a new Encoder that writes to target. -func NewEncoder(target io.Writer) *Encoder { - return &Encoder{target: target} -} - -// Encode encodes v to BSON, and if successful writes it to the Encoder's output stream. -// See the documentation for Marshal for details about the conversion of Go values to BSON. -func (enc *Encoder) Encode(v interface{}) error { - data, err := Marshal(v) - if err != nil { - return err - } - - _, err = enc.target.Write(data) - return err -} diff --git a/services/community/vendor/github.com/globalsign/mgo/bulk.go b/services/community/vendor/github.com/globalsign/mgo/bulk.go deleted file mode 100644 index c234fcce..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/bulk.go +++ /dev/null @@ -1,366 +0,0 @@ -package mgo - -import ( - "bytes" - "sort" - "sync" - - "github.com/globalsign/mgo/bson" -) - -// Bulk represents an operation that can be prepared with several -// orthogonal changes before being delivered to the server. -// -// MongoDB servers older than version 2.6 do not have proper support for bulk -// operations, so the driver attempts to map its API as much as possible into -// the functionality that works. In particular, in those releases updates and -// removals are sent individually, and inserts are sent in bulk but have -// suboptimal error reporting compared to more recent versions of the server. -// See the documentation of BulkErrorCase for details on that. -// -// Relevant documentation: -// -// http://blog.mongodb.org/post/84922794768/mongodbs-new-bulk-api -// -type Bulk struct { - c *Collection - opcount int - actions []bulkAction - ordered bool -} - -type bulkOp int - -const ( - bulkInsert bulkOp = iota + 1 - bulkUpdate - bulkUpdateAll - bulkRemove -) - -type bulkAction struct { - op bulkOp - docs []interface{} - idxs []int -} - -type bulkUpdateOp []interface{} -type bulkDeleteOp []interface{} - -// BulkResult holds the results for a bulk operation. -type BulkResult struct { - Matched int - Modified int // Available only for MongoDB 2.6+ - - // Be conservative while we understand exactly how to report these - // results in a useful and convenient way, and also how to emulate - // them with prior servers. - private bool -} - -// BulkError holds an error returned from running a Bulk operation. -// Individual errors may be obtained and inspected via the Cases method. -type BulkError struct { - ecases []BulkErrorCase -} - -func (e *BulkError) Error() string { - if len(e.ecases) == 0 { - return "invalid BulkError instance: no errors" - } - if len(e.ecases) == 1 { - return e.ecases[0].Err.Error() - } - msgs := make([]string, 0, len(e.ecases)) - seen := make(map[string]bool) - for _, ecase := range e.ecases { - msg := ecase.Err.Error() - if !seen[msg] { - seen[msg] = true - msgs = append(msgs, msg) - } - } - if len(msgs) == 1 { - return msgs[0] - } - var buf bytes.Buffer - buf.WriteString("multiple errors in bulk operation:\n") - for _, msg := range msgs { - buf.WriteString(" - ") - buf.WriteString(msg) - buf.WriteByte('\n') - } - return buf.String() -} - -type bulkErrorCases []BulkErrorCase - -func (slice bulkErrorCases) Len() int { return len(slice) } -func (slice bulkErrorCases) Less(i, j int) bool { return slice[i].Index < slice[j].Index } -func (slice bulkErrorCases) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } - -// BulkErrorCase holds an individual error found while attempting a single change -// within a bulk operation, and the position in which it was enqueued. -// -// MongoDB servers older than version 2.6 do not have proper support for bulk -// operations, so the driver attempts to map its API as much as possible into -// the functionality that works. In particular, only the last error is reported -// for bulk inserts and without any positional information, so the Index -// field is set to -1 in these cases. -type BulkErrorCase struct { - Index int // Position of operation that failed, or -1 if unknown. - Err error -} - -// Cases returns all individual errors found while attempting the requested changes. -// -// See the documentation of BulkErrorCase for limitations in older MongoDB releases. -func (e *BulkError) Cases() []BulkErrorCase { - return e.ecases -} - -var actionPool = sync.Pool{ - New: func() interface{} { - return &bulkAction{ - docs: make([]interface{}, 0), - idxs: make([]int, 0), - } - }, -} - -// Bulk returns a value to prepare the execution of a bulk operation. -func (c *Collection) Bulk() *Bulk { - return &Bulk{c: c, ordered: true} -} - -// Unordered puts the bulk operation in unordered mode. -// -// In unordered mode the indvidual operations may be sent -// out of order, which means latter operations may proceed -// even if prior ones have failed. -func (b *Bulk) Unordered() { - b.ordered = false -} - -func (b *Bulk) action(op bulkOp, opcount int) *bulkAction { - var action *bulkAction - if len(b.actions) > 0 && b.actions[len(b.actions)-1].op == op { - action = &b.actions[len(b.actions)-1] - } else if !b.ordered { - for i := range b.actions { - if b.actions[i].op == op { - action = &b.actions[i] - break - } - } - } - if action == nil { - a := actionPool.Get().(*bulkAction) - a.op = op - b.actions = append(b.actions, *a) - action = &b.actions[len(b.actions)-1] - } - for i := 0; i < opcount; i++ { - action.idxs = append(action.idxs, b.opcount) - b.opcount++ - } - return action -} - -// Insert queues up the provided documents for insertion. -func (b *Bulk) Insert(docs ...interface{}) { - action := b.action(bulkInsert, len(docs)) - action.docs = append(action.docs, docs...) -} - -// Remove queues up the provided selectors for removing matching documents. -// Each selector will remove only a single matching document. -func (b *Bulk) Remove(selectors ...interface{}) { - action := b.action(bulkRemove, len(selectors)) - for _, selector := range selectors { - if selector == nil { - selector = bson.D{} - } - action.docs = append(action.docs, &deleteOp{ - Collection: b.c.FullName, - Selector: selector, - Flags: 1, - Limit: 1, - }) - } -} - -// RemoveAll queues up the provided selectors for removing all matching documents. -// Each selector will remove all matching documents. -func (b *Bulk) RemoveAll(selectors ...interface{}) { - action := b.action(bulkRemove, len(selectors)) - for _, selector := range selectors { - if selector == nil { - selector = bson.D{} - } - action.docs = append(action.docs, &deleteOp{ - Collection: b.c.FullName, - Selector: selector, - Flags: 0, - Limit: 0, - }) - } -} - -// Update queues up the provided pairs of updating instructions. -// The first element of each pair selects which documents must be -// updated, and the second element defines how to update it. -// Each pair matches exactly one document for updating at most. -func (b *Bulk) Update(pairs ...interface{}) { - if len(pairs)%2 != 0 { - panic("Bulk.Update requires an even number of parameters") - } - action := b.action(bulkUpdate, len(pairs)/2) - for i := 0; i < len(pairs); i += 2 { - selector := pairs[i] - if selector == nil { - selector = bson.D{} - } - action.docs = append(action.docs, &updateOp{ - Collection: b.c.FullName, - Selector: selector, - Update: pairs[i+1], - }) - } -} - -// UpdateAll queues up the provided pairs of updating instructions. -// The first element of each pair selects which documents must be -// updated, and the second element defines how to update it. -// Each pair updates all documents matching the selector. -func (b *Bulk) UpdateAll(pairs ...interface{}) { - if len(pairs)%2 != 0 { - panic("Bulk.UpdateAll requires an even number of parameters") - } - action := b.action(bulkUpdate, len(pairs)/2) - for i := 0; i < len(pairs); i += 2 { - selector := pairs[i] - if selector == nil { - selector = bson.D{} - } - action.docs = append(action.docs, &updateOp{ - Collection: b.c.FullName, - Selector: selector, - Update: pairs[i+1], - Flags: 2, - Multi: true, - }) - } -} - -// Upsert queues up the provided pairs of upserting instructions. -// The first element of each pair selects which documents must be -// updated, and the second element defines how to update it. -// Each pair matches exactly one document for updating at most. -func (b *Bulk) Upsert(pairs ...interface{}) { - if len(pairs)%2 != 0 { - panic("Bulk.Update requires an even number of parameters") - } - action := b.action(bulkUpdate, len(pairs)/2) - for i := 0; i < len(pairs); i += 2 { - selector := pairs[i] - if selector == nil { - selector = bson.D{} - } - action.docs = append(action.docs, &updateOp{ - Collection: b.c.FullName, - Selector: selector, - Update: pairs[i+1], - Flags: 1, - Upsert: true, - }) - } -} - -// Run runs all the operations queued up. -// -// If an error is reported on an unordered bulk operation, the error value may -// be an aggregation of all issues observed. As an exception to that, Insert -// operations running on MongoDB versions prior to 2.6 will report the last -// error only due to a limitation in the wire protocol. -func (b *Bulk) Run() (*BulkResult, error) { - var result BulkResult - var berr BulkError - var failed bool - for i := range b.actions { - action := &b.actions[i] - var ok bool - switch action.op { - case bulkInsert: - ok = b.runInsert(action, &result, &berr) - case bulkUpdate: - ok = b.runUpdate(action, &result, &berr) - case bulkRemove: - ok = b.runRemove(action, &result, &berr) - default: - panic("unknown bulk operation") - } - action.idxs = action.idxs[0:0] - action.docs = action.docs[0:0] - actionPool.Put(action) - if !ok { - failed = true - if b.ordered { - break - } - } - } - if failed { - sort.Sort(bulkErrorCases(berr.ecases)) - return nil, &berr - } - return &result, nil -} - -func (b *Bulk) runInsert(action *bulkAction, result *BulkResult, berr *BulkError) bool { - op := &insertOp{b.c.FullName, action.docs, 0} - if !b.ordered { - op.flags = 1 // ContinueOnError - } - lerr, err := b.c.writeOp(op, b.ordered) - return b.checkSuccess(action, berr, lerr, err) -} - -func (b *Bulk) runUpdate(action *bulkAction, result *BulkResult, berr *BulkError) bool { - lerr, err := b.c.writeOp(bulkUpdateOp(action.docs), b.ordered) - if lerr != nil { - result.Matched += lerr.N - result.Modified += lerr.modified - } - return b.checkSuccess(action, berr, lerr, err) -} - -func (b *Bulk) runRemove(action *bulkAction, result *BulkResult, berr *BulkError) bool { - lerr, err := b.c.writeOp(bulkDeleteOp(action.docs), b.ordered) - if lerr != nil { - result.Matched += lerr.N - result.Modified += lerr.modified - } - return b.checkSuccess(action, berr, lerr, err) -} - -func (b *Bulk) checkSuccess(action *bulkAction, berr *BulkError, lerr *LastError, err error) bool { - if lerr != nil && len(lerr.ecases) > 0 { - for i := 0; i < len(lerr.ecases); i++ { - // Map back from the local error index into the visible one. - ecase := lerr.ecases[i] - idx := ecase.Index - if idx >= 0 { - idx = action.idxs[idx] - } - berr.ecases = append(berr.ecases, BulkErrorCase{idx, ecase.Err}) - } - return false - } else if err != nil { - for i := 0; i < len(action.idxs); i++ { - berr.ecases = append(berr.ecases, BulkErrorCase{action.idxs[i], err}) - } - return false - } - return true -} diff --git a/services/community/vendor/github.com/globalsign/mgo/changestreams.go b/services/community/vendor/github.com/globalsign/mgo/changestreams.go deleted file mode 100644 index 5c2279c6..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/changestreams.go +++ /dev/null @@ -1,357 +0,0 @@ -package mgo - -import ( - "errors" - "fmt" - "reflect" - "sync" - "time" - - "github.com/globalsign/mgo/bson" -) - -type FullDocument string - -const ( - Default = "default" - UpdateLookup = "updateLookup" -) - -type ChangeStream struct { - iter *Iter - isClosed bool - options ChangeStreamOptions - pipeline interface{} - resumeToken *bson.Raw - collection *Collection - readPreference *ReadPreference - err error - m sync.Mutex - sessionCopied bool -} - -type ChangeStreamOptions struct { - - // FullDocument controls the amount of data that the server will return when - // returning a changes document. - FullDocument FullDocument - - // ResumeAfter specifies the logical starting point for the new change stream. - ResumeAfter *bson.Raw - - // MaxAwaitTimeMS specifies the maximum amount of time for the server to wait - // on new documents to satisfy a change stream query. - MaxAwaitTimeMS time.Duration - - // BatchSize specifies the number of documents to return per batch. - BatchSize int - - // Collation specifies the way the server should collate returned data. - //TODO Collation *Collation -} - -var errMissingResumeToken = errors.New("resume token missing from result") - -// Watch constructs a new ChangeStream capable of receiving continuing data -// from the database. -func (coll *Collection) Watch(pipeline interface{}, - options ChangeStreamOptions) (*ChangeStream, error) { - - if pipeline == nil { - pipeline = []bson.M{} - } - - csPipe := constructChangeStreamPipeline(pipeline, options) - pipe := coll.Pipe(&csPipe) - if options.MaxAwaitTimeMS > 0 { - pipe.SetMaxTime(options.MaxAwaitTimeMS) - } - if options.BatchSize > 0 { - pipe.Batch(options.BatchSize) - } - pIter := pipe.Iter() - - // check that there was no issue creating the iterator. - // this will fail immediately with an error from the server if running against - // a standalone. - if err := pIter.Err(); err != nil { - return nil, err - } - - pIter.isChangeStream = true - return &ChangeStream{ - iter: pIter, - collection: coll, - resumeToken: nil, - options: options, - pipeline: pipeline, - }, nil -} - -// Next retrieves the next document from the change stream, blocking if necessary. -// Next returns true if a document was successfully unmarshalled into result, -// and false if an error occured. When Next returns false, the Err method should -// be called to check what error occurred during iteration. If there were no events -// available (ErrNotFound), the Err method returns nil so the user can retry the invocaton. -// -// For example: -// -// pipeline := []bson.M{} -// -// changeStream := collection.Watch(pipeline, ChangeStreamOptions{}) -// for changeStream.Next(&changeDoc) { -// fmt.Printf("Change: %v\n", changeDoc) -// } -// -// if err := changeStream.Close(); err != nil { -// return err -// } -// -// If the pipeline used removes the _id field from the result, Next will error -// because the _id field is needed to resume iteration when an error occurs. -// -func (changeStream *ChangeStream) Next(result interface{}) bool { - // the err field is being constantly overwritten and we don't want the user to - // attempt to read it at this point so we lock. - changeStream.m.Lock() - - defer changeStream.m.Unlock() - - // if we are in a state of error, then don't continue. - if changeStream.err != nil { - return false - } - - if changeStream.isClosed { - changeStream.err = fmt.Errorf("illegal use of a closed ChangeStream") - return false - } - - var err error - - // attempt to fetch the change stream result. - err = changeStream.fetchResultSet(result) - if err == nil { - return true - } - - // if we get no results we return false with no errors so the user can call Next - // again, resuming is not needed as the iterator is simply timed out as no events happened. - // The user will call Timeout in order to understand if this was the case. - if err == ErrNotFound { - return false - } - - // check if the error is resumable - if !isResumableError(err) { - // error is not resumable, give up and return it to the user. - changeStream.err = err - return false - } - - // try to resume. - err = changeStream.resume() - if err != nil { - // we've not been able to successfully resume and should only try once, - // so we give up. - changeStream.err = err - return false - } - - // we've successfully resumed the changestream. - // try to fetch the next result. - err = changeStream.fetchResultSet(result) - if err != nil { - changeStream.err = err - return false - } - - return true -} - -// Err returns nil if no errors happened during iteration, or the actual -// error otherwise. -func (changeStream *ChangeStream) Err() error { - changeStream.m.Lock() - defer changeStream.m.Unlock() - return changeStream.err -} - -// Close kills the server cursor used by the iterator, if any, and returns -// nil if no errors happened during iteration, or the actual error otherwise. -func (changeStream *ChangeStream) Close() error { - changeStream.m.Lock() - defer changeStream.m.Unlock() - changeStream.isClosed = true - err := changeStream.iter.Close() - if err != nil { - changeStream.err = err - } - if changeStream.sessionCopied { - changeStream.iter.session.Close() - changeStream.sessionCopied = false - } - return err -} - -// ResumeToken returns a copy of the current resume token held by the change stream. -// This token should be treated as an opaque token that can be provided to instantiate -// a new change stream. -func (changeStream *ChangeStream) ResumeToken() *bson.Raw { - changeStream.m.Lock() - defer changeStream.m.Unlock() - if changeStream.resumeToken == nil { - return nil - } - var tokenCopy = *changeStream.resumeToken - return &tokenCopy -} - -// Timeout returns true if the last call of Next returned false because of an iterator timeout. -func (changeStream *ChangeStream) Timeout() bool { - return changeStream.iter.Timeout() -} - -func constructChangeStreamPipeline(pipeline interface{}, - options ChangeStreamOptions) interface{} { - pipelinev := reflect.ValueOf(pipeline) - - // ensure that the pipeline passed in is a slice. - if pipelinev.Kind() != reflect.Slice { - panic("pipeline argument must be a slice") - } - - // construct the options to be used by the change notification - // pipeline stage. - changeStreamStageOptions := bson.M{} - - if options.FullDocument != "" { - changeStreamStageOptions["fullDocument"] = options.FullDocument - } - if options.ResumeAfter != nil { - changeStreamStageOptions["resumeAfter"] = options.ResumeAfter - } - - changeStreamStage := bson.M{"$changeStream": changeStreamStageOptions} - - pipeOfInterfaces := make([]interface{}, pipelinev.Len()+1) - - // insert the change notification pipeline stage at the beginning of the - // aggregation. - pipeOfInterfaces[0] = changeStreamStage - - // convert the passed in slice to a slice of interfaces. - for i := 0; i < pipelinev.Len(); i++ { - pipeOfInterfaces[1+i] = pipelinev.Index(i).Addr().Interface() - } - var pipelineAsInterface interface{} = pipeOfInterfaces - return pipelineAsInterface -} - -func (changeStream *ChangeStream) resume() error { - // copy the information for the new socket. - - // Thanks to Copy() future uses will acquire a new socket against the newly selected DB. - newSession := changeStream.iter.session.Copy() - - // fetch the cursor from the iterator and use it to run a killCursors - // on the connection. - cursorId := changeStream.iter.op.cursorId - err := runKillCursorsOnSession(newSession, cursorId) - if err != nil { - return err - } - - // change out the old connection to the database with the new connection. - if changeStream.sessionCopied { - changeStream.collection.Database.Session.Close() - } - changeStream.collection.Database.Session = newSession - changeStream.sessionCopied = true - - opts := changeStream.options - if changeStream.resumeToken != nil { - opts.ResumeAfter = changeStream.resumeToken - } - // make a new pipeline containing the resume token. - changeStreamPipeline := constructChangeStreamPipeline(changeStream.pipeline, opts) - - // generate the new iterator with the new connection. - newPipe := changeStream.collection.Pipe(changeStreamPipeline) - changeStream.iter = newPipe.Iter() - if err := changeStream.iter.Err(); err != nil { - return err - } - changeStream.iter.isChangeStream = true - return nil -} - -// fetchResumeToken unmarshals the _id field from the document, setting an error -// on the changeStream if it is unable to. -func (changeStream *ChangeStream) fetchResumeToken(rawResult *bson.Raw) error { - changeStreamResult := struct { - ResumeToken *bson.Raw `bson:"_id,omitempty"` - }{} - - err := rawResult.Unmarshal(&changeStreamResult) - if err != nil { - return err - } - - if changeStreamResult.ResumeToken == nil { - return errMissingResumeToken - } - - changeStream.resumeToken = changeStreamResult.ResumeToken - return nil -} - -func (changeStream *ChangeStream) fetchResultSet(result interface{}) error { - rawResult := bson.Raw{} - - // fetch the next set of documents from the cursor. - gotNext := changeStream.iter.Next(&rawResult) - err := changeStream.iter.Err() - if err != nil { - return err - } - - if !gotNext && err == nil { - // If the iter.Err() method returns nil despite us not getting a next batch, - // it is becuase iter.Err() silences this case. - return ErrNotFound - } - - // grab the resumeToken from the results - if err := changeStream.fetchResumeToken(&rawResult); err != nil { - return err - } - - // put the raw results into the data structure the user provided. - if err := rawResult.Unmarshal(result); err != nil { - return err - } - return nil -} - -func isResumableError(err error) bool { - _, isQueryError := err.(*QueryError) - // if it is not a database error OR it is a database error, - // but the error is a notMaster error - //and is not a missingResumeToken error (caused by the user provided pipeline) - return (!isQueryError || isNotMasterError(err)) && (err != errMissingResumeToken) -} - -func runKillCursorsOnSession(session *Session, cursorId int64) error { - socket, err := session.acquireSocket(true) - if err != nil { - return err - } - err = socket.Query(&killCursorsOp{[]int64{cursorId}}) - if err != nil { - return err - } - socket.Release() - - return nil -} diff --git a/services/community/vendor/github.com/globalsign/mgo/cluster.go b/services/community/vendor/github.com/globalsign/mgo/cluster.go deleted file mode 100644 index ff431cac..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/cluster.go +++ /dev/null @@ -1,704 +0,0 @@ -// mgo - MongoDB driver for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package mgo - -import ( - "errors" - "fmt" - "net" - "runtime" - "strconv" - "strings" - "sync" - "time" - - "github.com/globalsign/mgo/bson" -) - -// --------------------------------------------------------------------------- -// Mongo cluster encapsulation. -// -// A cluster enables the communication with one or more servers participating -// in a mongo cluster. This works with individual servers, a replica set, -// a replica pair, one or multiple mongos routers, etc. - -type mongoCluster struct { - sync.RWMutex - serverSynced sync.Cond - userSeeds []string - dynaSeeds []string - servers mongoServers - masters mongoServers - references int - syncing bool - syncCount uint - cachedIndex map[string]bool - sync chan bool - dial dialer - dialInfo *DialInfo -} - -func newCluster(userSeeds []string, info *DialInfo) *mongoCluster { - cluster := &mongoCluster{ - userSeeds: userSeeds, - references: 1, - dial: dialer{info.Dial, info.DialServer}, - dialInfo: info, - } - cluster.serverSynced.L = cluster.RWMutex.RLocker() - cluster.sync = make(chan bool, 1) - stats.cluster(+1) - go cluster.syncServersLoop() - return cluster -} - -// Acquire increases the reference count for the cluster. -func (cluster *mongoCluster) Acquire() { - cluster.Lock() - cluster.references++ - debugf("Cluster %p acquired (refs=%d)", cluster, cluster.references) - cluster.Unlock() -} - -// Release decreases the reference count for the cluster. Once -// it reaches zero, all servers will be closed. -func (cluster *mongoCluster) Release() { - cluster.Lock() - if cluster.references == 0 { - panic("cluster.Release() with references == 0") - } - cluster.references-- - debugf("Cluster %p released (refs=%d)", cluster, cluster.references) - if cluster.references == 0 { - for _, server := range cluster.servers.Slice() { - server.Close() - } - // Wake up the sync loop so it can die. - cluster.syncServers() - stats.cluster(-1) - } - cluster.Unlock() -} - -func (cluster *mongoCluster) LiveServers() (servers []string) { - cluster.RLock() - for _, serv := range cluster.servers.Slice() { - servers = append(servers, serv.Addr) - } - cluster.RUnlock() - return servers -} - -func (cluster *mongoCluster) removeServer(server *mongoServer) { - cluster.Lock() - cluster.masters.Remove(server) - other := cluster.servers.Remove(server) - cluster.Unlock() - if other != nil { - other.CloseIdle() - log("Removed server ", server.Addr, " from cluster.") - } - server.CloseIdle() -} - -type isMasterResult struct { - IsMaster bool - Secondary bool - Primary string - Hosts []string - Passives []string - Tags bson.D - Msg string - SetName string `bson:"setName"` - MaxWireVersion int `bson:"maxWireVersion"` -} - -func (cluster *mongoCluster) isMaster(socket *mongoSocket, result *isMasterResult) error { - // Monotonic let's it talk to a slave and still hold the socket. - session := newSession(Monotonic, cluster, cluster.dialInfo) - session.setSocket(socket) - - var cmd = bson.D{{Name: "isMaster", Value: 1}} - - // Send client metadata to the server to identify this socket if this is - // the first isMaster call only. - // - // isMaster commands issued after the initial connection handshake MUST NOT contain handshake arguments - // https://github.com/mongodb/specifications/blob/master/source/mongodb-handshake/handshake.rst#connection-handshake - // - socket.sendMeta.Do(func() { - var meta = bson.M{ - "driver": bson.M{ - "name": "mgo", - "version": "globalsign", - }, - "os": bson.M{ - "type": runtime.GOOS, - "architecture": runtime.GOARCH, - }, - } - - // Include the application name if set - if cluster.dialInfo.AppName != "" { - meta["application"] = bson.M{"name": cluster.dialInfo.AppName} - } - - cmd = append(cmd, bson.DocElem{ - Name: "client", - Value: meta, - }) - }) - - err := session.runOnSocket(socket, cmd, result) - session.Close() - return err -} - -type possibleTimeout interface { - Timeout() bool -} - -func (cluster *mongoCluster) syncServer(server *mongoServer) (info *mongoServerInfo, hosts []string, err error) { - addr := server.Addr - log("SYNC Processing ", addr, "...") - - // Retry a few times to avoid knocking a server down for a hiccup. - var result isMasterResult - var tryerr error - for retry := 0; ; retry++ { - if retry == 3 || retry == 1 && cluster.dialInfo.FailFast { - return nil, nil, tryerr - } - if retry > 0 { - // Don't abuse the server needlessly if there's something actually wrong. - if err, ok := tryerr.(possibleTimeout); ok && err.Timeout() { - // Give a chance for waiters to timeout as well. - cluster.serverSynced.Broadcast() - } - time.Sleep(syncShortDelay) - } - - // Don't ever hit the pool limit for syncing - config := cluster.dialInfo.Copy() - config.PoolLimit = 0 - - socket, _, err := server.AcquireSocket(config) - if err != nil { - tryerr = err - logf("SYNC Failed to get socket to %s: %v", addr, err) - continue - } - err = cluster.isMaster(socket, &result) - - // Restore the correct dial config before returning it to the pool - socket.dialInfo = cluster.dialInfo - socket.Release() - - if err != nil { - tryerr = err - logf("SYNC Command 'ismaster' to %s failed: %v", addr, err) - continue - } - debugf("SYNC Result of 'ismaster' from %s: %#v", addr, result) - break - } - - if cluster.dialInfo.ReplicaSetName != "" && result.SetName != cluster.dialInfo.ReplicaSetName { - logf("SYNC Server %s is not a member of replica set %q", addr, cluster.dialInfo.ReplicaSetName) - return nil, nil, fmt.Errorf("server %s is not a member of replica set %q", addr, cluster.dialInfo.ReplicaSetName) - } - - if result.IsMaster { - debugf("SYNC %s is a master.", addr) - if !server.info.Master { - // Made an incorrect assumption above, so fix stats. - stats.conn(-1, false) - stats.conn(+1, true) - } - } else if result.Secondary { - debugf("SYNC %s is a slave.", addr) - } else if cluster.dialInfo.Direct { - logf("SYNC %s in unknown state. Pretending it's a slave due to direct connection.", addr) - } else { - logf("SYNC %s is neither a master nor a slave.", addr) - // Let stats track it as whatever was known before. - return nil, nil, errors.New(addr + " is not a master nor slave") - } - - info = &mongoServerInfo{ - Master: result.IsMaster, - Mongos: result.Msg == "isdbgrid", - Tags: result.Tags, - SetName: result.SetName, - MaxWireVersion: result.MaxWireVersion, - } - - hosts = make([]string, 0, 1+len(result.Hosts)+len(result.Passives)) - if result.Primary != "" { - // First in the list to speed up master discovery. - hosts = append(hosts, result.Primary) - } - hosts = append(hosts, result.Hosts...) - hosts = append(hosts, result.Passives...) - - debugf("SYNC %s knows about the following peers: %#v", addr, hosts) - return info, hosts, nil -} - -type syncKind bool - -const ( - completeSync syncKind = true - partialSync syncKind = false -) - -func (cluster *mongoCluster) addServer(server *mongoServer, info *mongoServerInfo, syncKind syncKind) { - cluster.Lock() - current := cluster.servers.Search(server.ResolvedAddr) - if current == nil { - if syncKind == partialSync { - cluster.Unlock() - server.Close() - log("SYNC Discarding unknown server ", server.Addr, " due to partial sync.") - return - } - cluster.servers.Add(server) - if info.Master { - cluster.masters.Add(server) - log("SYNC Adding ", server.Addr, " to cluster as a master.") - } else { - log("SYNC Adding ", server.Addr, " to cluster as a slave.") - } - } else { - if server != current { - panic("addServer attempting to add duplicated server") - } - if server.Info().Master != info.Master { - if info.Master { - log("SYNC Server ", server.Addr, " is now a master.") - cluster.masters.Add(server) - } else { - log("SYNC Server ", server.Addr, " is now a slave.") - cluster.masters.Remove(server) - } - } - } - server.SetInfo(info) - debugf("SYNC Broadcasting availability of server %s", server.Addr) - cluster.serverSynced.Broadcast() - cluster.Unlock() -} - -func (cluster *mongoCluster) getKnownAddrs() []string { - cluster.RLock() - max := len(cluster.userSeeds) + len(cluster.dynaSeeds) + cluster.servers.Len() - seen := make(map[string]bool, max) - known := make([]string, 0, max) - - add := func(addr string) { - if _, found := seen[addr]; !found { - seen[addr] = true - known = append(known, addr) - } - } - - for _, addr := range cluster.userSeeds { - add(addr) - } - for _, addr := range cluster.dynaSeeds { - add(addr) - } - for _, serv := range cluster.servers.Slice() { - add(serv.Addr) - } - cluster.RUnlock() - - return known -} - -// syncServers injects a value into the cluster.sync channel to force -// an iteration of the syncServersLoop function. -func (cluster *mongoCluster) syncServers() { - select { - case cluster.sync <- true: - default: - } -} - -// How long to wait for a checkup of the cluster topology if nothing -// else kicks a synchronization before that. -const syncServersDelay = 30 * time.Second -const syncShortDelay = 500 * time.Millisecond - -// syncServersLoop loops while the cluster is alive to keep its idea of -// the server topology up-to-date. It must be called just once from -// newCluster. The loop iterates once syncServersDelay has passed, or -// if somebody injects a value into the cluster.sync channel to force a -// synchronization. A loop iteration will contact all servers in -// parallel, ask them about known peers and their own role within the -// cluster, and then attempt to do the same with all the peers -// retrieved. -func (cluster *mongoCluster) syncServersLoop() { - for { - debugf("SYNC Cluster %p is starting a sync loop iteration.", cluster) - - cluster.Lock() - if cluster.references == 0 { - cluster.Unlock() - break - } - cluster.references++ // Keep alive while syncing. - direct := cluster.dialInfo.Direct - cluster.Unlock() - - cluster.syncServersIteration(direct) - - // We just synchronized, so consume any outstanding requests. - select { - case <-cluster.sync: - default: - } - - cluster.Release() - - // Hold off before allowing another sync. No point in - // burning CPU looking for down servers. - if !cluster.dialInfo.FailFast { - time.Sleep(syncShortDelay) - } - - cluster.Lock() - if cluster.references == 0 { - cluster.Unlock() - break - } - cluster.syncCount++ - // Poke all waiters so they have a chance to timeout or - // restart syncing if they wish to. - cluster.serverSynced.Broadcast() - // Check if we have to restart immediately either way. - restart := !direct && cluster.masters.Empty() || cluster.servers.Empty() - cluster.Unlock() - - if restart { - log("SYNC No masters found. Will synchronize again.") - time.Sleep(syncShortDelay) - continue - } - - debugf("SYNC Cluster %p waiting for next requested or scheduled sync.", cluster) - - // Hold off until somebody explicitly requests a synchronization - // or it's time to check for a cluster topology change again. - select { - case <-cluster.sync: - case <-time.After(syncServersDelay): - } - } - debugf("SYNC Cluster %p is stopping its sync loop.", cluster) -} - -func (cluster *mongoCluster) server(addr string, tcpaddr *net.TCPAddr) *mongoServer { - cluster.RLock() - server := cluster.servers.Search(tcpaddr.String()) - cluster.RUnlock() - if server != nil { - return server - } - return newServer(addr, tcpaddr, cluster.sync, cluster.dial, cluster.dialInfo) -} - -func resolveAddr(addr string) (*net.TCPAddr, error) { - // Simple cases that do not need actual resolution. Works with IPv4 and v6. - if host, port, err := net.SplitHostPort(addr); err == nil { - if port, _ := strconv.Atoi(port); port > 0 { - zone := "" - if i := strings.LastIndex(host, "%"); i >= 0 { - zone = host[i+1:] - host = host[:i] - } - ip := net.ParseIP(host) - if ip != nil { - return &net.TCPAddr{IP: ip, Port: port, Zone: zone}, nil - } - } - } - - // Attempt to resolve IPv4 and v6 concurrently. - addrChan := make(chan *net.TCPAddr, 2) - for _, network := range []string{"udp4", "udp6"} { - network := network - go func() { - // The unfortunate UDP dialing hack allows having a timeout on address resolution. - conn, err := net.DialTimeout(network, addr, 10*time.Second) - if err != nil { - addrChan <- nil - } else { - addrChan <- (*net.TCPAddr)(conn.RemoteAddr().(*net.UDPAddr)) - conn.Close() - } - }() - } - - // Wait for the result of IPv4 and v6 resolution. Use IPv4 if available. - tcpaddr := <-addrChan - if tcpaddr == nil || len(tcpaddr.IP) != 4 { - var timeout <-chan time.Time - if tcpaddr != nil { - // Don't wait too long if an IPv6 address is known. - timeout = time.After(50 * time.Millisecond) - } - select { - case <-timeout: - case tcpaddr2 := <-addrChan: - if tcpaddr == nil || tcpaddr2 != nil { - // It's an IPv4 address or the only known address. Use it. - tcpaddr = tcpaddr2 - } - } - } - - if tcpaddr == nil { - log("SYNC Failed to resolve server address: ", addr) - return nil, errors.New("failed to resolve server address: " + addr) - } - if tcpaddr.String() != addr { - debug("SYNC Address ", addr, " resolved as ", tcpaddr.String()) - } - return tcpaddr, nil -} - -type pendingAdd struct { - server *mongoServer - info *mongoServerInfo -} - -func (cluster *mongoCluster) syncServersIteration(direct bool) { - log("SYNC Starting full topology synchronization...") - - var wg sync.WaitGroup - var m sync.Mutex - notYetAdded := make(map[string]pendingAdd) - addIfFound := make(map[string]bool) - seen := make(map[string]bool) - syncKind := partialSync - - var spawnSync func(addr string, byMaster bool) - spawnSync = func(addr string, byMaster bool) { - wg.Add(1) - go func() { - defer wg.Done() - - tcpaddr, err := resolveAddr(addr) - if err != nil { - log("SYNC Failed to start sync of ", addr, ": ", err.Error()) - return - } - resolvedAddr := tcpaddr.String() - - m.Lock() - if byMaster { - if pending, ok := notYetAdded[resolvedAddr]; ok { - delete(notYetAdded, resolvedAddr) - m.Unlock() - cluster.addServer(pending.server, pending.info, completeSync) - return - } - addIfFound[resolvedAddr] = true - } - if seen[resolvedAddr] { - m.Unlock() - return - } - seen[resolvedAddr] = true - m.Unlock() - - server := cluster.server(addr, tcpaddr) - info, hosts, err := cluster.syncServer(server) - if err != nil { - cluster.removeServer(server) - return - } - - m.Lock() - add := direct || info.Master || addIfFound[resolvedAddr] - if add { - syncKind = completeSync - } else { - notYetAdded[resolvedAddr] = pendingAdd{server, info} - } - m.Unlock() - if add { - cluster.addServer(server, info, completeSync) - } - if !direct { - for _, addr := range hosts { - spawnSync(addr, info.Master) - } - } - }() - } - - knownAddrs := cluster.getKnownAddrs() - for _, addr := range knownAddrs { - spawnSync(addr, false) - } - wg.Wait() - - if syncKind == completeSync { - logf("SYNC Synchronization was complete (got data from primary).") - for _, pending := range notYetAdded { - cluster.removeServer(pending.server) - } - } else { - logf("SYNC Synchronization was partial (cannot talk to primary).") - for _, pending := range notYetAdded { - cluster.addServer(pending.server, pending.info, partialSync) - } - } - - cluster.Lock() - mastersLen := cluster.masters.Len() - logf("SYNC Synchronization completed: %d master(s) and %d slave(s) alive.", mastersLen, cluster.servers.Len()-mastersLen) - - // Update dynamic seeds, but only if we have any good servers. Otherwise, - // leave them alone for better chances of a successful sync in the future. - if syncKind == completeSync { - dynaSeeds := make([]string, cluster.servers.Len()) - for i, server := range cluster.servers.Slice() { - dynaSeeds[i] = server.Addr - } - cluster.dynaSeeds = dynaSeeds - debugf("SYNC New dynamic seeds: %#v\n", dynaSeeds) - } - cluster.Unlock() -} - -// AcquireSocketWithPoolTimeout returns a socket to a server in the cluster. If slaveOk is -// true, it will attempt to return a socket to a slave server. If it is -// false, the socket will necessarily be to a master server. -func (cluster *mongoCluster) AcquireSocketWithPoolTimeout(mode Mode, slaveOk bool, syncTimeout time.Duration, serverTags []bson.D, info *DialInfo) (s *mongoSocket, err error) { - var started time.Time - var syncCount uint - for { - cluster.RLock() - for { - mastersLen := cluster.masters.Len() - slavesLen := cluster.servers.Len() - mastersLen - debugf("Cluster has %d known masters and %d known slaves.", mastersLen, slavesLen) - if mastersLen > 0 && !(slaveOk && mode == Secondary) || slavesLen > 0 && slaveOk { - break - } - if mastersLen > 0 && mode == Secondary && cluster.masters.HasMongos() { - break - } - if started.IsZero() { - // Initialize after fast path above. - started = time.Now() - syncCount = cluster.syncCount - } else if syncTimeout != 0 && started.Before(time.Now().Add(-syncTimeout)) || cluster.dialInfo.FailFast && cluster.syncCount != syncCount { - cluster.RUnlock() - return nil, errors.New("no reachable servers") - } - log("Waiting for servers to synchronize...") - cluster.syncServers() - - // Remember: this will release and reacquire the lock. - cluster.serverSynced.Wait() - } - - var server *mongoServer - if slaveOk { - server = cluster.servers.BestFit(mode, serverTags) - } else { - server = cluster.masters.BestFit(mode, nil) - } - cluster.RUnlock() - - if server == nil { - // Must have failed the requested tags. Sleep to avoid spinning. - time.Sleep(1e8) - continue - } - - s, abended, err := server.AcquireSocketWithBlocking(info) - if err == errPoolTimeout { - // No need to remove servers from the topology if acquiring a socket fails for this reason. - return nil, err - } - if err != nil { - cluster.removeServer(server) - cluster.syncServers() - continue - } - if abended && !slaveOk { - var result isMasterResult - err := cluster.isMaster(s, &result) - if err != nil || !result.IsMaster { - logf("Cannot confirm server %s as master (%v)", server.Addr, err) - s.Release() - cluster.syncServers() - time.Sleep(100 * time.Millisecond) - continue - } else { - // We've managed to successfully reconnect to the master, we are no longer abnormaly ended - server.Lock() - server.abended = false - server.Unlock() - } - } - return s, nil - } -} - -func (cluster *mongoCluster) CacheIndex(cacheKey string, exists bool) { - cluster.Lock() - if cluster.cachedIndex == nil { - cluster.cachedIndex = make(map[string]bool) - } - if exists { - cluster.cachedIndex[cacheKey] = true - } else { - delete(cluster.cachedIndex, cacheKey) - } - cluster.Unlock() -} - -func (cluster *mongoCluster) HasCachedIndex(cacheKey string) (result bool) { - cluster.RLock() - if cluster.cachedIndex != nil { - result = cluster.cachedIndex[cacheKey] - } - cluster.RUnlock() - return -} - -func (cluster *mongoCluster) ResetIndexCache() { - cluster.Lock() - cluster.cachedIndex = make(map[string]bool) - cluster.Unlock() -} diff --git a/services/community/vendor/github.com/globalsign/mgo/coarse_time.go b/services/community/vendor/github.com/globalsign/mgo/coarse_time.go deleted file mode 100644 index e54dd17c..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/coarse_time.go +++ /dev/null @@ -1,62 +0,0 @@ -package mgo - -import ( - "sync" - "sync/atomic" - "time" -) - -// coarseTimeProvider provides a periodically updated (approximate) time value to -// amortise the cost of frequent calls to time.Now. -// -// A read throughput increase of ~6% was measured when using coarseTimeProvider with the -// high-precision event timer (HPET) on FreeBSD 11.1 and Go 1.10.1 after merging -// #116. -// -// Calling Now returns a time.Time that is updated at the configured interval, -// however due to scheduling the value may be marginally older than expected. -// -// coarseTimeProvider is safe for concurrent use. -type coarseTimeProvider struct { - once sync.Once - stop chan struct{} - last atomic.Value -} - -// Now returns the most recently acquired time.Time value. -func (t *coarseTimeProvider) Now() time.Time { - return t.last.Load().(time.Time) -} - -// Close stops the periodic update of t. -// -// Any subsequent calls to Now will return the same value forever. -func (t *coarseTimeProvider) Close() { - t.once.Do(func() { - close(t.stop) - }) -} - -// newcoarseTimeProvider returns a coarseTimeProvider configured to update at granularity. -func newcoarseTimeProvider(granularity time.Duration) *coarseTimeProvider { - t := &coarseTimeProvider{ - stop: make(chan struct{}), - } - - t.last.Store(time.Now()) - - go func() { - ticker := time.NewTicker(granularity) - for { - select { - case <-t.stop: - ticker.Stop() - return - case <-ticker.C: - t.last.Store(time.Now()) - } - } - }() - - return t -} diff --git a/services/community/vendor/github.com/globalsign/mgo/doc.go b/services/community/vendor/github.com/globalsign/mgo/doc.go deleted file mode 100644 index f3f373bf..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/doc.go +++ /dev/null @@ -1,35 +0,0 @@ -// Package mgo (pronounced as "mango") offers a rich MongoDB driver for Go. -// -// Detailed documentation of the API is available at GoDoc: -// -// https://godoc.org/github.com/globalsign/mgo -// -// Usage of the driver revolves around the concept of sessions. To -// get started, obtain a session using the Dial function: -// -// session, err := mgo.Dial(url) -// -// This will establish one or more connections with the cluster of -// servers defined by the url parameter. From then on, the cluster -// may be queried with multiple consistency rules (see SetMode) and -// documents retrieved with statements such as: -// -// c := session.DB(database).C(collection) -// err := c.Find(query).One(&result) -// -// New sessions are typically created by calling session.Copy on the -// initial session obtained at dial time. These new sessions will share -// the same cluster information and connection pool, and may be easily -// handed into other methods and functions for organizing logic. -// Every session created must have its Close method called at the end -// of its life time, so its resources may be put back in the pool or -// collected, depending on the case. -// -// There is a sub-package that provides support for BSON, which can be -// used by itself as well: -// -// https://godoc.org/github.com/globalsign/mgo/bson -// -// For more details, see the documentation for the types and methods. -// -package mgo diff --git a/services/community/vendor/github.com/globalsign/mgo/gridfs.go b/services/community/vendor/github.com/globalsign/mgo/gridfs.go deleted file mode 100644 index 0954b166..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/gridfs.go +++ /dev/null @@ -1,782 +0,0 @@ -// mgo - MongoDB driver for Go -// -// Copyright (c) 2010-2012 - Gustavo Niemeyer -// -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this -// list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright notice, -// this list of conditions and the following disclaimer in the documentation -// and/or other materials provided with the distribution. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package mgo - -import ( - "crypto/md5" - "encoding/hex" - "errors" - "hash" - "io" - "os" - "sync" - "time" - - "github.com/globalsign/mgo/bson" -) - -// GridFS stores files in two collections: -// -// - chunks stores the binary chunks. For details, see the chunks Collection. -// - files stores the file’s metadata. For details, see the files Collection. -// -// GridFS places the collections in a common bucket by prefixing each with the bucket name. -// By default, GridFS uses two collections with a bucket named fs: -// -// - fs.files -// - fs.chunks -// -// You can choose a different bucket name, as well as create multiple buckets in a single database. -// The full collection name, which includes the bucket name, is subject to the namespace length limit. -// -// Relevant documentation: -// -// https://docs.mongodb.com/manual/core/gridfs/ -// https://docs.mongodb.com/manual/core/gridfs/#gridfs-chunks-collection -// https://docs.mongodb.com/manual/core/gridfs/#gridfs-files-collection -// -type GridFS struct { - Files *Collection - Chunks *Collection -} - -type gfsFileMode int - -const ( - gfsClosed gfsFileMode = 0 - gfsReading gfsFileMode = 1 - gfsWriting gfsFileMode = 2 -) - -// GridFile document in files collection -type GridFile struct { - m sync.Mutex - c sync.Cond - gfs *GridFS - mode gfsFileMode - err error - - chunk int - offset int64 - - wpending int - wbuf []byte - wsum hash.Hash - - rbuf []byte - rcache *gfsCachedChunk - - doc gfsFile -} - -type gfsFile struct { - Id interface{} `bson:"_id"` - ChunkSize int `bson:"chunkSize"` - UploadDate time.Time `bson:"uploadDate"` - Length int64 `bson:",minsize"` - MD5 string - Filename string `bson:",omitempty"` - ContentType string `bson:"contentType,omitempty"` - Metadata *bson.Raw `bson:",omitempty"` -} - -type gfsChunk struct { - Id interface{} `bson:"_id"` - FilesId interface{} `bson:"files_id"` - N int - Data []byte -} - -type gfsCachedChunk struct { - wait sync.Mutex - n int - data []byte - err error -} - -func newGridFS(db *Database, prefix string) *GridFS { - return &GridFS{db.C(prefix + ".files"), db.C(prefix + ".chunks")} -} - -func (gfs *GridFS) newFile() *GridFile { - file := &GridFile{gfs: gfs} - file.c.L = &file.m - //runtime.SetFinalizer(file, finalizeFile) - return file -} - -func finalizeFile(file *GridFile) { - file.Close() -} - -// Create creates a new file with the provided name in the GridFS. If the file -// name already exists, a new version will be inserted with an up-to-date -// uploadDate that will cause it to be atomically visible to the Open and -// OpenId methods. If the file name is not important, an empty name may be -// provided and the file Id used instead. -// -// It's important to Close files whether they are being written to -// or read from, and to check the err result to ensure the operation -// completed successfully. -// -// A simple example inserting a new file: -// -// func check(err error) { -// if err != nil { -// panic(err.String()) -// } -// } -// file, err := db.GridFS("fs").Create("myfile.txt") -// check(err) -// n, err := file.Write([]byte("Hello world!")) -// check(err) -// err = file.Close() -// check(err) -// fmt.Printf("%d bytes written\n", n) -// -// The io.Writer interface is implemented by *GridFile and may be used to -// help on the file creation. For example: -// -// file, err := db.GridFS("fs").Create("myfile.txt") -// check(err) -// messages, err := os.Open("/var/log/messages") -// check(err) -// defer messages.Close() -// err = io.Copy(file, messages) -// check(err) -// err = file.Close() -// check(err) -// -func (gfs *GridFS) Create(name string) (file *GridFile, err error) { - file = gfs.newFile() - file.mode = gfsWriting - file.wsum = md5.New() - file.doc = gfsFile{Id: bson.NewObjectId(), ChunkSize: 255 * 1024, Filename: name} - return -} - -// OpenId returns the file with the provided id, for reading. -// If the file isn't found, err will be set to mgo.ErrNotFound. -// -// It's important to Close files whether they are being written to -// or read from, and to check the err result to ensure the operation -// completed successfully. -// -// The following example will print the first 8192 bytes from the file: -// -// func check(err error) { -// if err != nil { -// panic(err.String()) -// } -// } -// file, err := db.GridFS("fs").OpenId(objid) -// check(err) -// b := make([]byte, 8192) -// n, err := file.Read(b) -// check(err) -// fmt.Println(string(b)) -// check(err) -// err = file.Close() -// check(err) -// fmt.Printf("%d bytes read\n", n) -// -// The io.Reader interface is implemented by *GridFile and may be used to -// deal with it. As an example, the following snippet will dump the whole -// file into the standard output: -// -// file, err := db.GridFS("fs").OpenId(objid) -// check(err) -// err = io.Copy(os.Stdout, file) -// check(err) -// err = file.Close() -// check(err) -// -func (gfs *GridFS) OpenId(id interface{}) (file *GridFile, err error) { - var doc gfsFile - err = gfs.Files.Find(bson.M{"_id": id}).One(&doc) - if err != nil { - return - } - file = gfs.newFile() - file.mode = gfsReading - file.doc = doc - return -} - -// Open returns the most recently uploaded file with the provided -// name, for reading. If the file isn't found, err will be set -// to mgo.ErrNotFound. -// -// It's important to Close files whether they are being written to -// or read from, and to check the err result to ensure the operation -// completed successfully. -// -// The following example will print the first 8192 bytes from the file: -// -// file, err := db.GridFS("fs").Open("myfile.txt") -// check(err) -// b := make([]byte, 8192) -// n, err := file.Read(b) -// check(err) -// fmt.Println(string(b)) -// check(err) -// err = file.Close() -// check(err) -// fmt.Printf("%d bytes read\n", n) -// -// The io.Reader interface is implemented by *GridFile and may be used to -// deal with it. As an example, the following snippet will dump the whole -// file into the standard output: -// -// file, err := db.GridFS("fs").Open("myfile.txt") -// check(err) -// err = io.Copy(os.Stdout, file) -// check(err) -// err = file.Close() -// check(err) -// -func (gfs *GridFS) Open(name string) (file *GridFile, err error) { - var doc gfsFile - err = gfs.Files.Find(bson.M{"filename": name}).Sort("-uploadDate").One(&doc) - if err != nil { - return - } - file = gfs.newFile() - file.mode = gfsReading - file.doc = doc - return -} - -// OpenNext opens the next file from iter for reading, sets *file to it, -// and returns true on the success case. If no more documents are available -// on iter or an error occurred, *file is set to nil and the result is false. -// Errors will be available via iter.Err(). -// -// The iter parameter must be an iterator on the GridFS files collection. -// Using the GridFS.Find method is an easy way to obtain such an iterator, -// but any iterator on the collection will work. -// -// If the provided *file is non-nil, OpenNext will close it before attempting -// to iterate to the next element. This means that in a loop one only -// has to worry about closing files when breaking out of the loop early -// (break, return, or panic). -// -// For example: -// -// gfs := db.GridFS("fs") -// query := gfs.Find(nil).Sort("filename") -// iter := query.Iter() -// var f *mgo.GridFile -// for gfs.OpenNext(iter, &f) { -// fmt.Printf("Filename: %s\n", f.Name()) -// } -// if iter.Close() != nil { -// panic(iter.Close()) -// } -// -func (gfs *GridFS) OpenNext(iter *Iter, file **GridFile) bool { - if *file != nil { - // Ignoring the error here shouldn't be a big deal - // as we're reading the file and the loop iteration - // for this file is finished. - _ = (*file).Close() - } - var doc gfsFile - if !iter.Next(&doc) { - *file = nil - return false - } - f := gfs.newFile() - f.mode = gfsReading - f.doc = doc - *file = f - return true -} - -// Find runs query on GridFS's files collection and returns -// the resulting Query. -// -// This logic: -// -// gfs := db.GridFS("fs") -// iter := gfs.Find(nil).Iter() -// -// Is equivalent to: -// -// files := db.C("fs" + ".files") -// iter := files.Find(nil).Iter() -// -func (gfs *GridFS) Find(query interface{}) *Query { - return gfs.Files.Find(query) -} - -// RemoveId deletes the file with the provided id from the GridFS. -func (gfs *GridFS) RemoveId(id interface{}) error { - err := gfs.Files.Remove(bson.M{"_id": id}) - if err != nil { - return err - } - _, err = gfs.Chunks.RemoveAll(bson.D{{Name: "files_id", Value: id}}) - return err -} - -type gfsDocId struct { - Id interface{} `bson:"_id"` -} - -// Remove deletes all files with the provided name from the GridFS. -func (gfs *GridFS) Remove(name string) (err error) { - iter := gfs.Files.Find(bson.M{"filename": name}).Select(bson.M{"_id": 1}).Iter() - var doc gfsDocId - for iter.Next(&doc) { - if e := gfs.RemoveId(doc.Id); e != nil { - err = e - } - } - if err == nil { - err = iter.Close() - } - return err -} - -func (file *GridFile) assertMode(mode gfsFileMode) { - switch file.mode { - case mode: - return - case gfsWriting: - panic("GridFile is open for writing") - case gfsReading: - panic("GridFile is open for reading") - case gfsClosed: - panic("GridFile is closed") - default: - panic("internal error: missing GridFile mode") - } -} - -// SetChunkSize sets size of saved chunks. Once the file is written to, it -// will be split in blocks of that size and each block saved into an -// independent chunk document. The default chunk size is 255kb. -// -// It is a runtime error to call this function once the file has started -// being written to. -func (file *GridFile) SetChunkSize(bytes int) { - file.assertMode(gfsWriting) - debugf("GridFile %p: setting chunk size to %d", file, bytes) - file.m.Lock() - file.doc.ChunkSize = bytes - file.m.Unlock() -} - -// Id returns the current file Id. -func (file *GridFile) Id() interface{} { - return file.doc.Id -} - -// SetId changes the current file Id. -// -// It is a runtime error to call this function once the file has started -// being written to, or when the file is not open for writing. -func (file *GridFile) SetId(id interface{}) { - file.assertMode(gfsWriting) - file.m.Lock() - file.doc.Id = id - file.m.Unlock() -} - -// Name returns the optional file name. An empty string will be returned -// in case it is unset. -func (file *GridFile) Name() string { - return file.doc.Filename -} - -// SetName changes the optional file name. An empty string may be used to -// unset it. -// -// It is a runtime error to call this function when the file is not open -// for writing. -func (file *GridFile) SetName(name string) { - file.assertMode(gfsWriting) - file.m.Lock() - file.doc.Filename = name - file.m.Unlock() -} - -// ContentType returns the optional file content type. An empty string will be -// returned in case it is unset. -func (file *GridFile) ContentType() string { - return file.doc.ContentType -} - -// SetContentType changes the optional file content type. An empty string may be -// used to unset it. -// -// It is a runtime error to call this function when the file is not open -// for writing. -func (file *GridFile) SetContentType(ctype string) { - file.assertMode(gfsWriting) - file.m.Lock() - file.doc.ContentType = ctype - file.m.Unlock() -} - -// GetMeta unmarshals the optional "metadata" field associated with the -// file into the result parameter. The meaning of keys under that field -// is user-defined. For example: -// -// result := struct{ INode int }{} -// err = file.GetMeta(&result) -// if err != nil { -// panic(err.String()) -// } -// fmt.Printf("inode: %d\n", result.INode) -// -func (file *GridFile) GetMeta(result interface{}) (err error) { - file.m.Lock() - if file.doc.Metadata != nil { - err = bson.Unmarshal(file.doc.Metadata.Data, result) - } - file.m.Unlock() - return -} - -// SetMeta changes the optional "metadata" field associated with the -// file. The meaning of keys under that field is user-defined. -// For example: -// -// file.SetMeta(bson.M{"inode": inode}) -// -// It is a runtime error to call this function when the file is not open -// for writing. -func (file *GridFile) SetMeta(metadata interface{}) { - file.assertMode(gfsWriting) - data, err := bson.Marshal(metadata) - file.m.Lock() - if err != nil && file.err == nil { - file.err = err - } else { - file.doc.Metadata = &bson.Raw{Data: data} - } - file.m.Unlock() -} - -// Size returns the file size in bytes. -func (file *GridFile) Size() (bytes int64) { - file.m.Lock() - bytes = file.doc.Length - file.m.Unlock() - return -} - -// MD5 returns the file MD5 as a hex-encoded string. -func (file *GridFile) MD5() (md5 string) { - return file.doc.MD5 -} - -// UploadDate returns the file upload time. -func (file *GridFile) UploadDate() time.Time { - return file.doc.UploadDate -} - -// SetUploadDate changes the file upload time. -// -// It is a runtime error to call this function when the file is not open -// for writing. -func (file *GridFile) SetUploadDate(t time.Time) { - file.assertMode(gfsWriting) - file.m.Lock() - file.doc.UploadDate = t - file.m.Unlock() -} - -// Close flushes any pending changes in case the file is being written -// to, waits for any background operations to finish, and closes the file. -// -// It's important to Close files whether they are being written to -// or read from, and to check the err result to ensure the operation -// completed successfully. -func (file *GridFile) Close() (err error) { - file.m.Lock() - defer file.m.Unlock() - if file.mode == gfsWriting { - if len(file.wbuf) > 0 && file.err == nil { - file.insertChunk(file.wbuf) - file.wbuf = file.wbuf[0:0] - } - file.completeWrite() - } else if file.mode == gfsReading && file.rcache != nil { - file.rcache.wait.Lock() - file.rcache = nil - } - file.mode = gfsClosed - debugf("GridFile %p: closed", file) - return file.err -} - -func (file *GridFile) completeWrite() { - for file.wpending > 0 { - debugf("GridFile %p: waiting for %d pending chunks to complete file write", file, file.wpending) - file.c.Wait() - } - if file.err == nil { - hexsum := hex.EncodeToString(file.wsum.Sum(nil)) - if file.doc.UploadDate.IsZero() { - file.doc.UploadDate = bson.Now() - } - file.doc.MD5 = hexsum - file.err = file.gfs.Files.Insert(file.doc) - } - if file.err != nil { - file.gfs.Chunks.RemoveAll(bson.D{{Name: "files_id", Value: file.doc.Id}}) - } - if file.err == nil { - index := Index{ - Key: []string{"files_id", "n"}, - Unique: true, - } - file.err = file.gfs.Chunks.EnsureIndex(index) - } -} - -// Abort cancels an in-progress write, preventing the file from being -// automically created and ensuring previously written chunks are -// removed when the file is closed. -// -// It is a runtime error to call Abort when the file was not opened -// for writing. -func (file *GridFile) Abort() { - if file.mode != gfsWriting { - panic("file.Abort must be called on file opened for writing") - } - file.err = errors.New("write aborted") -} - -// Write writes the provided data to the file and returns the -// number of bytes written and an error in case something -// wrong happened. -// -// The file will internally cache the data so that all but the last -// chunk sent to the database have the size defined by SetChunkSize. -// This also means that errors may be deferred until a future call -// to Write or Close. -// -// The parameters and behavior of this function turn the file -// into an io.Writer. -func (file *GridFile) Write(data []byte) (n int, err error) { - file.assertMode(gfsWriting) - file.m.Lock() - debugf("GridFile %p: writing %d bytes", file, len(data)) - defer file.m.Unlock() - - if file.err != nil { - return 0, file.err - } - - n = len(data) - file.doc.Length += int64(n) - chunkSize := file.doc.ChunkSize - - if len(file.wbuf)+len(data) < chunkSize { - file.wbuf = append(file.wbuf, data...) - return - } - - // First, flush file.wbuf complementing with data. - if len(file.wbuf) > 0 { - missing := chunkSize - len(file.wbuf) - if missing > len(data) { - missing = len(data) - } - file.wbuf = append(file.wbuf, data[:missing]...) - data = data[missing:] - file.insertChunk(file.wbuf) - file.wbuf = file.wbuf[0:0] - } - - // Then, flush all chunks from data without copying. - for len(data) > chunkSize { - size := chunkSize - if size > len(data) { - size = len(data) - } - file.insertChunk(data[:size]) - data = data[size:] - } - - // And append the rest for a future call. - file.wbuf = append(file.wbuf, data...) - - return n, file.err -} - -func (file *GridFile) insertChunk(data []byte) { - n := file.chunk - file.chunk++ - debugf("GridFile %p: adding to checksum: %q", file, string(data)) - file.wsum.Write(data) - - for file.doc.ChunkSize*file.wpending >= 1024*1024 { - // Hold on.. we got a MB pending. - file.c.Wait() - if file.err != nil { - return - } - } - - file.wpending++ - - debugf("GridFile %p: inserting chunk %d with %d bytes", file, n, len(data)) - - // We may not own the memory of data, so rather than - // simply copying it, we'll marshal the document ahead of time. - data, err := bson.Marshal(gfsChunk{bson.NewObjectId(), file.doc.Id, n, data}) - if err != nil { - file.err = err - return - } - - go func() { - err := file.gfs.Chunks.Insert(bson.Raw{Data: data}) - file.m.Lock() - file.wpending-- - if err != nil && file.err == nil { - file.err = err - } - file.c.Broadcast() - file.m.Unlock() - }() -} - -// Seek sets the offset for the next Read or Write on file to -// offset, interpreted according to whence: 0 means relative to -// the origin of the file, 1 means relative to the current offset, -// and 2 means relative to the end. It returns the new offset and -// an error, if any. -func (file *GridFile) Seek(offset int64, whence int) (pos int64, err error) { - file.m.Lock() - debugf("GridFile %p: seeking for %s (whence=%d)", file, offset, whence) - defer file.m.Unlock() - switch whence { - case os.SEEK_SET: - case os.SEEK_CUR: - offset += file.offset - case os.SEEK_END: - offset += file.doc.Length - default: - panic("unsupported whence value") - } - if offset > file.doc.Length { - return file.offset, errors.New("seek past end of file") - } - if offset == file.doc.Length { - // If we're seeking to the end of the file, - // no need to read anything. This enables - // a client to find the size of the file using only the - // io.ReadSeeker interface with low overhead. - file.offset = offset - return file.offset, nil - } - chunk := int(offset / int64(file.doc.ChunkSize)) - if chunk+1 == file.chunk && offset >= file.offset { - file.rbuf = file.rbuf[int(offset-file.offset):] - file.offset = offset - return file.offset, nil - } - file.offset = offset - file.chunk = chunk - file.rbuf = nil - file.rbuf, err = file.getChunk() - if err == nil { - file.rbuf = file.rbuf[int(file.offset-int64(chunk)*int64(file.doc.ChunkSize)):] - } - return file.offset, err -} - -// Read reads into b the next available data from the file and -// returns the number of bytes written and an error in case -// something wrong happened. At the end of the file, n will -// be zero and err will be set to io.EOF. -// -// The parameters and behavior of this function turn the file -// into an io.Reader. -func (file *GridFile) Read(b []byte) (n int, err error) { - file.assertMode(gfsReading) - file.m.Lock() - debugf("GridFile %p: reading at offset %d into buffer of length %d", file, file.offset, len(b)) - defer file.m.Unlock() - if file.offset == file.doc.Length { - return 0, io.EOF - } - for err == nil { - i := copy(b, file.rbuf) - n += i - file.offset += int64(i) - file.rbuf = file.rbuf[i:] - if i == len(b) || file.offset == file.doc.Length { - break - } - b = b[i:] - file.rbuf, err = file.getChunk() - } - return n, err -} - -func (file *GridFile) getChunk() (data []byte, err error) { - cache := file.rcache - file.rcache = nil - if cache != nil && cache.n == file.chunk { - debugf("GridFile %p: Getting chunk %d from cache", file, file.chunk) - cache.wait.Lock() - data, err = cache.data, cache.err - } else { - debugf("GridFile %p: Fetching chunk %d", file, file.chunk) - var doc gfsChunk - err = file.gfs.Chunks.Find(bson.D{{Name: "files_id", Value: file.doc.Id}, {Name: "n", Value: file.chunk}}).One(&doc) - data = doc.Data - } - file.chunk++ - if int64(file.chunk)*int64(file.doc.ChunkSize) < file.doc.Length { - // Read the next one in background. - cache = &gfsCachedChunk{n: file.chunk} - cache.wait.Lock() - debugf("GridFile %p: Scheduling chunk %d for background caching", file, file.chunk) - // Clone the session to avoid having it closed in between. - chunks := file.gfs.Chunks - session := chunks.Database.Session.Clone() - go func(id interface{}, n int) { - defer session.Close() - chunks = chunks.With(session) - var doc gfsChunk - cache.err = chunks.Find(bson.D{{Name: "files_id", Value: id}, {Name: "n", Value: n}}).One(&doc) - cache.data = doc.Data - cache.wait.Unlock() - }(file.doc.Id, file.chunk) - file.rcache = cache - } - debugf("Returning err: %#v", err) - return -} diff --git a/services/community/vendor/github.com/globalsign/mgo/internal/json/LICENSE b/services/community/vendor/github.com/globalsign/mgo/internal/json/LICENSE deleted file mode 100644 index 74487567..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/internal/json/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/services/community/vendor/github.com/globalsign/mgo/internal/json/decode.go b/services/community/vendor/github.com/globalsign/mgo/internal/json/decode.go deleted file mode 100644 index d5ca1f9a..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/internal/json/decode.go +++ /dev/null @@ -1,1685 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Represents JSON data structure using native Go types: booleans, floats, -// strings, arrays, and maps. - -package json - -import ( - "bytes" - "encoding" - "encoding/base64" - "errors" - "fmt" - "reflect" - "runtime" - "strconv" - "unicode" - "unicode/utf16" - "unicode/utf8" -) - -// Unmarshal parses the JSON-encoded data and stores the result -// in the value pointed to by v. -// -// Unmarshal uses the inverse of the encodings that -// Marshal uses, allocating maps, slices, and pointers as necessary, -// with the following additional rules: -// -// To unmarshal JSON into a pointer, Unmarshal first handles the case of -// the JSON being the JSON literal null. In that case, Unmarshal sets -// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into -// the value pointed at by the pointer. If the pointer is nil, Unmarshal -// allocates a new value for it to point to. -// -// To unmarshal JSON into a struct, Unmarshal matches incoming object -// keys to the keys used by Marshal (either the struct field name or its tag), -// preferring an exact match but also accepting a case-insensitive match. -// Unmarshal will only set exported fields of the struct. -// -// To unmarshal JSON into an interface value, -// Unmarshal stores one of these in the interface value: -// -// bool, for JSON booleans -// float64, for JSON numbers -// string, for JSON strings -// []interface{}, for JSON arrays -// map[string]interface{}, for JSON objects -// nil for JSON null -// -// To unmarshal a JSON array into a slice, Unmarshal resets the slice length -// to zero and then appends each element to the slice. -// As a special case, to unmarshal an empty JSON array into a slice, -// Unmarshal replaces the slice with a new empty slice. -// -// To unmarshal a JSON array into a Go array, Unmarshal decodes -// JSON array elements into corresponding Go array elements. -// If the Go array is smaller than the JSON array, -// the additional JSON array elements are discarded. -// If the JSON array is smaller than the Go array, -// the additional Go array elements are set to zero values. -// -// To unmarshal a JSON object into a map, Unmarshal first establishes a map to -// use, If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal -// reuses the existing map, keeping existing entries. Unmarshal then stores key- -// value pairs from the JSON object into the map. The map's key type must -// either be a string or implement encoding.TextUnmarshaler. -// -// If a JSON value is not appropriate for a given target type, -// or if a JSON number overflows the target type, Unmarshal -// skips that field and completes the unmarshaling as best it can. -// If no more serious errors are encountered, Unmarshal returns -// an UnmarshalTypeError describing the earliest such error. -// -// The JSON null value unmarshals into an interface, map, pointer, or slice -// by setting that Go value to nil. Because null is often used in JSON to mean -// ``not present,'' unmarshaling a JSON null into any other Go type has no effect -// on the value and produces no error. -// -// When unmarshaling quoted strings, invalid UTF-8 or -// invalid UTF-16 surrogate pairs are not treated as an error. -// Instead, they are replaced by the Unicode replacement -// character U+FFFD. -// -func Unmarshal(data []byte, v interface{}) error { - // Check for well-formedness. - // Avoids filling out half a data structure - // before discovering a JSON syntax error. - var d decodeState - err := checkValid(data, &d.scan) - if err != nil { - return err - } - - d.init(data) - return d.unmarshal(v) -} - -// Unmarshaler is the interface implemented by types -// that can unmarshal a JSON description of themselves. -// The input can be assumed to be a valid encoding of -// a JSON value. UnmarshalJSON must copy the JSON data -// if it wishes to retain the data after returning. -type Unmarshaler interface { - UnmarshalJSON([]byte) error -} - -// An UnmarshalTypeError describes a JSON value that was -// not appropriate for a value of a specific Go type. -type UnmarshalTypeError struct { - Value string // description of JSON value - "bool", "array", "number -5" - Type reflect.Type // type of Go value it could not be assigned to - Offset int64 // error occurred after reading Offset bytes -} - -func (e *UnmarshalTypeError) Error() string { - return "json: cannot unmarshal " + e.Value + " into Go value of type " + e.Type.String() -} - -// An UnmarshalFieldError describes a JSON object key that -// led to an unexported (and therefore unwritable) struct field. -// (No longer used; kept for compatibility.) -type UnmarshalFieldError struct { - Key string - Type reflect.Type - Field reflect.StructField -} - -func (e *UnmarshalFieldError) Error() string { - return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String() -} - -// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal. -// (The argument to Unmarshal must be a non-nil pointer.) -type InvalidUnmarshalError struct { - Type reflect.Type -} - -func (e *InvalidUnmarshalError) Error() string { - if e.Type == nil { - return "json: Unmarshal(nil)" - } - - if e.Type.Kind() != reflect.Ptr { - return "json: Unmarshal(non-pointer " + e.Type.String() + ")" - } - return "json: Unmarshal(nil " + e.Type.String() + ")" -} - -func (d *decodeState) unmarshal(v interface{}) (err error) { - defer func() { - if r := recover(); r != nil { - if _, ok := r.(runtime.Error); ok { - panic(r) - } - err = r.(error) - } - }() - - rv := reflect.ValueOf(v) - if rv.Kind() != reflect.Ptr || rv.IsNil() { - return &InvalidUnmarshalError{reflect.TypeOf(v)} - } - - d.scan.reset() - // We decode rv not rv.Elem because the Unmarshaler interface - // test must be applied at the top level of the value. - d.value(rv) - return d.savedError -} - -// A Number represents a JSON number literal. -type Number string - -// String returns the literal text of the number. -func (n Number) String() string { return string(n) } - -// Float64 returns the number as a float64. -func (n Number) Float64() (float64, error) { - return strconv.ParseFloat(string(n), 64) -} - -// Int64 returns the number as an int64. -func (n Number) Int64() (int64, error) { - return strconv.ParseInt(string(n), 10, 64) -} - -// isValidNumber reports whether s is a valid JSON number literal. -func isValidNumber(s string) bool { - // This function implements the JSON numbers grammar. - // See https://tools.ietf.org/html/rfc7159#section-6 - // and http://json.org/number.gif - - if s == "" { - return false - } - - // Optional - - if s[0] == '-' { - s = s[1:] - if s == "" { - return false - } - } - - // Digits - switch { - default: - return false - - case s[0] == '0': - s = s[1:] - - case '1' <= s[0] && s[0] <= '9': - s = s[1:] - for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { - s = s[1:] - } - } - - // . followed by 1 or more digits. - if len(s) >= 2 && s[0] == '.' && '0' <= s[1] && s[1] <= '9' { - s = s[2:] - for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { - s = s[1:] - } - } - - // e or E followed by an optional - or + and - // 1 or more digits. - if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') { - s = s[1:] - if s[0] == '+' || s[0] == '-' { - s = s[1:] - if s == "" { - return false - } - } - for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { - s = s[1:] - } - } - - // Make sure we are at the end. - return s == "" -} - -// decodeState represents the state while decoding a JSON value. -type decodeState struct { - data []byte - off int // read offset in data - scan scanner - nextscan scanner // for calls to nextValue - savedError error - useNumber bool - ext Extension -} - -// errPhase is used for errors that should not happen unless -// there is a bug in the JSON decoder or something is editing -// the data slice while the decoder executes. -var errPhase = errors.New("JSON decoder out of sync - data changing underfoot?") - -func (d *decodeState) init(data []byte) *decodeState { - d.data = data - d.off = 0 - d.savedError = nil - return d -} - -// error aborts the decoding by panicking with err. -func (d *decodeState) error(err error) { - panic(err) -} - -// saveError saves the first err it is called with, -// for reporting at the end of the unmarshal. -func (d *decodeState) saveError(err error) { - if d.savedError == nil { - d.savedError = err - } -} - -// next cuts off and returns the next full JSON value in d.data[d.off:]. -// The next value is known to be an object or array, not a literal. -func (d *decodeState) next() []byte { - c := d.data[d.off] - item, rest, err := nextValue(d.data[d.off:], &d.nextscan) - if err != nil { - d.error(err) - } - d.off = len(d.data) - len(rest) - - // Our scanner has seen the opening brace/bracket - // and thinks we're still in the middle of the object. - // invent a closing brace/bracket to get it out. - if c == '{' { - d.scan.step(&d.scan, '}') - } else if c == '[' { - d.scan.step(&d.scan, ']') - } else { - // Was inside a function name. Get out of it. - d.scan.step(&d.scan, '(') - d.scan.step(&d.scan, ')') - } - - return item -} - -// scanWhile processes bytes in d.data[d.off:] until it -// receives a scan code not equal to op. -// It updates d.off and returns the new scan code. -func (d *decodeState) scanWhile(op int) int { - var newOp int - for { - if d.off >= len(d.data) { - newOp = d.scan.eof() - d.off = len(d.data) + 1 // mark processed EOF with len+1 - } else { - c := d.data[d.off] - d.off++ - newOp = d.scan.step(&d.scan, c) - } - if newOp != op { - break - } - } - return newOp -} - -// value decodes a JSON value from d.data[d.off:] into the value. -// it updates d.off to point past the decoded value. -func (d *decodeState) value(v reflect.Value) { - if !v.IsValid() { - _, rest, err := nextValue(d.data[d.off:], &d.nextscan) - if err != nil { - d.error(err) - } - d.off = len(d.data) - len(rest) - - // d.scan thinks we're still at the beginning of the item. - // Feed in an empty string - the shortest, simplest value - - // so that it knows we got to the end of the value. - if d.scan.redo { - // rewind. - d.scan.redo = false - d.scan.step = stateBeginValue - } - d.scan.step(&d.scan, '"') - d.scan.step(&d.scan, '"') - - n := len(d.scan.parseState) - if n > 0 && d.scan.parseState[n-1] == parseObjectKey { - // d.scan thinks we just read an object key; finish the object - d.scan.step(&d.scan, ':') - d.scan.step(&d.scan, '"') - d.scan.step(&d.scan, '"') - d.scan.step(&d.scan, '}') - } - - return - } - - switch op := d.scanWhile(scanSkipSpace); op { - default: - d.error(errPhase) - - case scanBeginArray: - d.array(v) - - case scanBeginObject: - d.object(v) - - case scanBeginLiteral: - d.literal(v) - - case scanBeginName: - d.name(v) - } -} - -type unquotedValue struct{} - -// valueQuoted is like value but decodes a -// quoted string literal or literal null into an interface value. -// If it finds anything other than a quoted string literal or null, -// valueQuoted returns unquotedValue{}. -func (d *decodeState) valueQuoted() interface{} { - switch op := d.scanWhile(scanSkipSpace); op { - default: - d.error(errPhase) - - case scanBeginArray: - d.array(reflect.Value{}) - - case scanBeginObject: - d.object(reflect.Value{}) - - case scanBeginName: - switch v := d.nameInterface().(type) { - case nil, string: - return v - } - - case scanBeginLiteral: - switch v := d.literalInterface().(type) { - case nil, string: - return v - } - } - return unquotedValue{} -} - -// indirect walks down v allocating pointers as needed, -// until it gets to a non-pointer. -// if it encounters an Unmarshaler, indirect stops and returns that. -// if decodingNull is true, indirect stops at the last pointer so it can be set to nil. -func (d *decodeState) indirect(v reflect.Value, decodingNull bool) (Unmarshaler, encoding.TextUnmarshaler, reflect.Value) { - // If v is a named type and is addressable, - // start with its address, so that if the type has pointer methods, - // we find them. - if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() { - v = v.Addr() - } - for { - // Load value from interface, but only if the result will be - // usefully addressable. - if v.Kind() == reflect.Interface && !v.IsNil() { - e := v.Elem() - if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) { - v = e - continue - } - } - - if v.Kind() != reflect.Ptr { - break - } - - if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() { - break - } - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - if v.Type().NumMethod() > 0 { - if u, ok := v.Interface().(Unmarshaler); ok { - return u, nil, v - } - if u, ok := v.Interface().(encoding.TextUnmarshaler); ok { - return nil, u, v - } - } - v = v.Elem() - } - return nil, nil, v -} - -// array consumes an array from d.data[d.off-1:], decoding into the value v. -// the first byte of the array ('[') has been read already. -func (d *decodeState) array(v reflect.Value) { - // Check for unmarshaler. - u, ut, pv := d.indirect(v, false) - if u != nil { - d.off-- - err := u.UnmarshalJSON(d.next()) - if err != nil { - d.error(err) - } - return - } - if ut != nil { - d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)}) - d.off-- - d.next() - return - } - - v = pv - - // Check type of target. - switch v.Kind() { - case reflect.Interface: - if v.NumMethod() == 0 { - // Decoding into nil interface? Switch to non-reflect code. - v.Set(reflect.ValueOf(d.arrayInterface())) - return - } - // Otherwise it's invalid. - fallthrough - default: - d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)}) - d.off-- - d.next() - return - case reflect.Array: - case reflect.Slice: - break - } - - i := 0 - for { - // Look ahead for ] - can only happen on first iteration. - op := d.scanWhile(scanSkipSpace) - if op == scanEndArray { - break - } - - // Back up so d.value can have the byte we just read. - d.off-- - d.scan.undo(op) - - // Get element of array, growing if necessary. - if v.Kind() == reflect.Slice { - // Grow slice if necessary - if i >= v.Cap() { - newcap := v.Cap() + v.Cap()/2 - if newcap < 4 { - newcap = 4 - } - newv := reflect.MakeSlice(v.Type(), v.Len(), newcap) - reflect.Copy(newv, v) - v.Set(newv) - } - if i >= v.Len() { - v.SetLen(i + 1) - } - } - - if i < v.Len() { - // Decode into element. - d.value(v.Index(i)) - } else { - // Ran out of fixed array: skip. - d.value(reflect.Value{}) - } - i++ - - // Next token must be , or ]. - op = d.scanWhile(scanSkipSpace) - if op == scanEndArray { - break - } - if op != scanArrayValue { - d.error(errPhase) - } - } - - if i < v.Len() { - if v.Kind() == reflect.Array { - // Array. Zero the rest. - z := reflect.Zero(v.Type().Elem()) - for ; i < v.Len(); i++ { - v.Index(i).Set(z) - } - } else { - v.SetLen(i) - } - } - if i == 0 && v.Kind() == reflect.Slice { - v.Set(reflect.MakeSlice(v.Type(), 0, 0)) - } -} - -var nullLiteral = []byte("null") -var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem() - -// object consumes an object from d.data[d.off-1:], decoding into the value v. -// the first byte ('{') of the object has been read already. -func (d *decodeState) object(v reflect.Value) { - // Check for unmarshaler. - u, ut, pv := d.indirect(v, false) - if d.storeKeyed(pv) { - return - } - if u != nil { - d.off-- - err := u.UnmarshalJSON(d.next()) - if err != nil { - d.error(err) - } - return - } - if ut != nil { - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - d.off-- - d.next() // skip over { } in input - return - } - v = pv - - // Decoding into nil interface? Switch to non-reflect code. - if v.Kind() == reflect.Interface && v.NumMethod() == 0 { - v.Set(reflect.ValueOf(d.objectInterface())) - return - } - - // Check type of target: - // struct or - // map[string]T or map[encoding.TextUnmarshaler]T - switch v.Kind() { - case reflect.Map: - // Map key must either have string kind or be an encoding.TextUnmarshaler. - t := v.Type() - if t.Key().Kind() != reflect.String && - !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) { - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - d.off-- - d.next() // skip over { } in input - return - } - if v.IsNil() { - v.Set(reflect.MakeMap(t)) - } - case reflect.Struct: - - default: - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - d.off-- - d.next() // skip over { } in input - return - } - - var mapElem reflect.Value - - empty := true - for { - // Read opening " of string key or closing }. - op := d.scanWhile(scanSkipSpace) - if op == scanEndObject { - if !empty && !d.ext.trailingCommas { - d.syntaxError("beginning of object key string") - } - break - } - empty = false - if op == scanBeginName { - if !d.ext.unquotedKeys { - d.syntaxError("beginning of object key string") - } - } else if op != scanBeginLiteral { - d.error(errPhase) - } - unquotedKey := op == scanBeginName - - // Read key. - start := d.off - 1 - op = d.scanWhile(scanContinue) - item := d.data[start : d.off-1] - var key []byte - if unquotedKey { - key = item - // TODO Fix code below to quote item when necessary. - } else { - var ok bool - key, ok = unquoteBytes(item) - if !ok { - d.error(errPhase) - } - } - - // Figure out field corresponding to key. - var subv reflect.Value - destring := false // whether the value is wrapped in a string to be decoded first - - if v.Kind() == reflect.Map { - elemType := v.Type().Elem() - if !mapElem.IsValid() { - mapElem = reflect.New(elemType).Elem() - } else { - mapElem.Set(reflect.Zero(elemType)) - } - subv = mapElem - } else { - var f *field - fields := cachedTypeFields(v.Type()) - for i := range fields { - ff := &fields[i] - if bytes.Equal(ff.nameBytes, key) { - f = ff - break - } - if f == nil && ff.equalFold(ff.nameBytes, key) { - f = ff - } - } - if f != nil { - subv = v - destring = f.quoted - for _, i := range f.index { - if subv.Kind() == reflect.Ptr { - if subv.IsNil() { - subv.Set(reflect.New(subv.Type().Elem())) - } - subv = subv.Elem() - } - subv = subv.Field(i) - } - } - } - - // Read : before value. - if op == scanSkipSpace { - op = d.scanWhile(scanSkipSpace) - } - if op != scanObjectKey { - d.error(errPhase) - } - - // Read value. - if destring { - switch qv := d.valueQuoted().(type) { - case nil: - d.literalStore(nullLiteral, subv, false) - case string: - d.literalStore([]byte(qv), subv, true) - default: - d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type())) - } - } else { - d.value(subv) - } - - // Write value back to map; - // if using struct, subv points into struct already. - if v.Kind() == reflect.Map { - kt := v.Type().Key() - var kv reflect.Value - switch { - case kt.Kind() == reflect.String: - kv = reflect.ValueOf(key).Convert(v.Type().Key()) - case reflect.PtrTo(kt).Implements(textUnmarshalerType): - kv = reflect.New(v.Type().Key()) - d.literalStore(item, kv, true) - kv = kv.Elem() - default: - panic("json: Unexpected key type") // should never occur - } - v.SetMapIndex(kv, subv) - } - - // Next token must be , or }. - op = d.scanWhile(scanSkipSpace) - if op == scanEndObject { - break - } - if op != scanObjectValue { - d.error(errPhase) - } - } -} - -// isNull returns whether there's a null literal at the provided offset. -func (d *decodeState) isNull(off int) bool { - if off+4 >= len(d.data) || d.data[off] != 'n' || d.data[off+1] != 'u' || d.data[off+2] != 'l' || d.data[off+3] != 'l' { - return false - } - d.nextscan.reset() - for i, c := range d.data[off:] { - if i > 4 { - return false - } - switch d.nextscan.step(&d.nextscan, c) { - case scanContinue, scanBeginName: - continue - } - break - } - return true -} - -// name consumes a const or function from d.data[d.off-1:], decoding into the value v. -// the first byte of the function name has been read already. -func (d *decodeState) name(v reflect.Value) { - if d.isNull(d.off - 1) { - d.literal(v) - return - } - - // Check for unmarshaler. - u, ut, pv := d.indirect(v, false) - if d.storeKeyed(pv) { - return - } - if u != nil { - d.off-- - err := u.UnmarshalJSON(d.next()) - if err != nil { - d.error(err) - } - return - } - if ut != nil { - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - d.off-- - d.next() // skip over function in input - return - } - v = pv - - // Decoding into nil interface? Switch to non-reflect code. - if v.Kind() == reflect.Interface && v.NumMethod() == 0 { - out := d.nameInterface() - if out == nil { - v.Set(reflect.Zero(v.Type())) - } else { - v.Set(reflect.ValueOf(out)) - } - return - } - - nameStart := d.off - 1 - - op := d.scanWhile(scanContinue) - - name := d.data[nameStart : d.off-1] - if op != scanParam { - // Back up so the byte just read is consumed next. - d.off-- - d.scan.undo(op) - if l, ok := d.convertLiteral(name); ok { - d.storeValue(v, l) - return - } - d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)}) - } - - funcName := string(name) - funcData := d.ext.funcs[funcName] - if funcData.key == "" { - d.error(fmt.Errorf("json: unknown function %q", funcName)) - } - - // Check type of target: - // struct or - // map[string]T or map[encoding.TextUnmarshaler]T - switch v.Kind() { - case reflect.Map: - // Map key must either have string kind or be an encoding.TextUnmarshaler. - t := v.Type() - if t.Key().Kind() != reflect.String && - !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) { - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - d.off-- - d.next() // skip over { } in input - return - } - if v.IsNil() { - v.Set(reflect.MakeMap(t)) - } - case reflect.Struct: - - default: - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - d.off-- - d.next() // skip over { } in input - return - } - - // TODO Fix case of func field as map. - //topv := v - - // Figure out field corresponding to function. - key := []byte(funcData.key) - if v.Kind() == reflect.Map { - elemType := v.Type().Elem() - v = reflect.New(elemType).Elem() - } else { - var f *field - fields := cachedTypeFields(v.Type()) - for i := range fields { - ff := &fields[i] - if bytes.Equal(ff.nameBytes, key) { - f = ff - break - } - if f == nil && ff.equalFold(ff.nameBytes, key) { - f = ff - } - } - if f != nil { - for _, i := range f.index { - if v.Kind() == reflect.Ptr { - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - v = v.Elem() - } - v = v.Field(i) - } - if v.Kind() == reflect.Ptr { - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - v = v.Elem() - } - } - } - - // Check for unmarshaler on func field itself. - u, _, _ = d.indirect(v, false) - if u != nil { - d.off = nameStart - err := u.UnmarshalJSON(d.next()) - if err != nil { - d.error(err) - } - return - } - - var mapElem reflect.Value - - // Parse function arguments. - for i := 0; ; i++ { - // closing ) - can only happen on first iteration. - op := d.scanWhile(scanSkipSpace) - if op == scanEndParams { - break - } - - // Back up so d.value can have the byte we just read. - d.off-- - d.scan.undo(op) - - if i >= len(funcData.args) { - d.error(fmt.Errorf("json: too many arguments for function %s", funcName)) - } - key := []byte(funcData.args[i]) - - // Figure out field corresponding to key. - var subv reflect.Value - destring := false // whether the value is wrapped in a string to be decoded first - - if v.Kind() == reflect.Map { - elemType := v.Type().Elem() - if !mapElem.IsValid() { - mapElem = reflect.New(elemType).Elem() - } else { - mapElem.Set(reflect.Zero(elemType)) - } - subv = mapElem - } else { - var f *field - fields := cachedTypeFields(v.Type()) - for i := range fields { - ff := &fields[i] - if bytes.Equal(ff.nameBytes, key) { - f = ff - break - } - if f == nil && ff.equalFold(ff.nameBytes, key) { - f = ff - } - } - if f != nil { - subv = v - destring = f.quoted - for _, i := range f.index { - if subv.Kind() == reflect.Ptr { - if subv.IsNil() { - subv.Set(reflect.New(subv.Type().Elem())) - } - subv = subv.Elem() - } - subv = subv.Field(i) - } - } - } - - // Read value. - if destring { - switch qv := d.valueQuoted().(type) { - case nil: - d.literalStore(nullLiteral, subv, false) - case string: - d.literalStore([]byte(qv), subv, true) - default: - d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type())) - } - } else { - d.value(subv) - } - - // Write value back to map; - // if using struct, subv points into struct already. - if v.Kind() == reflect.Map { - kt := v.Type().Key() - var kv reflect.Value - switch { - case kt.Kind() == reflect.String: - kv = reflect.ValueOf(key).Convert(v.Type().Key()) - case reflect.PtrTo(kt).Implements(textUnmarshalerType): - kv = reflect.New(v.Type().Key()) - d.literalStore(key, kv, true) - kv = kv.Elem() - default: - panic("json: Unexpected key type") // should never occur - } - v.SetMapIndex(kv, subv) - } - - // Next token must be , or ). - op = d.scanWhile(scanSkipSpace) - if op == scanEndParams { - break - } - if op != scanParam { - d.error(errPhase) - } - } -} - -// keyed attempts to decode an object or function using a keyed doc extension, -// and returns the value and true on success, or nil and false otherwise. -func (d *decodeState) keyed() (interface{}, bool) { - if len(d.ext.keyed) == 0 { - return nil, false - } - - unquote := false - - // Look-ahead first key to check for a keyed document extension. - d.nextscan.reset() - var start, end int - for i, c := range d.data[d.off-1:] { - switch op := d.nextscan.step(&d.nextscan, c); op { - case scanSkipSpace, scanContinue, scanBeginObject: - continue - case scanBeginLiteral, scanBeginName: - unquote = op == scanBeginLiteral - start = i - continue - } - end = i - break - } - - name := bytes.Trim(d.data[d.off-1+start:d.off-1+end], " \n\t") - - var key []byte - var ok bool - if unquote { - key, ok = unquoteBytes(name) - if !ok { - d.error(errPhase) - } - } else { - funcData, ok := d.ext.funcs[string(name)] - if !ok { - return nil, false - } - key = []byte(funcData.key) - } - - decode, ok := d.ext.keyed[string(key)] - if !ok { - return nil, false - } - - d.off-- - out, err := decode(d.next()) - if err != nil { - d.error(err) - } - return out, true -} - -func (d *decodeState) storeKeyed(v reflect.Value) bool { - keyed, ok := d.keyed() - if !ok { - return false - } - d.storeValue(v, keyed) - return true -} - -var ( - trueBytes = []byte("true") - falseBytes = []byte("false") - nullBytes = []byte("null") -) - -func (d *decodeState) storeValue(v reflect.Value, from interface{}) { - switch from { - case nil: - d.literalStore(nullBytes, v, false) - return - case true: - d.literalStore(trueBytes, v, false) - return - case false: - d.literalStore(falseBytes, v, false) - return - } - fromv := reflect.ValueOf(from) - for fromv.Kind() == reflect.Ptr && !fromv.IsNil() { - fromv = fromv.Elem() - } - fromt := fromv.Type() - for v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - vt := v.Type() - if fromt.AssignableTo(vt) { - v.Set(fromv) - } else if fromt.ConvertibleTo(vt) { - v.Set(fromv.Convert(vt)) - } else { - d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) - } -} - -func (d *decodeState) convertLiteral(name []byte) (interface{}, bool) { - if len(name) == 0 { - return nil, false - } - switch name[0] { - case 't': - if bytes.Equal(name, trueBytes) { - return true, true - } - case 'f': - if bytes.Equal(name, falseBytes) { - return false, true - } - case 'n': - if bytes.Equal(name, nullBytes) { - return nil, true - } - } - if l, ok := d.ext.consts[string(name)]; ok { - return l, true - } - return nil, false -} - -// literal consumes a literal from d.data[d.off-1:], decoding into the value v. -// The first byte of the literal has been read already -// (that's how the caller knows it's a literal). -func (d *decodeState) literal(v reflect.Value) { - // All bytes inside literal return scanContinue op code. - start := d.off - 1 - op := d.scanWhile(scanContinue) - - // Scan read one byte too far; back up. - d.off-- - d.scan.undo(op) - - d.literalStore(d.data[start:d.off], v, false) -} - -// convertNumber converts the number literal s to a float64 or a Number -// depending on the setting of d.useNumber. -func (d *decodeState) convertNumber(s string) (interface{}, error) { - if d.useNumber { - return Number(s), nil - } - f, err := strconv.ParseFloat(s, 64) - if err != nil { - return nil, &UnmarshalTypeError{"number " + s, reflect.TypeOf(0.0), int64(d.off)} - } - return f, nil -} - -var numberType = reflect.TypeOf(Number("")) - -// literalStore decodes a literal stored in item into v. -// -// fromQuoted indicates whether this literal came from unwrapping a -// string from the ",string" struct tag option. this is used only to -// produce more helpful error messages. -func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) { - // Check for unmarshaler. - if len(item) == 0 { - //Empty string given - d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - return - } - wantptr := item[0] == 'n' // null - u, ut, pv := d.indirect(v, wantptr) - if u != nil { - err := u.UnmarshalJSON(item) - if err != nil { - d.error(err) - } - return - } - if ut != nil { - if item[0] != '"' { - if fromQuoted { - d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - } else { - d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) - } - return - } - s, ok := unquoteBytes(item) - if !ok { - if fromQuoted { - d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - } else { - d.error(errPhase) - } - } - err := ut.UnmarshalText(s) - if err != nil { - d.error(err) - } - return - } - - v = pv - - switch c := item[0]; c { - case 'n': // null - switch v.Kind() { - case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: - v.Set(reflect.Zero(v.Type())) - // otherwise, ignore null for primitives/string - } - case 't', 'f': // true, false - value := c == 't' - switch v.Kind() { - default: - if fromQuoted { - d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - } else { - d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)}) - } - case reflect.Bool: - v.SetBool(value) - case reflect.Interface: - if v.NumMethod() == 0 { - v.Set(reflect.ValueOf(value)) - } else { - d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)}) - } - } - - case '"': // string - s, ok := unquoteBytes(item) - if !ok { - if fromQuoted { - d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - } else { - d.error(errPhase) - } - } - switch v.Kind() { - default: - d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) - case reflect.Slice: - if v.Type().Elem().Kind() != reflect.Uint8 { - d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) - break - } - b := make([]byte, base64.StdEncoding.DecodedLen(len(s))) - n, err := base64.StdEncoding.Decode(b, s) - if err != nil { - d.saveError(err) - break - } - v.SetBytes(b[:n]) - case reflect.String: - v.SetString(string(s)) - case reflect.Interface: - if v.NumMethod() == 0 { - v.Set(reflect.ValueOf(string(s))) - } else { - d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) - } - } - - default: // number - if c != '-' && (c < '0' || c > '9') { - if fromQuoted { - d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - } else { - d.error(errPhase) - } - } - s := string(item) - switch v.Kind() { - default: - if v.Kind() == reflect.String && v.Type() == numberType { - v.SetString(s) - if !isValidNumber(s) { - d.error(fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item)) - } - break - } - if fromQuoted { - d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) - } else { - d.error(&UnmarshalTypeError{"number", v.Type(), int64(d.off)}) - } - case reflect.Interface: - n, err := d.convertNumber(s) - if err != nil { - d.saveError(err) - break - } - if v.NumMethod() != 0 { - d.saveError(&UnmarshalTypeError{"number", v.Type(), int64(d.off)}) - break - } - v.Set(reflect.ValueOf(n)) - - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - n, err := strconv.ParseInt(s, 10, 64) - if err != nil || v.OverflowInt(n) { - d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)}) - break - } - v.SetInt(n) - - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - n, err := strconv.ParseUint(s, 10, 64) - if err != nil || v.OverflowUint(n) { - d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)}) - break - } - v.SetUint(n) - - case reflect.Float32, reflect.Float64: - n, err := strconv.ParseFloat(s, v.Type().Bits()) - if err != nil || v.OverflowFloat(n) { - d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)}) - break - } - v.SetFloat(n) - } - } -} - -// The xxxInterface routines build up a value to be stored -// in an empty interface. They are not strictly necessary, -// but they avoid the weight of reflection in this common case. - -// valueInterface is like value but returns interface{} -func (d *decodeState) valueInterface() interface{} { - switch d.scanWhile(scanSkipSpace) { - default: - d.error(errPhase) - panic("unreachable") - case scanBeginArray: - return d.arrayInterface() - case scanBeginObject: - return d.objectInterface() - case scanBeginLiteral: - return d.literalInterface() - case scanBeginName: - return d.nameInterface() - } -} - -func (d *decodeState) syntaxError(expected string) { - msg := fmt.Sprintf("invalid character '%c' looking for %s", d.data[d.off-1], expected) - d.error(&SyntaxError{msg, int64(d.off)}) -} - -// arrayInterface is like array but returns []interface{}. -func (d *decodeState) arrayInterface() []interface{} { - var v = make([]interface{}, 0) - for { - // Look ahead for ] - can only happen on first iteration. - op := d.scanWhile(scanSkipSpace) - if op == scanEndArray { - if len(v) > 0 && !d.ext.trailingCommas { - d.syntaxError("beginning of value") - } - break - } - - // Back up so d.value can have the byte we just read. - d.off-- - d.scan.undo(op) - - v = append(v, d.valueInterface()) - - // Next token must be , or ]. - op = d.scanWhile(scanSkipSpace) - if op == scanEndArray { - break - } - if op != scanArrayValue { - d.error(errPhase) - } - } - return v -} - -// objectInterface is like object but returns map[string]interface{}. -func (d *decodeState) objectInterface() interface{} { - v, ok := d.keyed() - if ok { - return v - } - - m := make(map[string]interface{}) - for { - // Read opening " of string key or closing }. - op := d.scanWhile(scanSkipSpace) - if op == scanEndObject { - if len(m) > 0 && !d.ext.trailingCommas { - d.syntaxError("beginning of object key string") - } - break - } - if op == scanBeginName { - if !d.ext.unquotedKeys { - d.syntaxError("beginning of object key string") - } - } else if op != scanBeginLiteral { - d.error(errPhase) - } - unquotedKey := op == scanBeginName - - // Read string key. - start := d.off - 1 - op = d.scanWhile(scanContinue) - item := d.data[start : d.off-1] - var key string - if unquotedKey { - key = string(item) - } else { - var ok bool - key, ok = unquote(item) - if !ok { - d.error(errPhase) - } - } - - // Read : before value. - if op == scanSkipSpace { - op = d.scanWhile(scanSkipSpace) - } - if op != scanObjectKey { - d.error(errPhase) - } - - // Read value. - m[key] = d.valueInterface() - - // Next token must be , or }. - op = d.scanWhile(scanSkipSpace) - if op == scanEndObject { - break - } - if op != scanObjectValue { - d.error(errPhase) - } - } - return m -} - -// literalInterface is like literal but returns an interface value. -func (d *decodeState) literalInterface() interface{} { - // All bytes inside literal return scanContinue op code. - start := d.off - 1 - op := d.scanWhile(scanContinue) - - // Scan read one byte too far; back up. - d.off-- - d.scan.undo(op) - item := d.data[start:d.off] - - switch c := item[0]; c { - case 'n': // null - return nil - - case 't', 'f': // true, false - return c == 't' - - case '"': // string - s, ok := unquote(item) - if !ok { - d.error(errPhase) - } - return s - - default: // number - if c != '-' && (c < '0' || c > '9') { - d.error(errPhase) - } - n, err := d.convertNumber(string(item)) - if err != nil { - d.saveError(err) - } - return n - } -} - -// nameInterface is like function but returns map[string]interface{}. -func (d *decodeState) nameInterface() interface{} { - v, ok := d.keyed() - if ok { - return v - } - - nameStart := d.off - 1 - - op := d.scanWhile(scanContinue) - - name := d.data[nameStart : d.off-1] - if op != scanParam { - // Back up so the byte just read is consumed next. - d.off-- - d.scan.undo(op) - if l, ok := d.convertLiteral(name); ok { - return l - } - d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)}) - } - - funcName := string(name) - funcData := d.ext.funcs[funcName] - if funcData.key == "" { - d.error(fmt.Errorf("json: unknown function %q", funcName)) - } - - m := make(map[string]interface{}) - for i := 0; ; i++ { - // Look ahead for ) - can only happen on first iteration. - op := d.scanWhile(scanSkipSpace) - if op == scanEndParams { - break - } - - // Back up so d.value can have the byte we just read. - d.off-- - d.scan.undo(op) - - if i >= len(funcData.args) { - d.error(fmt.Errorf("json: too many arguments for function %s", funcName)) - } - m[funcData.args[i]] = d.valueInterface() - - // Next token must be , or ). - op = d.scanWhile(scanSkipSpace) - if op == scanEndParams { - break - } - if op != scanParam { - d.error(errPhase) - } - } - return map[string]interface{}{funcData.key: m} -} - -// getu4 decodes \uXXXX from the beginning of s, returning the hex value, -// or it returns -1. -func getu4(s []byte) rune { - if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { - return -1 - } - r, err := strconv.ParseUint(string(s[2:6]), 16, 64) - if err != nil { - return -1 - } - return rune(r) -} - -// unquote converts a quoted JSON string literal s into an actual string t. -// The rules are different than for Go, so cannot use strconv.Unquote. -func unquote(s []byte) (t string, ok bool) { - s, ok = unquoteBytes(s) - t = string(s) - return -} - -func unquoteBytes(s []byte) (t []byte, ok bool) { - if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { - return - } - s = s[1 : len(s)-1] - - // Check for unusual characters. If there are none, - // then no unquoting is needed, so return a slice of the - // original bytes. - r := 0 - for r < len(s) { - c := s[r] - if c == '\\' || c == '"' || c < ' ' { - break - } - if c < utf8.RuneSelf { - r++ - continue - } - rr, size := utf8.DecodeRune(s[r:]) - if rr == utf8.RuneError && size == 1 { - break - } - r += size - } - if r == len(s) { - return s, true - } - - b := make([]byte, len(s)+2*utf8.UTFMax) - w := copy(b, s[0:r]) - for r < len(s) { - // Out of room? Can only happen if s is full of - // malformed UTF-8 and we're replacing each - // byte with RuneError. - if w >= len(b)-2*utf8.UTFMax { - nb := make([]byte, (len(b)+utf8.UTFMax)*2) - copy(nb, b[0:w]) - b = nb - } - switch c := s[r]; { - case c == '\\': - r++ - if r >= len(s) { - return - } - switch s[r] { - default: - return - case '"', '\\', '/', '\'': - b[w] = s[r] - r++ - w++ - case 'b': - b[w] = '\b' - r++ - w++ - case 'f': - b[w] = '\f' - r++ - w++ - case 'n': - b[w] = '\n' - r++ - w++ - case 'r': - b[w] = '\r' - r++ - w++ - case 't': - b[w] = '\t' - r++ - w++ - case 'u': - r-- - rr := getu4(s[r:]) - if rr < 0 { - return - } - r += 6 - if utf16.IsSurrogate(rr) { - rr1 := getu4(s[r:]) - if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { - // A valid pair; consume. - r += 6 - w += utf8.EncodeRune(b[w:], dec) - break - } - // Invalid surrogate; fall back to replacement rune. - rr = unicode.ReplacementChar - } - w += utf8.EncodeRune(b[w:], rr) - } - - // Quote, control characters are invalid. - case c == '"', c < ' ': - return - - // ASCII - case c < utf8.RuneSelf: - b[w] = c - r++ - w++ - - // Coerce to well-formed UTF-8. - default: - rr, size := utf8.DecodeRune(s[r:]) - r += size - w += utf8.EncodeRune(b[w:], rr) - } - } - return b[0:w], true -} diff --git a/services/community/vendor/github.com/globalsign/mgo/internal/json/encode.go b/services/community/vendor/github.com/globalsign/mgo/internal/json/encode.go deleted file mode 100644 index e4b8f864..00000000 --- a/services/community/vendor/github.com/globalsign/mgo/internal/json/encode.go +++ /dev/null @@ -1,1260 +0,0 @@ -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package json implements encoding and decoding of JSON as defined in -// RFC 4627. The mapping between JSON and Go values is described -// in the documentation for the Marshal and Unmarshal functions. -// -// See "JSON and Go" for an introduction to this package: -// https://golang.org/doc/articles/json_and_go.html -package json - -import ( - "bytes" - "encoding" - "encoding/base64" - "fmt" - "math" - "reflect" - "runtime" - "sort" - "strconv" - "strings" - "sync" - "unicode" - "unicode/utf8" -) - -// Marshal returns the JSON encoding of v. -// -// Marshal traverses the value v recursively. -// If an encountered value implements the Marshaler interface -// and is not a nil pointer, Marshal calls its MarshalJSON method -// to produce JSON. If no MarshalJSON method is present but the -// value implements encoding.TextMarshaler instead, Marshal calls -// its MarshalText method. -// The nil pointer exception is not strictly necessary -// but mimics a similar, necessary exception in the behavior of -// UnmarshalJSON. -// -// Otherwise, Marshal uses the following type-dependent default encodings: -// -// Boolean values encode as JSON booleans. -// -// Floating point, integer, and Number values encode as JSON numbers. -// -// String values encode as JSON strings coerced to valid UTF-8, -// replacing invalid bytes with the Unicode replacement rune. -// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e" -// to keep some browsers from misinterpreting JSON output as HTML. -// Ampersand "&" is also escaped to "\u0026" for the same reason. -// This escaping can be disabled using an Encoder with DisableHTMLEscaping. -// -// Array and slice values encode as JSON arrays, except that -// []byte encodes as a base64-encoded string, and a nil slice -// encodes as the null JSON value. -// -// Struct values encode as JSON objects. Each exported struct field -// becomes a member of the object unless -// - the field's tag is "-", or -// - the field is empty and its tag specifies the "omitempty" option. -// The empty values are false, 0, any -// nil pointer or interface value, and any array, slice, map, or string of -// length zero. The object's default key string is the struct field name -// but can be specified in the struct field's tag value. The "json" key in -// the struct field's tag value is the key name, followed by an optional comma -// and options. Examples: -// -// // Field is ignored by this package. -// Field int `json:"-"` -// -// // Field appears in JSON as key "myName". -// Field int `json:"myName"` -// -// // Field appears in JSON as key "myName" and -// // the field is omitted from the object if its value is empty, -// // as defined above. -// Field int `json:"myName,omitempty"` -// -// // Field appears in JSON as key "Field" (the default), but -// // the field is skipped if empty. -// // Note the leading comma. -// Field int `json:",omitempty"` -// -// The "string" option signals that a field is stored as JSON inside a -// JSON-encoded string. It applies only to fields of string, floating point, -// integer, or boolean types. This extra level of encoding is sometimes used -// when communicating with JavaScript programs: -// -// Int64String int64 `json:",string"` -// -// The key name will be used if it's a non-empty string consisting of -// only Unicode letters, digits, dollar signs, percent signs, hyphens, -// underscores and slashes. -// -// Anonymous struct fields are usually marshaled as if their inner exported fields -// were fields in the outer struct, subject to the usual Go visibility rules amended -// as described in the next paragraph. -// An anonymous struct field with a name given in its JSON tag is treated as -// having that name, rather than being anonymous. -// An anonymous struct field of interface type is treated the same as having -// that type as its name, rather than being anonymous. -// -// The Go visibility rules for struct fields are amended for JSON when -// deciding which field to marshal or unmarshal. If there are -// multiple fields at the same level, and that level is the least -// nested (and would therefore be the nesting level selected by the -// usual Go rules), the following extra rules apply: -// -// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered, -// even if there are multiple untagged fields that would otherwise conflict. -// 2) If there is exactly one field (tagged or not according to the first rule), that is selected. -// 3) Otherwise there are multiple fields, and all are ignored; no error occurs. -// -// Handling of anonymous struct fields is new in Go 1.1. -// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of -// an anonymous struct field in both current and earlier versions, give the field -// a JSON tag of "-". -// -// Map values encode as JSON objects. The map's key type must either be a string -// or implement encoding.TextMarshaler. The map keys are used as JSON object -// keys, subject to the UTF-8 coercion described for string values above. -// -// Pointer values encode as the value pointed to. -// A nil pointer encodes as the null JSON value. -// -// Interface values encode as the value contained in the interface. -// A nil interface value encodes as the null JSON value. -// -// Channel, complex, and function values cannot be encoded in JSON. -// Attempting to encode such a value causes Marshal to return -// an UnsupportedTypeError. -// -// JSON cannot represent cyclic data structures and Marshal does not -// handle them. Passing cyclic structures to Marshal will result in -// an infinite recursion. -// -func Marshal(v interface{}) ([]byte, error) { - e := &encodeState{} - err := e.marshal(v, encOpts{escapeHTML: true}) - if err != nil { - return nil, err - } - return e.Bytes(), nil -} - -// MarshalIndent is like Marshal but applies Indent to format the output. -func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { - b, err := Marshal(v) - if err != nil { - return nil, err - } - var buf bytes.Buffer - err = Indent(&buf, b, prefix, indent) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 -// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 -// so that the JSON will be safe to embed inside HTML