diff --git a/.env b/.env deleted file mode 100644 index 2faa615..0000000 --- a/.env +++ /dev/null @@ -1,3 +0,0 @@ -POSTGRES_DB=test -POSTGRES_USER=test -POSTGRES_PASSWORD=test \ No newline at end of file diff --git a/README.md b/README.md index e0dabab..d1d35dc 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,7 @@ psql> SELECT aws_commons.create_s3_uri( psql> \echo :s3_uri (test-bucket,animals.csv,us-east-1) +-- Used to overide the credentials specified in docker-compose psql> SELECT aws_commons.create_aws_credentials( '', '', @@ -136,7 +137,7 @@ psql> select * from animals; ``` You can also call the function as: -``` +```postgresql psql> SELECT aws_s3.table_import_from_s3( 'animals', '', @@ -145,13 +146,16 @@ psql> SELECT aws_s3.table_import_from_s3( 'test-bucket', 'animals.csv', 'us-east-1' - ), + ) +); +``` +Add the following argument to overide the credentials: +```postgresql aws_commons.create_aws_credentials( '', '', '' ) -); ``` #### Using the function table_import_from_s3 with all the parameters @@ -192,10 +196,7 @@ psql> SELECT aws_s3.table_import_from_s3( '(FORMAT CSV, DELIMITER '','', HEADER true)', 'test-bucket', 'animals.csv', - 'us-east-1', - '', - '', - '' + 'us-east-1' ); table_import_from_s3 @@ -214,33 +215,20 @@ psql> select * from animals; (4 rows) ``` -If you use localstack, you can set `endpoint_url` to point to the localstack s3 endpoint: -``` +If you use localstack, you can set `endpoint_url` to overide the localstack s3 endpoint set in docker-compose: +```postgresql psql> SET aws_s3.endpoint_url TO 'http://localstack:4566'; ``` -You can also set the AWS credentials: -``` +Similarly, you can overide the AWS credentials: +```postgresql psql> SET aws_s3.access_key_id TO 'dummy'; psql> SET aws_s3.secret_key TO 'dummy'; psql> SET aws_s3.session_token TO 'dummy'; ``` -and then omit them from the function calls. - -For example: -``` -psql> SELECT aws_s3.table_import_from_s3( - 'animals', - '', - '(FORMAT CSV, DELIMITER '','', HEADER true)', - 'test-bucket', - 'animals.csv', - 'us-east-1' -); -``` You can pass them also as optional parameters. For example: -``` +```postgresql psql> SELECT aws_s3.table_import_from_s3( 'animals', '', @@ -249,6 +237,9 @@ psql> SELECT aws_s3.table_import_from_s3( 'animals.csv', 'us-east-1', endpoint_url := 'http://localstack:4566' + my_access_id := 'dummy', + my_secret_key := 'dummy', + session_token := 'dummy' ); ``` @@ -294,24 +285,28 @@ psql> SELECT * FROM aws_s3.query_export_to_s3( 'test-bucket', 'animals2.csv', 'us-east-1' - ), - aws_commons.create_aws_credentials( + ) + options := 'FORMAT CSV, DELIMITER '','', HEADER true' +); +``` +Note that credentials provided in the docker-compose environment can be overidden by adding the following argument to `aws_s3.query_export_to_s3`: +```postgresql +psql> SELECT * FROM aws_s3.query_export_to_s3( + ..., + credentials := aws_commons.create_aws_credentials( '', '', '' - ), - options := 'FORMAT CSV, DELIMITER '','', HEADER true' + ) ); ``` -If you set the AWS credentials: +Similarly, they can be overidden using `SET`. ``` psql> SET aws_s3.aws_s3.access_key_id TO 'dummy'; psql> SET aws_s3.aws_s3.secret_key TO 'dummy'; psql> SET aws_s3.session_token TO 'dummy'; ``` -You can omit the credentials. - ##### Example #### Using the function table_import_from_s3 with all the parameters @@ -359,7 +354,7 @@ psql> SELECT * FROM aws_s3.query_export_to_s3( 5 | 1 | 47 ``` -If you set the AWS credentials: +If you set your own the AWS credentials: ``` psql> SET aws_s3.aws_s3.access_key_id TO 'dummy'; psql> SET aws_s3.aws_s3.secret_key TO 'dummy'; @@ -388,11 +383,11 @@ $ psql -h localhost -p 15432 -U test test Initialize the extensions: ``` -psql> CREATE EXTENSION plpythonu; +psql> CREATE EXTENSION plpython3u; psql> CREATE EXTENSION aws_s3; ``` -Set the endpoint url and the aws keys to use s3 (in localstack you can set the aws creds to any non-empty string): +If you want to you can overide the environment variables set in docker-compose (Note: in localstack you can set the aws creds to any non-empty string): ``` psql> SET aws_s3.endpoint_url TO 'http://localstack:4566'; psql> SET aws_s3.aws_access_key_id TO 'dummy'; diff --git a/docker-compose.yml b/docker-compose.yml index fe22435..39a5d76 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,7 +6,14 @@ services: dockerfile: mock-servers/postgres/Dockerfile ports: - "15432:5432" - env_file: .env + environment: + AWS_ACCESS_KEY_ID: localstack + AWS_SECRET_ACCESS_KEY: localstack + AWS_DEFAULT_REGION: us-east-1 + AWS_ENDPOINT_URL_S3: http://localstack:4566 + POSTGRES_DB: test + POSTGRES_USER: test + POSTGRES_PASSWORD: test volumes: - $PWD/mock-servers/postgres/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - $PWD/mock-servers/postgres/data:/var/lib/postgresql/data @@ -16,16 +23,17 @@ services: ports: - "4566:4566" - "9090:9090" - env_file: - - .env environment: SERVICES: "s3" - DEBUG: "true" + DEBUG: true DATA_DIR: mock-servers/localstack/data PORT_WEB_UI: 9090 AWS_ACCESS_KEY_ID: localstack AWS_SECRET_ACCESS_KEY: localstack AWS_DEFAULT_REGION: us-east-1 + POSTGRES_DB: test + POSTGRES_USER: test + POSTGRES_PASSWORD: test volumes: - $PWD/mock-servers/localstack/samples:/tmp/samples - - $PWD/mock-servers/localstack/docker-entrypoint-initaws.d:/docker-entrypoint-initaws.d + - $PWD/mock-servers/localstack/docker-entrypoint-initaws.d:/docker-entrypoint-initaws.d \ No newline at end of file