-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathserverless.yml
102 lines (93 loc) · 4.94 KB
/
serverless.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
service: airtable-backups-demo
frameworkVersion: "=2.30.3"
# Validation warnings will become errors by default in serverless 3.0; turn this on preemptively https://www.serverless.com/framework/docs/configuration-validation/
configValidationMode: error
plugins:
- serverless-plugin-epsagon # Must be first, see https://github.com/epsagon/serverless-plugin-epsagon
- serverless-bundle # See https://github.com/AnomalyInnovations/serverless-bundle
- serverless-offline # See https://github.com/dherault/serverless-offline
- '@unly/serverless-env-copy-plugin' # See https://github.com/UnlyEd/serverless-env-copy-plugin
- serverless-dotenv-plugin # See https://www.npmjs.com/package/serverless-dotenv-plugin
custom:
epsagon:
token: '' # TODO Set your Epsagon token - Won't be applied if not provided
appName: ${self:service}-${self:custom.environment}
environment: ${env:NODE_ENV, 'development'} # Defaults to "development" if not provided (see package.json scripts)
envs:
development: # Necessary for running SLS scripts locally, but not shouldn't be used to deploy anything
profile:
memorySize:
staging:
profile: sandbox # TODO replace by your own serverless profile, or provide AWS credentials directly if you don't use profiles
memorySize: 256
production:
profile: sandbox # TODO replace by your own serverless profile, or provide AWS credentials directly if you don't use profiles
memorySize: 256
bucket: ${self:service}-${self:custom.environment}
serverless-offline:
port: 3000
provider:
name: aws
runtime: nodejs14.x
versionFunctions: false
logRetentionInDays: 60
timeout: 30
memorySize: ${self:custom.envs.${self:provider.stage}.memorySize, '128'}
stage: ${self:custom.environment} # XXX The stage directly depends on the environment, it's the same thing
region: ${opt:region, 'eu-west-1'} # Ireland by default, change as you need
environment:
NODE_ENV: ${self:custom.environment}
SERVICE: ${self:service}
S3_BUCKET: ${self:custom.bucket}
profile: ${self:custom.envs.${self:provider.stage}.profile, ''}
logs:
restApi: true # Enable logs in other services, such as API GW - See https://serverless.com/blog/framework-release-v142/
apiGateway:
binaryMediaTypes:
- '*/*' # Allow to return all binary response types - See https://serverless.com/blog/framework-release-v142/
iam:
role:
statements:
- Effect: Allow
Action:
- s3:PutObject
- s3:PutObjectAcl
Resource: "arn:aws:s3:::${self:custom.bucket}/*"
deploymentBucket:
serverSideEncryption: AES256
blockPublicAccess: true
stackTags:
env: ${self:custom.environment}
stage: ${self:provider.stage}
region: ${self:provider.region}
service: ${self:service}
runtime: ${self:provider.runtime}
functions:
status:
handler: src/functions/status.handler
events:
- http:
path: status
method: get
airtableBackups: # The same lambda is used to configure all backups (each backup is a distinct "scheduled event", AKA "cron")
handler: src/functions/makeAirtableBackup.handler
events:
- schedule:
description: "Airtable backups for the 'Airtable backups boilerplate' base (demo)"
rate: rate(5 minutes) # TODO Set your own rate : https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html
enabled: true
input:
AIRTABLE_BASE: "app7nfLmoVHva1Vdv" # TODO Set your own base id
AIRTABLE_TABLES: "Video tracker;Staff directory;Agencies;Agency contacts;Scenes;Shots;Locations;Props and equipment" # TODO Set your table names
S3_DIRECTORY: "airtableBackupsBoilerplate/" # TODO Set the s3 sub-directory you want the backups to be stored in
STORAGE_CLASS: 'STANDARD_IA' # Set the storage class to use within those values: "STANDARD"|"REDUCED_REDUNDANCY"|"STANDARD_IA"|"ONEZONE_IA"|"INTELLIGENT_TIERING"|"GLACIER"|"DEEP_ARCHIVE" - See https://aws.amazon.com/en/s3/storage-classes/
# XXX You can configure another scheduled backup just by creating another "event"
# - schedule:
# description: "Another backup for another Airtable base"
# rate: rate(5 minutes) # TODO Set your own rate : https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html
# enabled: true
# input:
# AIRTABLE_BASE: "app7nfLmoVHva1Vdv" # TODO Set your own base id
# AIRTABLE_TABLES: "Video tracker;Staff directory;Agencies;Agency contacts;Scenes;Shots;Locations;Props and equipment" # TODO Set your table names
# S3_DIRECTORY: "airtableBackupsBoilerplate/" # TODO Set the s3 sub-directory you want the backups to be stored in
# STORAGE_CLASS: 'STANDARD_IA' # Set the storage class to use within those values: "STANDARD"|"REDUCED_REDUNDANCY"|"STANDARD_IA"|"ONEZONE_IA"|"INTELLIGENT_TIERING"|"GLACIER"|"DEEP_ARCHIVE" - See https://aws.amazon.com/en/s3/storage-classes/