Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/containers/ftp/Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ gem "aws-sdk-cloudwatch", "~> 1"
gem "aws-sdk-ec2", "~> 1"
gem "aws-sdk-ecs", "~> 1"
gem "aws-sdk-s3", "~> 1"
gem "aws-sdk-states", "~> 1"
gem "net-ftp"
gem "net-sftp"

Expand Down
64 changes: 25 additions & 39 deletions src/containers/ftp/ftp.rb
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
# STATE_MACHINE_ARTIFACT_BUCKET_NAME
# STATE_MACHINE_ARTIFACT_OBJECT_KEY
# STATE_MACHINE_TASK_JSON
# STATE_MACHINE_TASK_TOKEN
# STATE_MACHINE_TASK_TYPE
# Set elsewhere
# FTP_LISTEN_PORT
# PUBLIC_IP
Expand All @@ -22,6 +24,7 @@
require "rubygems"
require "bundler/setup"
require "aws-sdk-cloudwatch"
require "aws-sdk-states"
require "net/sftp"
require "net/ftp"
require "logger"
Expand All @@ -42,6 +45,10 @@
}))

begin
task_result = {
Task: ENV["STATE_MACHINE_TASK_TYPE"]
}

# Count the transfers in CloudWatch Metrics
recorder =
Recorder.new(
Expand Down Expand Up @@ -85,41 +92,23 @@
}
used_mode = ftp_files.upload_file(uri, file, ftp_options)

if used_mode
logger.debug(JSON.dump({
msg: "Copying state machine results file",
bucket_name: bucket,
object_key: RESULT_KEY
}))
s3.put_object(
bucket: bucket,
key: RESULT_KEY,
body: JSON.dump({
# All properties listed here will be included in the task result for
# this task.
Mode: used_mode
})
)
end
task_result["Mode"] = used_mode if used_mode
elsif uri.scheme == "sftp"
sftp_files = SftpFiles.new(logger, recorder)
sftp_files.upload_file(uri, file, md5: md5, timeout: timeout)

logger.debug(JSON.dump({
msg: "Copying state machine results file",
bucket_name: bucket,
object_key: RESULT_KEY
}))
s3.put_object(
bucket: bucket,
key: RESULT_KEY,
body: JSON.dump({
# All properties listed here will be included in the task result for
# this task.
# Foo: "bar"
})
)
end

task_result["URL"] = task["URL"]

now = Time.now
task_result["Time"] = now.getutc.iso8601
task_result["Timestamp"] = now.to_i

puts JSON.dump({msg: "Task output", output: task_result})
sf.send_task_success({
task_token: ENV["STATE_MACHINE_TASK_TOKEN"],
output: task_result.to_json
})
rescue => e
puts e.class.name
puts e.message
Expand All @@ -130,14 +119,11 @@
bucket_name: bucket,
object_key: RESULT_KEY
}))
s3.put_object(
bucket: bucket,
key: RESULT_KEY,
body: JSON.dump({
Error: e.class.name,
ErrorMessage: e.message
})
)
sf.send_task_failure({
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Compare to the old way, which caused Lambda executions to fail to force the FTP issue to be treated as a task failure. Now we do that directly.

task_token: ENV["STATE_MACHINE_TASK_TOKEN"],
error: e.class.name,
cause: e.message
})
end

# Count the transfers in CloudWatch Metrics
Expand Down
1 change: 1 addition & 0 deletions src/containers/transcode/Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@ source "https://rubygems.org"

gem "aws-sdk-cloudwatch", "~> 1"
gem "aws-sdk-s3", "~> 1"
gem "aws-sdk-states", "~> 1"
gem "aws-sdk-sts", "~> 1"
gem "nokogiri"
Loading