Skip to content

Commit

Permalink
Fix field name
Browse files Browse the repository at this point in the history
  • Loading branch information
bhearsum committed Sep 25, 2020
1 parent 3e04390 commit 1cbe599
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions src/auslib/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -836,34 +836,34 @@ def forInsert(self, insertedKeys, columns, changed_by, trans):
for data_version, ts, data in ((None, timestamp - 1, ""), (columns.get("data_version"), timestamp, json.dumps(columns[self.data_column]))):
bname = "{}/{}-{}-{}.json".format(identifier, data_version, ts, changed_by)
start = time.time()
logging.info("Beginning GCS upload", extra={"release": bname})
logging.info("Beginning GCS upload", extra={"file": bname})
bucket = self._getBucket(identifier)(use_gcloud_aio=False)
blob = bucket.blob(bname)
blob.upload_from_string(data, content_type="application/json")
duration = time.time() - start
logging.info("Completed GCS upload", extra={"release": bname, "duration": duration})
logging.info("Completed GCS upload", extra={"file": bname, "duration": duration})

def forDelete(self, rowData, changed_by, trans):
identifier = "-".join([rowData.get(i) for i in self.identifier_columns])
bname = "{}/{}-{}-{}.json".format(identifier, rowData.get("data_version"), getMillisecondTimestamp(), changed_by)
start = time.time()
logging.info("Beginning GCS upload", extra={"release": bname})
logging.info("Beginning GCS upload", extra={"file": bname})
bucket = self._getBucket(identifier)(use_gcloud_aio=False)
blob = bucket.blob(bname)
blob.upload_from_string("", content_type="application/json")
duration = time.time() - start
logging.info("Completed GCS upload", extra={"release": bname, "duration": duration})
logging.info("Completed GCS upload", extra={"file": bname, "duration": duration})

def forUpdate(self, rowData, changed_by, trans):
identifier = "-".join([rowData.get(i) for i in self.identifier_columns])
bname = "{}/{}-{}-{}.json".format(identifier, rowData.get("data_version"), getMillisecondTimestamp(), changed_by)
start = time.time()
logging.info("Beginning GCS upload", extra={"release": bname})
logging.info("Beginning GCS upload", extra={"file": bname})
bucket = self._getBucket(identifier)(use_gcloud_aio=False)
blob = bucket.blob(bname)
blob.upload_from_string(json.dumps(rowData[self.data_column]), content_type="application/json")
duration = time.time() - start
logging.info("Completed GCS upload", extra={"release": bname, "duration": duration})
logging.info("Completed GCS upload", extra={"file": bname, "duration": duration})

def getChange(self, change_id=None, column_values=None, data_version=None, transaction=None):
if not set(self.identifier_columns).issubset(column_values.keys()) or not data_version:
Expand Down Expand Up @@ -896,7 +896,7 @@ async def forInsert(self, insertedKeys, columns, changed_by, trans):
for data_version, ts, data in ((None, timestamp - 1, ""), (columns.get("data_version"), timestamp, json.dumps(columns[self.data_column]))):
bname = "{}/{}-{}-{}.json".format(identifier, data_version, ts, changed_by)
start = time.time()
logging.info("Beginning GCS upload", extra={"release": bname})
logging.info("Beginning GCS upload", extra={"file": bname})
# Using a separate session for each request is not ideal, but it's
# the only thing that seems to work. Ideally, we'd share one session
# for the entire application, but we can't for two reasons:
Expand All @@ -909,31 +909,31 @@ async def forInsert(self, insertedKeys, columns, changed_by, trans):
blob = bucket.new_blob(bname)
await blob.upload(data, session=session)
duration = time.time() - start
logging.info("Completed GCS upload", extra={"release": bname, "duration": duration})
logging.info("Completed GCS upload", extra={"file": bname, "duration": duration})

async def forDelete(self, rowData, changed_by, trans):
identifier = "-".join([rowData.get(i) for i in self.identifier_columns])
bname = "{}/{}-{}-{}.json".format(identifier, rowData.get("data_version"), getMillisecondTimestamp(), changed_by)
start = time.time()
logging.info("Beginning GCS upload", extra={"release": bname})
logging.info("Beginning GCS upload", extra={"file": bname})
async with ClientSession() as session:
bucket = self._getBucket(identifier)(session=session)
blob = bucket.new_blob(bname)
await blob.upload("", session=session)
duration = time.time() - start
logging.info("Completed GCS upload", extra={"release": bname, "duration": duration})
logging.info("Completed GCS upload", extra={"file": bname, "duration": duration})

async def forUpdate(self, rowData, changed_by, trans):
identifier = "-".join([rowData.get(i) for i in self.identifier_columns])
bname = "{}/{}-{}-{}.json".format(identifier, rowData.get("data_version"), getMillisecondTimestamp(), changed_by)
start = time.time()
logging.info("Beginning GCS upload", extra={"release": bname})
logging.info("Beginning GCS upload", extra={"file": bname})
async with ClientSession() as session:
bucket = self._getBucket(identifier)(session=session)
blob = bucket.new_blob(bname)
await blob.upload(json.dumps(rowData[self.data_column]), session=session)
duration = time.time() - start
logging.info("Completed GCS upload", extra={"release": bname, "duration": duration})
logging.info("Completed GCS upload", extra={"file": bname, "duration": duration})


class HistoryTable(AUSTable):
Expand Down

0 comments on commit 1cbe599

Please sign in to comment.