Skip to content

Commit

Permalink
Merge pull request #13 from onna/cache-access-token
Browse files Browse the repository at this point in the history
Cache service account tokens.
  • Loading branch information
j-durbin authored Jul 14, 2023
2 parents 384515c + b4e0b3d commit 9484b36
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 34 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
6.0.9 (unreleased)
------------------
- Cache access tokens when using pod RBAC.

6.0.8 (unreleased)
------------------
- Support arbitrary args to iterate bucket page method, e.g. to include deleted versions.
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
6.0.8
6.0.9
54 changes: 21 additions & 33 deletions guillotina_gcloudstorage/storage.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
from async_lru import alru_cache
from datetime import datetime
from datetime import timedelta
from guillotina import configure
Expand Down Expand Up @@ -39,6 +40,7 @@
import json
import logging
import os
import time


class IGCloudFileStorageManager(IExternalFileStorageManager):
Expand Down Expand Up @@ -202,11 +204,7 @@ async def start(self, dm):
"Content-Length": str(call_size),
}
)
async with util.session.post(
init_url,
headers=headers,
data=metadata,
) as call:
async with util.session.post(init_url, headers=headers, data=metadata) as call:
if call.status != 200:
text = await call.text()
raise GoogleCloudException(f"{call.status}: {text}")
Expand Down Expand Up @@ -331,10 +329,7 @@ async def exists(self):
OBJECT_BASE_URL, await util.get_bucket_name(), quote_plus(file.uri)
)

async with util.session.get(
url,
headers=await self.get_headers(),
) as api_resp:
async with util.session.get(url, headers=await self.get_headers()) as api_resp:
return api_resp.status == 200

@backoff.on_exception(backoff.expo, RETRIABLE_EXCEPTIONS, max_tries=4)
Expand All @@ -359,10 +354,7 @@ async def copy(self, to_storage_manager, to_dm):

headers = await self.get_headers()
headers.update({"Content-Type": "application/json"})
async with util.session.post(
url,
headers=headers,
) as resp:
async with util.session.post(url, headers=headers) as resp:
if resp.status == 404:
text = await resp.text()
reason = (
Expand Down Expand Up @@ -400,7 +392,19 @@ def __init__(self, **kw):
super(GCloudFileField, self).__init__(schema=self.schema, **kw)


# Configuration Utility
@alru_cache(maxsize=2)
async def _get_access_token(_):
url = "{}instance/service-accounts/{}/token".format(METADATA_URL, SERVICE_ACCOUNT)

# Request an access token from the metadata server.
async with aiohttp.ClientSession().get(url, headers=METADATA_HEADERS) as resp:
assert resp.status == 200
data = await resp.json()
return data["access_token"]


async def get_access_token():
return await _get_access_token(round(time.time() / 300))


class GCloudBlobStore(object):
Expand Down Expand Up @@ -441,15 +445,7 @@ def session(self):
async def get_access_token(self):
# If not using json service credentials, get the access token based on pod rbac
if not self._credentials:
url = "{}instance/service-accounts/{}/token".format(
METADATA_URL, SERVICE_ACCOUNT
)

# Request an access token from the metadata server.
async with self.session.get(url, headers=METADATA_HEADERS) as resp:
assert resp.status == 200
data = await resp.json()
access_token = data["access_token"]
access_token = await get_access_token()
else:
access_token = self._credentials.get_access_token().access_token
self._creation_access_token = datetime.now()
Expand Down Expand Up @@ -582,11 +578,7 @@ async def iterate_bucket_page(self, page_token=None, prefix=None, **params):
if access_token:
headers = {"AUTHORIZATION": f"Bearer {access_token}"}

async with self.session.get(
url,
headers=headers,
params=params,
) as resp:
async with self.session.get(url, headers=headers, params=params) as resp:
assert resp.status == 200
data = await resp.json()
return data
Expand All @@ -597,11 +589,7 @@ async def generate_download_signed_url(
client = self.get_client()
bucket = google.cloud.storage.Bucket(client, name=await self.get_bucket_name())
blob = bucket.blob(key)
request_args = {
"version": "v4",
"expiration": expiration,
"method": "GET",
}
request_args = {"version": "v4", "expiration": expiration, "method": "GET"}
if credentials:
request_args["credentials"] = credentials
return blob.generate_signed_url(**request_args)
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
"gcloud",
"ujson",
"backoff",
"async-lru",
],
extras_require={"test": test_reqs},
tests_require=test_reqs,
Expand Down

0 comments on commit 9484b36

Please sign in to comment.