-
Notifications
You must be signed in to change notification settings - Fork 97
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
FLAG-1263: Data mart api client #4966
Draft
wri7tno
wants to merge
4
commits into
develop
Choose a base branch
from
feat/FLAG-1263
base: develop
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Draft
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
// eslint-disable-next-line no-unused-vars | ||
import { NextApiRequest, NextApiResponse } from 'next'; | ||
import { | ||
createRequestByGeostoryId, | ||
getDataByGeostoreId, | ||
getDataFromLink, | ||
} from 'services/datamart'; | ||
import { GFW_DATA_API, GFW_STAGING_DATA_API } from 'utils/apis'; | ||
|
||
// types | ||
/** | ||
* @typedef {object} DataLinkObject | ||
* @property {string} link - The URL to POST the content. | ||
*/ | ||
|
||
/** | ||
* @typedef {object} GetResponseObject | ||
* @property {string} status - status. | ||
* @property {DataLinkObject} data - data link object. | ||
*/ | ||
|
||
/** | ||
* @typedef {object} NotFoundObject | ||
* @property {string} status - status. | ||
* @property {string} message - message. | ||
*/ | ||
// END types | ||
|
||
const ENVIRONMENT = process.env.NEXT_PUBLIC_FEATURE_ENV; | ||
|
||
export const DATA_API_URL = | ||
ENVIRONMENT === 'staging' ? GFW_STAGING_DATA_API : GFW_DATA_API; | ||
|
||
/** | ||
* @param {NextApiRequest} req | ||
* @param {NextApiResponse} res | ||
*/ | ||
const fetchDataByDatasetAndGeostore = async (req, res) => { | ||
const { query } = req; | ||
// TODO: add more parameters to the query like, global, adm9, adm1, etc etc etc | ||
const { slug: slugs, geostore_id, canopy_cover } = query; | ||
|
||
if (slugs.length === 0) { | ||
res.status(400).send(); | ||
return; | ||
} | ||
|
||
if (slugs.length === 1) { | ||
const dataByGeostore = await getDataByGeostoreId({ | ||
dataset: slugs[0], | ||
geostoreId: geostore_id, | ||
canopy: canopy_cover, | ||
}); | ||
|
||
res.status(200).send(dataByGeostore); | ||
return; | ||
} | ||
|
||
const url = `${DATA_API_URL}/${slugs.join('/')}`; | ||
try { | ||
const dataByUrl = await getDataFromLink({ url }); | ||
|
||
res.send(dataByUrl); | ||
} catch (error) { | ||
res.status(error.response?.status).send({ | ||
status: error.response?.status, | ||
message: error?.message, | ||
}); | ||
} | ||
}; | ||
|
||
/** | ||
* @param {NextApiRequest} req | ||
* @param {NextApiResponse} res | ||
*/ | ||
const postData = async (req, res) => { | ||
const { query } = req; | ||
// TODO: add more parameters to the query like, global, adm9, adm1, etc etc etc | ||
const { slug: slugs, geostore_id, canopy_cover } = query; | ||
|
||
if (slugs.length === 0) { | ||
res.status(400).send(); | ||
return; | ||
} | ||
|
||
try { | ||
const submitted = await createRequestByGeostoryId({ | ||
dataset: slugs[0], | ||
geostoreId: geostore_id, | ||
canopy: canopy_cover, | ||
}); | ||
|
||
res.status(201).send(submitted); | ||
} catch (error) { | ||
res.status(error.response?.status).send({ | ||
status: error.response?.status, | ||
message: error?.message, | ||
}); | ||
} | ||
}; | ||
|
||
/** | ||
* @param {NextApiRequest} req | ||
* @param {NextApiResponse} res | ||
*/ | ||
export default async (req, res) => { | ||
switch (req.method) { | ||
case 'POST': | ||
postData(req, res); | ||
break; | ||
case 'GET': | ||
fetchDataByDatasetAndGeostore(req, res); | ||
break; | ||
default: | ||
res.send(405); | ||
} | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,170 @@ | ||
import qs from 'qs'; | ||
import { dataRequest } from 'utils/request'; | ||
import { GFW_DATA_API, GFW_STAGING_DATA_API } from 'utils/apis'; | ||
|
||
const ENVIRONMENT = process.env.NEXT_PUBLIC_FEATURE_ENV; | ||
const DATA_API_URL = | ||
ENVIRONMENT === 'staging' ? GFW_STAGING_DATA_API : GFW_DATA_API; | ||
|
||
/** | ||
* @typedef {object} DataLinkObject | ||
* @property {string} link - The URL to POST the content. | ||
*/ | ||
|
||
/** | ||
* @typedef {object} GetResponseObject | ||
* @property {string} status - status. | ||
* @property {DataLinkObject} data - data link object. | ||
*/ | ||
|
||
/** | ||
* @typedef {object} NotFoundObject | ||
* @property {string} status - status. | ||
* @property {string} message - message. | ||
*/ | ||
|
||
/** | ||
* 1 | ||
* @param {Object} request - request | ||
* @param {string} request.dataset - dataset. | ||
* @param {string} request.geostoreId - a geostore id. | ||
* @param {number} request.canopy - canopy filter. | ||
* @returns {Promise<GetResponseObject | NotFoundObject>} response. | ||
*/ | ||
const getDataByGeostoreId = async ({ dataset, geostoreId, canopy }) => { | ||
const url = `/v0/land/${dataset}`; | ||
const params = { | ||
geostore_id: geostoreId, | ||
canopy_cover: canopy, | ||
}; | ||
|
||
const requestUrl = `${url}/?${qs.stringify(params)}`; | ||
|
||
let response; | ||
|
||
try { | ||
response = await dataRequest.get(requestUrl); | ||
} catch (error) { | ||
if (error.response?.status === 404) { | ||
return new Promise((resolve) => { | ||
// eslint-disable-next-line prefer-promise-reject-errors | ||
resolve({ | ||
status: error.response?.status, | ||
message: error.response?.statusText, | ||
}); | ||
}); | ||
} | ||
} | ||
|
||
return response.data; | ||
}; | ||
|
||
/** | ||
* 2 | ||
* @param {Object} request - request | ||
* @param {string} request.dataset - dataset. | ||
* @param {string} request.geostoreId - a geostore id. | ||
* @param {number} request.canopy - canopy filter. | ||
* @returns {Promise<GetResponseObject>} response. | ||
*/ | ||
const createRequestByGeostoryId = async ({ | ||
dataset, | ||
geostoreId, | ||
canopy, | ||
}) => { | ||
const url = `/v0/land/${dataset}`; | ||
const params = { | ||
geostore_id: geostoreId, | ||
canopy_cover: canopy, | ||
}; | ||
|
||
const response = await dataRequest.post(url, params); | ||
|
||
return response; | ||
}; | ||
|
||
/** | ||
* 3 | ||
* @param {Object} request - request | ||
* @param {string} request.url - url | ||
* @returns {Promise<GetResponseObject>} response. | ||
*/ | ||
const getDataFromLink = async ({ url }) => { | ||
return dataRequest.get(url.replace(DATA_API_URL, '')); | ||
}; | ||
|
||
|
||
const wait = (ms) => new Promise(resolve => setTimeout(resolve, ms)); | ||
|
||
const retryRequest = async (fn, params, retries = 3, interval = 1000, finalErr = 'Retry failed') => { | ||
try { | ||
console.log(`retryRequest retries ${retries} with fn ${fn}`); | ||
const res = await fn(params); | ||
|
||
if (res.data?.status === 'pending') { | ||
console.log(`is pending, waiting ${interval} sec and retrying`); | ||
await wait(interval); | ||
return retryRequest(fn, params, retries - 1, interval, finalErr); | ||
} | ||
|
||
return res; | ||
|
||
} catch (err) { | ||
console.log('caught err: ', err); | ||
if (retries <= 0) { | ||
console.log('no more retries, rejecting'); | ||
return Promise.reject(finalErr); | ||
} | ||
await wait(interval); | ||
return retryRequest(fn, params, retries - 1, interval, finalErr); | ||
} | ||
}; | ||
|
||
/** | ||
* | ||
* @param {Object} request | ||
* @param {string} request.dataset - dataset | ||
* @param {string} request.geostoreId - geostore id | ||
* @param {boolean} request.isGlobal - whether the query is global or not | ||
* @param {string} request.adm0 - adm0 | ||
* @param {string} request.adm1 - adm1 | ||
* @param {string} request.adm2 - adm2 | ||
* @param {boolean} request.isAnalyis - is analysis | ||
* @param {number} request.threshold - canopy threshold | ||
* @param {boolean} request.isDownload - whether the query is is download | ||
* @param {number} request.retries - this parameter is to manage retries (only for recursion) | ||
* @param | ||
*/ | ||
export const fetchDataMart = async ({ | ||
dataset, | ||
geostoreId, | ||
isGlobal, | ||
adm0, | ||
adm1, | ||
adm2, | ||
isAnalyis, | ||
threshold, | ||
isDownload, | ||
retries, | ||
}) => { | ||
const response = await getDataByGeostoreId({ dataset, geostoreId, canopy: threshold }); | ||
|
||
if (response.status !== 404) { | ||
console.log('link exists, need to fetch: ', response.link); | ||
const existing = await retryRequest(getDataFromLink, { url: response.link }); | ||
console.log('existing: ', existing); | ||
} else { | ||
// make post to create the data in back end | ||
console.log('make post to create the data in back end'); | ||
const submitted = await createRequestByGeostoryId({ dataset, geostoreId, canopy: threshold }); | ||
console.log('> submitted: ', submitted); | ||
|
||
// get link and fetch | ||
|
||
// retry based on secondTry.headers['retry-after] | ||
const secondTry = await retryRequest(getDataFromLink, { url: submitted.data.link }); | ||
|
||
console.log('secondTry: ', secondTry); | ||
} | ||
|
||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Check failure
Code scanning / CodeQL
Type confusion through parameter tampering Critical
Copilot Autofix AI 11 days ago
To fix the problem, we need to ensure that the
slugs
parameter is always treated as an array of strings. We can do this by checking the type ofslugs
and converting it to an array if it is not already one. This will prevent type confusion attacks and ensure that the code behaves as expected.We will modify the
fetchDataByDatasetAndGeostore
andpostData
functions to include type checks and conversions for theslugs
parameter. Specifically, we will:slugs
is an array. If not, convert it to an array containing the single value.slugs
is an array.