Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ops Logs: #933

Merged
merged 3 commits into from
Jan 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion src/components/collection/DataPreview/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ export function DataPreview({ collectionName }: Props) {
// TODO (typing) we need to fix typing
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
const journal = useMemo(() => journalsData?.journals?.[0], [journalsData]);
const journalData = useJournalData(journal?.name, 20, collectionName);
const journalData = useJournalData(journal?.name, collectionName, {
desiredCount: 20,
});

// There is a brief delay between when the data preview card is rendered and the two journal-related
// hooks are called, which resulted in `isLoading` being a false negative. If the journal client is
Expand Down
104 changes: 61 additions & 43 deletions src/components/shared/Entity/Details/Ops/index.tsx
Original file line number Diff line number Diff line change
@@ -1,90 +1,110 @@
import { Box, Button, LinearProgress, Stack } from '@mui/material';
import { Box, Button, Stack } from '@mui/material';
import KeyValueList from 'components/shared/KeyValueList';
import UnderDev from 'components/shared/UnderDev';
import LogsTable from 'components/tables/Logs';
import { useJournalData } from 'hooks/journals/useJournalData';
import useJournalNameForLogs from 'hooks/journals/useJournalNameForLogs';
import useGlobalSearchParams, {
GlobalSearchParams,
} from 'hooks/searchParams/useGlobalSearchParams';
import { useEffect, useState } from 'react';
import { useEffect, useMemo, useState } from 'react';
import { OpsLogFlowDocument } from 'types';
import { MEGABYTE } from 'utils/dataPlane-utils';

const docsRequested = 25;
const maxBytes = Math.round(MEGABYTE / 25);

function Ops() {
const [loading] = useState(false);
const [fetchingMore, setFetchingMore] = useState(false);
const [olderFinished, setOlderFinished] = useState(false);
const [lastParsed, setLastParsed] = useState<number>(0);
const [docs, setDocs] = useState<OpsLogFlowDocument[]>([]);

const catalogName = useGlobalSearchParams(GlobalSearchParams.CATALOG_NAME);
const [name, collectionName] = useJournalNameForLogs(catalogName);

// TODO (typing)
// need to handle typing
const journalData = useJournalData(name, docsRequested, collectionName);
const documents = (journalData.data?.documents ??
[]) as OpsLogFlowDocument[];
const { data, loading, refresh } = useJournalData(name, collectionName, {
maxBytes,
});

const documents = useMemo(
() => (data?.documents ?? []) as OpsLogFlowDocument[],
[data?.documents]
);

useEffect(() => {
console.log('Ops:journalData:effect', journalData);
// Get the mete data out of the response
const meta = data?.meta;

// Wait until loading is complete
if (journalData.loading) {
return;
}
// Figure out what the last document offset is
const parsedEnd = meta?.docsMetaResponse.offset
? parseInt(meta.docsMetaResponse.offset, 10)
: null;

// If we have documents add them to the list
if (journalData.data?.documents) {
// This is where we need to populate a list of docs we maintain
// journalData.data.documents.forEach((doc) => {});
// Since journalData is read kinda async we need to wait to
// update documents until we know the meta data changed
if (parsedEnd !== lastParsed) {
if (documents.length > 0) {
const newDocs = [...documents, ...docs];
setDocs(newDocs);
setFetchingMore(false);
}
}
}, [data?.meta, docs, documents, lastParsed]);

useEffect(() => {
// Get the mete data out of the response
const meta = journalData.data?.meta;
const meta = data?.meta;

// Figure out what the last document offset is
const parsedEnd = meta?.docsMetaResponse.offset
? parseInt(meta.docsMetaResponse.offset, 10)
: null;

// Keep track of where we last read data from so we can keep stepping backwards through the file
setLastParsed(parsedEnd ?? 0);

if (
journalData.data?.documents &&
journalData.data.documents.length > 0 &&
parsedEnd === 0
) {
// If we have hit 0 then we now we hit the start of the data are nothing older is available
if (parsedEnd === 0) {
setOlderFinished(true);
}

// We only care about the data changing here
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [journalData]);

console.log('Ops:journalData:data:meta', {
documents,
olderFinished,
});
}, [data?.meta]);

return (
<Box>
<UnderDev />
<Box>
<KeyValueList
sectionTitle="Debugging Values"
data={[
{ title: 'Documents', val: docs.length },
{ title: 'Last Byte Parsed', val: lastParsed },
]}
/>

<Stack spacing={2} direction="row">
<Button
disabled={olderFinished}
onClick={() =>
journalData.refresh({
disabled={loading || fetchingMore || olderFinished}
onClick={() => {
setFetchingMore(true);
refresh({
offset: 0,
endOffset: lastParsed,
})
}
});
}}
>
Load Older (wip - might blow up)
Load Older
</Button>

<Button onClick={() => journalData.refresh()}>
Load Newer (wip - just full refresh right now)
<Button
disabled={loading || fetchingMore}
onClick={() => {
setFetchingMore(true);
refresh();
}}
>
Load Newer
</Button>
</Stack>

Expand All @@ -101,11 +121,9 @@ function Ops() {
}
/>*/}

{journalData.loading ? <LinearProgress /> : null}

<LogsTable
documents={documents}
loading={loading}
documents={docs}
loading={fetchingMore || loading}
fetchNewer={() => {
console.log('fetcher latest logs');

Expand Down
2 changes: 1 addition & 1 deletion src/components/tables/Logs/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ function LogsTable({ documents, fetchNewer, fetchOlder, loading }: Props) {
stayScrolled();

return (
<TableContainer component={Box} maxHeight={250} ref={tableScroller}>
<TableContainer component={Box} maxHeight={500} ref={tableScroller}>
<Table
aria-label={intl.formatMessage({
id: 'entityTable.title',
Expand Down
82 changes: 47 additions & 35 deletions src/hooks/journals/useJournalData.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import { useCounter } from 'react-use';
import useSWR from 'swr';
import {
dataPlaneFetcher_list,
MAX_DOCUMENT_SIZE,
shouldRefreshToken,
} from 'utils/dataPlane-utils';

Expand Down Expand Up @@ -153,7 +154,7 @@ async function loadDocuments({
offsets?: LoadDocumentsOffsets;
journalName?: string;
client?: JournalClient;
documentCount: number;
documentCount?: number;
maxBytes: number;
}) {
if (!client || !journalName) {
Expand All @@ -164,6 +165,7 @@ async function loadDocuments({
tooManyBytes: false,
};
}

const metaInfo = (
await client.read({
metadataOnly: true,
Expand All @@ -190,14 +192,13 @@ async function loadDocuments({
let documents: JournalRecord[] = [];
let attempt = 0;

while (
documents.length < documentCount &&
start > 0 &&
head - start < maxBytes
) {
attempt += 1;
start = Math.max(0, start - INCREMENT * attempt);

// TODO (gross)
// This is bad and I feel bad. The function uses references to vars up above.
// It was done so we could quickly add the ability to read based only on data size.
// Future work is needed to full break this hook up into the stand alone pieces that are needed.
// More than likely we can have a hook for "readingByDoc" and one for "readingByByte" and have those
// share common functions
const attemptToRead = async () => {
const stream = (
await client.read({
journal: journalName,
Expand All @@ -206,14 +207,6 @@ async function loadDocuments({
})
).unwrap();

// console.log('loadDocuments : ', {
// metaInfo,
// metadataResponse,
// stream,
// startingOffset,
// head,
// });

// Splt the stream so we can read it twice
const teedDocumentsStream = stream.tee();

Expand All @@ -226,16 +219,28 @@ async function loadDocuments({
const docsMetaGenerator = streamAsyncIterator(teedDocumentsStream[1]);
docsMetaResponse = (await docsMetaGenerator.next()).value;

console.log('docsMetaResponse = ', docsMetaResponse);

// TODO: Instead of inefficiently re-reading until we get the desired row count,
// we should accumulate documents and shift `head` backwards using `ProtocolReadResponse.offset`
documents = allDocs
return allDocs
.filter(isJournalRecord)
.filter(
(record) => !(record._meta as unknown as { ack: boolean }).ack
)
.slice(documentCount * -1);
);
};

if (!documentCount) {
start = Math.max(0, start - maxBytes);
documents = await attemptToRead();
} else {
while (
documents.length < documentCount &&
start > 0 &&
head - start < maxBytes
) {
attempt += 1;
start = Math.max(0, start - INCREMENT * attempt);
documents = (await attemptToRead()).slice(documentCount * -1);
}
}

return {
Expand All @@ -244,7 +249,7 @@ async function loadDocuments({
metadataResponse,
docsMetaResponse,
},
tooFewDocuments: start <= 0,
tooFewDocuments: documentCount ? start <= 0 : false,
tooManyBytes: head - start >= maxBytes,
};
}
Expand All @@ -259,12 +264,16 @@ function isJournalRecord(val: any): val is JournalRecord {
return val?._meta?.uuid;
}

interface UseJournalDataSettings {
// If you want a specific amount we'll keep making calls to get that many docs.
// Otherwise we just return whatever we got in the call you made.
desiredCount?: number;
maxBytes?: number;
}
const useJournalData = (
journalName?: string,
desiredCount: number = 50,
collectionName?: string,
// 16mb, which is the max document size, ensuring we'll always get at least 1 doc if it exists
maxBytes: number = 16 * 10 ** 6
settings?: UseJournalDataSettings
) => {
const failures = useRef(0);

Expand Down Expand Up @@ -297,41 +306,43 @@ const useJournalData = (
useEffect(() => {
void (async () => {
if (
(refreshing && !loading) ||
(failures.current < 2 &&
journalName &&
journalClient &&
!loading &&
!data) ||
refreshing
!data)
) {
try {
setLoading(true);
const docs = await loadDocuments({
journalName,
client: journalClient,
documentCount: desiredCount,
maxBytes,
documentCount: settings?.desiredCount,
maxBytes: settings?.maxBytes ?? MAX_DOCUMENT_SIZE,
offsets,
});
setData(docs);
} catch (e: unknown) {
failures.current += 1;
setError(e);
} finally {
setLoading(false);
// Make sure to set refreshing back first
// Otherwise the effect fires again with loading=false|refreshing=true and loads more data
setRefreshing(false);
setLoading(false);
}
}
})();
}, [
data,
desiredCount,
journalClient,
journalName,
loading,
maxBytes,
refreshing,
offsets,
refreshing,
settings?.desiredCount,
settings?.maxBytes,
]);

return useMemo(
Expand All @@ -340,11 +351,12 @@ const useJournalData = (
error,
loading,
refresh: (newOffset?: LoadDocumentsOffsets) => {
failures.current = 0;

if (newOffset) {
setOffsets(newOffset);
}

failures.current = 0;
setRefreshing(true);
},
}),
Expand Down
5 changes: 5 additions & 0 deletions src/utils/dataPlane-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,8 @@ export async function dataPlaneFetcher_list(
return Promise.reject(error);
}
}

export const MEGABYTE = 1 * 10 ** 6;

// 16mb, which is the max document size, ensuring we'll always get at least 1 doc if it exists
export const MAX_DOCUMENT_SIZE = 16 * MEGABYTE;