Skip to content

Commit

Permalink
Merge pull request opendatahub-io#2334 from lucferbux/rhoaieng-549
Browse files Browse the repository at this point in the history
Fix performance issues in Model Serving Global
  • Loading branch information
openshift-merge-bot[bot] authored Jan 19, 2024
2 parents 06674c3 + 2522c0f commit 2dd7f08
Show file tree
Hide file tree
Showing 8 changed files with 156 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ type HandlersProps = {
projectEnableModelMesh?: boolean;
servingRuntimes?: ServingRuntimeKind[];
inferenceServices?: InferenceServiceKind[];
delayInferenceServices?: boolean;
};

const initIntercepts = ({
Expand All @@ -35,6 +36,7 @@ const initIntercepts = ({
projectEnableModelMesh,
servingRuntimes = [mockServingRuntimeK8sResource({})],
inferenceServices = [mockInferenceServiceK8sResource({})],
delayInferenceServices,
}: HandlersProps) => {
cy.intercept(
'/api/dsc/status',
Expand Down Expand Up @@ -78,13 +80,30 @@ const initIntercepts = ({
},
mockK8sResourceList(servingRuntimes),
);
cy.intercept(
{
method: 'GET',
pathname: '/api/k8s/apis/serving.kserve.io/v1alpha1/servingruntimes',
},
mockK8sResourceList(servingRuntimes),
);
cy.intercept(
{
method: 'GET',
pathname: '/api/k8s/apis/serving.kserve.io/v1beta1/namespaces/modelServing/inferenceservices',
},
mockK8sResourceList(inferenceServices),
);
cy.intercept(
{
method: 'GET',
pathname: '/api/k8s/apis/serving.kserve.io/v1beta1/inferenceservices',
},
{
delay: delayInferenceServices ? 1000 : 0,
body: mockK8sResourceList(inferenceServices),
},
);
cy.intercept(
{
method: 'POST',
Expand Down Expand Up @@ -205,6 +224,17 @@ describe('Model Serving Global', () => {
inferenceServiceModal.findSubmitButton().should('be.disabled');
});

it('All projects loading', () => {
initIntercepts({ delayInferenceServices: true, servingRuntimes: [], inferenceServices: [] });

// Visit the all-projects view (no project name passed here)
modelServingGlobal.visit();

modelServingGlobal.shouldWaitAndCancel();

modelServingGlobal.shouldBeEmpty();
});

it('Empty State No Project Selected', () => {
initIntercepts({ inferenceServices: [] });

Expand Down
11 changes: 11 additions & 0 deletions frontend/src/__tests__/cypress/cypress/pages/modelServing.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,17 @@ class ModelServingGlobal {
return this;
}

shouldWaitAndCancel() {
cy.findAllByText(
'Retrieving model data from all projects in the cluster. This can take a few minutes.',
);
this.findCancelButton().click();
}

findCancelButton() {
return cy.findByRole('button', { name: 'Cancel' });
}

findDeployModelButton() {
return cy.findByRole('button', { name: 'Deploy model' });
}
Expand Down
16 changes: 10 additions & 6 deletions frontend/src/pages/ApplicationsPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ type ApplicationsPageProps = {
headerAction?: React.ReactNode;
headerContent?: React.ReactNode;
provideChildrenPadding?: boolean;
loadingContent?: React.ReactNode;
};

const ApplicationsPage: React.FC<ApplicationsPageProps> = ({
Expand All @@ -48,6 +49,7 @@ const ApplicationsPage: React.FC<ApplicationsPageProps> = ({
headerAction,
headerContent,
provideChildrenPadding,
loadingContent,
}) => {
const renderHeader = () => (
<PageSection variant={PageSectionVariants.light}>
Expand Down Expand Up @@ -88,12 +90,14 @@ const ApplicationsPage: React.FC<ApplicationsPageProps> = ({

if (!loaded) {
return (
<PageSection isFilled>
<EmptyState variant={EmptyStateVariant.lg} data-id="loading-empty-state">
<Spinner size="xl" />
<EmptyStateHeader titleText="Loading" headingLevel="h1" />
</EmptyState>
</PageSection>
loadingContent || (
<PageSection isFilled>
<EmptyState variant={EmptyStateVariant.lg} data-id="loading-empty-state">
<Spinner size="xl" />
<EmptyStateHeader titleText="Loading" headingLevel="h1" />
</EmptyState>
</PageSection>
)
);
}

Expand Down
8 changes: 7 additions & 1 deletion frontend/src/pages/modelServing/ModelServingContext.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ type ModelServingContextType = {
servingRuntimes: ContextResourceData<ServingRuntimeKind>;
inferenceServices: ContextResourceData<InferenceServiceKind>;
project: ProjectKind | null;
preferredProject: ProjectKind | null;
projects: ProjectKind[] | null;
};

type ModelServingContextProviderProps = {
Expand All @@ -52,6 +54,8 @@ export const ModelServingContext = React.createContext<ModelServingContextType>(
servingRuntimes: DEFAULT_CONTEXT_DATA,
inferenceServices: DEFAULT_CONTEXT_DATA,
project: null,
preferredProject: null,
projects: null,
});

const ModelServingContextProvider = conditionalArea<ModelServingContextProviderProps>(
Expand All @@ -60,7 +64,7 @@ const ModelServingContextProvider = conditionalArea<ModelServingContextProviderP
)(({ children, namespace }) => {
const { dashboardNamespace } = useDashboardNamespace();
const navigate = useNavigate();
const { projects } = React.useContext(ProjectsContext);
const { projects, preferredProject } = React.useContext(ProjectsContext);
const project = projects.find(byName(namespace)) ?? null;
useSyncPreferredProject(project);
const servingRuntimeTemplates = useContextResourceData<TemplateKind>(
Expand Down Expand Up @@ -144,6 +148,8 @@ const ModelServingContextProvider = conditionalArea<ModelServingContextProviderP
dataConnections,
refreshAllData,
project,
preferredProject,
projects,
}}
>
{children}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
import React from 'react';
import { useNavigate } from 'react-router';
import ApplicationsPage from '~/pages/ApplicationsPage';
import { ModelServingContext } from '~/pages/modelServing/ModelServingContext';
import useServingPlatformStatuses from '~/pages/modelServing/useServingPlatformStatuses';
import { getProjectModelServingPlatform } from '~/pages/modelServing/screens/projects/utils';
import EmptyModelServing from './EmptyModelServing';
import InferenceServiceListView from './InferenceServiceListView';
import ModelServingProjectSelection from './ModelServingProjectSelection';
import ModelServingLoading from './ModelServingLoading';

const ModelServingGlobal: React.FC = () => {
const {
servingRuntimes: { data: servingRuntimes, loaded: servingRuntimesLoaded },
inferenceServices: { data: inferenceServices, loaded: inferenceServicesLoaded },
project: currentProject,
preferredProject,
projects,
} = React.useContext(ModelServingContext);

const navigate = useNavigate();

const servingPlatformStatuses = useServingPlatformStatuses();
const { error: notInstalledError } = getProjectModelServingPlatform(
currentProject,
Expand All @@ -34,6 +40,20 @@ const ModelServingGlobal: React.FC = () => {
/>
}
provideChildrenPadding
loadingContent={
currentProject ? undefined : (
<ModelServingLoading
title="Loading"
description="Retrieving model data from all projects in the cluster. This can take a few minutes."
onCancel={() => {
const redirectProject = preferredProject ?? projects?.[0];
if (redirectProject) {
navigate(`/modelServing/${redirectProject?.metadata.name}`);
}
}}
/>
)
}
>
<InferenceServiceListView
inferenceServices={inferenceServices}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import {
Button,
EmptyState,
EmptyStateActions,
EmptyStateBody,
EmptyStateFooter,
EmptyStateHeader,
EmptyStateVariant,
PageSection,
Spinner,
} from '@patternfly/react-core';
import React from 'react';

type ModelServingLoadingProps = {
title: string;
description: string;
onCancel: () => void;
};

const ModelServingLoading: React.FC<ModelServingLoadingProps> = ({
title,
description,
onCancel,
}) => (
<PageSection isFilled>
<EmptyState variant={EmptyStateVariant.lg} data-id="loading-empty-state">
<Spinner size="xl" />
<EmptyStateHeader titleText={title} headingLevel="h1" />
<EmptyStateBody>{description}</EmptyStateBody>
<EmptyStateFooter>
<EmptyStateActions>
<Button variant="primary" onClick={onCancel}>
Cancel
</Button>
</EmptyStateActions>
</EmptyStateFooter>
</EmptyState>
</PageSection>
);

export default ModelServingLoading;
24 changes: 20 additions & 4 deletions frontend/src/pages/modelServing/useInferenceServices.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,36 @@
import * as React from 'react';
import { getInferenceServiceContext } from '~/api';
import { InferenceServiceKind } from '~/k8sTypes';
import { getInferenceServiceContext, listInferenceService, useAccessReview } from '~/api';
import { AccessReviewResourceAttributes, InferenceServiceKind } from '~/k8sTypes';
import useFetchState, { FetchState, NotReadyError } from '~/utilities/useFetchState';
import useModelServingEnabled from '~/pages/modelServing/useModelServingEnabled';
import { LABEL_SELECTOR_DASHBOARD_RESOURCE } from '~/const';

const accessReviewResource: AccessReviewResourceAttributes = {
group: 'serving.kserve.io',
resource: 'inferenceservices',
verb: 'list',
};

const useInferenceServices = (namespace?: string): FetchState<InferenceServiceKind[]> => {
const modelServingEnabled = useModelServingEnabled();

const [allowCreate, rbacLoaded] = useAccessReview({
...accessReviewResource,
});

const getServingInferences = React.useCallback(() => {
if (!modelServingEnabled) {
return Promise.reject(new NotReadyError('Model serving is not enabled'));
}

return getInferenceServiceContext(namespace, LABEL_SELECTOR_DASHBOARD_RESOURCE);
}, [namespace, modelServingEnabled]);
if (!rbacLoaded) {
return Promise.reject(new NotReadyError('Fetch is not ready'));
}

const getInferenceServices = allowCreate ? listInferenceService : getInferenceServiceContext;

return getInferenceServices(namespace, LABEL_SELECTOR_DASHBOARD_RESOURCE);
}, [namespace, modelServingEnabled, rbacLoaded, allowCreate]);

return useFetchState<InferenceServiceKind[]>(getServingInferences, [], {
initialPromisePurity: true,
Expand Down
22 changes: 17 additions & 5 deletions frontend/src/pages/modelServing/useServingRuntimes.ts
Original file line number Diff line number Diff line change
@@ -1,32 +1,44 @@
import * as React from 'react';
import { getServingRuntimeContext } from '~/api';
import { ServingRuntimeKind } from '~/k8sTypes';
import { getServingRuntimeContext, listServingRuntimes, useAccessReview } from '~/api';
import { AccessReviewResourceAttributes, ServingRuntimeKind } from '~/k8sTypes';
import useModelServingEnabled from '~/pages/modelServing/useModelServingEnabled';
import useFetchState, { FetchState, NotReadyError } from '~/utilities/useFetchState';
import { LABEL_SELECTOR_DASHBOARD_RESOURCE } from '~/const';

const accessReviewResource: AccessReviewResourceAttributes = {
group: 'serving.kserve.io',
resource: 'servingruntimes',
verb: 'list',
};

const useServingRuntimes = (
namespace?: string,
notReady?: boolean,
): FetchState<ServingRuntimeKind[]> => {
const modelServingEnabled = useModelServingEnabled();

const [allowCreate, rbacLoaded] = useAccessReview({
...accessReviewResource,
});

const getServingRuntimes = React.useCallback(() => {
if (!modelServingEnabled) {
return Promise.reject(new NotReadyError('Model serving is not enabled'));
}

if (notReady) {
if (notReady || !rbacLoaded) {
return Promise.reject(new NotReadyError('Fetch is not ready'));
}

return getServingRuntimeContext(namespace, LABEL_SELECTOR_DASHBOARD_RESOURCE).catch((e) => {
const getServingRuntimes = allowCreate ? listServingRuntimes : getServingRuntimeContext;

return getServingRuntimes(namespace, LABEL_SELECTOR_DASHBOARD_RESOURCE).catch((e) => {
if (e.statusObject?.code === 404) {
throw new Error('Model serving is not properly configured.');
}
throw e;
});
}, [namespace, modelServingEnabled, notReady]);
}, [namespace, modelServingEnabled, notReady, rbacLoaded, allowCreate]);

return useFetchState<ServingRuntimeKind[]>(getServingRuntimes, [], {
initialPromisePurity: true,
Expand Down

0 comments on commit 2dd7f08

Please sign in to comment.