Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removed the name saas in the file names and package names #5124

Merged
merged 2 commits into from
Oct 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
package org.opensearch.dataprepper.plugins.source.saas.jira;
package org.opensearch.dataprepper.plugins.source.jira;

import org.opensearch.dataprepper.model.buffer.Buffer;
import org.opensearch.dataprepper.model.event.Event;
import org.opensearch.dataprepper.model.record.Record;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.CrawlerClient;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.SaasSourceConfig;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.CrawlerSourceConfig;
import org.opensearch.dataprepper.plugins.source.source_crawler.coordination.state.SaasWorkerProgressState;
import org.opensearch.dataprepper.plugins.source.source_crawler.model.ItemInfo;
import org.slf4j.Logger;
Expand Down Expand Up @@ -39,7 +39,7 @@ public void setLastPollTime(Instant lastPollTime) {
}

@Override
public void executePartition(SaasWorkerProgressState state, Buffer<Record<Event>> buffer, SaasSourceConfig configuration) {
public void executePartition(SaasWorkerProgressState state, Buffer<Record<Event>> buffer, CrawlerSourceConfig configuration) {
log.info("Logic for executing the partitions");
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.opensearch.dataprepper.plugins.source.saas.jira;
package org.opensearch.dataprepper.plugins.source.jira;


import org.opensearch.dataprepper.metrics.PluginMetrics;
Expand All @@ -11,9 +11,9 @@
import org.opensearch.dataprepper.model.plugin.PluginFactory;
import org.opensearch.dataprepper.model.record.Record;
import org.opensearch.dataprepper.model.source.Source;
import org.opensearch.dataprepper.plugins.source.source_crawler.SaasCrawlerApplicationContextMarker;
import org.opensearch.dataprepper.plugins.source.source_crawler.CrawlerApplicationContextMarker;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.Crawler;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.SaasPluginExecutorServiceProvider;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.PluginExecutorServiceProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -24,34 +24,34 @@

@DataPrepperPlugin(name = "jira",
pluginType = Source.class,
packagesToScan = {SaasCrawlerApplicationContextMarker.class, JiraSource.class}
packagesToScan = {CrawlerApplicationContextMarker.class, JiraSource.class}
)
public class JiraSource implements Source<Record<Event>> {

private static final Logger log = LoggerFactory.getLogger(JiraSource.class);
private static final Logger log = LoggerFactory.getLogger(JiraSource.class);


@DataPrepperPluginConstructor
public JiraSource(final PluginMetrics pluginMetrics,
final PluginFactory pluginFactory,
final AcknowledgementSetManager acknowledgementSetManager,
Crawler crawler,
SaasPluginExecutorServiceProvider executorServiceProvider) {
log.info("Create Jira Source Connector");
}
@DataPrepperPluginConstructor
public JiraSource(final PluginMetrics pluginMetrics,
final PluginFactory pluginFactory,
final AcknowledgementSetManager acknowledgementSetManager,
Crawler crawler,
PluginExecutorServiceProvider executorServiceProvider) {
log.info("Create Jira Source Connector");
}

public void start(Buffer<Record<Event>> buffer) {
log.info("Starting Jira Source Plugin... ");
}
public void start(Buffer<Record<Event>> buffer) {
log.info("Starting Jira Source Plugin... ");
}

@Override
public void stop() {
@Override
public void stop() {

}
}

@Override
public ByteDecoder getDecoder() {
return Source.super.getDecoder();
}
@Override
public ByteDecoder getDecoder() {
return Source.super.getDecoder();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
/**
* Market interface to indicate the base package to scan for dependency injection
*/
public interface SaasCrawlerApplicationContextMarker {
public interface CrawlerApplicationContextMarker {
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public Instant crawl(Instant lastPollTime,
return updatedPollTime;
}

public void executePartition(SaasWorkerProgressState state, Buffer<Record<Event>> buffer, SaasSourceConfig sourceConfig) {
public void executePartition(SaasWorkerProgressState state, Buffer<Record<Event>> buffer, CrawlerSourceConfig sourceConfig) {
client.executePartition(state, buffer, sourceConfig);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public interface CrawlerClient {

/**
* This will be the main API called by crawler. This method assumes that {@link
* SaasSourceConfig} is available as a member to {@link CrawlerClient}, as a result of
* CrawlerSourceConfig} is available as a member to {@link CrawlerClient}, as a result of
* which, other scanning properties will also be available to this method
*
* @return returns an {@link Iterator} of {@link ItemInfo}
Expand All @@ -40,5 +40,5 @@ public interface CrawlerClient {
* @param buffer pipeline buffer to write the results into
* @param sourceConfig pipeline configuration from the yaml
*/
void executePartition(SaasWorkerProgressState state, Buffer<Record<Event>> buffer, SaasSourceConfig sourceConfig);
void executePartition(SaasWorkerProgressState state, Buffer<Record<Event>> buffer, CrawlerSourceConfig sourceConfig);
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
/**
* Marker interface to all the SAAS connectors configuration
*/
public interface SaasSourceConfig {
public interface CrawlerSourceConfig {

int DEFAULT_NUMBER_OF_WORKERS = 1;
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,30 +29,30 @@
* JiraConnector connector entry point.
*/

public abstract class SaasSourcePlugin implements Source<Record<Event>>, UsesEnhancedSourceCoordination {
public abstract class CrawlerSourcePlugin implements Source<Record<Event>>, UsesEnhancedSourceCoordination {


private static final Logger log = LoggerFactory.getLogger(SaasSourcePlugin.class);
private static final Logger log = LoggerFactory.getLogger(CrawlerSourcePlugin.class);
private final PluginMetrics pluginMetrics;
private final PluginFactory pluginFactory;

private final AcknowledgementSetManager acknowledgementSetManager;

private final ExecutorService executorService;
private final SaasSourceConfig sourceConfig;
private final CrawlerSourceConfig sourceConfig;
private final Crawler crawler;
private final String sourcePluginName;
private EnhancedSourceCoordinator coordinator;
private Buffer<Record<Event>> buffer;


public SaasSourcePlugin(final String sourcePluginName,
final PluginMetrics pluginMetrics,
final SaasSourceConfig sourceConfig,
final PluginFactory pluginFactory,
final AcknowledgementSetManager acknowledgementSetManager,
final Crawler crawler,
final SaasPluginExecutorServiceProvider executorServiceProvider) {
public CrawlerSourcePlugin(final String sourcePluginName,
final PluginMetrics pluginMetrics,
final CrawlerSourceConfig sourceConfig,
final PluginFactory pluginFactory,
final AcknowledgementSetManager acknowledgementSetManager,
final Crawler crawler,
final PluginExecutorServiceProvider executorServiceProvider) {
log.debug("Creating {} Source Plugin", sourcePluginName);
this.sourcePluginName = sourcePluginName;
this.pluginMetrics = pluginMetrics;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,19 @@
import java.util.concurrent.TimeUnit;

@Named
public class SaasPluginExecutorServiceProvider {
private static final Logger log = LoggerFactory.getLogger(SaasPluginExecutorServiceProvider.class);
public class PluginExecutorServiceProvider {
private static final Logger log = LoggerFactory.getLogger(PluginExecutorServiceProvider.class);
private static final int DEFAULT_THREAD_COUNT = 50;
private final ExecutorService executorService;

public SaasPluginExecutorServiceProvider() {
public PluginExecutorServiceProvider() {
executorService = Executors.newFixedThreadPool(DEFAULT_THREAD_COUNT);
}

/**
* Constructor for testing
*/
public SaasPluginExecutorServiceProvider(ExecutorService testExecutorService) {
public PluginExecutorServiceProvider(ExecutorService testExecutorService) {
executorService = testExecutorService;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import org.opensearch.dataprepper.model.source.coordinator.enhanced.EnhancedSourceCoordinator;
import org.opensearch.dataprepper.model.source.coordinator.enhanced.EnhancedSourcePartition;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.Crawler;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.SaasSourcePlugin;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.CrawlerSourcePlugin;
import org.opensearch.dataprepper.plugins.source.source_crawler.coordination.partition.LeaderPartition;
import org.opensearch.dataprepper.plugins.source.source_crawler.coordination.state.LeaderProgressState;
import org.slf4j.Logger;
Expand All @@ -29,14 +29,14 @@ public class LeaderScheduler implements Runnable {
private static final Duration DEFAULT_LEASE_INTERVAL = Duration.ofMinutes(1);

private final EnhancedSourceCoordinator coordinator;
private final SaasSourcePlugin sourcePlugin;
private final CrawlerSourcePlugin sourcePlugin;
private final Crawler crawler;
@Setter
private Duration leaseInterval;
private LeaderPartition leaderPartition;

public LeaderScheduler(EnhancedSourceCoordinator coordinator,
SaasSourcePlugin sourcePlugin,
CrawlerSourcePlugin sourcePlugin,
Crawler crawler) {
this.coordinator = coordinator;
this.leaseInterval = DEFAULT_LEASE_INTERVAL;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import org.opensearch.dataprepper.model.source.coordinator.enhanced.EnhancedSourceCoordinator;
import org.opensearch.dataprepper.model.source.coordinator.enhanced.EnhancedSourcePartition;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.Crawler;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.SaasSourceConfig;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.CrawlerSourceConfig;
import org.opensearch.dataprepper.plugins.source.source_crawler.coordination.partition.SaasSourcePartition;
import org.opensearch.dataprepper.plugins.source.source_crawler.coordination.state.SaasWorkerProgressState;
import org.slf4j.Logger;
Expand All @@ -26,14 +26,14 @@ public class WorkerScheduler implements Runnable {
private static final Duration DEFAULT_SLEEP_DURATION_MILLIS = Duration.ofMillis(10000);

private final EnhancedSourceCoordinator sourceCoordinator;
private final SaasSourceConfig sourceConfig;
private final CrawlerSourceConfig sourceConfig;
private final Crawler crawler;
private final Buffer<Record<Event>> buffer;


public WorkerScheduler(Buffer<Record<Event>> buffer,
EnhancedSourceCoordinator sourceCoordinator,
SaasSourceConfig sourceConfig,
CrawlerSourceConfig sourceConfig,
Crawler crawler) {
this.sourceCoordinator = sourceCoordinator;
this.sourceConfig = sourceConfig;
Expand Down Expand Up @@ -75,7 +75,7 @@ public void run() {
log.warn("SourceItemWorker Scheduler is interrupted, looks like shutdown has triggered");
}

private void processPartition(EnhancedSourcePartition partition, Buffer<Record<Event>> buffer, SaasSourceConfig sourceConfig) {
private void processPartition(EnhancedSourcePartition partition, Buffer<Record<Event>> buffer, CrawlerSourceConfig sourceConfig) {
// Implement your source extraction logic here
// Update the partition state or commit the partition as needed
// Commit the partition to mark it as processed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import static org.mockito.internal.verification.VerificationModeFactory.times;

@ExtendWith(MockitoExtension.class)
public class SaasSourcePluginTest {
public class CrawlerSourcePluginTest {
@Mock
private PluginMetrics pluginMetrics;

Expand All @@ -42,13 +42,13 @@ public class SaasSourcePluginTest {
private Crawler crawler;

@Mock
private SaasPluginExecutorServiceProvider executorServiceProvider;
private PluginExecutorServiceProvider executorServiceProvider;

@Mock
private ExecutorService executorService;

@Mock
private SaasSourceConfig sourceConfig;
private CrawlerSourceConfig sourceConfig;

@Mock
private Buffer<Record<Event>> buffer;
Expand All @@ -60,12 +60,12 @@ public class SaasSourcePluginTest {
@Mock
private EnhancedSourceCoordinator sourceCoordinator;

private testSaasSourcePlugin saasSourcePlugin;
private testCrawlerSourcePlugin saasSourcePlugin;

@BeforeEach
void setUp() {
when(executorServiceProvider.get()).thenReturn(executorService);
saasSourcePlugin = new testSaasSourcePlugin(pluginMetrics, sourceConfig, pluginFactory, acknowledgementSetManager, crawler, executorServiceProvider);
saasSourcePlugin = new testCrawlerSourcePlugin(pluginMetrics, sourceConfig, pluginFactory, acknowledgementSetManager, crawler, executorServiceProvider);
}

@Test
Expand Down Expand Up @@ -98,7 +98,7 @@ void testExecutorServiceSchedulersSubmitted() {
saasSourcePlugin.setEnhancedSourceCoordinator(sourceCoordinator);
saasSourcePlugin.start(buffer);
verify(executorService, times(1)).submit(any(LeaderScheduler.class));
verify(executorService, times(SaasSourceConfig.DEFAULT_NUMBER_OF_WORKERS))
verify(executorService, times(CrawlerSourceConfig.DEFAULT_NUMBER_OF_WORKERS))
.submit(any(Thread.class));
}

Expand All @@ -123,13 +123,13 @@ void testGetDecoder() {
}

@Nested
public class testSaasSourcePlugin extends SaasSourcePlugin {
public testSaasSourcePlugin(final PluginMetrics pluginMetrics,
final SaasSourceConfig sourceConfig,
final PluginFactory pluginFactory,
final AcknowledgementSetManager acknowledgementSetManager,
final Crawler crawler,
final SaasPluginExecutorServiceProvider executorServiceProvider) {
public class testCrawlerSourcePlugin extends CrawlerSourcePlugin {
public testCrawlerSourcePlugin(final PluginMetrics pluginMetrics,
final CrawlerSourceConfig sourceConfig,
final PluginFactory pluginFactory,
final AcknowledgementSetManager acknowledgementSetManager,
final Crawler crawler,
final PluginExecutorServiceProvider executorServiceProvider) {
super("TestcasePlugin", pluginMetrics, sourceConfig, pluginFactory, acknowledgementSetManager, crawler, executorServiceProvider);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
@ExtendWith(MockitoExtension.class)
public class CrawlerTest {
@Mock
private SaasSourceConfig sourceConfig;
private CrawlerSourceConfig sourceConfig;

@Mock
private EnhancedSourceCoordinator coordinator;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,18 @@
import static org.mockito.Mockito.when;

@ExtendWith(MockitoExtension.class)
public class SaasPluginExecutorServiceProviderTest {
public class PluginExecutorServiceProviderTest {

private SaasPluginExecutorServiceProvider provider;
private PluginExecutorServiceProvider provider;
private ExecutorService executorService;

private SaasPluginExecutorServiceProvider provider2;
private PluginExecutorServiceProvider provider2;
@Mock
private ExecutorService mockExecutorService;

@BeforeEach
void setUp() {
provider = new SaasPluginExecutorServiceProvider();
provider = new PluginExecutorServiceProvider();
executorService = provider.get();
}

Expand All @@ -53,7 +53,7 @@ void testTerminateExecutor() {

@Test
void terminateExecutorInterruptionTest() throws InterruptedException {
provider2 = new SaasPluginExecutorServiceProvider(mockExecutorService);
provider2 = new PluginExecutorServiceProvider(mockExecutorService);
when(mockExecutorService.awaitTermination(anyLong(), any(TimeUnit.class))).thenThrow(new InterruptedException());
AtomicBoolean wasInterrupted = new AtomicBoolean(false);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import org.mockito.junit.jupiter.MockitoExtension;
import org.opensearch.dataprepper.model.source.coordinator.enhanced.EnhancedSourceCoordinator;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.Crawler;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.SaasSourcePlugin;
import org.opensearch.dataprepper.plugins.source.source_crawler.base.CrawlerSourcePlugin;
import org.opensearch.dataprepper.plugins.source.source_crawler.coordination.partition.LeaderPartition;

import java.time.Duration;
Expand All @@ -33,7 +33,7 @@ public class LeaderSchedulerTest {
@Mock
private EnhancedSourceCoordinator coordinator;
@Mock
private SaasSourcePlugin saasSourcePlugin;
private CrawlerSourcePlugin saasSourcePlugin;
@Mock
private Crawler crawler;

Expand Down
Loading
Loading