Remove unnecessary exception throws, Redundant lambda code and javadoc (#22321)

* Remove unnecessary exception throws, Rredundant labmda code and javadoc

* Fix java checkstyle issues
This commit is contained in:
Suresh Srinivas 2025-07-12 17:26:00 -07:00 committed by GitHub
parent 7561289208
commit 2c1ccca730
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
78 changed files with 185 additions and 440 deletions

View File

@ -25,7 +25,6 @@ import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.MultivaluedMap;
import jakarta.ws.rs.ext.MessageBodyReader;
import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.lang.annotation.Annotation;
@ -49,7 +48,7 @@ public class JsonPatchMessageBodyReader implements MessageBodyReader<JsonPatch>
MediaType mediaType,
MultivaluedMap<String, String> httpHeaders,
InputStream entityStream)
throws IOException, WebApplicationException {
throws WebApplicationException {
try {
// Use Jackson to read the JSON content first to avoid JsonStructure deserialization issues
ObjectMapper mapper = new ObjectMapper();

View File

@ -42,7 +42,6 @@ import org.openmetadata.service.util.FullyQualifiedName;
import org.openmetadata.service.util.OpenMetadataConnectionBuilder;
import org.quartz.JobExecutionContext;
import org.quartz.SchedulerException;
import org.quartz.UnableToInterruptJobException;
@Getter
@Slf4j
@ -124,7 +123,6 @@ public class AbstractNativeApplication implements NativeApplication {
/**
* Validate the configuration of the application. This method is called before the application is
* triggered.
* @param config
*/
protected void validateConfig(Map<String, Object> config) {
LOG.warn("validateConfig is not implemented for this application. Skipping validation.");
@ -334,7 +332,7 @@ public class AbstractNativeApplication implements NativeApplication {
}
@Override
public void interrupt() throws UnableToInterruptJobException {
public void interrupt() {
LOG.info("Interrupting the job for app: {}", this.app.getName());
stop();
}

View File

@ -12,11 +12,6 @@ import org.openmetadata.service.security.Authorizer;
public interface McpServerProvider {
/**
* Initialize and register the MCP server with the application.
*
* @param environment Dropwizard environment
* @param authorizer Security authorizer
* @param limits Request limits
* @param config Application configuration
*/
void initializeMcpServer(
Environment environment,

View File

@ -13,7 +13,7 @@ public class AlertPublisher extends AbstractEventConsumer {
}
@Override
public void sendAlert(UUID receiverId, ChangeEvent event) throws EventPublisherException {
public void sendAlert(UUID receiverId, ChangeEvent event) {
if (destinationMap.containsKey(receiverId)) {
Destination<ChangeEvent> destination = destinationMap.get(receiverId);
if (Boolean.TRUE.equals(destination.getEnabled())) {

View File

@ -104,7 +104,7 @@ public class GenericPublisher implements Destination<ChangeEvent> {
}
}
private void sendActionsToTargets(ChangeEvent event) throws Exception {
private void sendActionsToTargets(ChangeEvent event) {
List<Invocation.Builder> targets =
getTargetsForWebhookAlert(
webhook, subscriptionDestination.getCategory(), WEBHOOK, client, event);

View File

@ -15,7 +15,6 @@ import es.org.elasticsearch.common.unit.ByteSizeUnit;
import es.org.elasticsearch.common.unit.ByteSizeValue;
import es.org.elasticsearch.core.TimeValue;
import es.org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@ -216,7 +215,7 @@ public class ElasticSearchBulkSink implements BulkSink {
}
}
private void addEntity(EntityInterface entity, String indexName) throws Exception {
private void addEntity(EntityInterface entity, String indexName) {
// Build the search index document using the proper transformation
String entityType = Entity.getEntityTypeFromObject(entity);
Object searchIndexDoc = Entity.buildSearchIndex(entityType, entity).buildSearchIndexDoc();
@ -230,8 +229,7 @@ public class ElasticSearchBulkSink implements BulkSink {
bulkProcessor.add(updateRequest);
}
private void addTimeSeriesEntity(EntityTimeSeriesInterface entity, String indexName)
throws Exception {
private void addTimeSeriesEntity(EntityTimeSeriesInterface entity, String indexName) {
String json = JsonUtils.pojoToJson(entity);
String docId = entity.getId().toString();
@ -263,7 +261,7 @@ public class ElasticSearchBulkSink implements BulkSink {
}
@Override
public void close() throws IOException {
public void close() {
try {
// Flush any pending requests
bulkProcessor.flush();

View File

@ -500,7 +500,7 @@ public class ElasticSearchIndexSink implements BulkSink, Closeable {
}
@Override
public void close() throws IOException {
public void close() {
client.close();
}

View File

@ -2,7 +2,6 @@ package org.openmetadata.service.apps.bundles.searchIndex;
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.ENTITY_TYPE_KEY;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@ -216,7 +215,7 @@ public class OpenSearchBulkSink implements BulkSink {
}
}
private void addEntity(EntityInterface entity, String indexName) throws Exception {
private void addEntity(EntityInterface entity, String indexName) {
// Build the search index document using the proper transformation
String entityType = Entity.getEntityTypeFromObject(entity);
Object searchIndexDoc = Entity.buildSearchIndex(entityType, entity).buildSearchIndexDoc();
@ -230,8 +229,7 @@ public class OpenSearchBulkSink implements BulkSink {
bulkProcessor.add(updateRequest);
}
private void addTimeSeriesEntity(EntityTimeSeriesInterface entity, String indexName)
throws Exception {
private void addTimeSeriesEntity(EntityTimeSeriesInterface entity, String indexName) {
String json = JsonUtils.pojoToJson(entity);
String docId = entity.getId().toString();
@ -263,7 +261,7 @@ public class OpenSearchBulkSink implements BulkSink {
}
@Override
public void close() throws IOException {
public void close() {
try {
// Flush any pending requests
bulkProcessor.flush();

View File

@ -452,7 +452,7 @@ public class OpenSearchIndexSink implements BulkSink, Closeable {
}
@Override
public void close() throws IOException {
public void close() {
client.close();
}

View File

@ -529,11 +529,6 @@ public class SearchIndexApp extends AbstractNativeApplication {
/**
* Shuts down an executor service gracefully.
*
* @param executor The executor service to shut down.
* @param name The name of the executor for logging.
* @param timeout The timeout duration.
* @param unit The time unit of the timeout.
*/
private void shutdownExecutor(
ExecutorService executor, String name, long timeout, TimeUnit unit) {

View File

@ -182,7 +182,7 @@ public class RedisCacheBundle implements ConfiguredBundle<OpenMetadataApplicatio
// Start warmup asynchronously after a short delay to allow application to fully
// start
CompletableFuture.delayedExecutor(5, java.util.concurrent.TimeUnit.SECONDS)
.execute(() -> warmupService.startWarmup());
.execute(warmupService::startWarmup);
}
@Override

View File

@ -25,8 +25,7 @@ public abstract class AbstractEventPublisher implements EventPublisher {
@Override
public void onEvent(
EventPubSub.ChangeEventHolder changeEventHolder, long sequence, boolean endOfBatch)
throws Exception {
EventPubSub.ChangeEventHolder changeEventHolder, long sequence, boolean endOfBatch) {
// Ignore events that don't match the webhook event filters
ChangeEvent changeEvent = changeEventHolder.getEvent();

View File

@ -56,8 +56,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Register a new lifecycle event handler.
* Handlers are automatically sorted by priority after registration.
*
* @param handler The handler to register
*/
public synchronized void registerHandler(EntityLifecycleEventHandler handler) {
if (handler == null) {
@ -86,9 +84,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Unregister a lifecycle event handler by name.
*
* @param handlerName The name of the handler to remove
* @return true if handler was found and removed
*/
public synchronized boolean unregisterHandler(String handlerName) {
boolean removed = handlers.removeIf(h -> h.getHandlerName().equals(handlerName));
@ -109,9 +104,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Dispatch entity created event to all applicable handlers.
*
* @param entity The entity that was created
* @param subjectContext The security context
*/
public void onEntityCreated(EntityInterface entity, SubjectContext subjectContext) {
if (entity == null) return;
@ -126,10 +118,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Dispatch entity updated event to all applicable handlers.
*
* @param entity The entity that was updated
* @param changeDescription Description of changes
* @param subjectContext The security context
*/
public void onEntityUpdated(
EntityInterface entity, ChangeDescription changeDescription, SubjectContext subjectContext) {
@ -146,9 +134,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Dispatch entity updated event to all applicable handlers.
*
* @param entityReference The reference for the entity that was updated
* @param subjectContext The security context
*/
public void onEntityUpdated(EntityReference entityReference, SubjectContext subjectContext) {
if (entityReference == null) return;
@ -163,9 +148,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Dispatch entity deleted event to all applicable handlers.
*
* @param entity The entity that was deleted
* @param subjectContext The security context
*/
public void onEntityDeleted(EntityInterface entity, SubjectContext subjectContext) {
if (entity == null) return;
@ -180,10 +162,6 @@ public class EntityLifecycleEventDispatcher {
/**
* Dispatch entity soft deleted or restored event to all applicable handlers.
*
* @param entity The entity that was soft deleted or restored
* @param isDeleted true if soft deleted, false if restored
* @param subjectContext The security context
*/
public void onEntitySoftDeletedOrRestored(
EntityInterface entity, boolean isDeleted, SubjectContext subjectContext) {

View File

@ -27,9 +27,6 @@ public interface EntityLifecycleEventHandler {
/**
* Called after an entity is successfully created.
*
* @param entity The entity that was created
* @param subjectContext The security context of the user who created the entity
*/
default void onEntityCreated(EntityInterface entity, SubjectContext subjectContext) {
// Default empty implementation
@ -37,10 +34,6 @@ public interface EntityLifecycleEventHandler {
/**
* Called after an entity is successfully updated.
*
* @param entity The updated entity
* @param changeDescription Description of changes made to the entity
* @param subjectContext The security context of the user who updated the entity
*/
default void onEntityUpdated(
EntityInterface entity, ChangeDescription changeDescription, SubjectContext subjectContext) {
@ -49,9 +42,6 @@ public interface EntityLifecycleEventHandler {
/**
* Called after an entity is successfully updated.
*
* @param entityRef The updated entity reference
* @param subjectContext The security context of the user who updated the entity
*/
default void onEntityUpdated(EntityReference entityRef, SubjectContext subjectContext) {
// Default empty implementation
@ -59,9 +49,6 @@ public interface EntityLifecycleEventHandler {
/**
* Called after an entity is successfully deleted.
*
* @param entity The entity that was deleted
* @param subjectContext The security context of the user who deleted the entity
*/
default void onEntityDeleted(EntityInterface entity, SubjectContext subjectContext) {
// Default empty implementation
@ -69,10 +56,6 @@ public interface EntityLifecycleEventHandler {
/**
* Called after an entity is soft deleted or restored.
*
* @param entity The entity that was soft deleted or restored
* @param isDeleted true if soft deleted, false if restored
* @param subjectContext The security context of the user who performed the action
*/
default void onEntitySoftDeletedOrRestored(
EntityInterface entity, boolean isDeleted, SubjectContext subjectContext) {

View File

@ -169,9 +169,6 @@ public class SearchIndexHandler implements EntityLifecycleEventHandler {
/**
* Handle bulk entity creation for better performance.
* This method can be used by the dispatcher for optimized bulk operations.
*
* @param entities List of entities to index
* @param subjectContext Security context
*/
public void onEntitiesCreated(List<EntityInterface> entities, SubjectContext subjectContext) {
if (entities == null || entities.isEmpty()) {

View File

@ -37,12 +37,11 @@ public class ConstraintViolationExceptionMapper
List<String> errorMessages =
constraintViolations.stream()
.map(
constraintViolation -> {
return "query param "
+ getLeafNodeName(constraintViolation.getPropertyPath())
+ " "
+ constraintViolation.getMessage();
})
constraintViolation ->
"query param "
+ getLeafNodeName(constraintViolation.getPropertyPath())
+ " "
+ constraintViolation.getMessage())
.toList();
return Response.status(Response.Status.BAD_REQUEST)
.entity(

View File

@ -225,11 +225,10 @@ public class SlackMessageDecorator implements MessageDecorator<SlackMessage> {
// desc about the event
List<String> thread_messages = outgoingMessage.getMessages();
thread_messages.forEach(
(message) -> {
blocks.add(
Blocks.section(
section -> section.text(BlockCompositions.markdownText("> " + message))));
});
(message) ->
blocks.add(
Blocks.section(
section -> section.text(BlockCompositions.markdownText("> " + message)))));
// Divider
blocks.add(Blocks.divider());
@ -312,11 +311,10 @@ public class SlackMessageDecorator implements MessageDecorator<SlackMessage> {
// desc about the event
List<String> thread_messages = outgoingMessage.getMessages();
thread_messages.forEach(
(message) -> {
blocks.add(
Blocks.section(
section -> section.text(BlockCompositions.markdownText("> " + message))));
});
(message) ->
blocks.add(
Blocks.section(
section -> section.text(BlockCompositions.markdownText("> " + message)))));
// Divider
blocks.add(Blocks.divider());

View File

@ -432,9 +432,8 @@ public class WorkflowHandler {
.processDefinitionKey(getTriggerWorkflowId(workflowName))
.list()
.forEach(
instance -> {
runtimeService.deleteProcessInstance(
instance.getId(), "Terminating all instances due to user request.");
});
instance ->
runtimeService.deleteProcessInstance(
instance.getId(), "Terminating all instances due to user request."));
}
}

View File

@ -45,7 +45,7 @@ public class ChangeSummarizer<T extends EntityInterface> {
change ->
Optional.ofNullable(currentSummary)
.map(summary -> summary.get(change.getName()))
.map(c -> c.getChangedAt())
.map(ChangeSummary::getChangedAt)
.orElse(0L)
.compareTo(changedAt)
< 0)
@ -98,8 +98,6 @@ public class ChangeSummarizer<T extends EntityInterface> {
/**
* Given a list of fields that were deleted, process the fields and return the set of keys to delete
* @param fieldsDeleted list of fields that were deleted
* @return set of keys to delete
*/
public Set<String> processDeleted(List<FieldChange> fieldsDeleted) {
Set<String> keysToDelete = new HashSet<>();
@ -144,10 +142,9 @@ public class ChangeSummarizer<T extends EntityInterface> {
.map(map -> (Map<String, Object>) map)
.map(map -> (String) map.get("name"))
.forEach(
name -> {
keysToDelete.add(
FullyQualifiedName.build(fieldChange.getName(), name, nestedField));
});
name ->
keysToDelete.add(
FullyQualifiedName.build(fieldChange.getName(), name, nestedField)));
} catch (JsonParsingException e) {
LOG.warn("Error processing deleted fields", e);
}

View File

@ -96,9 +96,6 @@ public final class ColumnUtil {
/**
* Finds a column by its Fully Qualified Name (FQN).
* @param columns List of columns in the table.
* @param columnFqn The fully qualified name of the column.
* @return The matched column if found, otherwise null.
*/
public static Column findColumn(List<Column> columns, String columnFqn) {
for (Column column : columns) {
@ -119,9 +116,6 @@ public final class ColumnUtil {
* Adds or updates a column within the given column list.
* If the column exists, it updates its values.
* If the column does not exist, it adds it to the list.
*
* @param columns The list of columns where the column should be added/updated.
* @param newColumn The new column to add or update.
*/
public static void addOrUpdateColumn(List<Column> columns, Column newColumn) {
Optional<Column> existingColumn =

View File

@ -349,7 +349,7 @@ public class DirectoryRepository extends EntityRepository<Directory> {
recordList,
entity.getDataProducts() != null
? entity.getDataProducts().stream()
.map(ref -> ref.getFullyQualifiedName())
.map(EntityReference::getFullyQualifiedName)
.collect(Collectors.joining(";"))
: "");
addOwners(recordList, entity.getExperts());

View File

@ -10,9 +10,7 @@ import static org.openmetadata.service.util.jdbi.JdbiUtils.getOffset;
import jakarta.json.JsonObject;
import jakarta.json.JsonPatch;
import jakarta.ws.rs.core.Response;
import java.beans.IntrospectionException;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -339,8 +337,7 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
return resultList;
}
public RestUtil.PatchResponse<T> patch(UUID id, JsonPatch patch, String user)
throws IntrospectionException, InvocationTargetException, IllegalAccessException {
public RestUtil.PatchResponse<T> patch(UUID id, JsonPatch patch, String user) {
String originalJson = timeSeriesDao.getById(id);
if (originalJson == null) {
throw new EntityNotFoundException(String.format("Entity with id %s not found", id));

View File

@ -182,7 +182,7 @@ public class FileRepository extends EntityRepository<File> {
@Override
public CsvImportResult importFromCsv(
String name, String csv, boolean dryRun, String user, boolean recursive) throws IOException {
String name, String csv, boolean dryRun, String user, boolean recursive) {
// For files, we need the directory context for import
throw new UnsupportedOperationException(
"File import requires directory context. Use directory import instead.");

View File

@ -234,7 +234,7 @@ public class SpreadsheetRepository extends EntityRepository<Spreadsheet> {
@Override
public CsvImportResult importFromCsv(
String name, String csv, boolean dryRun, String user, boolean recursive) throws IOException {
String name, String csv, boolean dryRun, String user, boolean recursive) {
// For spreadsheets, we need the directory context for import
throw new UnsupportedOperationException(
"Spreadsheet import requires directory context. Use directory import instead.");
@ -367,7 +367,7 @@ public class SpreadsheetRepository extends EntityRepository<Spreadsheet> {
recordList,
entity.getDataProducts() != null
? entity.getDataProducts().stream()
.map(ref -> ref.getFullyQualifiedName())
.map(EntityReference::getFullyQualifiedName)
.collect(Collectors.joining(";"))
: "");
addOwners(recordList, entity.getExperts());

View File

@ -998,9 +998,6 @@ public class TableRepository extends EntityRepository<Table> {
/**
* Export columns for a table, handling nested column structure
*
* @param table The table whose columns are being exported
* @param csvFile The CSV file to add column records to
*/
public void exportColumnsRecursively(Table table, CsvFile csvFile) {
if (table.getColumns() != null && !table.getColumns().isEmpty()) {

View File

@ -7,10 +7,8 @@ import static org.openmetadata.service.Entity.getEntityReferenceByName;
import jakarta.json.JsonPatch;
import jakarta.ws.rs.core.Response;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@ -95,8 +93,7 @@ public class TestCaseResolutionStatusRepository
}
public RestUtil.PatchResponse<TestCaseResolutionStatus> patch(
UUID id, JsonPatch patch, String user)
throws IntrospectionException, InvocationTargetException, IllegalAccessException {
UUID id, JsonPatch patch, String user) {
String originalJson = timeSeriesDao.getById(id);
if (originalJson == null) {
throw new EntityNotFoundException(String.format("Entity with id %s not found", id));

View File

@ -248,13 +248,12 @@ public class TestCaseResultRepository extends EntityTimeSeriesRepository<TestCas
s.setStatus(testCase.getTestCaseStatus());
s.setTimestamp(testCase.getTestCaseResult().getTimestamp());
},
() -> {
resultSummaries.add(
new ResultSummary()
.withTestCaseName(testCase.getFullyQualifiedName())
.withStatus(testCase.getTestCaseStatus())
.withTimestamp(testCase.getTestCaseResult().getTimestamp()));
});
() ->
resultSummaries.add(
new ResultSummary()
.withTestCaseName(testCase.getFullyQualifiedName())
.withStatus(testCase.getTestCaseStatus())
.withTimestamp(testCase.getTestCaseResult().getTimestamp())));
} else {
testSuite.setTestCaseResultSummary(
List.of(

View File

@ -172,7 +172,7 @@ public class WorksheetRepository extends EntityRepository<Worksheet> {
@Override
public CsvImportResult importFromCsv(
String name, String csv, boolean dryRun, String user, boolean recursive) throws IOException {
String name, String csv, boolean dryRun, String user, boolean recursive) {
// For worksheets, we need the spreadsheet context for import
throw new UnsupportedOperationException(
"Worksheet import requires spreadsheet context. Use spreadsheet import instead.");

View File

@ -27,7 +27,7 @@ public class GenericBackgroundWorker implements Managed {
}
@Override
public void start() throws Exception {
public void start() {
LOG.info("Starting background job worker");
Thread workerThread = new Thread(this::runWorker, "background-job-worker");
workerThread.setDaemon(true);
@ -35,7 +35,7 @@ public class GenericBackgroundWorker implements Managed {
}
@Override
public void stop() throws Exception {
public void stop() {
running = false;
}

View File

@ -175,7 +175,6 @@ public class MigrationWorkflow {
/*
* Parse a version string into an array of integers
* @param version The version string to parse
* Follows the format major.minor.patch, patch can contain -extension
*/
private static int[] parseVersion(String version) {

View File

@ -137,7 +137,7 @@ public class MetricsExample {
// Example: Using gauges for current state
metrics.registerGauge(
"example.active_connections",
() -> getActiveConnectionCount(),
this::getActiveConnectionCount,
"Number of active connections");
// Example: Distribution summary for response sizes

View File

@ -9,7 +9,6 @@ import jakarta.ws.rs.container.ContainerRequestFilter;
import jakarta.ws.rs.container.ContainerResponseContext;
import jakarta.ws.rs.container.ContainerResponseFilter;
import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
import lombok.extern.slf4j.Slf4j;
/**
@ -33,7 +32,7 @@ public class MetricsRequestFilter implements ContainerRequestFilter, ContainerRe
}
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
public void filter(ContainerRequestContext requestContext) {
// Start timing the request
Timer.Sample sample = metrics.startHttpRequestTimer();
requestContext.setProperty(TIMER_SAMPLE_PROPERTY, sample);
@ -46,8 +45,7 @@ public class MetricsRequestFilter implements ContainerRequestFilter, ContainerRe
@Override
public void filter(
ContainerRequestContext requestContext, ContainerResponseContext responseContext)
throws IOException {
ContainerRequestContext requestContext, ContainerResponseContext responseContext) {
try {
// Get timing information
Timer.Sample sample = (Timer.Sample) requestContext.getProperty(TIMER_SAMPLE_PROPERTY);

View File

@ -629,8 +629,7 @@ public class DataContractResource extends EntityResource<DataContract, DataContr
@Context SecurityContext securityContext,
@Parameter(description = "Id of the data contract", schema = @Schema(type = "UUID"))
@PathParam("id")
UUID id)
throws Exception {
UUID id) {
DataContract dataContract = repository.get(uriInfo, id, Fields.EMPTY_FIELDS);
OperationContext operationContext =
new OperationContext(Entity.DATA_CONTRACT, MetadataOperation.VIEW_BASIC);
@ -670,9 +669,8 @@ public class DataContractResource extends EntityResource<DataContract, DataContr
UUID id,
@Parameter(description = "Id of the data contract result", schema = @Schema(type = "UUID"))
@PathParam("resultId")
UUID resultId)
throws Exception {
DataContract dataContract = repository.get(uriInfo, id, Fields.EMPTY_FIELDS);
UUID resultId) {
repository.get(uriInfo, id, Fields.EMPTY_FIELDS);
OperationContext operationContext =
new OperationContext(Entity.DATA_CONTRACT, MetadataOperation.VIEW_BASIC);
ResourceContext<DataContract> resourceContext =
@ -747,8 +745,7 @@ public class DataContractResource extends EntityResource<DataContract, DataContr
description = "Timestamp of the result to delete",
schema = @Schema(type = "number"))
@PathParam("timestamp")
Long timestamp)
throws Exception {
Long timestamp) {
DataContract dataContract = repository.get(uriInfo, id, Fields.EMPTY_FIELDS);
OperationContext operationContext =
new OperationContext(Entity.DATA_CONTRACT, MetadataOperation.DELETE);

View File

@ -80,10 +80,6 @@ public class SearchSettingsHandler {
* Merges default search settings with incoming search settings.
* Certain fields like aggregations and highlightFields are kept from defaults,
* while user-configurable settings like termBoosts and fieldValueBoosts are taken from incoming settings if present.
*
* @param defaultSearchSettings The default search settings
* @param incomingSearchSettings The incoming search settings to merge
* @return The merged search settings
*/
public SearchSettings mergeSearchSettings(
SearchSettings defaultSearchSettings, SearchSettings incomingSearchSettings) {
@ -184,9 +180,6 @@ public class SearchSettingsHandler {
/**
* Merges asset type configurations from default settings into incoming settings
* ensuring all required asset types are present.
*
* @param defaultSearchSettings The default search settings
* @param incomingSearchSettings The incoming search settings
*/
private void mergeAssetTypeConfigurations(
SearchSettings defaultSearchSettings, SearchSettings incomingSearchSettings) {

View File

@ -641,7 +641,7 @@ public class TeamResource extends EntityResource<Team, TeamRepository> {
schema = @Schema(implementation = CSVExportResponse.class)))
})
public Response exportCsvAsync(
@Context SecurityContext securityContext, @PathParam("name") String name) throws IOException {
@Context SecurityContext securityContext, @PathParam("name") String name) {
return exportCsvInternalAsync(securityContext, name, false);
}

View File

@ -24,9 +24,6 @@ public class SearchAggregation {
* 1. dimensions: the list of dimensions
* 2. metrics: the list of metrics
* 3. keys: the list of keys to traverse the aggregation tree
*
* @param aggregationMapList the list of aggregations
* @return the metadata
*/
public DataQualityReportMetadata getAggregationMetadata() {
DataQualityReportMetadata metadata = new DataQualityReportMetadata();

View File

@ -439,10 +439,6 @@ public interface SearchClient {
/**
* Get a list of data stream names that match the given prefix.
*
* @param prefix The prefix to match data stream names against
* @return List of data stream names that match the prefix
* @throws IOException if there is an error communicating with the search engine
*/
default List<String> getDataStreams(String prefix) throws IOException {
throw new CustomExceptionMessage(
@ -451,9 +447,6 @@ public interface SearchClient {
/**
* Delete data streams that match the given name or pattern.
*
* @param dataStreamName The name or pattern of data streams to delete
* @throws IOException if there is an error communicating with the search engine
*/
default void deleteDataStream(String dataStreamName) throws IOException {
throw new CustomExceptionMessage(
@ -462,9 +455,6 @@ public interface SearchClient {
/**
* Delete an Index Lifecycle Management (ILM) policy.
*
* @param policyName The name of the ILM policy to delete
* @throws IOException if there is an error communicating with the search engine
*/
default void deleteILMPolicy(String policyName) throws IOException {
throw new CustomExceptionMessage(
@ -473,9 +463,6 @@ public interface SearchClient {
/**
* Delete an index template.
*
* @param templateName The name of the index template to delete
* @throws IOException if there is an error communicating with the search engine
*/
default void deleteIndexTemplate(String templateName) throws IOException {
throw new CustomExceptionMessage(
@ -484,9 +471,6 @@ public interface SearchClient {
/**
* Delete a component template.
*
* @param componentTemplateName The name of the component template to delete
* @throws IOException if there is an error communicating with the search engine
*/
default void deleteComponentTemplate(String componentTemplateName) throws IOException {
throw new CustomExceptionMessage(
@ -495,9 +479,6 @@ public interface SearchClient {
/**
* Detach an ILM policy from indexes matching the given pattern.
*
* @param indexPattern The pattern of indexes to detach the ILM policy from
* @throws IOException if there is an error communicating with the search engine
*/
default void dettachIlmPolicyFromIndexes(String indexPattern) throws IOException {
throw new CustomExceptionMessage(
@ -507,9 +488,6 @@ public interface SearchClient {
/**
* Removes ILM policy from a component template while preserving all other settings.
* This is only implemented for Elasticsearch as OpenSearch handles ILM differently.
*
* @param componentTemplateName The name of the component template to update
* @throws IOException if there is an error communicating with the search engine
*/
default void removeILMFromComponentTemplate(String componentTemplateName) throws IOException {
// Default implementation does nothing as this is only needed for Elasticsearch

View File

@ -327,25 +327,24 @@ public final class SearchIndexUtils {
List<TagLabel> tagList, TagAndTierSources tagAndTierSources) {
Optional.ofNullable(tagList)
.ifPresent(
tags -> {
tags.forEach(
tag -> {
String tagSource = tag.getLabelType().value();
if (tag.getTagFQN().startsWith("Tier.")) {
tagAndTierSources
.getTierSources()
.put(
tagSource,
tagAndTierSources.getTierSources().getOrDefault(tagSource, 0) + 1);
} else {
tagAndTierSources
.getTagSources()
.put(
tagSource,
tagAndTierSources.getTagSources().getOrDefault(tagSource, 0) + 1);
}
});
});
tags ->
tags.forEach(
tag -> {
String tagSource = tag.getLabelType().value();
if (tag.getTagFQN().startsWith("Tier.")) {
tagAndTierSources
.getTierSources()
.put(
tagSource,
tagAndTierSources.getTierSources().getOrDefault(tagSource, 0) + 1);
} else {
tagAndTierSources
.getTagSources()
.put(
tagSource,
tagAndTierSources.getTagSources().getOrDefault(tagSource, 0) + 1);
}
}));
}
private static void processEntityTagSources(

View File

@ -19,10 +19,10 @@ import org.openmetadata.service.Entity;
* Interface for creating search source builders for different entity types.
* This interface provides a common contract for both ElasticSearch and OpenSearch implementations.
*
* @param <S> The SearchSourceBuilder type (different for ElasticSearch and OpenSearch)
* @param <Q> The QueryBuilder type
* @param <H> The HighlightBuilder type
* @param <F> The FunctionScoreQueryBuilder type
* <S> The SearchSourceBuilder type (different for ElasticSearch and OpenSearch)
* <Q> The QueryBuilder type
* <H> The HighlightBuilder type
* <F> The FunctionScoreQueryBuilder type
*/
public interface SearchSourceBuilderFactory<S, Q, H, F> {
@ -64,54 +64,42 @@ public interface SearchSourceBuilderFactory<S, Q, H, F> {
/**
* Get the appropriate search source builder based on the index name.
*
* @param index the index name
* @param q the search query
* @param from the starting offset
* @param size the number of results to return
* @return a search source builder configured for the specific entity type
*/
default S getSearchSourceBuilder(String index, String q, int from, int size) {
return getSearchSourceBuilder(index, q, from, size, false);
default S getSearchSourceBuilder(String indexName, String searchQuery, int fromOffset, int size) {
return getSearchSourceBuilder(indexName, searchQuery, fromOffset, size, false);
}
/**
* Get the appropriate search source builder based on the index name.
*
* @param index the index name
* @param q the search query
* @param from the starting offset
* @param size the number of results to return
* @param explain whether to include explanation of the search results
* @return a search source builder configured for the specific entity type
*/
default S getSearchSourceBuilder(String index, String q, int from, int size, boolean explain) {
String indexName = Entity.getSearchRepository().getIndexNameWithoutAlias(index);
default S getSearchSourceBuilder(
String indexName, String searchQuery, int fromOffset, int size, boolean includeExplain) {
indexName = Entity.getSearchRepository().getIndexNameWithoutAlias(indexName);
if (isTimeSeriesIndex(indexName)) {
return buildTimeSeriesSearchBuilder(indexName, q, from, size);
return buildTimeSeriesSearchBuilder(indexName, searchQuery, fromOffset, size);
}
if (isServiceIndex(indexName)) {
return buildServiceSearchBuilder(q, from, size);
return buildServiceSearchBuilder(searchQuery, fromOffset, size);
}
if (isDataQualityIndex(indexName)) {
return buildDataQualitySearchBuilder(indexName, q, from, size);
return buildDataQualitySearchBuilder(indexName, searchQuery, fromOffset, size);
}
if (isDataAssetIndex(indexName)) {
return buildDataAssetSearchBuilder(indexName, q, from, size, explain);
return buildDataAssetSearchBuilder(indexName, searchQuery, fromOffset, size, includeExplain);
}
if (indexName.equals("all") || indexName.equals("dataAsset")) {
return buildCommonSearchBuilder(q, from, size);
return buildCommonSearchBuilder(searchQuery, fromOffset, size);
}
return switch (indexName) {
case "user_search_index", "user", "team_search_index", "team" -> buildUserOrTeamSearchBuilder(
q, from, size);
default -> buildAggregateSearchBuilder(q, from, size);
searchQuery, fromOffset, size);
default -> buildAggregateSearchBuilder(searchQuery, fromOffset, size);
};
}
@ -214,31 +202,18 @@ public interface SearchSourceBuilderFactory<S, Q, H, F> {
/**
* Build a search query builder with the specified fields and weights.
*
* @param query the search query
* @param fields map of field names to their boost weights
* @return a query string query builder
*/
Q buildSearchQueryBuilder(String query, Map<String, Float> fields);
/**
* Build highlights for the specified fields.
*
* @param fields list of field names to highlight
* @return a highlight builder
*/
H buildHighlights(List<String> fields);
/**
* Create a search source builder with the specified query builder, highlights, and pagination.
*
* @param queryBuilder the query builder
* @param highlightBuilder the highlight builder
* @param from the starting offset
* @param size the number of results to return
* @return a search source builder
*/
S searchBuilder(Q queryBuilder, H highlightBuilder, int from, int size);
S searchBuilder(Q queryBuilder, H highlightBuilder, int fromOffset, int size);
S addAggregationsToNLQQuery(S searchSourceBuilder, String indexName);

View File

@ -7,8 +7,6 @@ import org.openmetadata.service.Entity;
public class SearchUtil {
/**
* Check if the index is a data asset index
* @param indexName name of the index to check
* @return true if index is a data asset, false otherwise
*/
public static boolean isDataAssetIndex(String indexName) {
return switch (indexName) {

View File

@ -852,8 +852,7 @@ public class ElasticSearchClient implements SearchClient {
}
@Override
public Response searchWithNLQ(SearchRequest request, SubjectContext subjectContext)
throws IOException {
public Response searchWithNLQ(SearchRequest request, SubjectContext subjectContext) {
LOG.info("Searching with NLQ: {}", request.getQuery());
if (nlqService != null) {
try {
@ -1457,8 +1456,7 @@ public class ElasticSearchClient implements SearchClient {
}
@Override
public void createEntities(String indexName, List<Map<String, String>> docsAndIds)
throws IOException {
public void createEntities(String indexName, List<Map<String, String>> docsAndIds) {
if (isClientAvailable) {
BulkRequest bulkRequest = new BulkRequest();
for (Map<String, String> docAndId : docsAndIds) {

View File

@ -84,13 +84,13 @@ public class ElasticSearchSourceBuilderFactory
@Override
public SearchSourceBuilder searchBuilder(
QueryBuilder query, HighlightBuilder highlightBuilder, int from, int size) {
QueryBuilder query, HighlightBuilder highlightBuilder, int fromOffset, int size) {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(query);
if (highlightBuilder != null) {
builder.highlighter(highlightBuilder);
}
builder.from(from);
builder.from(fromOffset);
builder.size(size);
return builder;
}

View File

@ -1,6 +1,5 @@
package org.openmetadata.service.search.nlq;
import java.io.IOException;
import lombok.extern.slf4j.Slf4j;
import org.openmetadata.schema.api.search.NLQConfiguration;
import org.openmetadata.schema.search.SearchRequest;
@ -21,8 +20,7 @@ public class NoOpNLQService implements NLQService {
}
@Override
public String transformNaturalLanguageQuery(SearchRequest request, String additionalContext)
throws IOException {
public String transformNaturalLanguageQuery(SearchRequest request, String additionalContext) {
return null;
}

View File

@ -509,8 +509,7 @@ public class OpenSearchClient implements SearchClient {
}
@Override
public Response searchWithNLQ(SearchRequest request, SubjectContext subjectContext)
throws IOException {
public Response searchWithNLQ(SearchRequest request, SubjectContext subjectContext) {
LOG.info("Searching with NLQ: {}", request.getQuery());
if (nlqService != null) {
@ -1611,8 +1610,7 @@ public class OpenSearchClient implements SearchClient {
}
@Override
public void createEntities(String indexName, List<Map<String, String>> docsAndIds)
throws IOException {
public void createEntities(String indexName, List<Map<String, String>> docsAndIds) {
if (isClientAvailable) {
BulkRequest bulkRequest = new BulkRequest();
for (Map<String, String> docAndId : docsAndIds) {
@ -2525,7 +2523,7 @@ public class OpenSearchClient implements SearchClient {
}
@Override
public List<String> getDataStreams(String prefix) throws IOException {
public List<String> getDataStreams(String prefix) {
try {
GetDataStreamRequest request = new GetDataStreamRequest(prefix + "*");
GetDataStreamResponse response =

View File

@ -83,13 +83,13 @@ public class OpenSearchSourceBuilderFactory
@Override
public SearchSourceBuilder searchBuilder(
QueryBuilder query, HighlightBuilder highlightBuilder, int from, int size) {
QueryBuilder query, HighlightBuilder highlightBuilder, int fromOffset, int size) {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(query);
if (highlightBuilder != null) {
builder.highlighter(highlightBuilder);
}
builder.from(from);
builder.from(fromOffset);
builder.size(size);
return builder;
}

View File

@ -150,11 +150,6 @@ public class LdapAuthenticator implements AuthenticatorHandler {
/**
* Check if the user exists in database by userName, if user exist, reassign roles for user according to it's ldap
* group else, create a new user and assign roles according to it's ldap group
*
* @param userDn userDn from LDAP
* @param email Email of the User
* @return user info
* @author Eric Wen@2023-07-16 17:06:43
*/
private User checkAndCreateUser(String userDn, String email, String userName) throws IOException {
// Check if the user exists in OM Database
@ -303,10 +298,6 @@ public class LdapAuthenticator implements AuthenticatorHandler {
/**
* Getting user's roles according to the mapping between ldap groups and roles
*
* @param user user object
* @param reAssign flag to decide whether to reassign roles
* @author Eric Wen@2023-07-16 17:23:57
*/
private void getRoleForLdap(String userDn, User user, Boolean reAssign)
throws JsonProcessingException {

View File

@ -16,7 +16,6 @@ import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.container.ContainerRequestFilter;
import jakarta.ws.rs.core.SecurityContext;
import jakarta.ws.rs.ext.Provider;
import java.io.IOException;
import lombok.extern.slf4j.Slf4j;
import org.openmetadata.service.security.policyevaluator.SubjectContext;
@ -30,7 +29,7 @@ import org.openmetadata.service.security.policyevaluator.SubjectContext;
public class UserActivityFilter implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
public void filter(ContainerRequestContext requestContext) {
// Only track authenticated users
SecurityContext securityContext = requestContext.getSecurityContext();
if (securityContext == null || securityContext.getUserPrincipal() == null) {

View File

@ -59,19 +59,11 @@ public class UserActivityTracker {
private final ScheduledExecutorService scheduler =
Executors.newSingleThreadScheduledExecutor(
r -> {
return Thread.ofPlatform()
.name("UserActivityTracker-Scheduler")
.daemon(true)
.unstarted(r);
});
r -> Thread.ofPlatform().name("UserActivityTracker-Scheduler").daemon(true).unstarted(r));
private final ScheduledExecutorService virtualThreadExecutor =
Executors.newScheduledThreadPool(
0,
r -> {
return Thread.ofVirtual().name("UserActivityTracker-VirtualThread-", 0).unstarted(r);
});
0, r -> Thread.ofVirtual().name("UserActivityTracker-VirtualThread-", 0).unstarted(r));
private volatile UserRepository userRepository;
@ -191,9 +183,7 @@ public class UserActivityTracker {
// Convert to a simple map of userName -> lastActivityTime
userActivityMap = new HashMap<>();
localActivityCache.forEach(
(userName, activity) -> {
userActivityMap.put(userName, activity.lastActivityTime);
});
(userName, activity) -> userActivityMap.put(userName, activity.lastActivityTime));
localActivityCache.clear();
} finally {
cacheLock.writeLock().unlock();
@ -241,10 +231,10 @@ public class UserActivityTracker {
cacheLock.writeLock().lock();
try {
userActivityMap.forEach(
(userName, lastActivityTime) -> {
localActivityCache.putIfAbsent(
userName, new UserActivity(userName, lastActivityTime, System.currentTimeMillis()));
});
(userName, lastActivityTime) ->
localActivityCache.putIfAbsent(
userName,
new UserActivity(userName, lastActivityTime, System.currentTimeMillis())));
} finally {
cacheLock.writeLock().unlock();
}
@ -310,9 +300,7 @@ public class UserActivityTracker {
// Convert to a simple map of userName -> lastActivityTime
userActivityMap = new HashMap<>();
localActivityCache.forEach(
(userName, activity) -> {
userActivityMap.put(userName, activity.lastActivityTime);
});
(userName, activity) -> userActivityMap.put(userName, activity.lastActivityTime));
localActivityCache.clear();
} finally {
cacheLock.writeLock().unlock();

View File

@ -18,13 +18,11 @@ import static org.openmetadata.service.security.AuthenticationCodeFlowHandler.SE
import static org.openmetadata.service.util.UserUtil.getRoleListFromUser;
import com.onelogin.saml2.Auth;
import jakarta.servlet.ServletException;
import jakarta.servlet.annotation.WebServlet;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -63,8 +61,7 @@ public class SamlAssertionConsumerServlet extends HttpServlet {
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
protected void doPost(HttpServletRequest request, HttpServletResponse response) {
try {
// Convert Jakarta servlet types to javax servlet types using Apache Felix wrappers
javax.servlet.http.HttpServletRequest wrappedRequest = new HttpServletRequestWrapper(request);

View File

@ -24,7 +24,6 @@ import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
@ -61,8 +60,7 @@ public class SamlLogoutServlet extends HttpServlet {
@Override
protected void doGet(
final HttpServletRequest httpServletRequest, final HttpServletResponse httpServletResponse)
throws IOException {
final HttpServletRequest httpServletRequest, final HttpServletResponse httpServletResponse) {
try {
LOG.debug("Performing application logout");
HttpSession session = httpServletRequest.getSession(false);

View File

@ -244,9 +244,6 @@ public class EntityRelationshipCleanup {
/**
* Deletes orphaned relationships from the database
*
* @param orphanedRelationships List of orphaned relationships to delete
* @return Number of relationships successfully deleted
*/
private int deleteOrphanedRelationships(List<OrphanedRelationship> orphanedRelationships) {
LOG.info("Deleting {} orphaned relationships", orphanedRelationships.size());

View File

@ -799,10 +799,6 @@ public final class EntityUtil {
* Gets the value of a field from an entity using reflection.
* This method checks if the entity supports the given field and returns its value.
* If the field is not supported, returns null.
*
* @param entity The entity to get the field value from
* @param fieldName The name of the field to get (corresponds to getter method name without 'get' prefix)
* @return The value of the field, or null if field is not supported by the entity
*/
public static Object getEntityField(EntityInterface entity, String fieldName) {
if (entity == null || fieldName == null || fieldName.isEmpty()) {

View File

@ -215,9 +215,6 @@ public class FullyQualifiedName {
* - Full hierarchy: "service", "service.database", "service.database.schema", "service.database.schema.table"
* - Individual parts: "service", "database", "schema", "table"
* - Bottom-up combinations: "database.schema.table", "schema.table", "table"
*
* @param fqn The fully qualified name to generate parts from
* @return Set of all possible FQN parts
*/
public static Set<String> getAllParts(String fqn) {
var parts = split(fqn);
@ -239,9 +236,6 @@ public class FullyQualifiedName {
* Generates hierarchical FQN parts from root to the full FQN.
* For example, given FQN "service.database.schema.table", this method generates:
* ["service", "service.database", "service.database.schema", "service.database.schema.table"]
*
* @param fqn The fully qualified name to generate hierarchy from
* @return List of hierarchical FQN parts from root to full FQN
*/
public static List<String> getHierarchicalParts(String fqn) {
var parts = split(fqn);
@ -254,9 +248,6 @@ public class FullyQualifiedName {
* Gets all ancestor FQNs for a given FQN.
* For example, given FQN "service.database.schema.table", this method returns:
* ["service.database.schema", "service.database", "service"]
*
* @param fqn The fully qualified name to get ancestors from
* @return List of ancestor FQNs (excluding the input FQN itself)
*/
public static List<String> getAncestors(String fqn) {
var parts = split(fqn);

View File

@ -85,10 +85,6 @@ public class ReflectionUtil {
/**
* Creates a class instance from a fully qualified class name
*
* @param className The fully qualified class name
* @return Class object
* @throws ClassNotFoundException If class cannot be found
*/
public static Class<?> createClass(String className) throws ClassNotFoundException {
try {

View File

@ -28,7 +28,6 @@ import jakarta.ws.rs.core.UriInfo;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.LocalDateTime;
@ -103,7 +102,7 @@ public final class RestUtil {
return getHref(uriInfo, collectionPath, id.toString());
}
public static int compareDates(String date1, String date2) throws ParseException {
public static int compareDates(String date1, String date2) {
return LocalDateTime.parse(date1, DATE_FORMAT)
.compareTo(LocalDateTime.parse(date2, DATE_FORMAT));
}

View File

@ -15,9 +15,6 @@ public class DatabaseAuthenticationProviderFactory {
/**
* Get auth provider based on the given jdbc url.
*
* @param jdbcURL the jdbc url.
* @return instance of {@link DatabaseAuthenticationProvider}.
*/
public static Optional<DatabaseAuthenticationProvider> get(String jdbcURL) {
Map<String, String> queryParams = parseQueryParams(jdbcURL);

View File

@ -91,8 +91,7 @@ class DataRetentionAppTest extends OpenMetadataApplicationTest {
@Test
@Execution(ExecutionMode.CONCURRENT)
void test_dataRetention_withOrphanedRelationshipsCleanup_shouldExecuteSuccessfully()
throws Exception {
void test_dataRetention_withOrphanedRelationshipsCleanup_shouldExecuteSuccessfully() {
// Test that the enhanced DataRetention executes the orphaned relationships cleanup
// Create a test configuration

View File

@ -160,9 +160,7 @@ public class CacheDisabledCompatibilityTest extends OpenMetadataApplicationTest
// These operations should not throw exceptions when cache is disabled
assertDoesNotThrow(
() -> {
RelationshipCache.put(entityId, testData);
},
() -> RelationshipCache.put(entityId, testData),
"put() should handle disabled cache gracefully");
assertDoesNotThrow(
@ -173,15 +171,10 @@ public class CacheDisabledCompatibilityTest extends OpenMetadataApplicationTest
"get() should handle disabled cache gracefully");
assertDoesNotThrow(
() -> {
RelationshipCache.evict(entityId);
},
"evict() should handle disabled cache gracefully");
() -> RelationshipCache.evict(entityId), "evict() should handle disabled cache gracefully");
assertDoesNotThrow(
() -> {
RelationshipCache.bumpTag("test-tag", 1);
},
() -> RelationshipCache.bumpTag("test-tag", 1),
"bumpTag() should handle disabled cache gracefully");
assertDoesNotThrow(
@ -254,9 +247,7 @@ public class CacheDisabledCompatibilityTest extends OpenMetadataApplicationTest
// Test insert
assertDoesNotThrow(
() -> {
entityRelationshipDAO.insert(fromId, toId, fromEntity, toEntity, relation, jsonData);
},
() -> entityRelationshipDAO.insert(fromId, toId, fromEntity, toEntity, relation, jsonData),
"Insert should work without cache");
// Test read
@ -300,9 +291,7 @@ public class CacheDisabledCompatibilityTest extends OpenMetadataApplicationTest
// Test bulk insert
assertDoesNotThrow(
() -> {
entityRelationshipDAO.bulkInsertTo(relationships);
},
() -> entityRelationshipDAO.bulkInsertTo(relationships),
"Bulk insert should work without cache");
// Test bulk remove
@ -310,9 +299,9 @@ public class CacheDisabledCompatibilityTest extends OpenMetadataApplicationTest
relationships.stream().map(CollectionDAO.EntityRelationshipObject::getToId).toList();
assertDoesNotThrow(
() -> {
entityRelationshipDAO.bulkRemoveTo(testTable.getId(), toIds, Entity.TABLE, Entity.TAG, 2);
},
() ->
entityRelationshipDAO.bulkRemoveTo(
testTable.getId(), toIds, Entity.TABLE, Entity.TAG, 2),
"Bulk remove should work without cache");
LOG.info("Bulk operations without cache test passed");

View File

@ -294,7 +294,7 @@ class CacheWarmupIntegrationTest extends CachedOpenMetadataApplicationResourceTe
@Test
@Order(2)
@DisplayName("Test cache warmup improves query performance")
public void testCacheWarmupImprovesQueryPerformance() throws Exception {
public void testCacheWarmupImprovesQueryPerformance() {
assertTrue(isCacheAvailable(), "Cache should be available for performance testing");
clearCache();
@ -316,7 +316,7 @@ class CacheWarmupIntegrationTest extends CachedOpenMetadataApplicationResourceTe
@Test
@DisplayName("Test cache warmup populates relationship data")
public void testCacheWarmupPopulatesRelationshipData() throws Exception {
public void testCacheWarmupPopulatesRelationshipData() {
assertTrue(isCacheAvailable(), "Cache should be available for relationship testing");
clearCache();
Map<String, Long> initialStats = getCacheStats();
@ -341,7 +341,7 @@ class CacheWarmupIntegrationTest extends CachedOpenMetadataApplicationResourceTe
@Test
@DisplayName("Test cache warmup populates tag data")
public void testCacheWarmupPopulatesTagData() throws Exception {
public void testCacheWarmupPopulatesTagData() {
assertTrue(isCacheAvailable(), "Cache should be available for tag testing");
clearCache();
Map<String, List<TagLabel>> tagResults = new HashMap<>();
@ -453,7 +453,7 @@ class CacheWarmupIntegrationTest extends CachedOpenMetadataApplicationResourceTe
@Test
@DisplayName("Test cache warmup handles large dataset efficiently")
public void testCacheWarmupHandlesLargeDatasetEfficiently() throws Exception {
public void testCacheWarmupHandlesLargeDatasetEfficiently() {
assertTrue(isCacheAvailable(), "Cache should be available for large dataset testing");
clearCache();
@ -492,7 +492,7 @@ class CacheWarmupIntegrationTest extends CachedOpenMetadataApplicationResourceTe
@Test
@DisplayName("Test cache warmup integration with application lifecycle")
public void testCacheWarmupIntegrationWithApplicationLifecycle() throws Exception {
public void testCacheWarmupIntegrationWithApplicationLifecycle() {
assertTrue(isCacheAvailable(), "Cache should be available for lifecycle testing");
assertTrue(RelationshipCache.isAvailable(), "Cache should be initialized");
assertFalse(testTables.isEmpty(), "Test entities should be created");
@ -520,7 +520,7 @@ class CacheWarmupIntegrationTest extends CachedOpenMetadataApplicationResourceTe
@Test
@DisplayName("Test cache warmup with mixed entity types")
public void testCacheWarmupWithMixedEntityTypes() throws Exception {
public void testCacheWarmupWithMixedEntityTypes() {
assertTrue(isCacheAvailable(), "Cache should be available for mixed entity testing");
clearCache();
entityRelationshipDAO.findTo(

View File

@ -279,7 +279,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
@Test
@Order(3)
@DisplayName("Test lazy cache initialization and validation")
void testLazyCacheInitialization() throws Exception {
void testLazyCacheInitialization() {
assertTrue(isCacheAvailable(), "Cache should be available for lazy loading testing");
CompletableFuture<Void> initFuture = lazyCacheService.initializeLazyCache();
@ -293,16 +293,13 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
// Test that cache connectivity works
assertDoesNotThrow(
() -> {
lazyCacheService.testCacheConnectivity();
},
"Cache connectivity test should pass");
() -> lazyCacheService.testCacheConnectivity(), "Cache connectivity test should pass");
}
@Test
@Order(4)
@DisplayName("Test cache statistics and monitoring")
void testCacheStatisticsAndMonitoring() throws Exception {
void testCacheStatisticsAndMonitoring() {
LazyCacheService.CacheStats initialStats = lazyCacheService.getCacheStats();
assertEquals(0, initialStats.cacheHits, "Initial cache hits should be 0");
assertEquals(0, initialStats.cacheMisses, "Initial cache misses should be 0");
@ -329,7 +326,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
@Test
@Order(5)
@DisplayName("Test lazy cache population")
void testLazyCachePopulation() throws Exception {
void testLazyCachePopulation() {
if (!isCacheAvailable()) {
LOG.warn("Cache not available, skipping lazy cache population test");
return;
@ -352,7 +349,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
@Test
@Order(6)
@DisplayName("Test lazy loading with real entity queries")
void testLazyLoadingWithRealQueries() throws Exception {
void testLazyLoadingWithRealQueries() {
if (!isCacheAvailable()) {
LOG.warn("Cache not available, skipping real query test");
return;
@ -401,8 +398,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
Thread.sleep(50);
assertDoesNotThrow(
() -> lifecycleService.shutdown(),
"Shutdown during initialization should not throw exceptions");
lifecycleService::shutdown, "Shutdown during initialization should not throw exceptions");
try {
initFuture.get(5, TimeUnit.SECONDS);
@ -416,7 +412,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
@Test
@Order(8)
@DisplayName("Test lazy cache error handling and resilience")
void testLazyCacheErrorHandlingAndResilience() throws Exception {
void testLazyCacheErrorHandlingAndResilience() {
CacheConfiguration minimalConfig = new CacheConfiguration();
minimalConfig.setEnabled(true);
minimalConfig.setWarmupEnabled(true);
@ -444,7 +440,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
@Test
@Order(9)
@DisplayName("Test simple background prefetching")
void testSimpleBackgroundPrefetching() throws Exception {
void testSimpleBackgroundPrefetching() {
if (!isCacheAvailable()) {
LOG.warn("Cache not available, skipping prefetching test");
return;
@ -483,7 +479,7 @@ class CacheWarmupServiceTest extends CachedOpenMetadataApplicationResourceTest {
@Test
@Order(10)
@DisplayName("Test lazy cache thread configuration")
void testLazyCacheThreadConfiguration() throws Exception {
void testLazyCacheThreadConfiguration() {
// Test single thread configuration
CacheConfiguration singleThreadConfig = new CacheConfiguration();
singleThreadConfig.setEnabled(true);

View File

@ -42,7 +42,7 @@ class RateLimiterComparisonTest {
LOG.info("Testing Guava RateLimiter (version: 33.4.8-jre, marked @Beta)");
RateLimiter rateLimiter = RateLimiter.create(TEST_RATE);
RateLimiterTestResult result = performRateLimiterTest("Guava", () -> rateLimiter.acquire());
RateLimiterTestResult result = performRateLimiterTest("Guava", rateLimiter::acquire);
validateRateLimiterResult(result, "Guava RateLimiter");
@ -156,7 +156,7 @@ class RateLimiterComparisonTest {
// Test Guava RateLimiter under concurrency
RateLimiter guavaLimiter = RateLimiter.create(TEST_RATE);
testConcurrentRateLimiter(
"Guava", executor, threadCount, operationsPerThread, () -> guavaLimiter.acquire());
"Guava", executor, threadCount, operationsPerThread, guavaLimiter::acquire);
// Test Resilience4j RateLimiter under concurrency
RateLimiterConfig config =
@ -172,7 +172,7 @@ class RateLimiterComparisonTest {
executor,
threadCount,
operationsPerThread,
() -> resilience4jLimiter.acquirePermission());
resilience4jLimiter::acquirePermission);
// Test production Resilience4j configuration under concurrency
RateLimiterConfig prodConfig =
@ -188,7 +188,7 @@ class RateLimiterComparisonTest {
executor,
threadCount,
operationsPerThread,
() -> prodLimiter.acquirePermission());
prodLimiter::acquirePermission);
executor.shutdown();
assertTrue(
@ -373,9 +373,7 @@ class RateLimiterComparisonTest {
actualRate <= TEST_RATE * 1.2, name + " should respect rate limits under concurrent load");
}
private RateLimiterTestResult performRateLimiterTest(String name, Runnable acquireOperation)
throws Exception {
private RateLimiterTestResult performRateLimiterTest(String name, Runnable acquireOperation) {
LOG.info("Starting rate limiter test for: {}", name);
long startTime = System.currentTimeMillis();

View File

@ -52,7 +52,7 @@ class RateLimiterProductionReadinessTest {
LOG.info("⚠️ Alternative: Guava RateLimiter - Works but marked @Beta (use with caution)");
}
private void testGuavaRateLimiterProduction(double targetRate, int operations) throws Exception {
private void testGuavaRateLimiterProduction(double targetRate, int operations) {
LOG.info("\n--- Testing Guava RateLimiter (v33.4.8-jre) ---");
LOG.info("Status: ⚠️ Marked @Beta - Use with caution in production");
@ -80,8 +80,7 @@ class RateLimiterProductionReadinessTest {
duration >= (operations - 1) * 1000 / targetRate * 0.5, "Should take reasonable time");
}
private void testResilience4jRateLimiterProduction(double targetRate, int operations)
throws Exception {
private void testResilience4jRateLimiterProduction(double targetRate, int operations) {
LOG.info("\n--- Testing Resilience4j RateLimiter (v2.2.0) ---");
LOG.info("Status: ✅ Production Ready - Stable and well-maintained");
@ -144,7 +143,7 @@ class RateLimiterProductionReadinessTest {
LOG.info("All rate limiters successfully controlled database load during warmup");
}
private void simulateCacheWarmupWithGuava(double rate, int queries) throws Exception {
private void simulateCacheWarmupWithGuava(double rate, int queries) {
LOG.info("\n--- Cache Warmup with Guava RateLimiter ---");
RateLimiter rateLimiter = RateLimiter.create(rate);
@ -165,7 +164,7 @@ class RateLimiterProductionReadinessTest {
rate);
}
private void simulateCacheWarmupWithResilience4j(double rate, int queries) throws Exception {
private void simulateCacheWarmupWithResilience4j(double rate, int queries) {
LOG.info("\n--- Cache Warmup with Resilience4j RateLimiter ---");
RateLimiterConfig config =
@ -227,7 +226,7 @@ class RateLimiterProductionReadinessTest {
io.github.resilience4j.ratelimiter.RateLimiter rateLimiter =
io.github.resilience4j.ratelimiter.RateLimiter.of("stability-test", config);
return () -> rateLimiter.acquirePermission();
return rateLimiter::acquirePermission;
},
threadCount,
operationsPerThread,

View File

@ -72,7 +72,7 @@ public class MicrometerBundleTest {
}
@Test
public void testPrometheusEndpoint() throws Exception {
public void testPrometheusEndpoint() {
// Initialize and run bundle
bundle.initialize(bootstrap);
when(config.getClusterName()).thenReturn("test-cluster");

View File

@ -23,7 +23,7 @@ class RequestLatencyTrackingSimpleTest {
}
@Test
void testRequestLatencyTracking() throws InterruptedException {
void testRequestLatencyTracking() {
String endpoint = "/api/v1/test";
RequestLatencyContext.startRequest(endpoint);
simulateWork(500);

View File

@ -2583,11 +2583,7 @@ public abstract class EntityResourceTest<T extends EntityInterface, K extends Cr
connectLatch.countDown();
messageLatch.countDown();
})
.on(
Socket.EVENT_DISCONNECT,
args -> {
LOG.info("Disconnected from Socket.IO server");
});
.on(Socket.EVENT_DISCONNECT, args -> LOG.info("Disconnected from Socket.IO server"));
socket.connect();
if (!connectLatch.await(10, TimeUnit.SECONDS)) {
@ -4463,9 +4459,7 @@ public abstract class EntityResourceTest<T extends EntityInterface, K extends Cr
})
.on(
Socket.EVENT_DISCONNECT,
args -> {
System.out.println("Disconnected from Socket.IO server");
});
args -> System.out.println("Disconnected from Socket.IO server"));
socket.connect();
if (!connectLatch.await(10, TimeUnit.SECONDS)) {
@ -4537,9 +4531,7 @@ public abstract class EntityResourceTest<T extends EntityInterface, K extends Cr
})
.on(
Socket.EVENT_DISCONNECT,
args -> {
System.out.println("Disconnected from Socket.IO server");
});
args -> System.out.println("Disconnected from Socket.IO server"));
socket.connect();
if (!connectLatch.await(10, TimeUnit.SECONDS)) {

View File

@ -48,10 +48,6 @@ public class ConfigurationReaderTest {
@Test
public void missingConfig() {
ConfigurationReader reader = new ConfigurationReader();
assertThrows(
IOException.class,
() -> {
reader.readConfigFromResource("missing");
});
assertThrows(IOException.class, () -> reader.readConfigFromResource("missing"));
}
}

View File

@ -1320,8 +1320,7 @@ class ColumnResourceTest extends OpenMetadataApplicationTest {
EntityInterface parentEntity,
String columnName,
List<String> expectedFields,
String entityType)
throws IOException {
String entityType) {
String entityLink =
String.format("<#E::%s::%s>", entityType, parentEntity.getFullyQualifiedName());

View File

@ -86,7 +86,7 @@ class DataContractResourceTest extends OpenMetadataApplicationTest {
private static String testDatabaseSchemaFQN = null;
@AfterEach
void cleanup() throws IOException {
void cleanup() {
for (DataContract contract : createdContracts) {
try {
deleteDataContract(contract.getId());

View File

@ -1262,7 +1262,7 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> {
}
@Test
void create_profilerWrongTimestamp(TestInfo testInfo) throws IOException, ParseException {
void create_profilerWrongTimestamp(TestInfo testInfo) throws IOException {
Table table = createEntity(createRequest(testInfo), ADMIN_AUTH_HEADERS);
Long correctTimestamp = 1725525388000L;
Long wrongTimestamp = 1725525388L;

View File

@ -473,7 +473,7 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
}
@Test
void post_testWithInvalidEntityTestSuite_4xx(TestInfo test) throws IOException {
void post_testWithInvalidEntityTestSuite_4xx(TestInfo test) {
CreateTestCase create = createRequest(test);
TestSuiteResourceTest testSuiteResourceTest = new TestSuiteResourceTest();
@ -983,13 +983,13 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
allEntities
.getData()
.forEach(
tc -> {
assertTrue(
tc.getOwners().stream().anyMatch(owner -> owner.getId().equals(user2Ref.getId())),
String.format(
"Test case %s does not contain the expected owner %s",
tc.getName(), user2Ref.getName()));
});
tc ->
assertTrue(
tc.getOwners().stream()
.anyMatch(owner -> owner.getId().equals(user2Ref.getId())),
String.format(
"Test case %s does not contain the expected owner %s",
tc.getName(), user2Ref.getName())));
queryParams.put("owner", team.getName());
allEntities = listEntitiesFromSearch(queryParams, testCasesNum, 0, ADMIN_AUTH_HEADERS);
@ -3802,7 +3802,7 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
}
@Test
void test_testCaseInvalidEntityLinkTest(TestInfo testInfo) throws IOException {
void test_testCaseInvalidEntityLinkTest(TestInfo testInfo) {
// Invalid entity link as not parsable by antlr parser
String entityLink = "<#E::table::special!@#$%^&*()_+[]{}|;:\\'\",./?>";
CreateTestCase create = createRequest(testInfo);
@ -3847,7 +3847,7 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
}
@Test
void test_columnTestCaseValidation(TestInfo testInfo) throws IOException {
void test_columnTestCaseValidation(TestInfo testInfo) {
CreateTestCase create = createRequest(testInfo);
String invalidFieldNameLink =
"<#E::table::" + TEST_TABLE1.getFullyQualifiedName() + "::invalidField::columnName>";

View File

@ -2113,11 +2113,7 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
connectLatch.countDown();
messageLatch.countDown();
})
.on(
Socket.EVENT_DISCONNECT,
args -> {
LOG.info("Disconnected from Socket.IO server");
});
.on(Socket.EVENT_DISCONNECT, args -> LOG.info("Disconnected from Socket.IO server"));
socket.connect();
if (!connectLatch.await(10, TimeUnit.SECONDS)) {

View File

@ -475,8 +475,7 @@ public class LineageResourceTest extends OpenMetadataApplicationTest {
@Order(6)
@Test
void get_dataQualityLineage(TestInfo test)
throws IOException, URISyntaxException, ParseException {
void get_dataQualityLineage(TestInfo test) throws IOException, ParseException {
TestSuiteResourceTest testSuiteResourceTest = new TestSuiteResourceTest();
TestCaseResourceTest testCaseResourceTest = new TestCaseResourceTest();
TestDefinitionResourceTest testDefinitionResourceTest = new TestDefinitionResourceTest();

View File

@ -178,8 +178,7 @@ public class MetricResourceTest extends EntityResourceTest<Metric, CreateMetric>
}
@Override
public void assertFieldChange(String fieldName, Object expected, Object actual)
throws IOException {
public void assertFieldChange(String fieldName, Object expected, Object actual) {
if (expected != null && actual != null) {
switch (fieldName) {
case "relatedMetrics" -> TestUtils.assertEntityReferences(

View File

@ -23,7 +23,6 @@ import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS;
import jakarta.ws.rs.client.WebTarget;
import jakarta.ws.rs.core.Response;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -59,7 +58,7 @@ public class SearchResourceTest extends OpenMetadataApplicationTest {
private TopicResourceTest topicResourceTest;
@BeforeAll
public void setup(TestInfo test) throws IOException, URISyntaxException {
public void setup(TestInfo test) {
tableResourceTest = new TableResourceTest();
topicResourceTest = new TopicResourceTest();
@ -72,8 +71,7 @@ public class SearchResourceTest extends OpenMetadataApplicationTest {
}
@Test
public void testLongTableNameWithManyColumnsDoesNotCauseClauseExplosion()
throws IOException, InterruptedException {
public void testLongTableNameWithManyColumnsDoesNotCauseClauseExplosion() throws IOException {
String longTableName = "int_snowplow_experiment_evaluation_detailed_analytics_processing";
List<Column> manyColumns = createManyTableColumns();

View File

@ -2145,7 +2145,7 @@ public class UserResourceTest extends EntityResourceTest<User, CreateUser> {
}
@Test
void test_botActivityNotTracked() throws HttpResponseException, InterruptedException {
void test_botActivityNotTracked() throws HttpResponseException {
// Test that bot activity is not tracked
// We'll use a simple approach: update a bot user's activity time directly
// and verify it doesn't show in online users (assuming the query filters out bots)

View File

@ -444,17 +444,10 @@ class ElasticSearchRBACConditionEvaluatorTest {
count.incrementAndGet();
countBoolQueries(node.get("bool"), count);
} else {
node.fields()
.forEachRemaining(
entry -> {
countBoolQueries(entry.getValue(), count);
});
node.fields().forEachRemaining(entry -> countBoolQueries(entry.getValue(), count));
}
} else if (node.isArray()) {
node.forEach(
element -> {
countBoolQueries(element, count);
});
node.forEach(element -> countBoolQueries(element, count));
}
}

View File

@ -43,7 +43,7 @@ class MultiUrlJwkProviderTest {
}
@Test
void testLocalJwkProviderReturnsKeyForMatchingKid() throws Exception {
void testLocalJwkProviderReturnsKeyForMatchingKid() {
// Setup mock JWTTokenGenerator
JWKSKey jwksKey = mock(JWKSKey.class);
when(jwksKey.getKid()).thenReturn(LOCAL_KID);

View File

@ -5,12 +5,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.when;
import static org.openmetadata.common.utils.CommonUtil.listOf;
import com.github.fge.jsonpatch.JsonPatchException;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonPatch;
import jakarta.json.JsonReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.Set;
import java.util.UUID;
@ -87,7 +85,7 @@ class JsonPatchUtilsTest {
}
@Test
void testAddClassificationTag() throws JsonPatchException, IOException {
void testAddClassificationTag() {
// Create a patch to add a new Classification tag
String patchString =
"[\n"
@ -118,7 +116,7 @@ class JsonPatchUtilsTest {
}
@Test
void testRemoveGlossaryTag() throws JsonPatchException, IOException {
void testRemoveGlossaryTag() {
// Create a patch to remove the Glossary tag
String patchString =
"[\n"
@ -143,7 +141,7 @@ class JsonPatchUtilsTest {
}
@Test
void testAddClassificationAndRemoveGlossaryTag() throws IOException {
void testAddClassificationAndRemoveGlossaryTag() {
// Create a patch to add a Classification tag and remove a Glossary tag
String patchString =
"[\n"
@ -179,7 +177,7 @@ class JsonPatchUtilsTest {
}
@Test
void testReplaceTierTag() throws JsonPatchException, IOException {
void testReplaceTierTag() {
// Create a patch to replace the Tier tag
String patchString =
"[\n"
@ -205,7 +203,7 @@ class JsonPatchUtilsTest {
}
@Test
void testModifyNonTagField() throws JsonPatchException, IOException {
void testModifyNonTagField() {
// Create a patch to modify a non-tag field (e.g., description)
String patchString =
"[\n"
@ -231,7 +229,7 @@ class JsonPatchUtilsTest {
}
@Test
void testAddClassificationTagAtColumn() throws Exception {
void testAddClassificationTagAtColumn() {
// Create a patch to add a new Classification tag
String patchString =
"[\n"
@ -280,7 +278,7 @@ class JsonPatchUtilsTest {
}
@Test
void testAddCertificationTag() throws JsonPatchException, IOException {
void testAddCertificationTag() {
// Create a patch to add a new Certification tag
long currentTime = System.currentTimeMillis();
String patchString = getPatchString(currentTime, " \"op\": \"add\",\n");
@ -300,7 +298,7 @@ class JsonPatchUtilsTest {
}
@Test
void testReplaceCertificationTag() throws JsonPatchException, IOException {
void testReplaceCertificationTag() {
// Create a patch to replace the Certification tag
long currentTime = System.currentTimeMillis();
String patchString = getPatchString(currentTime, " \"op\": \"replace\",\n");
@ -320,7 +318,7 @@ class JsonPatchUtilsTest {
}
@Test
void testRemoveCertificationTag() throws JsonPatchException, IOException {
void testRemoveCertificationTag() {
// Create a patch to remove the Certification tag
String patchString =
"""