Skip to content

Commit

Permalink
Merge branch 'release-1.161.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
pablomuri committed Aug 19, 2024
2 parents 2bf225c + cfe6afa commit f3cce32
Show file tree
Hide file tree
Showing 53 changed files with 1,160 additions and 1,190 deletions.
2 changes: 1 addition & 1 deletion .hooks/pre-commit
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ LC_ALL=C

local_branch="$(git rev-parse --abbrev-ref HEAD)"

valid_branch_regex="^(docs|feature|fix|test|release|improvement|hotfix|chore)\/[a-z0-9._-]+$"
valid_branch_regex="^(docs|feature|fix|test|release|improvement|hotfix|chore)[\/-][a-z0-9._-]+$"

message="There branch name is wrong. Branch names in this project must adhere to this contract: $valid_branch_regex. You should rename your branch to a valid name and try again."

Expand Down
16 changes: 16 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,20 @@
# Changelog
## v1.161.0 (08/08/2024)

### Bug Fixes:
- [#5711](https://github.com/telstra/open-kilda/pull/5711) Add a delta to compare all the metric rates (Issue: [#5638](https://github.com/telstra/open-kilda/issues/5638))
- [#5716](https://github.com/telstra/open-kilda/pull/5716) Add split kafka message support for flow validation process (Issue: [#5718](https://github.com/telstra/open-kilda/issues/5718))

### Improvements:
- [#5705](https://github.com/telstra/open-kilda/pull/5705) [TEST]: Regular Flow: New interaction approach: Func-tests: Eliminating flowHelper(v1/v2) usage [**tests**]
- [#5709](https://github.com/telstra/open-kilda/pull/5709) Update git hook
- [#5713](https://github.com/telstra/open-kilda/pull/5713) [TEST]: Regular Flow: Performance tests: Eliminating flowHelper(v1/v2) usage [**tests**]
- [#5722](https://github.com/telstra/open-kilda/pull/5722) [TEST]: Issue 5699: Flaky test: Flow with protected path(reroute) [**tests**]


For the complete list of changes, check out [the commit log](https://github.com/telstra/open-kilda/compare/v.160.0...v1.161.0).

---

## v1.160.0 (01/08/2024)

Expand Down
3 changes: 3 additions & 0 deletions src-java/floodlight-service/floodlight-api/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,12 @@ dependencies {
implementation 'com.fasterxml.jackson.core:jackson-databind'
implementation 'com.google.guava:guava'
implementation 'org.apache.commons:commons-lang3'
implementation 'org.apache.commons:commons-collections4'
implementation 'org.slf4j:slf4j-api'

testImplementation 'org.junit.jupiter:junit-jupiter-api'
testImplementation 'org.junit.jupiter:junit-jupiter-engine'
testImplementation 'org.mockito:mockito-junit-jupiter'

compileOnly 'org.projectlombok:lombok'
annotationProcessor 'org.projectlombok:lombok'
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/* Copyright 2024 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.openkilda.floodlight.api.response;

import org.openkilda.messaging.MessageContext;
import org.openkilda.messaging.MessageData;

import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
import org.apache.commons.collections4.CollectionUtils;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;

@Getter
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
public class ChunkedSpeakerDataResponse extends SpeakerDataResponse {

@JsonProperty("message_id")
private String messageId;

@JsonProperty("total_messages")
private int totalMessages;

public ChunkedSpeakerDataResponse(@JsonProperty("data") MessageData data,
@JsonProperty("message_context") @NonNull MessageContext messageContext,
@JsonProperty("total_messages") int totalMessages) {
super(messageContext, data);
this.messageId = messageContext.getCorrelationId();
this.totalMessages = totalMessages;
}

ChunkedSpeakerDataResponse(MessageData data, MessageContext messageContext, int totalMessages, int messageIndex) {
this(data, messageContext, totalMessages);
this.messageId = String.join(" : ", String.valueOf(messageIndex),
messageContext.getCorrelationId());
}

/**
* Creates list of ChunkedInfoMessages from list of InfoData.
*/
public static List<ChunkedSpeakerDataResponse> createChunkedList(
Collection<? extends MessageData> dataCollection, MessageContext messageContext) {

if (CollectionUtils.isEmpty(dataCollection)) {
return Collections.singletonList(new ChunkedSpeakerDataResponse(null, messageContext, 0));
}

List<ChunkedSpeakerDataResponse> result = new ArrayList<>();
int i = 0;
for (MessageData messageData : dataCollection) {
result.add(new ChunkedSpeakerDataResponse(
messageData, messageContext, dataCollection.size(), i++));
}
return result;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
/* Copyright 2024 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.openkilda.floodlight.api.response;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.mock;

import org.openkilda.messaging.MessageContext;
import org.openkilda.messaging.MessageData;

import org.junit.jupiter.api.Test;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

public class ChunkedSpeakerDataResponseTest {

/**
* Tests the createChunkedList method to ensure it throws a NullPointerException
* when passed a null collection. This verifies that the method correctly handles
* null input by throwing the appropriate exception.
*/
@Test
void testCreateChunkedListWithNullCollection() {

MessageContext messageContext = new MessageContext("Correlation Id");

// Act & Assert
NullPointerException thrown = assertThrows(NullPointerException.class, () ->
ChunkedSpeakerDataResponse.createChunkedList(null, messageContext),
"Expected createChunkedList() to throw, but it didn't"
);
assertEquals("data is marked non-null but is null", thrown.getMessage(),
"Exception message should match");
}

/**
* Tests the createChunkedList method to ensure it throws a NullPointerException
* when passed an empty collection. This verifies that the method correctly handles
* empty collections by throwing the appropriate exception.
*/
@Test
void testCreateChunkedListWithEmptyCollection() {
// Arrange
MessageContext messageContext = new MessageContext("Correlation Id");

NullPointerException thrown = assertThrows(NullPointerException.class, () ->
ChunkedSpeakerDataResponse.createChunkedList(Collections.emptyList(), messageContext),
"Expected createChunkedList() to throw, but it didn't"
);
assertEquals("data is marked non-null but is null", thrown.getMessage(),
"Exception message should match");
}

/**
* Tests the createChunkedList method with a collection containing a single item.
* Verifies that the method correctly processes the single item and sets the
* messageId and totalMessages fields appropriately.
*/
@Test
void testCreateChunkedListWithOneItem() {
// Arrange
MessageContext messageContext = new MessageContext("Correlation Id");
MessageData mockData = mock(MessageData.class);

// Act
List<ChunkedSpeakerDataResponse> result
= ChunkedSpeakerDataResponse.createChunkedList(Collections.singletonList(mockData), messageContext);

// Assert
assertEquals(1, result.size(), "Result list should contain one element");
ChunkedSpeakerDataResponse response = result.get(0);
assertSame(mockData, response.getData(), "Data should be the same as the input");
assertEquals(1, response.getTotalMessages(), "Total messages should be 1");
assertEquals("0 : Correlation Id", response.getMessageId(),
"Message ID should be '0 : Correlation Id'");
}

/**
* Tests the createChunkedList method with a collection containing multiple items.
* Verifies that the method correctly processes each item, setting the messageId
* and totalMessages fields appropriately for each element in the collection.
*/
@Test
void testCreateChunkedListWithMultipleItems() {
// Arrange
MessageContext messageContext = new MessageContext("mockCorrelationId");
MessageData mockData1 = mock(MessageData.class);
MessageData mockData2 = mock(MessageData.class);

List<MessageData> dataCollection = new ArrayList<>();
dataCollection.add(mockData1);
dataCollection.add(mockData2);

// Act
List<ChunkedSpeakerDataResponse> result
= ChunkedSpeakerDataResponse.createChunkedList(dataCollection, messageContext);

// Assert
assertEquals(2, result.size(), "Result list should contain two elements");

ChunkedSpeakerDataResponse response1 = result.get(0);
assertSame(mockData1, response1.getData(), "First response data should be mockData1");
assertEquals(2, response1.getTotalMessages(), "Total messages should be 2");
assertEquals("0 : mockCorrelationId", response1.getMessageId(),
"First message ID should be '0 : mockCorrelationId'");

ChunkedSpeakerDataResponse response2 = result.get(1);
assertSame(mockData2, response2.getData(), "Second response data should be mockData2");
assertEquals(2, response2.getTotalMessages(), "Total messages should be 2");
assertEquals("1 : mockCorrelationId", response2.getMessageId(),
"Second message ID should be '1 : mockCorrelationId'");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -283,22 +283,22 @@ private void doDeleteSwitchRules(final CommandMessage message) {
}

private void doDumpRulesRequest(CommandMessage message) {
processDumpRulesRequest(((DumpRulesRequest) message.getData()).getSwitchId(),
dumpRulesRequest(((DumpRulesRequest) message.getData()).getSwitchId(),
buildSenderToNorthbound(message), message);
}

private void doDumpRulesForSwitchManagerRequest(CommandMessage message) {
processDumpRulesRequest(((DumpRulesForSwitchManagerRequest) message.getData()).getSwitchId(),
dumpRulesRequest(((DumpRulesForSwitchManagerRequest) message.getData()).getSwitchId(),
buildSenderToSwitchManager(message), message);
}

private void doDumpRulesForFlowHsRequest(CommandMessage message) {
processDumpRulesRequest(((DumpRulesForFlowHsRequest) message.getData()).getSwitchId(),
dumpRulesRequest(((DumpRulesForFlowHsRequest) message.getData()).getSwitchId(),
buildSenderToFlowHs(message), message);
}

private void processDumpRulesRequest(SwitchId switchId, java.util.function.Consumer<MessageData> sender,
CommandMessage commandMessage) {
private void dumpRulesRequest(SwitchId switchId, java.util.function.Consumer<MessageData> sender,
CommandMessage commandMessage) {
try {
logger.debug("Loading installed rules for switch {}", switchId);
List<OFFlowStatsEntry> flowEntries =
Expand Down Expand Up @@ -508,22 +508,22 @@ private void doDumpPortDescriptionRequest(CommandMessage message) {

private void doDumpMetersRequest(CommandMessage message) {
DumpMetersRequest request = (DumpMetersRequest) message.getData();
dumpMeters(request.getSwitchId(), buildSenderToNorthbound(message), message);
dumpMetersRequest(request.getSwitchId(), buildSenderToNorthbound(message), message);
}

private void doDumpMetersForSwitchManagerRequest(CommandMessage message) {
DumpMetersForSwitchManagerRequest request = (DumpMetersForSwitchManagerRequest) message.getData();
dumpMeters(request.getSwitchId(), buildSenderToSwitchManager(message), message);
dumpMetersRequest(request.getSwitchId(), buildSenderToSwitchManager(message), message);
}

private void doDumpMetersForFlowHsRequest(CommandMessage message) {
DumpMetersForFlowHsRequest request = (DumpMetersForFlowHsRequest) message.getData();
dumpMeters(request.getSwitchId(), buildSenderToFlowHs(message), message);
dumpMetersRequest(request.getSwitchId(), buildSenderToFlowHs(message), message);
}

private void dumpMeters(SwitchId switchId,
java.util.function.Consumer<MessageData> sender,
CommandMessage message) {
private void dumpMetersRequest(SwitchId switchId,
java.util.function.Consumer<MessageData> sender,
CommandMessage message) {
try {
logger.debug("Get all meters for switch {}", switchId);
ISwitchManager switchManager = context.getSwitchManager();
Expand Down Expand Up @@ -627,6 +627,32 @@ private java.util.function.Consumer<MessageData> buildSenderToSwitchManager(Mess
};
}

private java.util.function.Consumer<MessageData> buildSenderToFlowHs(Message message) {
IKafkaProducerService producerService = getKafkaProducer();
return data -> {
MessageContext messageContext = new MessageContext(message);
if (data instanceof Chunkable<?>) {
List<? extends InfoData> chunks = ((Chunkable<?>) data).split(
context.getKafkaChannel().getConfig().getMessagesBatchSize());
if (chunks.isEmpty()) {
sendSpeakerDataResponse(producerService, message, messageContext, data);
} else {
producerService.sendChunkedSpeakerDataAndTrack(
context.getKafkaSpeakerFlowHsTopic(), messageContext, chunks);
}
} else {
sendSpeakerDataResponse(producerService, message, messageContext, data);
}
};
}

private void sendSpeakerDataResponse(IKafkaProducerService producerService, Message message,
MessageContext messageContext, MessageData data) {
SpeakerDataResponse result = new SpeakerDataResponse(messageContext, data);
producerService.sendMessageAndTrack(context.getKafkaSpeakerFlowHsTopic(),
message.getCorrelationId(), result);
}

private java.util.function.Consumer<MessageData> buildSenderToNorthbound(Message message) {
return buildSenderToTopic(context.getKafkaNorthboundTopic(),
message.getCorrelationId(), message.getTimestamp());
Expand All @@ -648,16 +674,6 @@ private java.util.function.Consumer<MessageData> buildSenderToTopic(String kafka
};
}

private java.util.function.Consumer<MessageData> buildSenderToFlowHs(Message message) {
IKafkaProducerService producerService = getKafkaProducer();
return data -> {
MessageContext messageContext = new MessageContext(message);
SpeakerDataResponse result = new SpeakerDataResponse(messageContext, data);
producerService.sendMessageAndTrack(context.getKafkaSpeakerFlowHsTopic(),
message.getCorrelationId(), result);
};
}

private void doModifyMeterRequest(CommandMessage message) {
MeterModifyCommandRequest request = (MeterModifyCommandRequest) message.getData();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.openkilda.floodlight.service.IService;
import org.openkilda.messaging.AbstractMessage;
import org.openkilda.messaging.Message;
import org.openkilda.messaging.MessageContext;
import org.openkilda.messaging.info.InfoData;

import java.util.Collection;
Expand All @@ -31,6 +32,9 @@ public interface IKafkaProducerService extends IService {

void sendChunkedMessageAndTrack(String topic, String key, Collection<? extends InfoData> data);

void sendChunkedSpeakerDataAndTrack(String topic, MessageContext messageContext,
Collection<? extends InfoData> data);

void sendMessageAndTrackWithZk(String topic, Message message);

void sendMessageAndTrackWithZk(String topic, String key, Message message);
Expand Down
Loading

0 comments on commit f3cce32

Please sign in to comment.