summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoel Fischer <joeljfischer@gmail.com>2017-09-07 15:13:51 -0400
committerJoel Fischer <joeljfischer@gmail.com>2017-09-07 15:13:51 -0400
commitb29a226cb4511d4c3a5943d4c6d14935eae4473e (patch)
treea78a570807c78b0867d61915913082c2b5ac78d5
parent8417139960efec24e81995c02337d37bb49ab095 (diff)
downloadsdl_ios-b29a226cb4511d4c3a5943d4c6d14935eae4473e.tar.gz
RPCs properly test equality
* Fixed streaming media manager bugs in determining formats to start with * Fixed broken tests
-rw-r--r--SmartDeviceLink/SDLRPCStruct.m16
-rw-r--r--SmartDeviceLink/SDLStreamingMediaLifecycleManager.m4
-rw-r--r--SmartDeviceLinkTests/RPCSpecs/StructSpecs/SDLVideoStreamingCapabilitySpec.m4
-rw-r--r--SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m346
4 files changed, 194 insertions, 176 deletions
diff --git a/SmartDeviceLink/SDLRPCStruct.m b/SmartDeviceLink/SDLRPCStruct.m
index 01b514f24..b6a588823 100644
--- a/SmartDeviceLink/SDLRPCStruct.m
+++ b/SmartDeviceLink/SDLRPCStruct.m
@@ -82,6 +82,22 @@ NS_ASSUME_NONNULL_BEGIN
return newStruct;
}
+- (BOOL)isEqualToRPC:(SDLRPCStruct *)rpc {
+ return [rpc->store isEqualToDictionary:self->store];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+
+ if (![object isMemberOfClass:self.class]) {
+ return NO;
+ }
+
+ return [self isEqualToRPC:(SDLRPCStruct *)object];
+}
+
@end
NS_ASSUME_NONNULL_END
diff --git a/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m b/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m
index 5f7c2ebbf..5a5501402 100644
--- a/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m
+++ b/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m
@@ -665,8 +665,10 @@ typedef void(^SDLVideoCapabilityResponse)(SDLVideoStreamingCapability *_Nullable
*/
- (void)sdl_sendVideoStartService {
while (self.preferredFormatIndex < self.preferredFormats.count) {
- if (![self.supportedFormats containsObject:self.preferredFormats[self.preferredResolutionIndex]]) {
+ if (![self.supportedFormats containsObject:self.preferredFormats[self.preferredFormatIndex]]) {
self.preferredFormatIndex++;
+ } else {
+ break;
}
}
diff --git a/SmartDeviceLinkTests/RPCSpecs/StructSpecs/SDLVideoStreamingCapabilitySpec.m b/SmartDeviceLinkTests/RPCSpecs/StructSpecs/SDLVideoStreamingCapabilitySpec.m
index 193c5dda6..ce42ce9d2 100644
--- a/SmartDeviceLinkTests/RPCSpecs/StructSpecs/SDLVideoStreamingCapabilitySpec.m
+++ b/SmartDeviceLinkTests/RPCSpecs/StructSpecs/SDLVideoStreamingCapabilitySpec.m
@@ -60,12 +60,12 @@ describe(@"Initialization tests", ^{
expect(testStruct.supportedFormats).to(beNil());
});
- it(@"Should initialize correctly with initWithVideoStreaming:(SDLImageResolution *)preferredResolution (NSNumber *)maxBitrate (NSArray<SDLVideoStreamingFormat *> *)suportedFormats", ^ {
+ it(@"Should initialize correctly with initWithVideoStreaming:maxBitrate:suportedFormats", ^ {
SDLImageResolution* resolution = [[SDLImageResolution alloc] init];
resolution.resolutionWidth = @600;
resolution.resolutionHeight = @500;
- NSNumber *maxBitrate = @100;
+ int32_t maxBitrate = 100;
NSNumber *hapticDataSupported = @YES;
SDLVideoStreamingFormat *format1 = [[SDLVideoStreamingFormat alloc] init];
diff --git a/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m b/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m
index 63288319b..e5a47527f 100644
--- a/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m
+++ b/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m
@@ -34,7 +34,7 @@
QuickSpecBegin(SDLStreamingMediaLifecycleManagerSpec)
-fdescribe(@"the streaming media manager", ^{
+describe(@"the streaming media manager", ^{
__block SDLStreamingMediaLifecycleManager *streamingLifecycleManager = nil;
__block SDLStreamingMediaConfiguration *testConfiguration = [SDLStreamingMediaConfiguration insecureConfiguration];
__block SDLFakeStreamingManagerDataSource *testDataSource = [[SDLFakeStreamingManagerDataSource alloc] init];
@@ -47,7 +47,7 @@ fdescribe(@"the streaming media manager", ^{
SDLRPCNotificationNotification *notification = [[SDLRPCNotificationNotification alloc] initWithName:SDLDidChangeHMIStatusNotification object:self rpcNotification:hmiStatus];
[[NSNotificationCenter defaultCenter] postNotification:notification];
- [NSThread sleepForTimeInterval:0.1];
+ [NSThread sleepForTimeInterval:0.3];
};
beforeEach(^{
@@ -218,7 +218,7 @@ fdescribe(@"the streaming media manager", ^{
[testConnectionManager respondToLastRequestWithResponse:response];
});
- fit(@"should have correct data from the data source", ^{
+ it(@"should have correct data from the data source", ^{
// Correct formats should be retrieved from the data source
expect(streamingLifecycleManager.preferredResolutions).to(haveCount(1));
expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionWidth).to(equal(resolution.resolutionWidth));
@@ -228,195 +228,195 @@ fdescribe(@"the streaming media manager", ^{
expect(streamingLifecycleManager.preferredFormats.firstObject.codec).to(equal(testDataSource.extraFormat.codec));
expect(streamingLifecycleManager.preferredFormats.firstObject.protocol).to(equal(testDataSource.extraFormat.protocol));
});
- });
- });
-
- describe(@"if the app state is active", ^{
- __block id streamStub = nil;
-
- beforeEach(^{
- streamStub = OCMPartialMock(streamingLifecycleManager);
-
- OCMStub([streamStub isStreamingSupported]).andReturn(YES);
- [streamingLifecycleManager.appStateMachine setToState:SDLAppStateActive fromOldState:nil callEnterTransition:NO];
- });
-
- describe(@"and both streams are open", ^{
- beforeEach(^{
- [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateReady fromOldState:nil callEnterTransition:NO];
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateReady fromOldState:nil callEnterTransition:NO];
- });
-
- describe(@"and the hmi state is limited", ^{
+ describe(@"if the app state is active", ^{
+ __block id streamStub = nil;
+
beforeEach(^{
- streamingLifecycleManager.hmiLevel = SDLHMILevelLimited;
+ streamStub = OCMPartialMock(streamingLifecycleManager);
+
+ OCMStub([streamStub isStreamingSupported]).andReturn(YES);
+
+ [streamingLifecycleManager.appStateMachine setToState:SDLAppStateActive fromOldState:nil callEnterTransition:NO];
});
-
- describe(@"and the hmi state changes to", ^{
- context(@"none", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelNone);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
+
+ describe(@"and both streams are open", ^{
+ beforeEach(^{
+ [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateReady fromOldState:nil callEnterTransition:NO];
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateReady fromOldState:nil callEnterTransition:NO];
});
-
- context(@"background", ^{
+
+ describe(@"and the hmi state is limited", ^{
beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelBackground);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ streamingLifecycleManager.hmiLevel = SDLHMILevelLimited;
});
- });
-
- context(@"limited", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelLimited);
+
+ describe(@"and the hmi state changes to", ^{
+ context(@"none", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelNone);
+ });
+
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
+ });
+
+ context(@"background", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelBackground);
+ });
+
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
+ });
+
+ context(@"limited", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelLimited);
+ });
+
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
+
+ context(@"full", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelFull);
+ });
+
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
});
-
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+
+ describe(@"and the app state changes to", ^{
+ context(@"inactive", ^{
+ beforeEach(^{
+ [streamingLifecycleManager.appStateMachine setToState:SDLAppStateInactive fromOldState:nil callEnterTransition:YES];
+ });
+
+ it(@"should flag to restart the video stream", ^{
+ expect(@(streamingLifecycleManager.shouldRestartVideoStream)).to(equal(@YES));
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
});
});
-
- context(@"full", ^{
+
+ describe(@"and the hmi state is full", ^{
beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelFull);
+ streamingLifecycleManager.hmiLevel = SDLHMILevelFull;
});
-
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+
+ context(@"and hmi state changes to none", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelNone);
+ });
+
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
});
- });
- });
-
- describe(@"and the app state changes to", ^{
- context(@"inactive", ^{
- beforeEach(^{
- [streamingLifecycleManager.appStateMachine setToState:SDLAppStateInactive fromOldState:nil callEnterTransition:YES];
+
+ context(@"and hmi state changes to background", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelBackground);
+ });
+
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
});
-
- it(@"should flag to restart the video stream", ^{
- expect(@(streamingLifecycleManager.shouldRestartVideoStream)).to(equal(@YES));
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+
+ context(@"and hmi state changes to limited", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelLimited);
+ });
+
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
});
- });
- });
- });
-
- describe(@"and the hmi state is full", ^{
- beforeEach(^{
- streamingLifecycleManager.hmiLevel = SDLHMILevelFull;
- });
-
- context(@"and hmi state changes to none", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelNone);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
- });
-
- context(@"and hmi state changes to background", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelBackground);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
- });
-
- context(@"and hmi state changes to limited", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelLimited);
- });
-
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
- context(@"and hmi state changes to full", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelFull);
- });
-
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
- });
- });
-
- describe(@"and both streams are closed", ^{
- beforeEach(^{
- [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateStopped fromOldState:nil callEnterTransition:NO];
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStopped fromOldState:nil callEnterTransition:NO];
- });
-
- describe(@"and the hmi state is none", ^{
- beforeEach(^{
- streamingLifecycleManager.hmiLevel = SDLHMILevelNone;
- });
-
- context(@"and hmi state changes to none", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelNone);
- });
-
- it(@"should only start the audio stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
- });
- });
-
- context(@"and hmi state changes to background", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelBackground);
- });
-
- it(@"should only start the audio stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
- });
- });
-
- context(@"and hmi state changes to limited", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelLimited);
- });
-
- it(@"should start both streams", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
+ context(@"and hmi state changes to full", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelFull);
+ });
+
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
});
});
-
- context(@"and hmi state changes to full", ^{
+
+ describe(@"and both streams are closed", ^{
beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelFull);
+ [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateStopped fromOldState:nil callEnterTransition:NO];
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStopped fromOldState:nil callEnterTransition:NO];
});
-
- it(@"should start both streams", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
+
+ describe(@"and the hmi state is none", ^{
+ beforeEach(^{
+ streamingLifecycleManager.hmiLevel = SDLHMILevelNone;
+ });
+
+ context(@"and hmi state changes to none", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelNone);
+ });
+
+ it(@"should only start the audio stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
+ });
+
+ context(@"and hmi state changes to background", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelBackground);
+ });
+
+ it(@"should only start the audio stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
+ });
+
+ context(@"and hmi state changes to limited", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelLimited);
+ });
+
+ it(@"should start both streams", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
+ });
+ });
+
+ context(@"and hmi state changes to full", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelFull);
+ });
+
+ it(@"should start both streams", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
+ });
+ });
});
});
});