source: orbit/iOS/Orbit/Orbit/SignalConverter.m @ 7945ecb

ServoTab_Interfacepyramid
Last change on this file since 7945ecb was 7945ecb, checked in by Jonathon Horsman <jonathon@…>, 7 years ago

CHange volume detection method

  • Property mode set to 100644
File size: 8.6 KB
Line 
1//
2//  SignalConverter.m
3//  orbit
4//
5//  Created by Jonathon Horsman on 11/07/2013.
6//  Copyright (c) 2013 Puzzlebox Productions, LLC. All rights reserved.
7//
8
9#import "SignalConverter.h"
10
11#define AUDIO_FILE_NAME @"throttle_hover_ios.wav" // @"iOS_noflip.wav"
12#define POOR_SIGNAL_KEY @"poorSignal"
13#define ATTENTION_KEY @"eSenseAttention"
14#define MEDITATION_KEY @"eSenseMeditation"
15
16#define CHANNEL_A 1
17
18// Converts signals received from the EEG headset to the audio played to fly the helicopter.
19//
20@implementation SignalConverter {
21    // these 2 arrays hold the values of the thrust which should be applied to the helicopter
22    // (by way of volume level through the headphones) at each level of attention and meditation.
23    // this could be calculated on the fly, but because it happens ~20 times per second, the
24    // better option is to store all the values in arrays, and recalculate when the sliders are changed.
25    float attentionPower[101];
26    float meditationPower[101];
27   
28    int signalStrength, attentionLevel, meditationLevel; // the latest readings from the headset
29    int yaw, throttle, pitch;
30}
31
32@synthesize attentionThreshold, meditationThreshold, running, testing;
33
34
35- (id) init
36{
37    self = [super init];
38    if (self)
39    {
40        [[TGAccessoryManager sharedTGAccessoryManager] setupManagerWithInterval:0.05];
41        [[TGAccessoryManager sharedTGAccessoryManager] setDelegate:self];
42        audioPlayer = [[AudioGenerator alloc] init];
43       
44        // defaults
45        throttle = 80;
46        yaw = 78;
47        pitch = 31;
48       
49        // initialise the audio session - this should only be done once
50        AudioSessionInitialize(NULL, NULL, NULL, NULL);
51        AudioSessionSetActive(YES);
52    }
53    return self;
54}
55
56- (void) setValuesForAttention:(float) attention meditation:(float) meditation
57{
58    attentionThreshold = attention;
59    meditationThreshold = meditation;
60    [self calculatePowerValues];
61}
62
63- (void) appStopped
64{
65    running = NO;
66    if ([TGAccessoryManager sharedTGAccessoryManager].accessory != nil) {
67        [[TGAccessoryManager sharedTGAccessoryManager] stopStream];
68    }
69    [audioPlayer stop];
70    if (_delegate != nil) {
71        [_delegate appStopped];
72    }
73    AudioSessionSetActive(NO);
74}
75
76#pragma mark - TGAccessoryDelegate methods
77
78- (void)dataReceived:(NSDictionary *)data {
79    [self performSelectorOnMainThread:@selector(updatedSignalReceived:)
80                           withObject:data
81                        waitUntilDone:NO];
82}
83
84// Updated signal received from the EEG headset.
85- (void) updatedSignalReceived:(id) data
86{
87    [self setValuesFromData:data];
88    // notify listening delegates of updated values so the UI can be updated
89    if (_delegate != nil)
90    {
91        [_delegate updatedValuesForSignal: signalStrength
92                                    attention: (float)attentionLevel / 100
93                                   meditation: (float)meditationLevel / 100
94                                        power: [self currentPowerLevel]];
95    }
96    [self playAudio];
97}
98
99- (void) setValuesFromData:(id) data
100{
101    [self setSignalStrength:(NSNumber *)[data valueForKey:POOR_SIGNAL_KEY]];
102    [self setAttentionLevel:(NSNumber *)[data valueForKey:ATTENTION_KEY]];
103    [self setMeditationLevel:(NSNumber *)[data valueForKey:MEDITATION_KEY]];   
104}
105
106- (void) setSignalStrength:(NSNumber *) value
107{
108    if (value != nil) {
109        signalStrength = (200.0 - [value intValue]) / 200.0;
110    }
111}
112
113- (void) setAttentionLevel:(NSNumber *) value
114{
115    if (value != nil) {
116        attentionLevel = [value intValue];
117    }
118}
119
120- (void) setMeditationLevel:(NSNumber *) value
121{
122    if (value != nil) {
123        meditationLevel = [value intValue];
124    }
125}
126
127// The headset was switched on, start the data stream
128- (void)accessoryDidConnect:(EAAccessory *)accessory {
129}
130
131// The headset was switched off (or the Bluetooth signal was dropped).
132- (void)accessoryDidDisconnect {
133    if (_delegate != nil)
134    {
135        [_delegate notifyHeadsetDisconnect];
136    }
137}
138
139- (BOOL) isBluetoothReady
140{
141    return [[TGAccessoryManager sharedTGAccessoryManager] accessory] != NULL;
142}
143
144- (BOOL) isVolumeMax
145{
146    Float32 volume;
147    UInt32 dataSize;
148    AudioSessionGetPropertySize(kAudioSessionProperty_CurrentHardwareOutputVolume, &dataSize);
149    AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareOutputVolume, &dataSize, &volume);
150    NSLog(@"Volume is %f", volume);
151    return 1.0 == volume;
152}
153
154- (BOOL) isAudioJackPlugged
155{
156    UInt32 routeSize;
157   
158    // oddly, without calling this method caused an error.
159    AudioSessionGetPropertySize(kAudioSessionProperty_AudioRouteDescription, &routeSize);
160    CFDictionaryRef desc; // this is the dictionary to contain descriptions
161   
162    // make the call to get the audio description and populate the desc dictionary
163    AudioSessionGetProperty(kAudioSessionProperty_AudioRouteDescription, &routeSize, &desc);
164
165    // the dictionary contains 2 keys, for input and output. Get output array
166    CFArrayRef outputs = CFDictionaryGetValue(desc, kAudioSession_AudioRouteKey_Outputs);
167   
168    // the output array contains 1 element - a dictionary
169    CFDictionaryRef dict = CFArrayGetValueAtIndex(outputs, 0);
170   
171    // get the output description from the dictionary
172    CFStringRef output = CFDictionaryGetValue(dict, kAudioSession_AudioRouteKey_Type);
173   
174    /**
175     These are the possible output types:
176     kAudioSessionOutputRoute_LineOut
177     kAudioSessionOutputRoute_Headphones
178     kAudioSessionOutputRoute_BluetoothHFP
179     kAudioSessionOutputRoute_BluetoothA2DP
180     kAudioSessionOutputRoute_BuiltInReceiver
181     kAudioSessionOutputRoute_BuiltInSpeaker
182     kAudioSessionOutputRoute_USBAudio
183     kAudioSessionOutputRoute_HDMI
184     kAudioSessionOutputRoute_AirPlay
185     */
186
187    NSLog(@"Output: %@ is %@", output, kAudioSessionOutputRoute_Headphones);
188    return CFStringCompare(output, kAudioSessionOutputRoute_Headphones, 0) == kCFCompareEqualTo;
189}
190
191#pragma mark start / stop methods
192
193- (BOOL) startProcessing
194{
195    if (testing) return NO;
196    EAAccessory *accessory = [[TGAccessoryManager sharedTGAccessoryManager] accessory];
197    if (accessory != nil) {
198        running = YES;
199        if (_delegate != nil) {
200            [_delegate notifyDeviceConnected: accessory.name];
201        }
202        [[TGAccessoryManager sharedTGAccessoryManager] startStream];
203    }
204    return running;
205}
206
207- (void) stopProcessing
208{
209    if (running) [self appStopped];
210}
211
212- (void) playTestSound
213{
214    if (!running && !testing) {
215        testing = YES;
216        [audioPlayer playWithThrottle:throttle yaw:yaw pitch:pitch];
217    }
218}
219
220- (void) stopTestSound
221{
222    if (testing) {
223        testing = NO;
224        [audioPlayer stop];
225    }
226}
227
228#pragma mark internal processing methods
229
230// calculate the total power level to output through the headphones, a value between zero and 1.
231// this is the attention signal level and meditation signal level added together
232- (float) currentPowerLevel
233{
234    float powerLevel = attentionPower[attentionLevel] + meditationPower[meditationLevel];
235    if (powerLevel > 1) {
236        return 1.0;
237    }
238    return powerLevel;
239}
240
241- (void) setYaw:(int)y throttle:(int)t pitch:(int)p
242{
243    yaw = y;
244    throttle = t;
245    pitch = p;
246}
247
248- (void) playAudio
249{
250    if ([self currentPowerLevel] > 0) {
251        [audioPlayer playWithThrottle:throttle yaw:yaw pitch:pitch];
252    } else {
253        [audioPlayer stop];
254       
255    }
256}
257
258#pragma mark - display calculation and update methods
259
260// when the user adjusts one of the 2 threshold sliders we recalculate the power values at each of the 101 possible levels
261- (void) calculatePowerValues
262{
263    for (int i = 0; i < 101; i++) {
264        attentionPower[i] = [self calculatePowerAt:(float)i/100 withThreshold:attentionThreshold];
265    }
266    for (int i = 0; i < 101; i++) {
267        meditationPower[i] = [self calculatePowerAt:(float)i/100 withThreshold:meditationThreshold];
268    }
269}
270
271// Convert a value (attention or meditation) into the corresponding power output (which is the volume level)
272// given the threshold is set at specified value.
273// The threshold is the value set by the user as the minimum value (attention or meditation) to be reached
274// before the helicopter is given the signal to fly.
275// If the value if below the threshold, the value will be zero (threshold not yet met).
276// Otherwise the returned power value will be between 0 and 1.
277- (float) calculatePowerAt:(float)value withThreshold:(float)threshold
278{
279    if (value < threshold) { // threshold not met
280        return 0;
281    }
282    // e.g. if the threshold is 0.55 and the current value is 0.7:
283    // there is 0.45 remaining of the threshold slider, and the value is 0.15 past the threshold (0.7 - 0.55), which is 0.33 (0.15 / 0.45)
284    return (value - threshold) / (1 - threshold);
285}
286
287@end
Note: See TracBrowser for help on using the repository browser.