]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
Initialise write_volume
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76
77 return data;
78 }
79
80 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
81 pa_assert(data);
82
83 pa_xfree(data->name);
84 data->name = pa_xstrdup(name);
85 }
86
87 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
88 pa_assert(data);
89
90 if ((data->sample_spec_is_set = !!spec))
91 data->sample_spec = *spec;
92 }
93
94 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
95 pa_assert(data);
96
97 if ((data->channel_map_is_set = !!map))
98 data->channel_map = *map;
99 }
100
101 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
102 pa_assert(data);
103
104 if ((data->volume_is_set = !!volume))
105 data->volume = *volume;
106 }
107
108 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
109 pa_assert(data);
110
111 data->muted_is_set = TRUE;
112 data->muted = !!mute;
113 }
114
115 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
116 pa_assert(data);
117
118 pa_xfree(data->active_port);
119 data->active_port = pa_xstrdup(port);
120 }
121
122 void pa_source_new_data_done(pa_source_new_data *data) {
123 pa_assert(data);
124
125 pa_proplist_free(data->proplist);
126
127 if (data->ports) {
128 pa_device_port *p;
129
130 while ((p = pa_hashmap_steal_first(data->ports)))
131 pa_device_port_free(p);
132
133 pa_hashmap_free(data->ports, NULL, NULL);
134 }
135
136 pa_xfree(data->name);
137 pa_xfree(data->active_port);
138 }
139
140 /* Called from main context */
141 static void reset_callbacks(pa_source *s) {
142 pa_assert(s);
143
144 s->set_state = NULL;
145 s->get_volume = NULL;
146 s->set_volume = NULL;
147 s->write_volume = NULL;
148 s->get_mute = NULL;
149 s->set_mute = NULL;
150 s->update_requested_latency = NULL;
151 s->set_port = NULL;
152 s->get_formats = NULL;
153 }
154
155 /* Called from main context */
156 pa_source* pa_source_new(
157 pa_core *core,
158 pa_source_new_data *data,
159 pa_source_flags_t flags) {
160
161 pa_source *s;
162 const char *name;
163 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
164 char *pt;
165
166 pa_assert(core);
167 pa_assert(data);
168 pa_assert(data->name);
169 pa_assert_ctl_context();
170
171 s = pa_msgobject_new(pa_source);
172
173 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
174 pa_log_debug("Failed to register name %s.", data->name);
175 pa_xfree(s);
176 return NULL;
177 }
178
179 pa_source_new_data_set_name(data, name);
180
181 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
182 pa_xfree(s);
183 pa_namereg_unregister(core, name);
184 return NULL;
185 }
186
187 /* FIXME, need to free s here on failure */
188
189 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
190 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
191
192 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
193
194 if (!data->channel_map_is_set)
195 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
196
197 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
198 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
199
200 /* FIXME: There should probably be a general function for checking whether
201 * the source volume is allowed to be set, like there is for source outputs. */
202 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
203
204 if (!data->volume_is_set) {
205 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
206 data->save_volume = FALSE;
207 }
208
209 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
210 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
211
212 if (!data->muted_is_set)
213 data->muted = FALSE;
214
215 if (data->card)
216 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
217
218 pa_device_init_description(data->proplist);
219 pa_device_init_icon(data->proplist, FALSE);
220 pa_device_init_intended_roles(data->proplist);
221
222 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
223 pa_xfree(s);
224 pa_namereg_unregister(core, name);
225 return NULL;
226 }
227
228 s->parent.parent.free = source_free;
229 s->parent.process_msg = pa_source_process_msg;
230
231 s->core = core;
232 s->state = PA_SOURCE_INIT;
233 s->flags = flags;
234 s->priority = 0;
235 s->suspend_cause = 0;
236 s->name = pa_xstrdup(name);
237 s->proplist = pa_proplist_copy(data->proplist);
238 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
239 s->module = data->module;
240 s->card = data->card;
241
242 s->priority = pa_device_init_priority(s->proplist);
243
244 s->sample_spec = data->sample_spec;
245 s->channel_map = data->channel_map;
246
247 s->outputs = pa_idxset_new(NULL, NULL);
248 s->n_corked = 0;
249 s->monitor_of = NULL;
250 s->output_from_master = NULL;
251
252 s->reference_volume = s->real_volume = data->volume;
253 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
254 s->base_volume = PA_VOLUME_NORM;
255 s->n_volume_steps = PA_VOLUME_NORM+1;
256 s->muted = data->muted;
257 s->refresh_volume = s->refresh_muted = FALSE;
258
259 reset_callbacks(s);
260 s->userdata = NULL;
261
262 s->asyncmsgq = NULL;
263
264 /* As a minor optimization we just steal the list instead of
265 * copying it here */
266 s->ports = data->ports;
267 data->ports = NULL;
268
269 s->active_port = NULL;
270 s->save_port = FALSE;
271
272 if (data->active_port && s->ports)
273 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
274 s->save_port = data->save_port;
275
276 if (!s->active_port && s->ports) {
277 void *state;
278 pa_device_port *p;
279
280 PA_HASHMAP_FOREACH(p, s->ports, state)
281 if (!s->active_port || p->priority > s->active_port->priority)
282 s->active_port = p;
283 }
284
285 s->save_volume = data->save_volume;
286 s->save_muted = data->save_muted;
287
288 pa_silence_memchunk_get(
289 &core->silence_cache,
290 core->mempool,
291 &s->silence,
292 &s->sample_spec,
293 0);
294
295 s->thread_info.rtpoll = NULL;
296 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
297 s->thread_info.soft_volume = s->soft_volume;
298 s->thread_info.soft_muted = s->muted;
299 s->thread_info.state = s->state;
300 s->thread_info.max_rewind = 0;
301 s->thread_info.requested_latency_valid = FALSE;
302 s->thread_info.requested_latency = 0;
303 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
304 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
305 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
306
307 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
308 s->thread_info.volume_changes_tail = NULL;
309 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
310 s->thread_info.volume_change_safety_margin = core->sync_volume_safety_margin_usec;
311 s->thread_info.volume_change_extra_delay = core->sync_volume_extra_delay_usec;
312
313 /* FIXME: This should probably be moved to pa_source_put() */
314 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
315
316 if (s->card)
317 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
318
319 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
320 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
321 s->index,
322 s->name,
323 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
324 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
325 pt);
326 pa_xfree(pt);
327
328 return s;
329 }
330
331 /* Called from main context */
332 static int source_set_state(pa_source *s, pa_source_state_t state) {
333 int ret;
334 pa_bool_t suspend_change;
335 pa_source_state_t original_state;
336
337 pa_assert(s);
338 pa_assert_ctl_context();
339
340 if (s->state == state)
341 return 0;
342
343 original_state = s->state;
344
345 suspend_change =
346 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
347 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
348
349 if (s->set_state)
350 if ((ret = s->set_state(s, state)) < 0)
351 return ret;
352
353 if (s->asyncmsgq)
354 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
355
356 if (s->set_state)
357 s->set_state(s, original_state);
358
359 return ret;
360 }
361
362 s->state = state;
363
364 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the apropriate events */
365 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
366 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
367 }
368
369 if (suspend_change) {
370 pa_source_output *o;
371 uint32_t idx;
372
373 /* We're suspending or resuming, tell everyone about it */
374
375 PA_IDXSET_FOREACH(o, s->outputs, idx)
376 if (s->state == PA_SOURCE_SUSPENDED &&
377 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
378 pa_source_output_kill(o);
379 else if (o->suspend)
380 o->suspend(o, state == PA_SOURCE_SUSPENDED);
381 }
382
383 return 0;
384 }
385
386 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
387 pa_assert(s);
388
389 s->get_volume = cb;
390 }
391
392 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
393 pa_source_flags_t flags;
394
395 pa_assert(s);
396 pa_assert(!s->write_volume || cb);
397
398 s->set_volume = cb;
399
400 /* Save the current flags so we can tell if they've changed */
401 flags = s->flags;
402
403 if (cb) {
404 /* The source implementor is responsible for setting decibel volume support */
405 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
406 } else {
407 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
408 /* See note below in pa_source_put() about volume sharing and decibel volumes */
409 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
410 }
411
412 /* If the flags have changed after init, let any clients know via a change event */
413 if (s->state != PA_SOURCE_INIT && flags != s->flags)
414 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
415 }
416
417 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
418 pa_source_flags_t flags;
419
420 pa_assert(s);
421 pa_assert(!cb || s->set_volume);
422
423 s->write_volume = cb;
424
425 /* Save the current flags so we can tell if they've changed */
426 flags = s->flags;
427
428 if (cb)
429 s->flags |= PA_SOURCE_SYNC_VOLUME;
430 else
431 s->flags &= ~PA_SOURCE_SYNC_VOLUME;
432
433 /* If the flags have changed after init, let any clients know via a change event */
434 if (s->state != PA_SOURCE_INIT && flags != s->flags)
435 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
436 }
437
438 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
439 pa_assert(s);
440
441 s->get_mute = cb;
442 }
443
444 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
445 pa_source_flags_t flags;
446
447 pa_assert(s);
448
449 s->set_mute = cb;
450
451 /* Save the current flags so we can tell if they've changed */
452 flags = s->flags;
453
454 if (cb)
455 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
456 else
457 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
458
459 /* If the flags have changed after init, let any clients know via a change event */
460 if (s->state != PA_SOURCE_INIT && flags != s->flags)
461 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
462 }
463
464 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
465 pa_source_flags_t flags;
466
467 pa_assert(s);
468
469 /* Always follow the overall user preference here */
470 enable = enable && s->core->flat_volumes;
471
472 /* Save the current flags so we can tell if they've changed */
473 flags = s->flags;
474
475 if (enable)
476 s->flags |= PA_SOURCE_FLAT_VOLUME;
477 else
478 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
479
480 /* If the flags have changed after init, let any clients know via a change event */
481 if (s->state != PA_SOURCE_INIT && flags != s->flags)
482 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
483 }
484
485 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
486 pa_source_flags_t flags;
487
488 pa_assert(s);
489
490 /* Save the current flags so we can tell if they've changed */
491 flags = s->flags;
492
493 if (enable) {
494 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
495 enable_flat_volume(s, TRUE);
496 } else {
497 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
498 enable_flat_volume(s, FALSE);
499 }
500
501 /* If the flags have changed after init, let any clients know via a change event */
502 if (s->state != PA_SOURCE_INIT && flags != s->flags)
503 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
504 }
505
506 /* Called from main context */
507 void pa_source_put(pa_source *s) {
508 pa_source_assert_ref(s);
509 pa_assert_ctl_context();
510
511 pa_assert(s->state == PA_SOURCE_INIT);
512 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
513
514 /* The following fields must be initialized properly when calling _put() */
515 pa_assert(s->asyncmsgq);
516 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
517
518 /* Generally, flags should be initialized via pa_source_new(). As a
519 * special exception we allow some volume related flags to be set
520 * between _new() and _put() by the callback setter functions above.
521 *
522 * Thus we implement a couple safeguards here which ensure the above
523 * setters were used (or at least the implementor made manual changes
524 * in a compatible way).
525 *
526 * Note: All of these flags set here can change over the life time
527 * of the source. */
528 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
529 pa_assert(!(s->flags & PA_SOURCE_SYNC_VOLUME) || s->write_volume);
530 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
531
532 /* XXX: Currently decibel volume is disabled for all sources that use volume
533 * sharing. When the master source supports decibel volume, it would be good
534 * to have the flag also in the filter source, but currently we don't do that
535 * so that the flags of the filter source never change when it's moved from
536 * a master source to another. One solution for this problem would be to
537 * remove user-visible volume altogether from filter sources when volume
538 * sharing is used, but the current approach was easier to implement... */
539 /* We always support decibel volumes in software, otherwise we leave it to
540 * the source implementor to set this flag as needed.
541 *
542 * Note: This flag can also change over the life time of the source. */
543 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
544 pa_source_enable_decibel_volume(s, TRUE);
545
546 /* If the source implementor support DB volumes by itself, we should always
547 * try and enable flat volumes too */
548 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
549 enable_flat_volume(s, TRUE);
550
551 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
552 pa_source *root_source = s->output_from_master->source;
553
554 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
555 root_source = root_source->output_from_master->source;
556
557 s->reference_volume = root_source->reference_volume;
558 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
559
560 s->real_volume = root_source->real_volume;
561 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
562 } else
563 /* We assume that if the sink implementor changed the default
564 * volume he did so in real_volume, because that is the usual
565 * place where he is supposed to place his changes. */
566 s->reference_volume = s->real_volume;
567
568 s->thread_info.soft_volume = s->soft_volume;
569 s->thread_info.soft_muted = s->muted;
570 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
571
572 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
573 || (s->base_volume == PA_VOLUME_NORM
574 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
575 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
576 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
577
578 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
579
580 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
581 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
582 }
583
584 /* Called from main context */
585 void pa_source_unlink(pa_source *s) {
586 pa_bool_t linked;
587 pa_source_output *o, *j = NULL;
588
589 pa_assert(s);
590 pa_assert_ctl_context();
591
592 /* See pa_sink_unlink() for a couple of comments how this function
593 * works. */
594
595 linked = PA_SOURCE_IS_LINKED(s->state);
596
597 if (linked)
598 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
599
600 if (s->state != PA_SOURCE_UNLINKED)
601 pa_namereg_unregister(s->core, s->name);
602 pa_idxset_remove_by_data(s->core->sources, s, NULL);
603
604 if (s->card)
605 pa_idxset_remove_by_data(s->card->sources, s, NULL);
606
607 while ((o = pa_idxset_first(s->outputs, NULL))) {
608 pa_assert(o != j);
609 pa_source_output_kill(o);
610 j = o;
611 }
612
613 if (linked)
614 source_set_state(s, PA_SOURCE_UNLINKED);
615 else
616 s->state = PA_SOURCE_UNLINKED;
617
618 reset_callbacks(s);
619
620 if (linked) {
621 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
622 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
623 }
624 }
625
626 /* Called from main context */
627 static void source_free(pa_object *o) {
628 pa_source_output *so;
629 pa_source *s = PA_SOURCE(o);
630
631 pa_assert(s);
632 pa_assert_ctl_context();
633 pa_assert(pa_source_refcnt(s) == 0);
634
635 if (PA_SOURCE_IS_LINKED(s->state))
636 pa_source_unlink(s);
637
638 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
639
640 pa_idxset_free(s->outputs, NULL, NULL);
641
642 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
643 pa_source_output_unref(so);
644
645 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
646
647 if (s->silence.memblock)
648 pa_memblock_unref(s->silence.memblock);
649
650 pa_xfree(s->name);
651 pa_xfree(s->driver);
652
653 if (s->proplist)
654 pa_proplist_free(s->proplist);
655
656 if (s->ports) {
657 pa_device_port *p;
658
659 while ((p = pa_hashmap_steal_first(s->ports)))
660 pa_device_port_free(p);
661
662 pa_hashmap_free(s->ports, NULL, NULL);
663 }
664
665 pa_xfree(s);
666 }
667
668 /* Called from main context, and not while the IO thread is active, please */
669 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
670 pa_source_assert_ref(s);
671 pa_assert_ctl_context();
672
673 s->asyncmsgq = q;
674 }
675
676 /* Called from main context, and not while the IO thread is active, please */
677 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
678 pa_source_assert_ref(s);
679 pa_assert_ctl_context();
680
681 if (mask == 0)
682 return;
683
684 /* For now, allow only a minimal set of flags to be changed. */
685 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
686
687 s->flags = (s->flags & ~mask) | (value & mask);
688 }
689
690 /* Called from IO context, or before _put() from main context */
691 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
692 pa_source_assert_ref(s);
693 pa_source_assert_io_context(s);
694
695 s->thread_info.rtpoll = p;
696 }
697
698 /* Called from main context */
699 int pa_source_update_status(pa_source*s) {
700 pa_source_assert_ref(s);
701 pa_assert_ctl_context();
702 pa_assert(PA_SOURCE_IS_LINKED(s->state));
703
704 if (s->state == PA_SOURCE_SUSPENDED)
705 return 0;
706
707 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
708 }
709
710 /* Called from main context */
711 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
712 pa_source_assert_ref(s);
713 pa_assert_ctl_context();
714 pa_assert(PA_SOURCE_IS_LINKED(s->state));
715 pa_assert(cause != 0);
716
717 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
718 return -PA_ERR_NOTSUPPORTED;
719
720 if (suspend)
721 s->suspend_cause |= cause;
722 else
723 s->suspend_cause &= ~cause;
724
725 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
726 return 0;
727
728 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
729
730 if (s->suspend_cause)
731 return source_set_state(s, PA_SOURCE_SUSPENDED);
732 else
733 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
734 }
735
736 /* Called from main context */
737 int pa_source_sync_suspend(pa_source *s) {
738 pa_sink_state_t state;
739
740 pa_source_assert_ref(s);
741 pa_assert_ctl_context();
742 pa_assert(PA_SOURCE_IS_LINKED(s->state));
743 pa_assert(s->monitor_of);
744
745 state = pa_sink_get_state(s->monitor_of);
746
747 if (state == PA_SINK_SUSPENDED)
748 return source_set_state(s, PA_SOURCE_SUSPENDED);
749
750 pa_assert(PA_SINK_IS_OPENED(state));
751
752 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
753 }
754
755 /* Called from main context */
756 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
757 pa_source_output *o, *n;
758 uint32_t idx;
759
760 pa_source_assert_ref(s);
761 pa_assert_ctl_context();
762 pa_assert(PA_SOURCE_IS_LINKED(s->state));
763
764 if (!q)
765 q = pa_queue_new();
766
767 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
768 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
769
770 pa_source_output_ref(o);
771
772 if (pa_source_output_start_move(o) >= 0)
773 pa_queue_push(q, o);
774 else
775 pa_source_output_unref(o);
776 }
777
778 return q;
779 }
780
781 /* Called from main context */
782 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
783 pa_source_output *o;
784
785 pa_source_assert_ref(s);
786 pa_assert_ctl_context();
787 pa_assert(PA_SOURCE_IS_LINKED(s->state));
788 pa_assert(q);
789
790 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
791 if (pa_source_output_finish_move(o, s, save) < 0)
792 pa_source_output_fail_move(o);
793
794 pa_source_output_unref(o);
795 }
796
797 pa_queue_free(q, NULL, NULL);
798 }
799
800 /* Called from main context */
801 void pa_source_move_all_fail(pa_queue *q) {
802 pa_source_output *o;
803
804 pa_assert_ctl_context();
805 pa_assert(q);
806
807 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
808 pa_source_output_fail_move(o);
809 pa_source_output_unref(o);
810 }
811
812 pa_queue_free(q, NULL, NULL);
813 }
814
815 /* Called from IO thread context */
816 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
817 pa_source_output *o;
818 void *state = NULL;
819
820 pa_source_assert_ref(s);
821 pa_source_assert_io_context(s);
822 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
823
824 if (nbytes <= 0)
825 return;
826
827 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
828 return;
829
830 pa_log_debug("Processing rewind...");
831
832 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
833 pa_source_output_assert_ref(o);
834 pa_source_output_process_rewind(o, nbytes);
835 }
836 }
837
838 /* Called from IO thread context */
839 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
840 pa_source_output *o;
841 void *state = NULL;
842
843 pa_source_assert_ref(s);
844 pa_source_assert_io_context(s);
845 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
846 pa_assert(chunk);
847
848 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
849 return;
850
851 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
852 pa_memchunk vchunk = *chunk;
853
854 pa_memblock_ref(vchunk.memblock);
855 pa_memchunk_make_writable(&vchunk, 0);
856
857 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
858 pa_silence_memchunk(&vchunk, &s->sample_spec);
859 else
860 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
861
862 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
863 pa_source_output_assert_ref(o);
864
865 if (!o->thread_info.direct_on_input)
866 pa_source_output_push(o, &vchunk);
867 }
868
869 pa_memblock_unref(vchunk.memblock);
870 } else {
871
872 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
873 pa_source_output_assert_ref(o);
874
875 if (!o->thread_info.direct_on_input)
876 pa_source_output_push(o, chunk);
877 }
878 }
879 }
880
881 /* Called from IO thread context */
882 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
883 pa_source_assert_ref(s);
884 pa_source_assert_io_context(s);
885 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
886 pa_source_output_assert_ref(o);
887 pa_assert(o->thread_info.direct_on_input);
888 pa_assert(chunk);
889
890 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
891 return;
892
893 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
894 pa_memchunk vchunk = *chunk;
895
896 pa_memblock_ref(vchunk.memblock);
897 pa_memchunk_make_writable(&vchunk, 0);
898
899 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
900 pa_silence_memchunk(&vchunk, &s->sample_spec);
901 else
902 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
903
904 pa_source_output_push(o, &vchunk);
905
906 pa_memblock_unref(vchunk.memblock);
907 } else
908 pa_source_output_push(o, chunk);
909 }
910
911 /* Called from main thread */
912 pa_usec_t pa_source_get_latency(pa_source *s) {
913 pa_usec_t usec;
914
915 pa_source_assert_ref(s);
916 pa_assert_ctl_context();
917 pa_assert(PA_SOURCE_IS_LINKED(s->state));
918
919 if (s->state == PA_SOURCE_SUSPENDED)
920 return 0;
921
922 if (!(s->flags & PA_SOURCE_LATENCY))
923 return 0;
924
925 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
926
927 return usec;
928 }
929
930 /* Called from IO thread */
931 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
932 pa_usec_t usec = 0;
933 pa_msgobject *o;
934
935 pa_source_assert_ref(s);
936 pa_source_assert_io_context(s);
937 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
938
939 /* The returned value is supposed to be in the time domain of the sound card! */
940
941 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
942 return 0;
943
944 if (!(s->flags & PA_SOURCE_LATENCY))
945 return 0;
946
947 o = PA_MSGOBJECT(s);
948
949 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
950
951 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
952 return -1;
953
954 return usec;
955 }
956
957 /* Called from the main thread (and also from the IO thread while the main
958 * thread is waiting).
959 *
960 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
961 * set. Instead, flat volume mode is detected by checking whether the root source
962 * has the flag set. */
963 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
964 pa_source_assert_ref(s);
965
966 while (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
967 s = s->output_from_master->source;
968
969 return (s->flags & PA_SOURCE_FLAT_VOLUME);
970 }
971
972 /* Called from main context */
973 pa_bool_t pa_source_is_passthrough(pa_source *s) {
974
975 pa_source_assert_ref(s);
976
977 /* NB Currently only monitor sources support passthrough mode */
978 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
979 }
980
981 /* Called from main context. */
982 static void compute_reference_ratio(pa_source_output *o) {
983 unsigned c = 0;
984 pa_cvolume remapped;
985
986 pa_assert(o);
987 pa_assert(pa_source_flat_volume_enabled(o->source));
988
989 /*
990 * Calculates the reference ratio from the source's reference
991 * volume. This basically calculates:
992 *
993 * o->reference_ratio = o->volume / o->source->reference_volume
994 */
995
996 remapped = o->source->reference_volume;
997 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
998
999 o->reference_ratio.channels = o->sample_spec.channels;
1000
1001 for (c = 0; c < o->sample_spec.channels; c++) {
1002
1003 /* We don't update when the source volume is 0 anyway */
1004 if (remapped.values[c] <= PA_VOLUME_MUTED)
1005 continue;
1006
1007 /* Don't update the reference ratio unless necessary */
1008 if (pa_sw_volume_multiply(
1009 o->reference_ratio.values[c],
1010 remapped.values[c]) == o->volume.values[c])
1011 continue;
1012
1013 o->reference_ratio.values[c] = pa_sw_volume_divide(
1014 o->volume.values[c],
1015 remapped.values[c]);
1016 }
1017 }
1018
1019 /* Called from main context. Only called for the root source in volume sharing
1020 * cases, except for internal recursive calls. */
1021 static void compute_reference_ratios(pa_source *s) {
1022 uint32_t idx;
1023 pa_source_output *o;
1024
1025 pa_source_assert_ref(s);
1026 pa_assert_ctl_context();
1027 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1028 pa_assert(pa_source_flat_volume_enabled(s));
1029
1030 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1031 compute_reference_ratio(o);
1032
1033 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1034 compute_reference_ratios(o->destination_source);
1035 }
1036 }
1037
1038 /* Called from main context. Only called for the root source in volume sharing
1039 * cases, except for internal recursive calls. */
1040 static void compute_real_ratios(pa_source *s) {
1041 pa_source_output *o;
1042 uint32_t idx;
1043
1044 pa_source_assert_ref(s);
1045 pa_assert_ctl_context();
1046 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1047 pa_assert(pa_source_flat_volume_enabled(s));
1048
1049 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1050 unsigned c;
1051 pa_cvolume remapped;
1052
1053 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1054 /* The origin source uses volume sharing, so this input's real ratio
1055 * is handled as a special case - the real ratio must be 0 dB, and
1056 * as a result i->soft_volume must equal i->volume_factor. */
1057 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1058 o->soft_volume = o->volume_factor;
1059
1060 compute_real_ratios(o->destination_source);
1061
1062 continue;
1063 }
1064
1065 /*
1066 * This basically calculates:
1067 *
1068 * i->real_ratio := i->volume / s->real_volume
1069 * i->soft_volume := i->real_ratio * i->volume_factor
1070 */
1071
1072 remapped = s->real_volume;
1073 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1074
1075 o->real_ratio.channels = o->sample_spec.channels;
1076 o->soft_volume.channels = o->sample_spec.channels;
1077
1078 for (c = 0; c < o->sample_spec.channels; c++) {
1079
1080 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1081 /* We leave o->real_ratio untouched */
1082 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1083 continue;
1084 }
1085
1086 /* Don't lose accuracy unless necessary */
1087 if (pa_sw_volume_multiply(
1088 o->real_ratio.values[c],
1089 remapped.values[c]) != o->volume.values[c])
1090
1091 o->real_ratio.values[c] = pa_sw_volume_divide(
1092 o->volume.values[c],
1093 remapped.values[c]);
1094
1095 o->soft_volume.values[c] = pa_sw_volume_multiply(
1096 o->real_ratio.values[c],
1097 o->volume_factor.values[c]);
1098 }
1099
1100 /* We don't copy the soft_volume to the thread_info data
1101 * here. That must be done by the caller */
1102 }
1103 }
1104
1105 static pa_cvolume *cvolume_remap_minimal_impact(
1106 pa_cvolume *v,
1107 const pa_cvolume *template,
1108 const pa_channel_map *from,
1109 const pa_channel_map *to) {
1110
1111 pa_cvolume t;
1112
1113 pa_assert(v);
1114 pa_assert(template);
1115 pa_assert(from);
1116 pa_assert(to);
1117 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1118 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1119
1120 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1121 * mapping from source output to source volumes:
1122 *
1123 * If template is a possible remapping from v it is used instead
1124 * of remapping anew.
1125 *
1126 * If the channel maps don't match we set an all-channel volume on
1127 * the source to ensure that changing a volume on one stream has no
1128 * effect that cannot be compensated for in another stream that
1129 * does not have the same channel map as the source. */
1130
1131 if (pa_channel_map_equal(from, to))
1132 return v;
1133
1134 t = *template;
1135 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1136 *v = *template;
1137 return v;
1138 }
1139
1140 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1141 return v;
1142 }
1143
1144 /* Called from main thread. Only called for the root source in volume sharing
1145 * cases, except for internal recursive calls. */
1146 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1147 pa_source_output *o;
1148 uint32_t idx;
1149
1150 pa_source_assert_ref(s);
1151 pa_assert(max_volume);
1152 pa_assert(channel_map);
1153 pa_assert(pa_source_flat_volume_enabled(s));
1154
1155 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1156 pa_cvolume remapped;
1157
1158 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1159 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1160
1161 /* Ignore this output. The origin source uses volume sharing, so this
1162 * output's volume will be set to be equal to the root source's real
1163 * volume. Obviously this outputs's current volume must not then
1164 * affect what the root source's real volume will be. */
1165 continue;
1166 }
1167
1168 remapped = o->volume;
1169 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1170 pa_cvolume_merge(max_volume, max_volume, &remapped);
1171 }
1172 }
1173
1174 /* Called from main thread. Only called for the root source in volume sharing
1175 * cases, except for internal recursive calls. */
1176 static pa_bool_t has_outputs(pa_source *s) {
1177 pa_source_output *o;
1178 uint32_t idx;
1179
1180 pa_source_assert_ref(s);
1181
1182 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1183 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1184 return TRUE;
1185 }
1186
1187 return FALSE;
1188 }
1189
1190 /* Called from main thread. Only called for the root source in volume sharing
1191 * cases, except for internal recursive calls. */
1192 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1193 pa_source_output *o;
1194 uint32_t idx;
1195
1196 pa_source_assert_ref(s);
1197 pa_assert(new_volume);
1198 pa_assert(channel_map);
1199
1200 s->real_volume = *new_volume;
1201 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1202
1203 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1204 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1205 if (pa_source_flat_volume_enabled(s)) {
1206 pa_cvolume old_volume = o->volume;
1207
1208 /* Follow the root source's real volume. */
1209 o->volume = *new_volume;
1210 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1211 compute_reference_ratio(o);
1212
1213 /* The volume changed, let's tell people so */
1214 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1215 if (o->volume_changed)
1216 o->volume_changed(o);
1217
1218 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1219 }
1220 }
1221
1222 update_real_volume(o->destination_source, new_volume, channel_map);
1223 }
1224 }
1225 }
1226
1227 /* Called from main thread. Only called for the root source in shared volume
1228 * cases. */
1229 static void compute_real_volume(pa_source *s) {
1230 pa_source_assert_ref(s);
1231 pa_assert_ctl_context();
1232 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1233 pa_assert(pa_source_flat_volume_enabled(s));
1234 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1235
1236 /* This determines the maximum volume of all streams and sets
1237 * s->real_volume accordingly. */
1238
1239 if (!has_outputs(s)) {
1240 /* In the special case that we have no source outputs we leave the
1241 * volume unmodified. */
1242 update_real_volume(s, &s->reference_volume, &s->channel_map);
1243 return;
1244 }
1245
1246 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1247
1248 /* First let's determine the new maximum volume of all outputs
1249 * connected to this source */
1250 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1251 update_real_volume(s, &s->real_volume, &s->channel_map);
1252
1253 /* Then, let's update the real ratios/soft volumes of all outputs
1254 * connected to this source */
1255 compute_real_ratios(s);
1256 }
1257
1258 /* Called from main thread. Only called for the root source in shared volume
1259 * cases, except for internal recursive calls. */
1260 static void propagate_reference_volume(pa_source *s) {
1261 pa_source_output *o;
1262 uint32_t idx;
1263
1264 pa_source_assert_ref(s);
1265 pa_assert_ctl_context();
1266 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1267 pa_assert(pa_source_flat_volume_enabled(s));
1268
1269 /* This is called whenever the source volume changes that is not
1270 * caused by a source output volume change. We need to fix up the
1271 * source output volumes accordingly */
1272
1273 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1274 pa_cvolume old_volume;
1275
1276 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1277 propagate_reference_volume(o->destination_source);
1278
1279 /* Since the origin source uses volume sharing, this output's volume
1280 * needs to be updated to match the root source's real volume, but
1281 * that will be done later in update_shared_real_volume(). */
1282 continue;
1283 }
1284
1285 old_volume = o->volume;
1286
1287 /* This basically calculates:
1288 *
1289 * o->volume := o->reference_volume * o->reference_ratio */
1290
1291 o->volume = s->reference_volume;
1292 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1293 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1294
1295 /* The volume changed, let's tell people so */
1296 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1297
1298 if (o->volume_changed)
1299 o->volume_changed(o);
1300
1301 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1302 }
1303 }
1304 }
1305
1306 /* Called from main thread. Only called for the root source in volume sharing
1307 * cases, except for internal recursive calls. The return value indicates
1308 * whether any reference volume actually changed. */
1309 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1310 pa_cvolume volume;
1311 pa_bool_t reference_volume_changed;
1312 pa_source_output *o;
1313 uint32_t idx;
1314
1315 pa_source_assert_ref(s);
1316 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1317 pa_assert(v);
1318 pa_assert(channel_map);
1319 pa_assert(pa_cvolume_valid(v));
1320
1321 volume = *v;
1322 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1323
1324 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1325 s->reference_volume = volume;
1326
1327 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1328
1329 if (reference_volume_changed)
1330 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1331 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1332 /* If the root source's volume doesn't change, then there can't be any
1333 * changes in the other source in the source tree either.
1334 *
1335 * It's probably theoretically possible that even if the root source's
1336 * volume changes slightly, some filter source doesn't change its volume
1337 * due to rounding errors. If that happens, we still want to propagate
1338 * the changed root source volume to the sources connected to the
1339 * intermediate source that didn't change its volume. This theoretical
1340 * possiblity is the reason why we have that !(s->flags &
1341 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1342 * notice even if we returned here FALSE always if
1343 * reference_volume_changed is FALSE. */
1344 return FALSE;
1345
1346 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1347 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1348 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1349 }
1350
1351 return TRUE;
1352 }
1353
1354 /* Called from main thread */
1355 void pa_source_set_volume(
1356 pa_source *s,
1357 const pa_cvolume *volume,
1358 pa_bool_t send_msg,
1359 pa_bool_t save) {
1360
1361 pa_cvolume new_reference_volume;
1362 pa_source *root_source = s;
1363
1364 pa_source_assert_ref(s);
1365 pa_assert_ctl_context();
1366 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1367 pa_assert(!volume || pa_cvolume_valid(volume));
1368 pa_assert(volume || pa_source_flat_volume_enabled(s));
1369 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1370
1371 /* make sure we don't change the volume when a PASSTHROUGH output is connected */
1372 if (pa_source_is_passthrough(s)) {
1373 /* FIXME: Need to notify client that volume control is disabled */
1374 pa_log_warn("Cannot change volume, Source is monitor of a PASSTHROUGH sink");
1375 return;
1376 }
1377
1378 /* In case of volume sharing, the volume is set for the root source first,
1379 * from which it's then propagated to the sharing sources. */
1380 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1381 root_source = root_source->output_from_master->source;
1382
1383 /* As a special exception we accept mono volumes on all sources --
1384 * even on those with more complex channel maps */
1385
1386 if (volume) {
1387 if (pa_cvolume_compatible(volume, &s->sample_spec))
1388 new_reference_volume = *volume;
1389 else {
1390 new_reference_volume = s->reference_volume;
1391 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1392 }
1393
1394 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1395 }
1396
1397 /* If volume is NULL we synchronize the source's real and reference
1398 * volumes with the stream volumes. If it is not NULL we update
1399 * the reference_volume with it. */
1400
1401 if (volume) {
1402 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1403 if (pa_source_flat_volume_enabled(root_source)) {
1404 /* OK, propagate this volume change back to the outputs */
1405 propagate_reference_volume(root_source);
1406
1407 /* And now recalculate the real volume */
1408 compute_real_volume(root_source);
1409 } else
1410 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1411 }
1412
1413 } else {
1414 pa_assert(pa_source_flat_volume_enabled(root_source));
1415
1416 /* Ok, let's determine the new real volume */
1417 compute_real_volume(root_source);
1418
1419 /* Let's 'push' the reference volume if necessary */
1420 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1421 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1422
1423 /* Now that the reference volume is updated, we can update the streams'
1424 * reference ratios. */
1425 compute_reference_ratios(root_source);
1426 }
1427
1428 if (root_source->set_volume) {
1429 /* If we have a function set_volume(), then we do not apply a
1430 * soft volume by default. However, set_volume() is free to
1431 * apply one to root_source->soft_volume */
1432
1433 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1434 if (!(root_source->flags & PA_SOURCE_SYNC_VOLUME))
1435 root_source->set_volume(root_source);
1436
1437 } else
1438 /* If we have no function set_volume(), then the soft volume
1439 * becomes the real volume */
1440 root_source->soft_volume = root_source->real_volume;
1441
1442 /* This tells the source that soft volume and/or real volume changed */
1443 if (send_msg)
1444 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1445 }
1446
1447 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1448 * Only to be called by source implementor */
1449 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1450
1451 pa_source_assert_ref(s);
1452 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1453
1454 if (s->flags & PA_SOURCE_SYNC_VOLUME)
1455 pa_source_assert_io_context(s);
1456 else
1457 pa_assert_ctl_context();
1458
1459 if (!volume)
1460 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1461 else
1462 s->soft_volume = *volume;
1463
1464 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_SYNC_VOLUME))
1465 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1466 else
1467 s->thread_info.soft_volume = s->soft_volume;
1468 }
1469
1470 /* Called from the main thread. Only called for the root source in volume sharing
1471 * cases, except for internal recursive calls. */
1472 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1473 pa_source_output *o;
1474 uint32_t idx;
1475
1476 pa_source_assert_ref(s);
1477 pa_assert(old_real_volume);
1478 pa_assert_ctl_context();
1479 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1480
1481 /* This is called when the hardware's real volume changes due to
1482 * some external event. We copy the real volume into our
1483 * reference volume and then rebuild the stream volumes based on
1484 * i->real_ratio which should stay fixed. */
1485
1486 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1487 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1488 return;
1489
1490 /* 1. Make the real volume the reference volume */
1491 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1492 }
1493
1494 if (pa_source_flat_volume_enabled(s)) {
1495
1496 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1497 pa_cvolume old_volume = o->volume;
1498
1499 /* 2. Since the source's reference and real volumes are equal
1500 * now our ratios should be too. */
1501 o->reference_ratio = o->real_ratio;
1502
1503 /* 3. Recalculate the new stream reference volume based on the
1504 * reference ratio and the sink's reference volume.
1505 *
1506 * This basically calculates:
1507 *
1508 * o->volume = s->reference_volume * o->reference_ratio
1509 *
1510 * This is identical to propagate_reference_volume() */
1511 o->volume = s->reference_volume;
1512 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1513 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1514
1515 /* Notify if something changed */
1516 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1517
1518 if (o->volume_changed)
1519 o->volume_changed(o);
1520
1521 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1522 }
1523
1524 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1525 propagate_real_volume(o->destination_source, old_real_volume);
1526 }
1527 }
1528
1529 /* Something got changed in the hardware. It probably makes sense
1530 * to save changed hw settings given that hw volume changes not
1531 * triggered by PA are almost certainly done by the user. */
1532 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1533 s->save_volume = TRUE;
1534 }
1535
1536 /* Called from io thread */
1537 void pa_source_update_volume_and_mute(pa_source *s) {
1538 pa_assert(s);
1539 pa_source_assert_io_context(s);
1540
1541 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1542 }
1543
1544 /* Called from main thread */
1545 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1546 pa_source_assert_ref(s);
1547 pa_assert_ctl_context();
1548 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1549
1550 if (s->refresh_volume || force_refresh) {
1551 struct pa_cvolume old_real_volume;
1552
1553 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1554
1555 old_real_volume = s->real_volume;
1556
1557 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_volume)
1558 s->get_volume(s);
1559
1560 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1561
1562 update_real_volume(s, &s->real_volume, &s->channel_map);
1563 propagate_real_volume(s, &old_real_volume);
1564 }
1565
1566 return &s->reference_volume;
1567 }
1568
1569 /* Called from main thread. In volume sharing cases, only the root source may
1570 * call this. */
1571 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1572 pa_cvolume old_real_volume;
1573
1574 pa_source_assert_ref(s);
1575 pa_assert_ctl_context();
1576 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1577 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1578
1579 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1580
1581 old_real_volume = s->real_volume;
1582 update_real_volume(s, new_real_volume, &s->channel_map);
1583 propagate_real_volume(s, &old_real_volume);
1584 }
1585
1586 /* Called from main thread */
1587 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1588 pa_bool_t old_muted;
1589
1590 pa_source_assert_ref(s);
1591 pa_assert_ctl_context();
1592 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1593
1594 old_muted = s->muted;
1595 s->muted = mute;
1596 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1597
1598 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->set_mute)
1599 s->set_mute(s);
1600
1601 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1602
1603 if (old_muted != s->muted)
1604 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1605 }
1606
1607 /* Called from main thread */
1608 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1609
1610 pa_source_assert_ref(s);
1611 pa_assert_ctl_context();
1612 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1613
1614 if (s->refresh_muted || force_refresh) {
1615 pa_bool_t old_muted = s->muted;
1616
1617 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_mute)
1618 s->get_mute(s);
1619
1620 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1621
1622 if (old_muted != s->muted) {
1623 s->save_muted = TRUE;
1624
1625 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1626
1627 /* Make sure the soft mute status stays in sync */
1628 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1629 }
1630 }
1631
1632 return s->muted;
1633 }
1634
1635 /* Called from main thread */
1636 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1637 pa_source_assert_ref(s);
1638 pa_assert_ctl_context();
1639 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1640
1641 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1642
1643 if (s->muted == new_muted)
1644 return;
1645
1646 s->muted = new_muted;
1647 s->save_muted = TRUE;
1648
1649 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1650 }
1651
1652 /* Called from main thread */
1653 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1654 pa_source_assert_ref(s);
1655 pa_assert_ctl_context();
1656
1657 if (p)
1658 pa_proplist_update(s->proplist, mode, p);
1659
1660 if (PA_SOURCE_IS_LINKED(s->state)) {
1661 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1662 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1663 }
1664
1665 return TRUE;
1666 }
1667
1668 /* Called from main thread */
1669 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1670 void pa_source_set_description(pa_source *s, const char *description) {
1671 const char *old;
1672 pa_source_assert_ref(s);
1673 pa_assert_ctl_context();
1674
1675 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1676 return;
1677
1678 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1679
1680 if (old && description && pa_streq(old, description))
1681 return;
1682
1683 if (description)
1684 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1685 else
1686 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1687
1688 if (PA_SOURCE_IS_LINKED(s->state)) {
1689 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1690 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1691 }
1692 }
1693
1694 /* Called from main thread */
1695 unsigned pa_source_linked_by(pa_source *s) {
1696 pa_source_assert_ref(s);
1697 pa_assert_ctl_context();
1698 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1699
1700 return pa_idxset_size(s->outputs);
1701 }
1702
1703 /* Called from main thread */
1704 unsigned pa_source_used_by(pa_source *s) {
1705 unsigned ret;
1706
1707 pa_source_assert_ref(s);
1708 pa_assert_ctl_context();
1709 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1710
1711 ret = pa_idxset_size(s->outputs);
1712 pa_assert(ret >= s->n_corked);
1713
1714 return ret - s->n_corked;
1715 }
1716
1717 /* Called from main thread */
1718 unsigned pa_source_check_suspend(pa_source *s) {
1719 unsigned ret;
1720 pa_source_output *o;
1721 uint32_t idx;
1722
1723 pa_source_assert_ref(s);
1724 pa_assert_ctl_context();
1725
1726 if (!PA_SOURCE_IS_LINKED(s->state))
1727 return 0;
1728
1729 ret = 0;
1730
1731 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1732 pa_source_output_state_t st;
1733
1734 st = pa_source_output_get_state(o);
1735
1736 /* We do not assert here. It is perfectly valid for a source output to
1737 * be in the INIT state (i.e. created, marked done but not yet put)
1738 * and we should not care if it's unlinked as it won't contribute
1739 * towarards our busy status.
1740 */
1741 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1742 continue;
1743
1744 if (st == PA_SOURCE_OUTPUT_CORKED)
1745 continue;
1746
1747 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1748 continue;
1749
1750 ret ++;
1751 }
1752
1753 return ret;
1754 }
1755
1756 /* Called from the IO thread */
1757 static void sync_output_volumes_within_thread(pa_source *s) {
1758 pa_source_output *o;
1759 void *state = NULL;
1760
1761 pa_source_assert_ref(s);
1762 pa_source_assert_io_context(s);
1763
1764 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1765 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1766 continue;
1767
1768 o->thread_info.soft_volume = o->soft_volume;
1769 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1770 }
1771 }
1772
1773 /* Called from the IO thread. Only called for the root source in volume sharing
1774 * cases, except for internal recursive calls. */
1775 static void set_shared_volume_within_thread(pa_source *s) {
1776 pa_source_output *o;
1777 void *state = NULL;
1778
1779 pa_source_assert_ref(s);
1780
1781 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1782
1783 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1784 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1785 set_shared_volume_within_thread(o->destination_source);
1786 }
1787 }
1788
1789 /* Called from IO thread, except when it is not */
1790 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1791 pa_source *s = PA_SOURCE(object);
1792 pa_source_assert_ref(s);
1793
1794 switch ((pa_source_message_t) code) {
1795
1796 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1797 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1798
1799 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1800
1801 if (o->direct_on_input) {
1802 o->thread_info.direct_on_input = o->direct_on_input;
1803 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1804 }
1805
1806 pa_assert(!o->thread_info.attached);
1807 o->thread_info.attached = TRUE;
1808
1809 if (o->attach)
1810 o->attach(o);
1811
1812 pa_source_output_set_state_within_thread(o, o->state);
1813
1814 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1815 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1816
1817 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1818
1819 /* We don't just invalidate the requested latency here,
1820 * because if we are in a move we might need to fix up the
1821 * requested latency. */
1822 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1823
1824 /* In flat volume mode we need to update the volume as
1825 * well */
1826 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1827 }
1828
1829 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1830 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1831
1832 pa_source_output_set_state_within_thread(o, o->state);
1833
1834 if (o->detach)
1835 o->detach(o);
1836
1837 pa_assert(o->thread_info.attached);
1838 o->thread_info.attached = FALSE;
1839
1840 if (o->thread_info.direct_on_input) {
1841 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
1842 o->thread_info.direct_on_input = NULL;
1843 }
1844
1845 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
1846 pa_source_output_unref(o);
1847
1848 pa_source_invalidate_requested_latency(s, TRUE);
1849
1850 /* In flat volume mode we need to update the volume as
1851 * well */
1852 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1853 }
1854
1855 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
1856 pa_source *root_source = s;
1857
1858 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1859 root_source = root_source->output_from_master->source;
1860
1861 set_shared_volume_within_thread(root_source);
1862 return 0;
1863 }
1864
1865 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
1866
1867 if (s->flags & PA_SOURCE_SYNC_VOLUME) {
1868 s->set_volume(s);
1869 pa_source_volume_change_push(s);
1870 }
1871 /* Fall through ... */
1872
1873 case PA_SOURCE_MESSAGE_SET_VOLUME:
1874
1875 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1876 s->thread_info.soft_volume = s->soft_volume;
1877 }
1878
1879 /* Fall through ... */
1880
1881 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
1882 sync_output_volumes_within_thread(s);
1883 return 0;
1884
1885 case PA_SOURCE_MESSAGE_GET_VOLUME:
1886
1887 if ((s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_volume) {
1888 s->get_volume(s);
1889 pa_source_volume_change_flush(s);
1890 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
1891 }
1892
1893 /* In case source implementor reset SW volume. */
1894 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1895 s->thread_info.soft_volume = s->soft_volume;
1896 }
1897
1898 return 0;
1899
1900 case PA_SOURCE_MESSAGE_SET_MUTE:
1901
1902 if (s->thread_info.soft_muted != s->muted) {
1903 s->thread_info.soft_muted = s->muted;
1904 }
1905
1906 if (s->flags & PA_SOURCE_SYNC_VOLUME && s->set_mute)
1907 s->set_mute(s);
1908
1909 return 0;
1910
1911 case PA_SOURCE_MESSAGE_GET_MUTE:
1912
1913 if (s->flags & PA_SOURCE_SYNC_VOLUME && s->get_mute)
1914 s->get_mute(s);
1915
1916 return 0;
1917
1918 case PA_SOURCE_MESSAGE_SET_STATE: {
1919
1920 pa_bool_t suspend_change =
1921 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
1922 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
1923
1924 s->thread_info.state = PA_PTR_TO_UINT(userdata);
1925
1926 if (suspend_change) {
1927 pa_source_output *o;
1928 void *state = NULL;
1929
1930 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
1931 if (o->suspend_within_thread)
1932 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
1933 }
1934
1935 return 0;
1936 }
1937
1938 case PA_SOURCE_MESSAGE_DETACH:
1939
1940 /* Detach all streams */
1941 pa_source_detach_within_thread(s);
1942 return 0;
1943
1944 case PA_SOURCE_MESSAGE_ATTACH:
1945
1946 /* Reattach all streams */
1947 pa_source_attach_within_thread(s);
1948 return 0;
1949
1950 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
1951
1952 pa_usec_t *usec = userdata;
1953 *usec = pa_source_get_requested_latency_within_thread(s);
1954
1955 /* Yes, that's right, the IO thread will see -1 when no
1956 * explicit requested latency is configured, the main
1957 * thread will see max_latency */
1958 if (*usec == (pa_usec_t) -1)
1959 *usec = s->thread_info.max_latency;
1960
1961 return 0;
1962 }
1963
1964 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
1965 pa_usec_t *r = userdata;
1966
1967 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
1968
1969 return 0;
1970 }
1971
1972 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
1973 pa_usec_t *r = userdata;
1974
1975 r[0] = s->thread_info.min_latency;
1976 r[1] = s->thread_info.max_latency;
1977
1978 return 0;
1979 }
1980
1981 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
1982
1983 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
1984 return 0;
1985
1986 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
1987
1988 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
1989 return 0;
1990
1991 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
1992
1993 *((size_t*) userdata) = s->thread_info.max_rewind;
1994 return 0;
1995
1996 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
1997
1998 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
1999 return 0;
2000
2001 case PA_SOURCE_MESSAGE_GET_LATENCY:
2002
2003 if (s->monitor_of) {
2004 *((pa_usec_t*) userdata) = 0;
2005 return 0;
2006 }
2007
2008 /* Implementors need to overwrite this implementation! */
2009 return -1;
2010
2011 case PA_SOURCE_MESSAGE_SET_PORT:
2012
2013 pa_assert(userdata);
2014 if (s->set_port) {
2015 struct source_message_set_port *msg_data = userdata;
2016 msg_data->ret = s->set_port(s, msg_data->port);
2017 }
2018 return 0;
2019
2020 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2021 /* This message is sent from IO-thread and handled in main thread. */
2022 pa_assert_ctl_context();
2023
2024 pa_source_get_volume(s, TRUE);
2025 pa_source_get_mute(s, TRUE);
2026 return 0;
2027
2028 case PA_SOURCE_MESSAGE_MAX:
2029 ;
2030 }
2031
2032 return -1;
2033 }
2034
2035 /* Called from main thread */
2036 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2037 pa_source *source;
2038 uint32_t idx;
2039 int ret = 0;
2040
2041 pa_core_assert_ref(c);
2042 pa_assert_ctl_context();
2043 pa_assert(cause != 0);
2044
2045 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2046 int r;
2047
2048 if (source->monitor_of)
2049 continue;
2050
2051 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2052 ret = r;
2053 }
2054
2055 return ret;
2056 }
2057
2058 /* Called from main thread */
2059 void pa_source_detach(pa_source *s) {
2060 pa_source_assert_ref(s);
2061 pa_assert_ctl_context();
2062 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2063
2064 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2065 }
2066
2067 /* Called from main thread */
2068 void pa_source_attach(pa_source *s) {
2069 pa_source_assert_ref(s);
2070 pa_assert_ctl_context();
2071 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2072
2073 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2074 }
2075
2076 /* Called from IO thread */
2077 void pa_source_detach_within_thread(pa_source *s) {
2078 pa_source_output *o;
2079 void *state = NULL;
2080
2081 pa_source_assert_ref(s);
2082 pa_source_assert_io_context(s);
2083 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2084
2085 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2086 if (o->detach)
2087 o->detach(o);
2088 }
2089
2090 /* Called from IO thread */
2091 void pa_source_attach_within_thread(pa_source *s) {
2092 pa_source_output *o;
2093 void *state = NULL;
2094
2095 pa_source_assert_ref(s);
2096 pa_source_assert_io_context(s);
2097 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2098
2099 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2100 if (o->attach)
2101 o->attach(o);
2102 }
2103
2104 /* Called from IO thread */
2105 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2106 pa_usec_t result = (pa_usec_t) -1;
2107 pa_source_output *o;
2108 void *state = NULL;
2109
2110 pa_source_assert_ref(s);
2111 pa_source_assert_io_context(s);
2112
2113 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2114 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2115
2116 if (s->thread_info.requested_latency_valid)
2117 return s->thread_info.requested_latency;
2118
2119 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2120 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2121 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2122 result = o->thread_info.requested_source_latency;
2123
2124 if (result != (pa_usec_t) -1)
2125 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2126
2127 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2128 /* Only cache this if we are fully set up */
2129 s->thread_info.requested_latency = result;
2130 s->thread_info.requested_latency_valid = TRUE;
2131 }
2132
2133 return result;
2134 }
2135
2136 /* Called from main thread */
2137 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2138 pa_usec_t usec = 0;
2139
2140 pa_source_assert_ref(s);
2141 pa_assert_ctl_context();
2142 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2143
2144 if (s->state == PA_SOURCE_SUSPENDED)
2145 return 0;
2146
2147 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2148
2149 return usec;
2150 }
2151
2152 /* Called from IO thread */
2153 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2154 pa_source_output *o;
2155 void *state = NULL;
2156
2157 pa_source_assert_ref(s);
2158 pa_source_assert_io_context(s);
2159
2160 if (max_rewind == s->thread_info.max_rewind)
2161 return;
2162
2163 s->thread_info.max_rewind = max_rewind;
2164
2165 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2166 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2167 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2168 }
2169
2170 /* Called from main thread */
2171 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2172 pa_source_assert_ref(s);
2173 pa_assert_ctl_context();
2174
2175 if (PA_SOURCE_IS_LINKED(s->state))
2176 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2177 else
2178 pa_source_set_max_rewind_within_thread(s, max_rewind);
2179 }
2180
2181 /* Called from IO thread */
2182 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2183 pa_source_output *o;
2184 void *state = NULL;
2185
2186 pa_source_assert_ref(s);
2187 pa_source_assert_io_context(s);
2188
2189 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2190 s->thread_info.requested_latency_valid = FALSE;
2191 else if (dynamic)
2192 return;
2193
2194 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2195
2196 if (s->update_requested_latency)
2197 s->update_requested_latency(s);
2198
2199 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2200 if (o->update_source_requested_latency)
2201 o->update_source_requested_latency(o);
2202 }
2203
2204 if (s->monitor_of)
2205 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2206 }
2207
2208 /* Called from main thread */
2209 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2210 pa_source_assert_ref(s);
2211 pa_assert_ctl_context();
2212
2213 /* min_latency == 0: no limit
2214 * min_latency anything else: specified limit
2215 *
2216 * Similar for max_latency */
2217
2218 if (min_latency < ABSOLUTE_MIN_LATENCY)
2219 min_latency = ABSOLUTE_MIN_LATENCY;
2220
2221 if (max_latency <= 0 ||
2222 max_latency > ABSOLUTE_MAX_LATENCY)
2223 max_latency = ABSOLUTE_MAX_LATENCY;
2224
2225 pa_assert(min_latency <= max_latency);
2226
2227 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2228 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2229 max_latency == ABSOLUTE_MAX_LATENCY) ||
2230 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2231
2232 if (PA_SOURCE_IS_LINKED(s->state)) {
2233 pa_usec_t r[2];
2234
2235 r[0] = min_latency;
2236 r[1] = max_latency;
2237
2238 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2239 } else
2240 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2241 }
2242
2243 /* Called from main thread */
2244 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2245 pa_source_assert_ref(s);
2246 pa_assert_ctl_context();
2247 pa_assert(min_latency);
2248 pa_assert(max_latency);
2249
2250 if (PA_SOURCE_IS_LINKED(s->state)) {
2251 pa_usec_t r[2] = { 0, 0 };
2252
2253 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2254
2255 *min_latency = r[0];
2256 *max_latency = r[1];
2257 } else {
2258 *min_latency = s->thread_info.min_latency;
2259 *max_latency = s->thread_info.max_latency;
2260 }
2261 }
2262
2263 /* Called from IO thread, and from main thread before pa_source_put() is called */
2264 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2265 pa_source_assert_ref(s);
2266 pa_source_assert_io_context(s);
2267
2268 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2269 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2270 pa_assert(min_latency <= max_latency);
2271
2272 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2273 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2274 max_latency == ABSOLUTE_MAX_LATENCY) ||
2275 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2276 s->monitor_of);
2277
2278 if (s->thread_info.min_latency == min_latency &&
2279 s->thread_info.max_latency == max_latency)
2280 return;
2281
2282 s->thread_info.min_latency = min_latency;
2283 s->thread_info.max_latency = max_latency;
2284
2285 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2286 pa_source_output *o;
2287 void *state = NULL;
2288
2289 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2290 if (o->update_source_latency_range)
2291 o->update_source_latency_range(o);
2292 }
2293
2294 pa_source_invalidate_requested_latency(s, FALSE);
2295 }
2296
2297 /* Called from main thread, before the source is put */
2298 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2299 pa_source_assert_ref(s);
2300 pa_assert_ctl_context();
2301
2302 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2303 pa_assert(latency == 0);
2304 return;
2305 }
2306
2307 if (latency < ABSOLUTE_MIN_LATENCY)
2308 latency = ABSOLUTE_MIN_LATENCY;
2309
2310 if (latency > ABSOLUTE_MAX_LATENCY)
2311 latency = ABSOLUTE_MAX_LATENCY;
2312
2313 if (PA_SOURCE_IS_LINKED(s->state))
2314 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2315 else
2316 s->thread_info.fixed_latency = latency;
2317 }
2318
2319 /* Called from main thread */
2320 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2321 pa_usec_t latency;
2322
2323 pa_source_assert_ref(s);
2324 pa_assert_ctl_context();
2325
2326 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2327 return 0;
2328
2329 if (PA_SOURCE_IS_LINKED(s->state))
2330 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2331 else
2332 latency = s->thread_info.fixed_latency;
2333
2334 return latency;
2335 }
2336
2337 /* Called from IO thread */
2338 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2339 pa_source_assert_ref(s);
2340 pa_source_assert_io_context(s);
2341
2342 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2343 pa_assert(latency == 0);
2344 return;
2345 }
2346
2347 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2348 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2349
2350 if (s->thread_info.fixed_latency == latency)
2351 return;
2352
2353 s->thread_info.fixed_latency = latency;
2354
2355 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2356 pa_source_output *o;
2357 void *state = NULL;
2358
2359 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2360 if (o->update_source_fixed_latency)
2361 o->update_source_fixed_latency(o);
2362 }
2363
2364 pa_source_invalidate_requested_latency(s, FALSE);
2365 }
2366
2367 /* Called from main thread */
2368 size_t pa_source_get_max_rewind(pa_source *s) {
2369 size_t r;
2370 pa_assert_ctl_context();
2371 pa_source_assert_ref(s);
2372
2373 if (!PA_SOURCE_IS_LINKED(s->state))
2374 return s->thread_info.max_rewind;
2375
2376 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2377
2378 return r;
2379 }
2380
2381 /* Called from main context */
2382 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2383 pa_device_port *port;
2384 int ret;
2385
2386 pa_source_assert_ref(s);
2387 pa_assert_ctl_context();
2388
2389 if (!s->set_port) {
2390 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2391 return -PA_ERR_NOTIMPLEMENTED;
2392 }
2393
2394 if (!s->ports)
2395 return -PA_ERR_NOENTITY;
2396
2397 if (!(port = pa_hashmap_get(s->ports, name)))
2398 return -PA_ERR_NOENTITY;
2399
2400 if (s->active_port == port) {
2401 s->save_port = s->save_port || save;
2402 return 0;
2403 }
2404
2405 if (s->flags & PA_SOURCE_SYNC_VOLUME) {
2406 struct source_message_set_port msg = { .port = port, .ret = 0 };
2407 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2408 ret = msg.ret;
2409 }
2410 else
2411 ret = s->set_port(s, port);
2412
2413 if (ret < 0)
2414 return -PA_ERR_NOENTITY;
2415
2416 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2417
2418 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2419
2420 s->active_port = port;
2421 s->save_port = save;
2422
2423 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2424
2425 return 0;
2426 }
2427
2428 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2429
2430 /* Called from the IO thread. */
2431 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2432 pa_source_volume_change *c;
2433 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2434 c = pa_xnew(pa_source_volume_change, 1);
2435
2436 PA_LLIST_INIT(pa_source_volume_change, c);
2437 c->at = 0;
2438 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2439 return c;
2440 }
2441
2442 /* Called from the IO thread. */
2443 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2444 pa_assert(c);
2445 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2446 pa_xfree(c);
2447 }
2448
2449 /* Called from the IO thread. */
2450 void pa_source_volume_change_push(pa_source *s) {
2451 pa_source_volume_change *c = NULL;
2452 pa_source_volume_change *nc = NULL;
2453 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2454
2455 const char *direction = NULL;
2456
2457 pa_assert(s);
2458 nc = pa_source_volume_change_new(s);
2459
2460 /* NOTE: There is already more different volumes in pa_source that I can remember.
2461 * Adding one more volume for HW would get us rid of this, but I am trying
2462 * to survive with the ones we already have. */
2463 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2464
2465 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2466 pa_log_debug("Volume not changing");
2467 pa_source_volume_change_free(nc);
2468 return;
2469 }
2470
2471 nc->at = pa_source_get_latency_within_thread(s);
2472 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2473
2474 if (s->thread_info.volume_changes_tail) {
2475 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2476 /* If volume is going up let's do it a bit late. If it is going
2477 * down let's do it a bit early. */
2478 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2479 if (nc->at + safety_margin > c->at) {
2480 nc->at += safety_margin;
2481 direction = "up";
2482 break;
2483 }
2484 }
2485 else if (nc->at - safety_margin > c->at) {
2486 nc->at -= safety_margin;
2487 direction = "down";
2488 break;
2489 }
2490 }
2491 }
2492
2493 if (c == NULL) {
2494 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2495 nc->at += safety_margin;
2496 direction = "up";
2497 } else {
2498 nc->at -= safety_margin;
2499 direction = "down";
2500 }
2501 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2502 }
2503 else {
2504 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2505 }
2506
2507 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2508
2509 /* We can ignore volume events that came earlier but should happen later than this. */
2510 PA_LLIST_FOREACH(c, nc->next) {
2511 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2512 pa_source_volume_change_free(c);
2513 }
2514 nc->next = NULL;
2515 s->thread_info.volume_changes_tail = nc;
2516 }
2517
2518 /* Called from the IO thread. */
2519 static void pa_source_volume_change_flush(pa_source *s) {
2520 pa_source_volume_change *c = s->thread_info.volume_changes;
2521 pa_assert(s);
2522 s->thread_info.volume_changes = NULL;
2523 s->thread_info.volume_changes_tail = NULL;
2524 while (c) {
2525 pa_source_volume_change *next = c->next;
2526 pa_source_volume_change_free(c);
2527 c = next;
2528 }
2529 }
2530
2531 /* Called from the IO thread. */
2532 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2533 pa_usec_t now = pa_rtclock_now();
2534 pa_bool_t ret = FALSE;
2535
2536 pa_assert(s);
2537
2538 if (!PA_SOURCE_IS_LINKED(s->state)) {
2539 if (usec_to_next)
2540 *usec_to_next = 0;
2541 return ret;
2542 }
2543
2544 pa_assert(s->write_volume);
2545
2546 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2547 pa_source_volume_change *c = s->thread_info.volume_changes;
2548 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2549 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2550 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2551 ret = TRUE;
2552 s->thread_info.current_hw_volume = c->hw_volume;
2553 pa_source_volume_change_free(c);
2554 }
2555
2556 if (s->write_volume && ret)
2557 s->write_volume(s);
2558
2559 if (s->thread_info.volume_changes) {
2560 if (usec_to_next)
2561 *usec_to_next = s->thread_info.volume_changes->at - now;
2562 if (pa_log_ratelimit(PA_LOG_DEBUG))
2563 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2564 }
2565 else {
2566 if (usec_to_next)
2567 *usec_to_next = 0;
2568 s->thread_info.volume_changes_tail = NULL;
2569 }
2570 return ret;
2571 }
2572
2573
2574 /* Called from the main thread */
2575 /* Gets the list of formats supported by the source. The members and idxset must
2576 * be freed by the caller. */
2577 pa_idxset* pa_source_get_formats(pa_source *s) {
2578 pa_idxset *ret;
2579
2580 pa_assert(s);
2581
2582 if (s->get_formats) {
2583 /* Source supports format query, all is good */
2584 ret = s->get_formats(s);
2585 } else {
2586 /* Source doesn't support format query, so assume it does PCM */
2587 pa_format_info *f = pa_format_info_new();
2588 f->encoding = PA_ENCODING_PCM;
2589
2590 ret = pa_idxset_new(NULL, NULL);
2591 pa_idxset_put(ret, f, NULL);
2592 }
2593
2594 return ret;
2595 }
2596
2597 /* Called from the main thread */
2598 /* Checks if the source can accept this format */
2599 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2600 {
2601 pa_idxset *formats = NULL;
2602 pa_bool_t ret = FALSE;
2603
2604 pa_assert(s);
2605 pa_assert(f);
2606
2607 formats = pa_source_get_formats(s);
2608
2609 if (formats) {
2610 pa_format_info *finfo_device;
2611 uint32_t i;
2612
2613 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2614 if (pa_format_info_is_compatible(finfo_device, f)) {
2615 ret = TRUE;
2616 break;
2617 }
2618 }
2619
2620 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2621 }
2622
2623 return ret;
2624 }
2625
2626 /* Called from the main thread */
2627 /* Calculates the intersection between formats supported by the source and
2628 * in_formats, and returns these, in the order of the source's formats. */
2629 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2630 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2631 pa_format_info *f_source, *f_in;
2632 uint32_t i, j;
2633
2634 pa_assert(s);
2635
2636 if (!in_formats || pa_idxset_isempty(in_formats))
2637 goto done;
2638
2639 source_formats = pa_source_get_formats(s);
2640
2641 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2642 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2643 if (pa_format_info_is_compatible(f_source, f_in))
2644 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2645 }
2646 }
2647
2648 done:
2649 if (source_formats)
2650 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2651
2652 return out_formats;
2653 }