]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
Add some missing format.h includes
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76
77 return data;
78 }
79
80 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
81 pa_assert(data);
82
83 pa_xfree(data->name);
84 data->name = pa_xstrdup(name);
85 }
86
87 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
88 pa_assert(data);
89
90 if ((data->sample_spec_is_set = !!spec))
91 data->sample_spec = *spec;
92 }
93
94 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
95 pa_assert(data);
96
97 if ((data->channel_map_is_set = !!map))
98 data->channel_map = *map;
99 }
100
101 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
102 pa_assert(data);
103
104 if ((data->volume_is_set = !!volume))
105 data->volume = *volume;
106 }
107
108 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
109 pa_assert(data);
110
111 data->muted_is_set = TRUE;
112 data->muted = !!mute;
113 }
114
115 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
116 pa_assert(data);
117
118 pa_xfree(data->active_port);
119 data->active_port = pa_xstrdup(port);
120 }
121
122 void pa_source_new_data_done(pa_source_new_data *data) {
123 pa_assert(data);
124
125 pa_proplist_free(data->proplist);
126
127 if (data->ports) {
128 pa_device_port *p;
129
130 while ((p = pa_hashmap_steal_first(data->ports)))
131 pa_device_port_free(p);
132
133 pa_hashmap_free(data->ports, NULL, NULL);
134 }
135
136 pa_xfree(data->name);
137 pa_xfree(data->active_port);
138 }
139
140 /* Called from main context */
141 static void reset_callbacks(pa_source *s) {
142 pa_assert(s);
143
144 s->set_state = NULL;
145 s->get_volume = NULL;
146 s->set_volume = NULL;
147 s->get_mute = NULL;
148 s->set_mute = NULL;
149 s->update_requested_latency = NULL;
150 s->set_port = NULL;
151 s->get_formats = NULL;
152 }
153
154 /* Called from main context */
155 pa_source* pa_source_new(
156 pa_core *core,
157 pa_source_new_data *data,
158 pa_source_flags_t flags) {
159
160 pa_source *s;
161 const char *name;
162 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
163 char *pt;
164
165 pa_assert(core);
166 pa_assert(data);
167 pa_assert(data->name);
168 pa_assert_ctl_context();
169
170 s = pa_msgobject_new(pa_source);
171
172 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
173 pa_log_debug("Failed to register name %s.", data->name);
174 pa_xfree(s);
175 return NULL;
176 }
177
178 pa_source_new_data_set_name(data, name);
179
180 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
181 pa_xfree(s);
182 pa_namereg_unregister(core, name);
183 return NULL;
184 }
185
186 /* FIXME, need to free s here on failure */
187
188 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
189 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
190
191 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
192
193 if (!data->channel_map_is_set)
194 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
195
196 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
197 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
198
199 /* FIXME: There should probably be a general function for checking whether
200 * the source volume is allowed to be set, like there is for source outputs. */
201 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
202
203 if (!data->volume_is_set) {
204 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
205 data->save_volume = FALSE;
206 }
207
208 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
209 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
210
211 if (!data->muted_is_set)
212 data->muted = FALSE;
213
214 if (data->card)
215 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
216
217 pa_device_init_description(data->proplist);
218 pa_device_init_icon(data->proplist, FALSE);
219 pa_device_init_intended_roles(data->proplist);
220
221 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
222 pa_xfree(s);
223 pa_namereg_unregister(core, name);
224 return NULL;
225 }
226
227 s->parent.parent.free = source_free;
228 s->parent.process_msg = pa_source_process_msg;
229
230 s->core = core;
231 s->state = PA_SOURCE_INIT;
232 s->flags = flags;
233 s->priority = 0;
234 s->suspend_cause = 0;
235 s->name = pa_xstrdup(name);
236 s->proplist = pa_proplist_copy(data->proplist);
237 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
238 s->module = data->module;
239 s->card = data->card;
240
241 s->priority = pa_device_init_priority(s->proplist);
242
243 s->sample_spec = data->sample_spec;
244 s->channel_map = data->channel_map;
245
246 s->outputs = pa_idxset_new(NULL, NULL);
247 s->n_corked = 0;
248 s->monitor_of = NULL;
249 s->output_from_master = NULL;
250
251 s->reference_volume = s->real_volume = data->volume;
252 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
253 s->base_volume = PA_VOLUME_NORM;
254 s->n_volume_steps = PA_VOLUME_NORM+1;
255 s->muted = data->muted;
256 s->refresh_volume = s->refresh_muted = FALSE;
257
258 reset_callbacks(s);
259 s->userdata = NULL;
260
261 s->asyncmsgq = NULL;
262
263 /* As a minor optimization we just steal the list instead of
264 * copying it here */
265 s->ports = data->ports;
266 data->ports = NULL;
267
268 s->active_port = NULL;
269 s->save_port = FALSE;
270
271 if (data->active_port && s->ports)
272 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
273 s->save_port = data->save_port;
274
275 if (!s->active_port && s->ports) {
276 void *state;
277 pa_device_port *p;
278
279 PA_HASHMAP_FOREACH(p, s->ports, state)
280 if (!s->active_port || p->priority > s->active_port->priority)
281 s->active_port = p;
282 }
283
284 s->save_volume = data->save_volume;
285 s->save_muted = data->save_muted;
286
287 pa_silence_memchunk_get(
288 &core->silence_cache,
289 core->mempool,
290 &s->silence,
291 &s->sample_spec,
292 0);
293
294 s->thread_info.rtpoll = NULL;
295 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
296 s->thread_info.soft_volume = s->soft_volume;
297 s->thread_info.soft_muted = s->muted;
298 s->thread_info.state = s->state;
299 s->thread_info.max_rewind = 0;
300 s->thread_info.requested_latency_valid = FALSE;
301 s->thread_info.requested_latency = 0;
302 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
303 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
304 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
305
306 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
307 s->thread_info.volume_changes_tail = NULL;
308 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
309 s->thread_info.volume_change_safety_margin = core->sync_volume_safety_margin_usec;
310 s->thread_info.volume_change_extra_delay = core->sync_volume_extra_delay_usec;
311
312 /* FIXME: This should probably be moved to pa_source_put() */
313 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
314
315 if (s->card)
316 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
317
318 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
319 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
320 s->index,
321 s->name,
322 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
323 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
324 pt);
325 pa_xfree(pt);
326
327 return s;
328 }
329
330 /* Called from main context */
331 static int source_set_state(pa_source *s, pa_source_state_t state) {
332 int ret;
333 pa_bool_t suspend_change;
334 pa_source_state_t original_state;
335
336 pa_assert(s);
337 pa_assert_ctl_context();
338
339 if (s->state == state)
340 return 0;
341
342 original_state = s->state;
343
344 suspend_change =
345 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
346 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
347
348 if (s->set_state)
349 if ((ret = s->set_state(s, state)) < 0)
350 return ret;
351
352 if (s->asyncmsgq)
353 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
354
355 if (s->set_state)
356 s->set_state(s, original_state);
357
358 return ret;
359 }
360
361 s->state = state;
362
363 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the apropriate events */
364 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
365 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
366 }
367
368 if (suspend_change) {
369 pa_source_output *o;
370 uint32_t idx;
371
372 /* We're suspending or resuming, tell everyone about it */
373
374 PA_IDXSET_FOREACH(o, s->outputs, idx)
375 if (s->state == PA_SOURCE_SUSPENDED &&
376 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
377 pa_source_output_kill(o);
378 else if (o->suspend)
379 o->suspend(o, state == PA_SOURCE_SUSPENDED);
380 }
381
382 return 0;
383 }
384
385 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
386 pa_assert(s);
387
388 s->get_volume = cb;
389 }
390
391 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
392 pa_source_flags_t flags;
393
394 pa_assert(s);
395 pa_assert(!s->write_volume || cb);
396
397 s->set_volume = cb;
398
399 /* Save the current flags so we can tell if they've changed */
400 flags = s->flags;
401
402 if (cb) {
403 /* The source implementor is responsible for setting decibel volume support */
404 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
405 } else {
406 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
407 /* See note below in pa_source_put() about volume sharing and decibel volumes */
408 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
409 }
410
411 /* If the flags have changed after init, let any clients know via a change event */
412 if (s->state != PA_SOURCE_INIT && flags != s->flags)
413 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
414 }
415
416 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
417 pa_source_flags_t flags;
418
419 pa_assert(s);
420 pa_assert(!cb || s->set_volume);
421
422 s->write_volume = cb;
423
424 /* Save the current flags so we can tell if they've changed */
425 flags = s->flags;
426
427 if (cb)
428 s->flags |= PA_SOURCE_SYNC_VOLUME;
429 else
430 s->flags &= ~PA_SOURCE_SYNC_VOLUME;
431
432 /* If the flags have changed after init, let any clients know via a change event */
433 if (s->state != PA_SOURCE_INIT && flags != s->flags)
434 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
435 }
436
437 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
438 pa_assert(s);
439
440 s->get_mute = cb;
441 }
442
443 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
444 pa_source_flags_t flags;
445
446 pa_assert(s);
447
448 s->set_mute = cb;
449
450 /* Save the current flags so we can tell if they've changed */
451 flags = s->flags;
452
453 if (cb)
454 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
455 else
456 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
457
458 /* If the flags have changed after init, let any clients know via a change event */
459 if (s->state != PA_SOURCE_INIT && flags != s->flags)
460 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
461 }
462
463 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
464 pa_source_flags_t flags;
465
466 pa_assert(s);
467
468 /* Always follow the overall user preference here */
469 enable = enable && s->core->flat_volumes;
470
471 /* Save the current flags so we can tell if they've changed */
472 flags = s->flags;
473
474 if (enable)
475 s->flags |= PA_SOURCE_FLAT_VOLUME;
476 else
477 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
478
479 /* If the flags have changed after init, let any clients know via a change event */
480 if (s->state != PA_SOURCE_INIT && flags != s->flags)
481 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
482 }
483
484 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
485 pa_source_flags_t flags;
486
487 pa_assert(s);
488
489 /* Save the current flags so we can tell if they've changed */
490 flags = s->flags;
491
492 if (enable) {
493 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
494 enable_flat_volume(s, TRUE);
495 } else {
496 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
497 enable_flat_volume(s, FALSE);
498 }
499
500 /* If the flags have changed after init, let any clients know via a change event */
501 if (s->state != PA_SOURCE_INIT && flags != s->flags)
502 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
503 }
504
505 /* Called from main context */
506 void pa_source_put(pa_source *s) {
507 pa_source_assert_ref(s);
508 pa_assert_ctl_context();
509
510 pa_assert(s->state == PA_SOURCE_INIT);
511 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
512
513 /* The following fields must be initialized properly when calling _put() */
514 pa_assert(s->asyncmsgq);
515 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
516
517 /* Generally, flags should be initialized via pa_source_new(). As a
518 * special exception we allow some volume related flags to be set
519 * between _new() and _put() by the callback setter functions above.
520 *
521 * Thus we implement a couple safeguards here which ensure the above
522 * setters were used (or at least the implementor made manual changes
523 * in a compatible way).
524 *
525 * Note: All of these flags set here can change over the life time
526 * of the source. */
527 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
528 pa_assert(!(s->flags & PA_SOURCE_SYNC_VOLUME) || s->write_volume);
529 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
530
531 /* XXX: Currently decibel volume is disabled for all sources that use volume
532 * sharing. When the master source supports decibel volume, it would be good
533 * to have the flag also in the filter source, but currently we don't do that
534 * so that the flags of the filter source never change when it's moved from
535 * a master source to another. One solution for this problem would be to
536 * remove user-visible volume altogether from filter sources when volume
537 * sharing is used, but the current approach was easier to implement... */
538 /* We always support decibel volumes in software, otherwise we leave it to
539 * the source implementor to set this flag as needed.
540 *
541 * Note: This flag can also change over the life time of the source. */
542 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
543 pa_source_enable_decibel_volume(s, TRUE);
544
545 /* If the source implementor support DB volumes by itself, we should always
546 * try and enable flat volumes too */
547 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
548 enable_flat_volume(s, TRUE);
549
550 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
551 pa_source *root_source = s->output_from_master->source;
552
553 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
554 root_source = root_source->output_from_master->source;
555
556 s->reference_volume = root_source->reference_volume;
557 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
558
559 s->real_volume = root_source->real_volume;
560 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
561 } else
562 /* We assume that if the sink implementor changed the default
563 * volume he did so in real_volume, because that is the usual
564 * place where he is supposed to place his changes. */
565 s->reference_volume = s->real_volume;
566
567 s->thread_info.soft_volume = s->soft_volume;
568 s->thread_info.soft_muted = s->muted;
569 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
570
571 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
572 || (s->base_volume == PA_VOLUME_NORM
573 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
574 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
575 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
576
577 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
578
579 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
580 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
581 }
582
583 /* Called from main context */
584 void pa_source_unlink(pa_source *s) {
585 pa_bool_t linked;
586 pa_source_output *o, *j = NULL;
587
588 pa_assert(s);
589 pa_assert_ctl_context();
590
591 /* See pa_sink_unlink() for a couple of comments how this function
592 * works. */
593
594 linked = PA_SOURCE_IS_LINKED(s->state);
595
596 if (linked)
597 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
598
599 if (s->state != PA_SOURCE_UNLINKED)
600 pa_namereg_unregister(s->core, s->name);
601 pa_idxset_remove_by_data(s->core->sources, s, NULL);
602
603 if (s->card)
604 pa_idxset_remove_by_data(s->card->sources, s, NULL);
605
606 while ((o = pa_idxset_first(s->outputs, NULL))) {
607 pa_assert(o != j);
608 pa_source_output_kill(o);
609 j = o;
610 }
611
612 if (linked)
613 source_set_state(s, PA_SOURCE_UNLINKED);
614 else
615 s->state = PA_SOURCE_UNLINKED;
616
617 reset_callbacks(s);
618
619 if (linked) {
620 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
621 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
622 }
623 }
624
625 /* Called from main context */
626 static void source_free(pa_object *o) {
627 pa_source_output *so;
628 pa_source *s = PA_SOURCE(o);
629
630 pa_assert(s);
631 pa_assert_ctl_context();
632 pa_assert(pa_source_refcnt(s) == 0);
633
634 if (PA_SOURCE_IS_LINKED(s->state))
635 pa_source_unlink(s);
636
637 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
638
639 pa_idxset_free(s->outputs, NULL, NULL);
640
641 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
642 pa_source_output_unref(so);
643
644 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
645
646 if (s->silence.memblock)
647 pa_memblock_unref(s->silence.memblock);
648
649 pa_xfree(s->name);
650 pa_xfree(s->driver);
651
652 if (s->proplist)
653 pa_proplist_free(s->proplist);
654
655 if (s->ports) {
656 pa_device_port *p;
657
658 while ((p = pa_hashmap_steal_first(s->ports)))
659 pa_device_port_free(p);
660
661 pa_hashmap_free(s->ports, NULL, NULL);
662 }
663
664 pa_xfree(s);
665 }
666
667 /* Called from main context, and not while the IO thread is active, please */
668 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
669 pa_source_assert_ref(s);
670 pa_assert_ctl_context();
671
672 s->asyncmsgq = q;
673 }
674
675 /* Called from main context, and not while the IO thread is active, please */
676 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
677 pa_source_assert_ref(s);
678 pa_assert_ctl_context();
679
680 if (mask == 0)
681 return;
682
683 /* For now, allow only a minimal set of flags to be changed. */
684 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
685
686 s->flags = (s->flags & ~mask) | (value & mask);
687 }
688
689 /* Called from IO context, or before _put() from main context */
690 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
691 pa_source_assert_ref(s);
692 pa_source_assert_io_context(s);
693
694 s->thread_info.rtpoll = p;
695 }
696
697 /* Called from main context */
698 int pa_source_update_status(pa_source*s) {
699 pa_source_assert_ref(s);
700 pa_assert_ctl_context();
701 pa_assert(PA_SOURCE_IS_LINKED(s->state));
702
703 if (s->state == PA_SOURCE_SUSPENDED)
704 return 0;
705
706 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
707 }
708
709 /* Called from main context */
710 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
711 pa_source_assert_ref(s);
712 pa_assert_ctl_context();
713 pa_assert(PA_SOURCE_IS_LINKED(s->state));
714 pa_assert(cause != 0);
715
716 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
717 return -PA_ERR_NOTSUPPORTED;
718
719 if (suspend)
720 s->suspend_cause |= cause;
721 else
722 s->suspend_cause &= ~cause;
723
724 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
725 return 0;
726
727 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
728
729 if (s->suspend_cause)
730 return source_set_state(s, PA_SOURCE_SUSPENDED);
731 else
732 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
733 }
734
735 /* Called from main context */
736 int pa_source_sync_suspend(pa_source *s) {
737 pa_sink_state_t state;
738
739 pa_source_assert_ref(s);
740 pa_assert_ctl_context();
741 pa_assert(PA_SOURCE_IS_LINKED(s->state));
742 pa_assert(s->monitor_of);
743
744 state = pa_sink_get_state(s->monitor_of);
745
746 if (state == PA_SINK_SUSPENDED)
747 return source_set_state(s, PA_SOURCE_SUSPENDED);
748
749 pa_assert(PA_SINK_IS_OPENED(state));
750
751 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
752 }
753
754 /* Called from main context */
755 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
756 pa_source_output *o, *n;
757 uint32_t idx;
758
759 pa_source_assert_ref(s);
760 pa_assert_ctl_context();
761 pa_assert(PA_SOURCE_IS_LINKED(s->state));
762
763 if (!q)
764 q = pa_queue_new();
765
766 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
767 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
768
769 pa_source_output_ref(o);
770
771 if (pa_source_output_start_move(o) >= 0)
772 pa_queue_push(q, o);
773 else
774 pa_source_output_unref(o);
775 }
776
777 return q;
778 }
779
780 /* Called from main context */
781 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
782 pa_source_output *o;
783
784 pa_source_assert_ref(s);
785 pa_assert_ctl_context();
786 pa_assert(PA_SOURCE_IS_LINKED(s->state));
787 pa_assert(q);
788
789 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
790 if (pa_source_output_finish_move(o, s, save) < 0)
791 pa_source_output_fail_move(o);
792
793 pa_source_output_unref(o);
794 }
795
796 pa_queue_free(q, NULL, NULL);
797 }
798
799 /* Called from main context */
800 void pa_source_move_all_fail(pa_queue *q) {
801 pa_source_output *o;
802
803 pa_assert_ctl_context();
804 pa_assert(q);
805
806 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
807 pa_source_output_fail_move(o);
808 pa_source_output_unref(o);
809 }
810
811 pa_queue_free(q, NULL, NULL);
812 }
813
814 /* Called from IO thread context */
815 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
816 pa_source_output *o;
817 void *state = NULL;
818
819 pa_source_assert_ref(s);
820 pa_source_assert_io_context(s);
821 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
822
823 if (nbytes <= 0)
824 return;
825
826 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
827 return;
828
829 pa_log_debug("Processing rewind...");
830
831 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
832 pa_source_output_assert_ref(o);
833 pa_source_output_process_rewind(o, nbytes);
834 }
835 }
836
837 /* Called from IO thread context */
838 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
839 pa_source_output *o;
840 void *state = NULL;
841
842 pa_source_assert_ref(s);
843 pa_source_assert_io_context(s);
844 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
845 pa_assert(chunk);
846
847 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
848 return;
849
850 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
851 pa_memchunk vchunk = *chunk;
852
853 pa_memblock_ref(vchunk.memblock);
854 pa_memchunk_make_writable(&vchunk, 0);
855
856 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
857 pa_silence_memchunk(&vchunk, &s->sample_spec);
858 else
859 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
860
861 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
862 pa_source_output_assert_ref(o);
863
864 if (!o->thread_info.direct_on_input)
865 pa_source_output_push(o, &vchunk);
866 }
867
868 pa_memblock_unref(vchunk.memblock);
869 } else {
870
871 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
872 pa_source_output_assert_ref(o);
873
874 if (!o->thread_info.direct_on_input)
875 pa_source_output_push(o, chunk);
876 }
877 }
878 }
879
880 /* Called from IO thread context */
881 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
882 pa_source_assert_ref(s);
883 pa_source_assert_io_context(s);
884 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
885 pa_source_output_assert_ref(o);
886 pa_assert(o->thread_info.direct_on_input);
887 pa_assert(chunk);
888
889 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
890 return;
891
892 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
893 pa_memchunk vchunk = *chunk;
894
895 pa_memblock_ref(vchunk.memblock);
896 pa_memchunk_make_writable(&vchunk, 0);
897
898 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
899 pa_silence_memchunk(&vchunk, &s->sample_spec);
900 else
901 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
902
903 pa_source_output_push(o, &vchunk);
904
905 pa_memblock_unref(vchunk.memblock);
906 } else
907 pa_source_output_push(o, chunk);
908 }
909
910 /* Called from main thread */
911 pa_usec_t pa_source_get_latency(pa_source *s) {
912 pa_usec_t usec;
913
914 pa_source_assert_ref(s);
915 pa_assert_ctl_context();
916 pa_assert(PA_SOURCE_IS_LINKED(s->state));
917
918 if (s->state == PA_SOURCE_SUSPENDED)
919 return 0;
920
921 if (!(s->flags & PA_SOURCE_LATENCY))
922 return 0;
923
924 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
925
926 return usec;
927 }
928
929 /* Called from IO thread */
930 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
931 pa_usec_t usec = 0;
932 pa_msgobject *o;
933
934 pa_source_assert_ref(s);
935 pa_source_assert_io_context(s);
936 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
937
938 /* The returned value is supposed to be in the time domain of the sound card! */
939
940 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
941 return 0;
942
943 if (!(s->flags & PA_SOURCE_LATENCY))
944 return 0;
945
946 o = PA_MSGOBJECT(s);
947
948 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
949
950 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
951 return -1;
952
953 return usec;
954 }
955
956 /* Called from the main thread (and also from the IO thread while the main
957 * thread is waiting).
958 *
959 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
960 * set. Instead, flat volume mode is detected by checking whether the root source
961 * has the flag set. */
962 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
963 pa_source_assert_ref(s);
964
965 while (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
966 s = s->output_from_master->source;
967
968 return (s->flags & PA_SOURCE_FLAT_VOLUME);
969 }
970
971 /* Called from main context */
972 pa_bool_t pa_source_is_passthrough(pa_source *s) {
973
974 pa_source_assert_ref(s);
975
976 /* NB Currently only monitor sources support passthrough mode */
977 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
978 }
979
980 /* Called from main context. */
981 static void compute_reference_ratio(pa_source_output *o) {
982 unsigned c = 0;
983 pa_cvolume remapped;
984
985 pa_assert(o);
986 pa_assert(pa_source_flat_volume_enabled(o->source));
987
988 /*
989 * Calculates the reference ratio from the source's reference
990 * volume. This basically calculates:
991 *
992 * o->reference_ratio = o->volume / o->source->reference_volume
993 */
994
995 remapped = o->source->reference_volume;
996 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
997
998 o->reference_ratio.channels = o->sample_spec.channels;
999
1000 for (c = 0; c < o->sample_spec.channels; c++) {
1001
1002 /* We don't update when the source volume is 0 anyway */
1003 if (remapped.values[c] <= PA_VOLUME_MUTED)
1004 continue;
1005
1006 /* Don't update the reference ratio unless necessary */
1007 if (pa_sw_volume_multiply(
1008 o->reference_ratio.values[c],
1009 remapped.values[c]) == o->volume.values[c])
1010 continue;
1011
1012 o->reference_ratio.values[c] = pa_sw_volume_divide(
1013 o->volume.values[c],
1014 remapped.values[c]);
1015 }
1016 }
1017
1018 /* Called from main context. Only called for the root source in volume sharing
1019 * cases, except for internal recursive calls. */
1020 static void compute_reference_ratios(pa_source *s) {
1021 uint32_t idx;
1022 pa_source_output *o;
1023
1024 pa_source_assert_ref(s);
1025 pa_assert_ctl_context();
1026 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1027 pa_assert(pa_source_flat_volume_enabled(s));
1028
1029 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1030 compute_reference_ratio(o);
1031
1032 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1033 compute_reference_ratios(o->destination_source);
1034 }
1035 }
1036
1037 /* Called from main context. Only called for the root source in volume sharing
1038 * cases, except for internal recursive calls. */
1039 static void compute_real_ratios(pa_source *s) {
1040 pa_source_output *o;
1041 uint32_t idx;
1042
1043 pa_source_assert_ref(s);
1044 pa_assert_ctl_context();
1045 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1046 pa_assert(pa_source_flat_volume_enabled(s));
1047
1048 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1049 unsigned c;
1050 pa_cvolume remapped;
1051
1052 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1053 /* The origin source uses volume sharing, so this input's real ratio
1054 * is handled as a special case - the real ratio must be 0 dB, and
1055 * as a result i->soft_volume must equal i->volume_factor. */
1056 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1057 o->soft_volume = o->volume_factor;
1058
1059 compute_real_ratios(o->destination_source);
1060
1061 continue;
1062 }
1063
1064 /*
1065 * This basically calculates:
1066 *
1067 * i->real_ratio := i->volume / s->real_volume
1068 * i->soft_volume := i->real_ratio * i->volume_factor
1069 */
1070
1071 remapped = s->real_volume;
1072 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1073
1074 o->real_ratio.channels = o->sample_spec.channels;
1075 o->soft_volume.channels = o->sample_spec.channels;
1076
1077 for (c = 0; c < o->sample_spec.channels; c++) {
1078
1079 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1080 /* We leave o->real_ratio untouched */
1081 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1082 continue;
1083 }
1084
1085 /* Don't lose accuracy unless necessary */
1086 if (pa_sw_volume_multiply(
1087 o->real_ratio.values[c],
1088 remapped.values[c]) != o->volume.values[c])
1089
1090 o->real_ratio.values[c] = pa_sw_volume_divide(
1091 o->volume.values[c],
1092 remapped.values[c]);
1093
1094 o->soft_volume.values[c] = pa_sw_volume_multiply(
1095 o->real_ratio.values[c],
1096 o->volume_factor.values[c]);
1097 }
1098
1099 /* We don't copy the soft_volume to the thread_info data
1100 * here. That must be done by the caller */
1101 }
1102 }
1103
1104 static pa_cvolume *cvolume_remap_minimal_impact(
1105 pa_cvolume *v,
1106 const pa_cvolume *template,
1107 const pa_channel_map *from,
1108 const pa_channel_map *to) {
1109
1110 pa_cvolume t;
1111
1112 pa_assert(v);
1113 pa_assert(template);
1114 pa_assert(from);
1115 pa_assert(to);
1116 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1117 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1118
1119 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1120 * mapping from source output to source volumes:
1121 *
1122 * If template is a possible remapping from v it is used instead
1123 * of remapping anew.
1124 *
1125 * If the channel maps don't match we set an all-channel volume on
1126 * the source to ensure that changing a volume on one stream has no
1127 * effect that cannot be compensated for in another stream that
1128 * does not have the same channel map as the source. */
1129
1130 if (pa_channel_map_equal(from, to))
1131 return v;
1132
1133 t = *template;
1134 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1135 *v = *template;
1136 return v;
1137 }
1138
1139 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1140 return v;
1141 }
1142
1143 /* Called from main thread. Only called for the root source in volume sharing
1144 * cases, except for internal recursive calls. */
1145 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1146 pa_source_output *o;
1147 uint32_t idx;
1148
1149 pa_source_assert_ref(s);
1150 pa_assert(max_volume);
1151 pa_assert(channel_map);
1152 pa_assert(pa_source_flat_volume_enabled(s));
1153
1154 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1155 pa_cvolume remapped;
1156
1157 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1158 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1159
1160 /* Ignore this output. The origin source uses volume sharing, so this
1161 * output's volume will be set to be equal to the root source's real
1162 * volume. Obviously this outputs's current volume must not then
1163 * affect what the root source's real volume will be. */
1164 continue;
1165 }
1166
1167 remapped = o->volume;
1168 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1169 pa_cvolume_merge(max_volume, max_volume, &remapped);
1170 }
1171 }
1172
1173 /* Called from main thread. Only called for the root source in volume sharing
1174 * cases, except for internal recursive calls. */
1175 static pa_bool_t has_outputs(pa_source *s) {
1176 pa_source_output *o;
1177 uint32_t idx;
1178
1179 pa_source_assert_ref(s);
1180
1181 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1182 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1183 return TRUE;
1184 }
1185
1186 return FALSE;
1187 }
1188
1189 /* Called from main thread. Only called for the root source in volume sharing
1190 * cases, except for internal recursive calls. */
1191 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1192 pa_source_output *o;
1193 uint32_t idx;
1194
1195 pa_source_assert_ref(s);
1196 pa_assert(new_volume);
1197 pa_assert(channel_map);
1198
1199 s->real_volume = *new_volume;
1200 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1201
1202 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1203 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1204 if (pa_source_flat_volume_enabled(s)) {
1205 pa_cvolume old_volume = o->volume;
1206
1207 /* Follow the root source's real volume. */
1208 o->volume = *new_volume;
1209 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1210 compute_reference_ratio(o);
1211
1212 /* The volume changed, let's tell people so */
1213 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1214 if (o->volume_changed)
1215 o->volume_changed(o);
1216
1217 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1218 }
1219 }
1220
1221 update_real_volume(o->destination_source, new_volume, channel_map);
1222 }
1223 }
1224 }
1225
1226 /* Called from main thread. Only called for the root source in shared volume
1227 * cases. */
1228 static void compute_real_volume(pa_source *s) {
1229 pa_source_assert_ref(s);
1230 pa_assert_ctl_context();
1231 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1232 pa_assert(pa_source_flat_volume_enabled(s));
1233 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1234
1235 /* This determines the maximum volume of all streams and sets
1236 * s->real_volume accordingly. */
1237
1238 if (!has_outputs(s)) {
1239 /* In the special case that we have no source outputs we leave the
1240 * volume unmodified. */
1241 update_real_volume(s, &s->reference_volume, &s->channel_map);
1242 return;
1243 }
1244
1245 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1246
1247 /* First let's determine the new maximum volume of all outputs
1248 * connected to this source */
1249 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1250 update_real_volume(s, &s->real_volume, &s->channel_map);
1251
1252 /* Then, let's update the real ratios/soft volumes of all outputs
1253 * connected to this source */
1254 compute_real_ratios(s);
1255 }
1256
1257 /* Called from main thread. Only called for the root source in shared volume
1258 * cases, except for internal recursive calls. */
1259 static void propagate_reference_volume(pa_source *s) {
1260 pa_source_output *o;
1261 uint32_t idx;
1262
1263 pa_source_assert_ref(s);
1264 pa_assert_ctl_context();
1265 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1266 pa_assert(pa_source_flat_volume_enabled(s));
1267
1268 /* This is called whenever the source volume changes that is not
1269 * caused by a source output volume change. We need to fix up the
1270 * source output volumes accordingly */
1271
1272 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1273 pa_cvolume old_volume;
1274
1275 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1276 propagate_reference_volume(o->destination_source);
1277
1278 /* Since the origin source uses volume sharing, this output's volume
1279 * needs to be updated to match the root source's real volume, but
1280 * that will be done later in update_shared_real_volume(). */
1281 continue;
1282 }
1283
1284 old_volume = o->volume;
1285
1286 /* This basically calculates:
1287 *
1288 * o->volume := o->reference_volume * o->reference_ratio */
1289
1290 o->volume = s->reference_volume;
1291 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1292 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1293
1294 /* The volume changed, let's tell people so */
1295 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1296
1297 if (o->volume_changed)
1298 o->volume_changed(o);
1299
1300 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1301 }
1302 }
1303 }
1304
1305 /* Called from main thread. Only called for the root source in volume sharing
1306 * cases, except for internal recursive calls. The return value indicates
1307 * whether any reference volume actually changed. */
1308 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1309 pa_cvolume volume;
1310 pa_bool_t reference_volume_changed;
1311 pa_source_output *o;
1312 uint32_t idx;
1313
1314 pa_source_assert_ref(s);
1315 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1316 pa_assert(v);
1317 pa_assert(channel_map);
1318 pa_assert(pa_cvolume_valid(v));
1319
1320 volume = *v;
1321 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1322
1323 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1324 s->reference_volume = volume;
1325
1326 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1327
1328 if (reference_volume_changed)
1329 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1330 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1331 /* If the root source's volume doesn't change, then there can't be any
1332 * changes in the other source in the source tree either.
1333 *
1334 * It's probably theoretically possible that even if the root source's
1335 * volume changes slightly, some filter source doesn't change its volume
1336 * due to rounding errors. If that happens, we still want to propagate
1337 * the changed root source volume to the sources connected to the
1338 * intermediate source that didn't change its volume. This theoretical
1339 * possiblity is the reason why we have that !(s->flags &
1340 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1341 * notice even if we returned here FALSE always if
1342 * reference_volume_changed is FALSE. */
1343 return FALSE;
1344
1345 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1346 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1347 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1348 }
1349
1350 return TRUE;
1351 }
1352
1353 /* Called from main thread */
1354 void pa_source_set_volume(
1355 pa_source *s,
1356 const pa_cvolume *volume,
1357 pa_bool_t send_msg,
1358 pa_bool_t save) {
1359
1360 pa_cvolume new_reference_volume;
1361 pa_source *root_source = s;
1362
1363 pa_source_assert_ref(s);
1364 pa_assert_ctl_context();
1365 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1366 pa_assert(!volume || pa_cvolume_valid(volume));
1367 pa_assert(volume || pa_source_flat_volume_enabled(s));
1368 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1369
1370 /* make sure we don't change the volume when a PASSTHROUGH output is connected */
1371 if (pa_source_is_passthrough(s)) {
1372 /* FIXME: Need to notify client that volume control is disabled */
1373 pa_log_warn("Cannot change volume, Source is monitor of a PASSTHROUGH sink");
1374 return;
1375 }
1376
1377 /* In case of volume sharing, the volume is set for the root source first,
1378 * from which it's then propagated to the sharing sources. */
1379 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1380 root_source = root_source->output_from_master->source;
1381
1382 /* As a special exception we accept mono volumes on all sources --
1383 * even on those with more complex channel maps */
1384
1385 if (volume) {
1386 if (pa_cvolume_compatible(volume, &s->sample_spec))
1387 new_reference_volume = *volume;
1388 else {
1389 new_reference_volume = s->reference_volume;
1390 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1391 }
1392
1393 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1394 }
1395
1396 /* If volume is NULL we synchronize the source's real and reference
1397 * volumes with the stream volumes. If it is not NULL we update
1398 * the reference_volume with it. */
1399
1400 if (volume) {
1401 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1402 if (pa_source_flat_volume_enabled(root_source)) {
1403 /* OK, propagate this volume change back to the outputs */
1404 propagate_reference_volume(root_source);
1405
1406 /* And now recalculate the real volume */
1407 compute_real_volume(root_source);
1408 } else
1409 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1410 }
1411
1412 } else {
1413 pa_assert(pa_source_flat_volume_enabled(root_source));
1414
1415 /* Ok, let's determine the new real volume */
1416 compute_real_volume(root_source);
1417
1418 /* Let's 'push' the reference volume if necessary */
1419 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1420 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1421
1422 /* Now that the reference volume is updated, we can update the streams'
1423 * reference ratios. */
1424 compute_reference_ratios(root_source);
1425 }
1426
1427 if (root_source->set_volume) {
1428 /* If we have a function set_volume(), then we do not apply a
1429 * soft volume by default. However, set_volume() is free to
1430 * apply one to root_source->soft_volume */
1431
1432 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1433 if (!(root_source->flags & PA_SOURCE_SYNC_VOLUME))
1434 root_source->set_volume(root_source);
1435
1436 } else
1437 /* If we have no function set_volume(), then the soft volume
1438 * becomes the real volume */
1439 root_source->soft_volume = root_source->real_volume;
1440
1441 /* This tells the source that soft volume and/or real volume changed */
1442 if (send_msg)
1443 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1444 }
1445
1446 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1447 * Only to be called by source implementor */
1448 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1449
1450 pa_source_assert_ref(s);
1451 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1452
1453 if (s->flags & PA_SOURCE_SYNC_VOLUME)
1454 pa_source_assert_io_context(s);
1455 else
1456 pa_assert_ctl_context();
1457
1458 if (!volume)
1459 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1460 else
1461 s->soft_volume = *volume;
1462
1463 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_SYNC_VOLUME))
1464 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1465 else
1466 s->thread_info.soft_volume = s->soft_volume;
1467 }
1468
1469 /* Called from the main thread. Only called for the root source in volume sharing
1470 * cases, except for internal recursive calls. */
1471 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1472 pa_source_output *o;
1473 uint32_t idx;
1474
1475 pa_source_assert_ref(s);
1476 pa_assert(old_real_volume);
1477 pa_assert_ctl_context();
1478 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1479
1480 /* This is called when the hardware's real volume changes due to
1481 * some external event. We copy the real volume into our
1482 * reference volume and then rebuild the stream volumes based on
1483 * i->real_ratio which should stay fixed. */
1484
1485 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1486 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1487 return;
1488
1489 /* 1. Make the real volume the reference volume */
1490 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1491 }
1492
1493 if (pa_source_flat_volume_enabled(s)) {
1494
1495 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1496 pa_cvolume old_volume = o->volume;
1497
1498 /* 2. Since the source's reference and real volumes are equal
1499 * now our ratios should be too. */
1500 o->reference_ratio = o->real_ratio;
1501
1502 /* 3. Recalculate the new stream reference volume based on the
1503 * reference ratio and the sink's reference volume.
1504 *
1505 * This basically calculates:
1506 *
1507 * o->volume = s->reference_volume * o->reference_ratio
1508 *
1509 * This is identical to propagate_reference_volume() */
1510 o->volume = s->reference_volume;
1511 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1512 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1513
1514 /* Notify if something changed */
1515 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1516
1517 if (o->volume_changed)
1518 o->volume_changed(o);
1519
1520 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1521 }
1522
1523 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1524 propagate_real_volume(o->destination_source, old_real_volume);
1525 }
1526 }
1527
1528 /* Something got changed in the hardware. It probably makes sense
1529 * to save changed hw settings given that hw volume changes not
1530 * triggered by PA are almost certainly done by the user. */
1531 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1532 s->save_volume = TRUE;
1533 }
1534
1535 /* Called from io thread */
1536 void pa_source_update_volume_and_mute(pa_source *s) {
1537 pa_assert(s);
1538 pa_source_assert_io_context(s);
1539
1540 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1541 }
1542
1543 /* Called from main thread */
1544 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1545 pa_source_assert_ref(s);
1546 pa_assert_ctl_context();
1547 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1548
1549 if (s->refresh_volume || force_refresh) {
1550 struct pa_cvolume old_real_volume;
1551
1552 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1553
1554 old_real_volume = s->real_volume;
1555
1556 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_volume)
1557 s->get_volume(s);
1558
1559 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1560
1561 update_real_volume(s, &s->real_volume, &s->channel_map);
1562 propagate_real_volume(s, &old_real_volume);
1563 }
1564
1565 return &s->reference_volume;
1566 }
1567
1568 /* Called from main thread. In volume sharing cases, only the root source may
1569 * call this. */
1570 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1571 pa_cvolume old_real_volume;
1572
1573 pa_source_assert_ref(s);
1574 pa_assert_ctl_context();
1575 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1576 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1577
1578 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1579
1580 old_real_volume = s->real_volume;
1581 update_real_volume(s, new_real_volume, &s->channel_map);
1582 propagate_real_volume(s, &old_real_volume);
1583 }
1584
1585 /* Called from main thread */
1586 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1587 pa_bool_t old_muted;
1588
1589 pa_source_assert_ref(s);
1590 pa_assert_ctl_context();
1591 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1592
1593 old_muted = s->muted;
1594 s->muted = mute;
1595 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1596
1597 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->set_mute)
1598 s->set_mute(s);
1599
1600 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1601
1602 if (old_muted != s->muted)
1603 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1604 }
1605
1606 /* Called from main thread */
1607 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1608
1609 pa_source_assert_ref(s);
1610 pa_assert_ctl_context();
1611 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1612
1613 if (s->refresh_muted || force_refresh) {
1614 pa_bool_t old_muted = s->muted;
1615
1616 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_mute)
1617 s->get_mute(s);
1618
1619 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1620
1621 if (old_muted != s->muted) {
1622 s->save_muted = TRUE;
1623
1624 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1625
1626 /* Make sure the soft mute status stays in sync */
1627 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1628 }
1629 }
1630
1631 return s->muted;
1632 }
1633
1634 /* Called from main thread */
1635 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1636 pa_source_assert_ref(s);
1637 pa_assert_ctl_context();
1638 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1639
1640 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1641
1642 if (s->muted == new_muted)
1643 return;
1644
1645 s->muted = new_muted;
1646 s->save_muted = TRUE;
1647
1648 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1649 }
1650
1651 /* Called from main thread */
1652 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1653 pa_source_assert_ref(s);
1654 pa_assert_ctl_context();
1655
1656 if (p)
1657 pa_proplist_update(s->proplist, mode, p);
1658
1659 if (PA_SOURCE_IS_LINKED(s->state)) {
1660 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1661 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1662 }
1663
1664 return TRUE;
1665 }
1666
1667 /* Called from main thread */
1668 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1669 void pa_source_set_description(pa_source *s, const char *description) {
1670 const char *old;
1671 pa_source_assert_ref(s);
1672 pa_assert_ctl_context();
1673
1674 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1675 return;
1676
1677 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1678
1679 if (old && description && pa_streq(old, description))
1680 return;
1681
1682 if (description)
1683 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1684 else
1685 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1686
1687 if (PA_SOURCE_IS_LINKED(s->state)) {
1688 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1689 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1690 }
1691 }
1692
1693 /* Called from main thread */
1694 unsigned pa_source_linked_by(pa_source *s) {
1695 pa_source_assert_ref(s);
1696 pa_assert_ctl_context();
1697 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1698
1699 return pa_idxset_size(s->outputs);
1700 }
1701
1702 /* Called from main thread */
1703 unsigned pa_source_used_by(pa_source *s) {
1704 unsigned ret;
1705
1706 pa_source_assert_ref(s);
1707 pa_assert_ctl_context();
1708 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1709
1710 ret = pa_idxset_size(s->outputs);
1711 pa_assert(ret >= s->n_corked);
1712
1713 return ret - s->n_corked;
1714 }
1715
1716 /* Called from main thread */
1717 unsigned pa_source_check_suspend(pa_source *s) {
1718 unsigned ret;
1719 pa_source_output *o;
1720 uint32_t idx;
1721
1722 pa_source_assert_ref(s);
1723 pa_assert_ctl_context();
1724
1725 if (!PA_SOURCE_IS_LINKED(s->state))
1726 return 0;
1727
1728 ret = 0;
1729
1730 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1731 pa_source_output_state_t st;
1732
1733 st = pa_source_output_get_state(o);
1734
1735 /* We do not assert here. It is perfectly valid for a source output to
1736 * be in the INIT state (i.e. created, marked done but not yet put)
1737 * and we should not care if it's unlinked as it won't contribute
1738 * towarards our busy status.
1739 */
1740 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1741 continue;
1742
1743 if (st == PA_SOURCE_OUTPUT_CORKED)
1744 continue;
1745
1746 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1747 continue;
1748
1749 ret ++;
1750 }
1751
1752 return ret;
1753 }
1754
1755 /* Called from the IO thread */
1756 static void sync_output_volumes_within_thread(pa_source *s) {
1757 pa_source_output *o;
1758 void *state = NULL;
1759
1760 pa_source_assert_ref(s);
1761 pa_source_assert_io_context(s);
1762
1763 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1764 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1765 continue;
1766
1767 o->thread_info.soft_volume = o->soft_volume;
1768 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1769 }
1770 }
1771
1772 /* Called from the IO thread. Only called for the root source in volume sharing
1773 * cases, except for internal recursive calls. */
1774 static void set_shared_volume_within_thread(pa_source *s) {
1775 pa_source_output *o;
1776 void *state = NULL;
1777
1778 pa_source_assert_ref(s);
1779
1780 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1781
1782 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1783 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1784 set_shared_volume_within_thread(o->destination_source);
1785 }
1786 }
1787
1788 /* Called from IO thread, except when it is not */
1789 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1790 pa_source *s = PA_SOURCE(object);
1791 pa_source_assert_ref(s);
1792
1793 switch ((pa_source_message_t) code) {
1794
1795 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1796 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1797
1798 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1799
1800 if (o->direct_on_input) {
1801 o->thread_info.direct_on_input = o->direct_on_input;
1802 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1803 }
1804
1805 pa_assert(!o->thread_info.attached);
1806 o->thread_info.attached = TRUE;
1807
1808 if (o->attach)
1809 o->attach(o);
1810
1811 pa_source_output_set_state_within_thread(o, o->state);
1812
1813 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1814 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1815
1816 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1817
1818 /* We don't just invalidate the requested latency here,
1819 * because if we are in a move we might need to fix up the
1820 * requested latency. */
1821 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1822
1823 /* In flat volume mode we need to update the volume as
1824 * well */
1825 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1826 }
1827
1828 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1829 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1830
1831 pa_source_output_set_state_within_thread(o, o->state);
1832
1833 if (o->detach)
1834 o->detach(o);
1835
1836 pa_assert(o->thread_info.attached);
1837 o->thread_info.attached = FALSE;
1838
1839 if (o->thread_info.direct_on_input) {
1840 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
1841 o->thread_info.direct_on_input = NULL;
1842 }
1843
1844 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
1845 pa_source_output_unref(o);
1846
1847 pa_source_invalidate_requested_latency(s, TRUE);
1848
1849 /* In flat volume mode we need to update the volume as
1850 * well */
1851 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1852 }
1853
1854 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
1855 pa_source *root_source = s;
1856
1857 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1858 root_source = root_source->output_from_master->source;
1859
1860 set_shared_volume_within_thread(root_source);
1861 return 0;
1862 }
1863
1864 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
1865
1866 if (s->flags & PA_SOURCE_SYNC_VOLUME) {
1867 s->set_volume(s);
1868 pa_source_volume_change_push(s);
1869 }
1870 /* Fall through ... */
1871
1872 case PA_SOURCE_MESSAGE_SET_VOLUME:
1873
1874 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1875 s->thread_info.soft_volume = s->soft_volume;
1876 }
1877
1878 /* Fall through ... */
1879
1880 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
1881 sync_output_volumes_within_thread(s);
1882 return 0;
1883
1884 case PA_SOURCE_MESSAGE_GET_VOLUME:
1885
1886 if ((s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_volume) {
1887 s->get_volume(s);
1888 pa_source_volume_change_flush(s);
1889 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
1890 }
1891
1892 /* In case source implementor reset SW volume. */
1893 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1894 s->thread_info.soft_volume = s->soft_volume;
1895 }
1896
1897 return 0;
1898
1899 case PA_SOURCE_MESSAGE_SET_MUTE:
1900
1901 if (s->thread_info.soft_muted != s->muted) {
1902 s->thread_info.soft_muted = s->muted;
1903 }
1904
1905 if (s->flags & PA_SOURCE_SYNC_VOLUME && s->set_mute)
1906 s->set_mute(s);
1907
1908 return 0;
1909
1910 case PA_SOURCE_MESSAGE_GET_MUTE:
1911
1912 if (s->flags & PA_SOURCE_SYNC_VOLUME && s->get_mute)
1913 s->get_mute(s);
1914
1915 return 0;
1916
1917 case PA_SOURCE_MESSAGE_SET_STATE: {
1918
1919 pa_bool_t suspend_change =
1920 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
1921 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
1922
1923 s->thread_info.state = PA_PTR_TO_UINT(userdata);
1924
1925 if (suspend_change) {
1926 pa_source_output *o;
1927 void *state = NULL;
1928
1929 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
1930 if (o->suspend_within_thread)
1931 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
1932 }
1933
1934 return 0;
1935 }
1936
1937 case PA_SOURCE_MESSAGE_DETACH:
1938
1939 /* Detach all streams */
1940 pa_source_detach_within_thread(s);
1941 return 0;
1942
1943 case PA_SOURCE_MESSAGE_ATTACH:
1944
1945 /* Reattach all streams */
1946 pa_source_attach_within_thread(s);
1947 return 0;
1948
1949 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
1950
1951 pa_usec_t *usec = userdata;
1952 *usec = pa_source_get_requested_latency_within_thread(s);
1953
1954 /* Yes, that's right, the IO thread will see -1 when no
1955 * explicit requested latency is configured, the main
1956 * thread will see max_latency */
1957 if (*usec == (pa_usec_t) -1)
1958 *usec = s->thread_info.max_latency;
1959
1960 return 0;
1961 }
1962
1963 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
1964 pa_usec_t *r = userdata;
1965
1966 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
1967
1968 return 0;
1969 }
1970
1971 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
1972 pa_usec_t *r = userdata;
1973
1974 r[0] = s->thread_info.min_latency;
1975 r[1] = s->thread_info.max_latency;
1976
1977 return 0;
1978 }
1979
1980 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
1981
1982 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
1983 return 0;
1984
1985 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
1986
1987 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
1988 return 0;
1989
1990 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
1991
1992 *((size_t*) userdata) = s->thread_info.max_rewind;
1993 return 0;
1994
1995 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
1996
1997 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
1998 return 0;
1999
2000 case PA_SOURCE_MESSAGE_GET_LATENCY:
2001
2002 if (s->monitor_of) {
2003 *((pa_usec_t*) userdata) = 0;
2004 return 0;
2005 }
2006
2007 /* Implementors need to overwrite this implementation! */
2008 return -1;
2009
2010 case PA_SOURCE_MESSAGE_SET_PORT:
2011
2012 pa_assert(userdata);
2013 if (s->set_port) {
2014 struct source_message_set_port *msg_data = userdata;
2015 msg_data->ret = s->set_port(s, msg_data->port);
2016 }
2017 return 0;
2018
2019 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2020 /* This message is sent from IO-thread and handled in main thread. */
2021 pa_assert_ctl_context();
2022
2023 pa_source_get_volume(s, TRUE);
2024 pa_source_get_mute(s, TRUE);
2025 return 0;
2026
2027 case PA_SOURCE_MESSAGE_MAX:
2028 ;
2029 }
2030
2031 return -1;
2032 }
2033
2034 /* Called from main thread */
2035 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2036 pa_source *source;
2037 uint32_t idx;
2038 int ret = 0;
2039
2040 pa_core_assert_ref(c);
2041 pa_assert_ctl_context();
2042 pa_assert(cause != 0);
2043
2044 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2045 int r;
2046
2047 if (source->monitor_of)
2048 continue;
2049
2050 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2051 ret = r;
2052 }
2053
2054 return ret;
2055 }
2056
2057 /* Called from main thread */
2058 void pa_source_detach(pa_source *s) {
2059 pa_source_assert_ref(s);
2060 pa_assert_ctl_context();
2061 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2062
2063 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2064 }
2065
2066 /* Called from main thread */
2067 void pa_source_attach(pa_source *s) {
2068 pa_source_assert_ref(s);
2069 pa_assert_ctl_context();
2070 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2071
2072 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2073 }
2074
2075 /* Called from IO thread */
2076 void pa_source_detach_within_thread(pa_source *s) {
2077 pa_source_output *o;
2078 void *state = NULL;
2079
2080 pa_source_assert_ref(s);
2081 pa_source_assert_io_context(s);
2082 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2083
2084 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2085 if (o->detach)
2086 o->detach(o);
2087 }
2088
2089 /* Called from IO thread */
2090 void pa_source_attach_within_thread(pa_source *s) {
2091 pa_source_output *o;
2092 void *state = NULL;
2093
2094 pa_source_assert_ref(s);
2095 pa_source_assert_io_context(s);
2096 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2097
2098 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2099 if (o->attach)
2100 o->attach(o);
2101 }
2102
2103 /* Called from IO thread */
2104 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2105 pa_usec_t result = (pa_usec_t) -1;
2106 pa_source_output *o;
2107 void *state = NULL;
2108
2109 pa_source_assert_ref(s);
2110 pa_source_assert_io_context(s);
2111
2112 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2113 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2114
2115 if (s->thread_info.requested_latency_valid)
2116 return s->thread_info.requested_latency;
2117
2118 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2119 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2120 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2121 result = o->thread_info.requested_source_latency;
2122
2123 if (result != (pa_usec_t) -1)
2124 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2125
2126 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2127 /* Only cache this if we are fully set up */
2128 s->thread_info.requested_latency = result;
2129 s->thread_info.requested_latency_valid = TRUE;
2130 }
2131
2132 return result;
2133 }
2134
2135 /* Called from main thread */
2136 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2137 pa_usec_t usec = 0;
2138
2139 pa_source_assert_ref(s);
2140 pa_assert_ctl_context();
2141 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2142
2143 if (s->state == PA_SOURCE_SUSPENDED)
2144 return 0;
2145
2146 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2147
2148 return usec;
2149 }
2150
2151 /* Called from IO thread */
2152 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2153 pa_source_output *o;
2154 void *state = NULL;
2155
2156 pa_source_assert_ref(s);
2157 pa_source_assert_io_context(s);
2158
2159 if (max_rewind == s->thread_info.max_rewind)
2160 return;
2161
2162 s->thread_info.max_rewind = max_rewind;
2163
2164 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2165 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2166 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2167 }
2168
2169 /* Called from main thread */
2170 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2171 pa_source_assert_ref(s);
2172 pa_assert_ctl_context();
2173
2174 if (PA_SOURCE_IS_LINKED(s->state))
2175 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2176 else
2177 pa_source_set_max_rewind_within_thread(s, max_rewind);
2178 }
2179
2180 /* Called from IO thread */
2181 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2182 pa_source_output *o;
2183 void *state = NULL;
2184
2185 pa_source_assert_ref(s);
2186 pa_source_assert_io_context(s);
2187
2188 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2189 s->thread_info.requested_latency_valid = FALSE;
2190 else if (dynamic)
2191 return;
2192
2193 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2194
2195 if (s->update_requested_latency)
2196 s->update_requested_latency(s);
2197
2198 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2199 if (o->update_source_requested_latency)
2200 o->update_source_requested_latency(o);
2201 }
2202
2203 if (s->monitor_of)
2204 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2205 }
2206
2207 /* Called from main thread */
2208 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2209 pa_source_assert_ref(s);
2210 pa_assert_ctl_context();
2211
2212 /* min_latency == 0: no limit
2213 * min_latency anything else: specified limit
2214 *
2215 * Similar for max_latency */
2216
2217 if (min_latency < ABSOLUTE_MIN_LATENCY)
2218 min_latency = ABSOLUTE_MIN_LATENCY;
2219
2220 if (max_latency <= 0 ||
2221 max_latency > ABSOLUTE_MAX_LATENCY)
2222 max_latency = ABSOLUTE_MAX_LATENCY;
2223
2224 pa_assert(min_latency <= max_latency);
2225
2226 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2227 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2228 max_latency == ABSOLUTE_MAX_LATENCY) ||
2229 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2230
2231 if (PA_SOURCE_IS_LINKED(s->state)) {
2232 pa_usec_t r[2];
2233
2234 r[0] = min_latency;
2235 r[1] = max_latency;
2236
2237 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2238 } else
2239 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2240 }
2241
2242 /* Called from main thread */
2243 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2244 pa_source_assert_ref(s);
2245 pa_assert_ctl_context();
2246 pa_assert(min_latency);
2247 pa_assert(max_latency);
2248
2249 if (PA_SOURCE_IS_LINKED(s->state)) {
2250 pa_usec_t r[2] = { 0, 0 };
2251
2252 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2253
2254 *min_latency = r[0];
2255 *max_latency = r[1];
2256 } else {
2257 *min_latency = s->thread_info.min_latency;
2258 *max_latency = s->thread_info.max_latency;
2259 }
2260 }
2261
2262 /* Called from IO thread, and from main thread before pa_source_put() is called */
2263 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2264 pa_source_assert_ref(s);
2265 pa_source_assert_io_context(s);
2266
2267 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2268 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2269 pa_assert(min_latency <= max_latency);
2270
2271 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2272 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2273 max_latency == ABSOLUTE_MAX_LATENCY) ||
2274 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2275 s->monitor_of);
2276
2277 if (s->thread_info.min_latency == min_latency &&
2278 s->thread_info.max_latency == max_latency)
2279 return;
2280
2281 s->thread_info.min_latency = min_latency;
2282 s->thread_info.max_latency = max_latency;
2283
2284 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2285 pa_source_output *o;
2286 void *state = NULL;
2287
2288 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2289 if (o->update_source_latency_range)
2290 o->update_source_latency_range(o);
2291 }
2292
2293 pa_source_invalidate_requested_latency(s, FALSE);
2294 }
2295
2296 /* Called from main thread, before the source is put */
2297 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2298 pa_source_assert_ref(s);
2299 pa_assert_ctl_context();
2300
2301 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2302 pa_assert(latency == 0);
2303 return;
2304 }
2305
2306 if (latency < ABSOLUTE_MIN_LATENCY)
2307 latency = ABSOLUTE_MIN_LATENCY;
2308
2309 if (latency > ABSOLUTE_MAX_LATENCY)
2310 latency = ABSOLUTE_MAX_LATENCY;
2311
2312 if (PA_SOURCE_IS_LINKED(s->state))
2313 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2314 else
2315 s->thread_info.fixed_latency = latency;
2316 }
2317
2318 /* Called from main thread */
2319 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2320 pa_usec_t latency;
2321
2322 pa_source_assert_ref(s);
2323 pa_assert_ctl_context();
2324
2325 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2326 return 0;
2327
2328 if (PA_SOURCE_IS_LINKED(s->state))
2329 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2330 else
2331 latency = s->thread_info.fixed_latency;
2332
2333 return latency;
2334 }
2335
2336 /* Called from IO thread */
2337 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2338 pa_source_assert_ref(s);
2339 pa_source_assert_io_context(s);
2340
2341 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2342 pa_assert(latency == 0);
2343 return;
2344 }
2345
2346 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2347 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2348
2349 if (s->thread_info.fixed_latency == latency)
2350 return;
2351
2352 s->thread_info.fixed_latency = latency;
2353
2354 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2355 pa_source_output *o;
2356 void *state = NULL;
2357
2358 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2359 if (o->update_source_fixed_latency)
2360 o->update_source_fixed_latency(o);
2361 }
2362
2363 pa_source_invalidate_requested_latency(s, FALSE);
2364 }
2365
2366 /* Called from main thread */
2367 size_t pa_source_get_max_rewind(pa_source *s) {
2368 size_t r;
2369 pa_assert_ctl_context();
2370 pa_source_assert_ref(s);
2371
2372 if (!PA_SOURCE_IS_LINKED(s->state))
2373 return s->thread_info.max_rewind;
2374
2375 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2376
2377 return r;
2378 }
2379
2380 /* Called from main context */
2381 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2382 pa_device_port *port;
2383 int ret;
2384
2385 pa_source_assert_ref(s);
2386 pa_assert_ctl_context();
2387
2388 if (!s->set_port) {
2389 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2390 return -PA_ERR_NOTIMPLEMENTED;
2391 }
2392
2393 if (!s->ports)
2394 return -PA_ERR_NOENTITY;
2395
2396 if (!(port = pa_hashmap_get(s->ports, name)))
2397 return -PA_ERR_NOENTITY;
2398
2399 if (s->active_port == port) {
2400 s->save_port = s->save_port || save;
2401 return 0;
2402 }
2403
2404 if (s->flags & PA_SOURCE_SYNC_VOLUME) {
2405 struct source_message_set_port msg = { .port = port, .ret = 0 };
2406 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2407 ret = msg.ret;
2408 }
2409 else
2410 ret = s->set_port(s, port);
2411
2412 if (ret < 0)
2413 return -PA_ERR_NOENTITY;
2414
2415 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2416
2417 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2418
2419 s->active_port = port;
2420 s->save_port = save;
2421
2422 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2423
2424 return 0;
2425 }
2426
2427 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2428
2429 /* Called from the IO thread. */
2430 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2431 pa_source_volume_change *c;
2432 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2433 c = pa_xnew(pa_source_volume_change, 1);
2434
2435 PA_LLIST_INIT(pa_source_volume_change, c);
2436 c->at = 0;
2437 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2438 return c;
2439 }
2440
2441 /* Called from the IO thread. */
2442 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2443 pa_assert(c);
2444 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2445 pa_xfree(c);
2446 }
2447
2448 /* Called from the IO thread. */
2449 void pa_source_volume_change_push(pa_source *s) {
2450 pa_source_volume_change *c = NULL;
2451 pa_source_volume_change *nc = NULL;
2452 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2453
2454 const char *direction = NULL;
2455
2456 pa_assert(s);
2457 nc = pa_source_volume_change_new(s);
2458
2459 /* NOTE: There is already more different volumes in pa_source that I can remember.
2460 * Adding one more volume for HW would get us rid of this, but I am trying
2461 * to survive with the ones we already have. */
2462 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2463
2464 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2465 pa_log_debug("Volume not changing");
2466 pa_source_volume_change_free(nc);
2467 return;
2468 }
2469
2470 nc->at = pa_source_get_latency_within_thread(s);
2471 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2472
2473 if (s->thread_info.volume_changes_tail) {
2474 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2475 /* If volume is going up let's do it a bit late. If it is going
2476 * down let's do it a bit early. */
2477 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2478 if (nc->at + safety_margin > c->at) {
2479 nc->at += safety_margin;
2480 direction = "up";
2481 break;
2482 }
2483 }
2484 else if (nc->at - safety_margin > c->at) {
2485 nc->at -= safety_margin;
2486 direction = "down";
2487 break;
2488 }
2489 }
2490 }
2491
2492 if (c == NULL) {
2493 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2494 nc->at += safety_margin;
2495 direction = "up";
2496 } else {
2497 nc->at -= safety_margin;
2498 direction = "down";
2499 }
2500 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2501 }
2502 else {
2503 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2504 }
2505
2506 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2507
2508 /* We can ignore volume events that came earlier but should happen later than this. */
2509 PA_LLIST_FOREACH(c, nc->next) {
2510 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2511 pa_source_volume_change_free(c);
2512 }
2513 nc->next = NULL;
2514 s->thread_info.volume_changes_tail = nc;
2515 }
2516
2517 /* Called from the IO thread. */
2518 static void pa_source_volume_change_flush(pa_source *s) {
2519 pa_source_volume_change *c = s->thread_info.volume_changes;
2520 pa_assert(s);
2521 s->thread_info.volume_changes = NULL;
2522 s->thread_info.volume_changes_tail = NULL;
2523 while (c) {
2524 pa_source_volume_change *next = c->next;
2525 pa_source_volume_change_free(c);
2526 c = next;
2527 }
2528 }
2529
2530 /* Called from the IO thread. */
2531 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2532 pa_usec_t now = pa_rtclock_now();
2533 pa_bool_t ret = FALSE;
2534
2535 pa_assert(s);
2536 pa_assert(s->write_volume);
2537
2538 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2539 pa_source_volume_change *c = s->thread_info.volume_changes;
2540 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2541 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2542 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2543 ret = TRUE;
2544 s->thread_info.current_hw_volume = c->hw_volume;
2545 pa_source_volume_change_free(c);
2546 }
2547
2548 if (s->write_volume && ret)
2549 s->write_volume(s);
2550
2551 if (s->thread_info.volume_changes) {
2552 if (usec_to_next)
2553 *usec_to_next = s->thread_info.volume_changes->at - now;
2554 if (pa_log_ratelimit(PA_LOG_DEBUG))
2555 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2556 }
2557 else {
2558 if (usec_to_next)
2559 *usec_to_next = 0;
2560 s->thread_info.volume_changes_tail = NULL;
2561 }
2562 return ret;
2563 }
2564
2565
2566 /* Called from the main thread */
2567 /* Gets the list of formats supported by the source. The members and idxset must
2568 * be freed by the caller. */
2569 pa_idxset* pa_source_get_formats(pa_source *s) {
2570 pa_idxset *ret;
2571
2572 pa_assert(s);
2573
2574 if (s->get_formats) {
2575 /* Source supports format query, all is good */
2576 ret = s->get_formats(s);
2577 } else {
2578 /* Source doesn't support format query, so assume it does PCM */
2579 pa_format_info *f = pa_format_info_new();
2580 f->encoding = PA_ENCODING_PCM;
2581
2582 ret = pa_idxset_new(NULL, NULL);
2583 pa_idxset_put(ret, f, NULL);
2584 }
2585
2586 return ret;
2587 }
2588
2589 /* Called from the main thread */
2590 /* Checks if the source can accept this format */
2591 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2592 {
2593 pa_idxset *formats = NULL;
2594 pa_bool_t ret = FALSE;
2595
2596 pa_assert(s);
2597 pa_assert(f);
2598
2599 formats = pa_source_get_formats(s);
2600
2601 if (formats) {
2602 pa_format_info *finfo_device;
2603 uint32_t i;
2604
2605 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2606 if (pa_format_info_is_compatible(finfo_device, f)) {
2607 ret = TRUE;
2608 break;
2609 }
2610 }
2611
2612 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2613 }
2614
2615 return ret;
2616 }
2617
2618 /* Called from the main thread */
2619 /* Calculates the intersection between formats supported by the source and
2620 * in_formats, and returns these, in the order of the source's formats. */
2621 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2622 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2623 pa_format_info *f_source, *f_in;
2624 uint32_t i, j;
2625
2626 pa_assert(s);
2627
2628 if (!in_formats || pa_idxset_isempty(in_formats))
2629 goto done;
2630
2631 source_formats = pa_source_get_formats(s);
2632
2633 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2634 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2635 if (pa_format_info_is_compatible(f_source, f_in))
2636 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2637 }
2638 }
2639
2640 done:
2641 if (source_formats)
2642 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2643
2644 return out_formats;
2645 }