]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
sink,source: Fix corked stream handling in update_rate()
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76
77 return data;
78 }
79
80 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
81 pa_assert(data);
82
83 pa_xfree(data->name);
84 data->name = pa_xstrdup(name);
85 }
86
87 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
88 pa_assert(data);
89
90 if ((data->sample_spec_is_set = !!spec))
91 data->sample_spec = *spec;
92 }
93
94 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
95 pa_assert(data);
96
97 if ((data->channel_map_is_set = !!map))
98 data->channel_map = *map;
99 }
100
101 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
102 pa_assert(data);
103
104 data->alternate_sample_rate_is_set = TRUE;
105 data->alternate_sample_rate = alternate_sample_rate;
106 }
107
108 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
109 pa_assert(data);
110
111 if ((data->volume_is_set = !!volume))
112 data->volume = *volume;
113 }
114
115 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
116 pa_assert(data);
117
118 data->muted_is_set = TRUE;
119 data->muted = !!mute;
120 }
121
122 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
123 pa_assert(data);
124
125 pa_xfree(data->active_port);
126 data->active_port = pa_xstrdup(port);
127 }
128
129 void pa_source_new_data_done(pa_source_new_data *data) {
130 pa_assert(data);
131
132 pa_proplist_free(data->proplist);
133
134 if (data->ports) {
135 pa_device_port *p;
136
137 while ((p = pa_hashmap_steal_first(data->ports)))
138 pa_device_port_free(p);
139
140 pa_hashmap_free(data->ports, NULL, NULL);
141 }
142
143 pa_xfree(data->name);
144 pa_xfree(data->active_port);
145 }
146
147 /* Called from main context */
148 static void reset_callbacks(pa_source *s) {
149 pa_assert(s);
150
151 s->set_state = NULL;
152 s->get_volume = NULL;
153 s->set_volume = NULL;
154 s->write_volume = NULL;
155 s->get_mute = NULL;
156 s->set_mute = NULL;
157 s->update_requested_latency = NULL;
158 s->set_port = NULL;
159 s->get_formats = NULL;
160 s->update_rate = NULL;
161 }
162
163 /* Called from main context */
164 pa_source* pa_source_new(
165 pa_core *core,
166 pa_source_new_data *data,
167 pa_source_flags_t flags) {
168
169 pa_source *s;
170 const char *name;
171 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
172 char *pt;
173
174 pa_assert(core);
175 pa_assert(data);
176 pa_assert(data->name);
177 pa_assert_ctl_context();
178
179 s = pa_msgobject_new(pa_source);
180
181 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
182 pa_log_debug("Failed to register name %s.", data->name);
183 pa_xfree(s);
184 return NULL;
185 }
186
187 pa_source_new_data_set_name(data, name);
188
189 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
190 pa_xfree(s);
191 pa_namereg_unregister(core, name);
192 return NULL;
193 }
194
195 /* FIXME, need to free s here on failure */
196
197 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
198 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
199
200 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
201
202 if (!data->channel_map_is_set)
203 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
204
205 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
206 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
207
208 /* FIXME: There should probably be a general function for checking whether
209 * the source volume is allowed to be set, like there is for source outputs. */
210 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
211
212 if (!data->volume_is_set) {
213 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
214 data->save_volume = FALSE;
215 }
216
217 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
218 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
219
220 if (!data->muted_is_set)
221 data->muted = FALSE;
222
223 if (data->card)
224 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
225
226 pa_device_init_description(data->proplist);
227 pa_device_init_icon(data->proplist, FALSE);
228 pa_device_init_intended_roles(data->proplist);
229
230 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
231 pa_xfree(s);
232 pa_namereg_unregister(core, name);
233 return NULL;
234 }
235
236 s->parent.parent.free = source_free;
237 s->parent.process_msg = pa_source_process_msg;
238
239 s->core = core;
240 s->state = PA_SOURCE_INIT;
241 s->flags = flags;
242 s->priority = 0;
243 s->suspend_cause = 0;
244 s->name = pa_xstrdup(name);
245 s->proplist = pa_proplist_copy(data->proplist);
246 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
247 s->module = data->module;
248 s->card = data->card;
249
250 s->priority = pa_device_init_priority(s->proplist);
251
252 s->sample_spec = data->sample_spec;
253 s->channel_map = data->channel_map;
254 s->default_sample_rate = s->sample_spec.rate;
255
256 if (data->alternate_sample_rate_is_set)
257 s->alternate_sample_rate = data->alternate_sample_rate;
258 else
259 s->alternate_sample_rate = s->core->alternate_sample_rate;
260
261 if (s->sample_spec.rate == s->alternate_sample_rate) {
262 pa_log_warn("Default and alternate sample rates are the same.");
263 s->alternate_sample_rate = 0;
264 }
265
266 s->outputs = pa_idxset_new(NULL, NULL);
267 s->n_corked = 0;
268 s->monitor_of = NULL;
269 s->output_from_master = NULL;
270
271 s->reference_volume = s->real_volume = data->volume;
272 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
273 s->base_volume = PA_VOLUME_NORM;
274 s->n_volume_steps = PA_VOLUME_NORM+1;
275 s->muted = data->muted;
276 s->refresh_volume = s->refresh_muted = FALSE;
277
278 reset_callbacks(s);
279 s->userdata = NULL;
280
281 s->asyncmsgq = NULL;
282
283 /* As a minor optimization we just steal the list instead of
284 * copying it here */
285 s->ports = data->ports;
286 data->ports = NULL;
287
288 s->active_port = NULL;
289 s->save_port = FALSE;
290
291 if (data->active_port && s->ports)
292 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
293 s->save_port = data->save_port;
294
295 if (!s->active_port && s->ports) {
296 void *state;
297 pa_device_port *p;
298
299 PA_HASHMAP_FOREACH(p, s->ports, state)
300 if (!s->active_port || p->priority > s->active_port->priority)
301 s->active_port = p;
302 }
303
304 s->save_volume = data->save_volume;
305 s->save_muted = data->save_muted;
306
307 pa_silence_memchunk_get(
308 &core->silence_cache,
309 core->mempool,
310 &s->silence,
311 &s->sample_spec,
312 0);
313
314 s->thread_info.rtpoll = NULL;
315 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
316 s->thread_info.soft_volume = s->soft_volume;
317 s->thread_info.soft_muted = s->muted;
318 s->thread_info.state = s->state;
319 s->thread_info.max_rewind = 0;
320 s->thread_info.requested_latency_valid = FALSE;
321 s->thread_info.requested_latency = 0;
322 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
323 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
324 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
325
326 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
327 s->thread_info.volume_changes_tail = NULL;
328 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
329 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
330 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
331
332 /* FIXME: This should probably be moved to pa_source_put() */
333 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
334
335 if (s->card)
336 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
337
338 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
339 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
340 s->index,
341 s->name,
342 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
343 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
344 pt);
345 pa_xfree(pt);
346
347 return s;
348 }
349
350 /* Called from main context */
351 static int source_set_state(pa_source *s, pa_source_state_t state) {
352 int ret;
353 pa_bool_t suspend_change;
354 pa_source_state_t original_state;
355
356 pa_assert(s);
357 pa_assert_ctl_context();
358
359 if (s->state == state)
360 return 0;
361
362 original_state = s->state;
363
364 suspend_change =
365 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
366 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
367
368 if (s->set_state)
369 if ((ret = s->set_state(s, state)) < 0)
370 return ret;
371
372 if (s->asyncmsgq)
373 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
374
375 if (s->set_state)
376 s->set_state(s, original_state);
377
378 return ret;
379 }
380
381 s->state = state;
382
383 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
384 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
385 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
386 }
387
388 if (suspend_change) {
389 pa_source_output *o;
390 uint32_t idx;
391
392 /* We're suspending or resuming, tell everyone about it */
393
394 PA_IDXSET_FOREACH(o, s->outputs, idx)
395 if (s->state == PA_SOURCE_SUSPENDED &&
396 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
397 pa_source_output_kill(o);
398 else if (o->suspend)
399 o->suspend(o, state == PA_SOURCE_SUSPENDED);
400 }
401
402 return 0;
403 }
404
405 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
406 pa_assert(s);
407
408 s->get_volume = cb;
409 }
410
411 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
412 pa_source_flags_t flags;
413
414 pa_assert(s);
415 pa_assert(!s->write_volume || cb);
416
417 s->set_volume = cb;
418
419 /* Save the current flags so we can tell if they've changed */
420 flags = s->flags;
421
422 if (cb) {
423 /* The source implementor is responsible for setting decibel volume support */
424 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
425 } else {
426 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
427 /* See note below in pa_source_put() about volume sharing and decibel volumes */
428 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
429 }
430
431 /* If the flags have changed after init, let any clients know via a change event */
432 if (s->state != PA_SOURCE_INIT && flags != s->flags)
433 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
434 }
435
436 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
437 pa_source_flags_t flags;
438
439 pa_assert(s);
440 pa_assert(!cb || s->set_volume);
441
442 s->write_volume = cb;
443
444 /* Save the current flags so we can tell if they've changed */
445 flags = s->flags;
446
447 if (cb)
448 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
449 else
450 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
451
452 /* If the flags have changed after init, let any clients know via a change event */
453 if (s->state != PA_SOURCE_INIT && flags != s->flags)
454 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
455 }
456
457 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
458 pa_assert(s);
459
460 s->get_mute = cb;
461 }
462
463 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
464 pa_source_flags_t flags;
465
466 pa_assert(s);
467
468 s->set_mute = cb;
469
470 /* Save the current flags so we can tell if they've changed */
471 flags = s->flags;
472
473 if (cb)
474 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
475 else
476 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
477
478 /* If the flags have changed after init, let any clients know via a change event */
479 if (s->state != PA_SOURCE_INIT && flags != s->flags)
480 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
481 }
482
483 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
484 pa_source_flags_t flags;
485
486 pa_assert(s);
487
488 /* Always follow the overall user preference here */
489 enable = enable && s->core->flat_volumes;
490
491 /* Save the current flags so we can tell if they've changed */
492 flags = s->flags;
493
494 if (enable)
495 s->flags |= PA_SOURCE_FLAT_VOLUME;
496 else
497 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
498
499 /* If the flags have changed after init, let any clients know via a change event */
500 if (s->state != PA_SOURCE_INIT && flags != s->flags)
501 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
502 }
503
504 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
505 pa_source_flags_t flags;
506
507 pa_assert(s);
508
509 /* Save the current flags so we can tell if they've changed */
510 flags = s->flags;
511
512 if (enable) {
513 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
514 enable_flat_volume(s, TRUE);
515 } else {
516 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
517 enable_flat_volume(s, FALSE);
518 }
519
520 /* If the flags have changed after init, let any clients know via a change event */
521 if (s->state != PA_SOURCE_INIT && flags != s->flags)
522 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
523 }
524
525 /* Called from main context */
526 void pa_source_put(pa_source *s) {
527 pa_source_assert_ref(s);
528 pa_assert_ctl_context();
529
530 pa_assert(s->state == PA_SOURCE_INIT);
531 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
532
533 /* The following fields must be initialized properly when calling _put() */
534 pa_assert(s->asyncmsgq);
535 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
536
537 /* Generally, flags should be initialized via pa_source_new(). As a
538 * special exception we allow some volume related flags to be set
539 * between _new() and _put() by the callback setter functions above.
540 *
541 * Thus we implement a couple safeguards here which ensure the above
542 * setters were used (or at least the implementor made manual changes
543 * in a compatible way).
544 *
545 * Note: All of these flags set here can change over the life time
546 * of the source. */
547 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
548 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
549 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
550
551 /* XXX: Currently decibel volume is disabled for all sources that use volume
552 * sharing. When the master source supports decibel volume, it would be good
553 * to have the flag also in the filter source, but currently we don't do that
554 * so that the flags of the filter source never change when it's moved from
555 * a master source to another. One solution for this problem would be to
556 * remove user-visible volume altogether from filter sources when volume
557 * sharing is used, but the current approach was easier to implement... */
558 /* We always support decibel volumes in software, otherwise we leave it to
559 * the source implementor to set this flag as needed.
560 *
561 * Note: This flag can also change over the life time of the source. */
562 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
563 pa_source_enable_decibel_volume(s, TRUE);
564
565 /* If the source implementor support DB volumes by itself, we should always
566 * try and enable flat volumes too */
567 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
568 enable_flat_volume(s, TRUE);
569
570 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
571 pa_source *root_source = pa_source_get_master(s);
572
573 pa_assert(PA_LIKELY(root_source));
574
575 s->reference_volume = root_source->reference_volume;
576 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
577
578 s->real_volume = root_source->real_volume;
579 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
580 } else
581 /* We assume that if the sink implementor changed the default
582 * volume he did so in real_volume, because that is the usual
583 * place where he is supposed to place his changes. */
584 s->reference_volume = s->real_volume;
585
586 s->thread_info.soft_volume = s->soft_volume;
587 s->thread_info.soft_muted = s->muted;
588 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
589
590 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
591 || (s->base_volume == PA_VOLUME_NORM
592 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
593 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
594 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
595
596 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
597
598 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
599 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
600 }
601
602 /* Called from main context */
603 void pa_source_unlink(pa_source *s) {
604 pa_bool_t linked;
605 pa_source_output *o, *j = NULL;
606
607 pa_assert(s);
608 pa_assert_ctl_context();
609
610 /* See pa_sink_unlink() for a couple of comments how this function
611 * works. */
612
613 linked = PA_SOURCE_IS_LINKED(s->state);
614
615 if (linked)
616 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
617
618 if (s->state != PA_SOURCE_UNLINKED)
619 pa_namereg_unregister(s->core, s->name);
620 pa_idxset_remove_by_data(s->core->sources, s, NULL);
621
622 if (s->card)
623 pa_idxset_remove_by_data(s->card->sources, s, NULL);
624
625 while ((o = pa_idxset_first(s->outputs, NULL))) {
626 pa_assert(o != j);
627 pa_source_output_kill(o);
628 j = o;
629 }
630
631 if (linked)
632 source_set_state(s, PA_SOURCE_UNLINKED);
633 else
634 s->state = PA_SOURCE_UNLINKED;
635
636 reset_callbacks(s);
637
638 if (linked) {
639 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
640 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
641 }
642 }
643
644 /* Called from main context */
645 static void source_free(pa_object *o) {
646 pa_source_output *so;
647 pa_source *s = PA_SOURCE(o);
648
649 pa_assert(s);
650 pa_assert_ctl_context();
651 pa_assert(pa_source_refcnt(s) == 0);
652
653 if (PA_SOURCE_IS_LINKED(s->state))
654 pa_source_unlink(s);
655
656 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
657
658 pa_idxset_free(s->outputs, NULL, NULL);
659
660 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
661 pa_source_output_unref(so);
662
663 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
664
665 if (s->silence.memblock)
666 pa_memblock_unref(s->silence.memblock);
667
668 pa_xfree(s->name);
669 pa_xfree(s->driver);
670
671 if (s->proplist)
672 pa_proplist_free(s->proplist);
673
674 if (s->ports) {
675 pa_device_port *p;
676
677 while ((p = pa_hashmap_steal_first(s->ports)))
678 pa_device_port_free(p);
679
680 pa_hashmap_free(s->ports, NULL, NULL);
681 }
682
683 pa_xfree(s);
684 }
685
686 /* Called from main context, and not while the IO thread is active, please */
687 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
688 pa_source_assert_ref(s);
689 pa_assert_ctl_context();
690
691 s->asyncmsgq = q;
692 }
693
694 /* Called from main context, and not while the IO thread is active, please */
695 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
696 pa_source_assert_ref(s);
697 pa_assert_ctl_context();
698
699 if (mask == 0)
700 return;
701
702 /* For now, allow only a minimal set of flags to be changed. */
703 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
704
705 s->flags = (s->flags & ~mask) | (value & mask);
706 }
707
708 /* Called from IO context, or before _put() from main context */
709 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
710 pa_source_assert_ref(s);
711 pa_source_assert_io_context(s);
712
713 s->thread_info.rtpoll = p;
714 }
715
716 /* Called from main context */
717 int pa_source_update_status(pa_source*s) {
718 pa_source_assert_ref(s);
719 pa_assert_ctl_context();
720 pa_assert(PA_SOURCE_IS_LINKED(s->state));
721
722 if (s->state == PA_SOURCE_SUSPENDED)
723 return 0;
724
725 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
726 }
727
728 /* Called from main context */
729 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
730 pa_source_assert_ref(s);
731 pa_assert_ctl_context();
732 pa_assert(PA_SOURCE_IS_LINKED(s->state));
733 pa_assert(cause != 0);
734
735 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
736 return -PA_ERR_NOTSUPPORTED;
737
738 if (suspend)
739 s->suspend_cause |= cause;
740 else
741 s->suspend_cause &= ~cause;
742
743 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
744 return 0;
745
746 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
747
748 if (s->suspend_cause)
749 return source_set_state(s, PA_SOURCE_SUSPENDED);
750 else
751 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
752 }
753
754 /* Called from main context */
755 int pa_source_sync_suspend(pa_source *s) {
756 pa_sink_state_t state;
757
758 pa_source_assert_ref(s);
759 pa_assert_ctl_context();
760 pa_assert(PA_SOURCE_IS_LINKED(s->state));
761 pa_assert(s->monitor_of);
762
763 state = pa_sink_get_state(s->monitor_of);
764
765 if (state == PA_SINK_SUSPENDED)
766 return source_set_state(s, PA_SOURCE_SUSPENDED);
767
768 pa_assert(PA_SINK_IS_OPENED(state));
769
770 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
771 }
772
773 /* Called from main context */
774 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
775 pa_source_output *o, *n;
776 uint32_t idx;
777
778 pa_source_assert_ref(s);
779 pa_assert_ctl_context();
780 pa_assert(PA_SOURCE_IS_LINKED(s->state));
781
782 if (!q)
783 q = pa_queue_new();
784
785 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
786 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
787
788 pa_source_output_ref(o);
789
790 if (pa_source_output_start_move(o) >= 0)
791 pa_queue_push(q, o);
792 else
793 pa_source_output_unref(o);
794 }
795
796 return q;
797 }
798
799 /* Called from main context */
800 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
801 pa_source_output *o;
802
803 pa_source_assert_ref(s);
804 pa_assert_ctl_context();
805 pa_assert(PA_SOURCE_IS_LINKED(s->state));
806 pa_assert(q);
807
808 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
809 if (pa_source_output_finish_move(o, s, save) < 0)
810 pa_source_output_fail_move(o);
811
812 pa_source_output_unref(o);
813 }
814
815 pa_queue_free(q, NULL, NULL);
816 }
817
818 /* Called from main context */
819 void pa_source_move_all_fail(pa_queue *q) {
820 pa_source_output *o;
821
822 pa_assert_ctl_context();
823 pa_assert(q);
824
825 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
826 pa_source_output_fail_move(o);
827 pa_source_output_unref(o);
828 }
829
830 pa_queue_free(q, NULL, NULL);
831 }
832
833 /* Called from IO thread context */
834 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
835 pa_source_output *o;
836 void *state = NULL;
837
838 pa_source_assert_ref(s);
839 pa_source_assert_io_context(s);
840 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
841
842 if (nbytes <= 0)
843 return;
844
845 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
846 return;
847
848 pa_log_debug("Processing rewind...");
849
850 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
851 pa_source_output_assert_ref(o);
852 pa_source_output_process_rewind(o, nbytes);
853 }
854 }
855
856 /* Called from IO thread context */
857 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
858 pa_source_output *o;
859 void *state = NULL;
860
861 pa_source_assert_ref(s);
862 pa_source_assert_io_context(s);
863 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
864 pa_assert(chunk);
865
866 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
867 return;
868
869 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
870 pa_memchunk vchunk = *chunk;
871
872 pa_memblock_ref(vchunk.memblock);
873 pa_memchunk_make_writable(&vchunk, 0);
874
875 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
876 pa_silence_memchunk(&vchunk, &s->sample_spec);
877 else
878 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
879
880 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
881 pa_source_output_assert_ref(o);
882
883 if (!o->thread_info.direct_on_input)
884 pa_source_output_push(o, &vchunk);
885 }
886
887 pa_memblock_unref(vchunk.memblock);
888 } else {
889
890 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
891 pa_source_output_assert_ref(o);
892
893 if (!o->thread_info.direct_on_input)
894 pa_source_output_push(o, chunk);
895 }
896 }
897 }
898
899 /* Called from IO thread context */
900 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
901 pa_source_assert_ref(s);
902 pa_source_assert_io_context(s);
903 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
904 pa_source_output_assert_ref(o);
905 pa_assert(o->thread_info.direct_on_input);
906 pa_assert(chunk);
907
908 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
909 return;
910
911 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
912 pa_memchunk vchunk = *chunk;
913
914 pa_memblock_ref(vchunk.memblock);
915 pa_memchunk_make_writable(&vchunk, 0);
916
917 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
918 pa_silence_memchunk(&vchunk, &s->sample_spec);
919 else
920 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
921
922 pa_source_output_push(o, &vchunk);
923
924 pa_memblock_unref(vchunk.memblock);
925 } else
926 pa_source_output_push(o, chunk);
927 }
928
929 /* Called from main thread */
930 pa_bool_t pa_source_update_rate(pa_source *s, uint32_t rate, pa_bool_t passthrough)
931 {
932 if (s->update_rate) {
933 uint32_t desired_rate = rate;
934 uint32_t default_rate = s->default_sample_rate;
935 uint32_t alternate_rate = s->alternate_sample_rate;
936 pa_bool_t use_alternate = FALSE;
937
938 if (PA_UNLIKELY(default_rate == alternate_rate)) {
939 pa_log_warn("Default and alternate sample rates are the same.");
940 return FALSE;
941 }
942
943 if (PA_SOURCE_IS_RUNNING(s->state)) {
944 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u kHz",
945 s->sample_spec.rate);
946 return FALSE;
947 }
948
949 if (PA_UNLIKELY (desired_rate < 8000 ||
950 desired_rate > PA_RATE_MAX))
951 return FALSE;
952
953 if (!passthrough) {
954 pa_assert(default_rate % 4000 || default_rate % 11025);
955 pa_assert(alternate_rate % 4000 || alternate_rate % 11025);
956
957 if (default_rate % 4000) {
958 /* default is a 11025 multiple */
959 if ((alternate_rate % 4000 == 0) && (desired_rate % 4000 == 0))
960 use_alternate=TRUE;
961 } else {
962 /* default is 4000 multiple */
963 if ((alternate_rate % 11025 == 0) && (desired_rate % 11025 == 0))
964 use_alternate=TRUE;
965 }
966
967 if (use_alternate)
968 desired_rate = alternate_rate;
969 else
970 desired_rate = default_rate;
971 } else {
972 desired_rate = rate; /* use stream sampling rate, discard default/alternate settings */
973 }
974
975 if (!passthrough && pa_source_linked_by(s) > 0)
976 return FALSE;
977
978 pa_source_suspend(s, TRUE, PA_SUSPEND_IDLE); /* needed before rate update, will be resumed automatically */
979
980 if (s->update_rate(s, desired_rate) == TRUE) {
981 pa_log_info("Changed sampling rate successfully ");
982 return TRUE;
983 }
984 }
985 return FALSE;
986 }
987
988 /* Called from main thread */
989 pa_usec_t pa_source_get_latency(pa_source *s) {
990 pa_usec_t usec;
991
992 pa_source_assert_ref(s);
993 pa_assert_ctl_context();
994 pa_assert(PA_SOURCE_IS_LINKED(s->state));
995
996 if (s->state == PA_SOURCE_SUSPENDED)
997 return 0;
998
999 if (!(s->flags & PA_SOURCE_LATENCY))
1000 return 0;
1001
1002 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1003
1004 return usec;
1005 }
1006
1007 /* Called from IO thread */
1008 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
1009 pa_usec_t usec = 0;
1010 pa_msgobject *o;
1011
1012 pa_source_assert_ref(s);
1013 pa_source_assert_io_context(s);
1014 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1015
1016 /* The returned value is supposed to be in the time domain of the sound card! */
1017
1018 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1019 return 0;
1020
1021 if (!(s->flags & PA_SOURCE_LATENCY))
1022 return 0;
1023
1024 o = PA_MSGOBJECT(s);
1025
1026 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1027
1028 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
1029 return -1;
1030
1031 return usec;
1032 }
1033
1034 /* Called from the main thread (and also from the IO thread while the main
1035 * thread is waiting).
1036 *
1037 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1038 * set. Instead, flat volume mode is detected by checking whether the root source
1039 * has the flag set. */
1040 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
1041 pa_source_assert_ref(s);
1042
1043 s = pa_source_get_master(s);
1044
1045 if (PA_LIKELY(s))
1046 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1047 else
1048 return FALSE;
1049 }
1050
1051 /* Called from the main thread (and also from the IO thread while the main
1052 * thread is waiting). */
1053 pa_source *pa_source_get_master(pa_source *s) {
1054 pa_source_assert_ref(s);
1055
1056 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1057 if (PA_UNLIKELY(!s->output_from_master))
1058 return NULL;
1059
1060 s = s->output_from_master->source;
1061 }
1062
1063 return s;
1064 }
1065
1066 /* Called from main context */
1067 pa_bool_t pa_source_is_passthrough(pa_source *s) {
1068
1069 pa_source_assert_ref(s);
1070
1071 /* NB Currently only monitor sources support passthrough mode */
1072 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1073 }
1074
1075 /* Called from main context */
1076 void pa_source_enter_passthrough(pa_source *s) {
1077 pa_cvolume volume;
1078
1079 /* set the volume to NORM */
1080 s->saved_volume = *pa_source_get_volume(s, TRUE);
1081 s->saved_save_volume = s->save_volume;
1082
1083 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1084 pa_source_set_volume(s, &volume, TRUE, FALSE);
1085 }
1086
1087 /* Called from main context */
1088 void pa_source_leave_passthrough(pa_source *s) {
1089 /* Restore source volume to what it was before we entered passthrough mode */
1090 pa_source_set_volume(s, &s->saved_volume, TRUE, s->saved_save_volume);
1091
1092 pa_cvolume_init(&s->saved_volume);
1093 s->saved_save_volume = FALSE;
1094 }
1095
1096 /* Called from main context. */
1097 static void compute_reference_ratio(pa_source_output *o) {
1098 unsigned c = 0;
1099 pa_cvolume remapped;
1100
1101 pa_assert(o);
1102 pa_assert(pa_source_flat_volume_enabled(o->source));
1103
1104 /*
1105 * Calculates the reference ratio from the source's reference
1106 * volume. This basically calculates:
1107 *
1108 * o->reference_ratio = o->volume / o->source->reference_volume
1109 */
1110
1111 remapped = o->source->reference_volume;
1112 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1113
1114 o->reference_ratio.channels = o->sample_spec.channels;
1115
1116 for (c = 0; c < o->sample_spec.channels; c++) {
1117
1118 /* We don't update when the source volume is 0 anyway */
1119 if (remapped.values[c] <= PA_VOLUME_MUTED)
1120 continue;
1121
1122 /* Don't update the reference ratio unless necessary */
1123 if (pa_sw_volume_multiply(
1124 o->reference_ratio.values[c],
1125 remapped.values[c]) == o->volume.values[c])
1126 continue;
1127
1128 o->reference_ratio.values[c] = pa_sw_volume_divide(
1129 o->volume.values[c],
1130 remapped.values[c]);
1131 }
1132 }
1133
1134 /* Called from main context. Only called for the root source in volume sharing
1135 * cases, except for internal recursive calls. */
1136 static void compute_reference_ratios(pa_source *s) {
1137 uint32_t idx;
1138 pa_source_output *o;
1139
1140 pa_source_assert_ref(s);
1141 pa_assert_ctl_context();
1142 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1143 pa_assert(pa_source_flat_volume_enabled(s));
1144
1145 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1146 compute_reference_ratio(o);
1147
1148 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1149 compute_reference_ratios(o->destination_source);
1150 }
1151 }
1152
1153 /* Called from main context. Only called for the root source in volume sharing
1154 * cases, except for internal recursive calls. */
1155 static void compute_real_ratios(pa_source *s) {
1156 pa_source_output *o;
1157 uint32_t idx;
1158
1159 pa_source_assert_ref(s);
1160 pa_assert_ctl_context();
1161 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1162 pa_assert(pa_source_flat_volume_enabled(s));
1163
1164 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1165 unsigned c;
1166 pa_cvolume remapped;
1167
1168 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1169 /* The origin source uses volume sharing, so this input's real ratio
1170 * is handled as a special case - the real ratio must be 0 dB, and
1171 * as a result i->soft_volume must equal i->volume_factor. */
1172 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1173 o->soft_volume = o->volume_factor;
1174
1175 compute_real_ratios(o->destination_source);
1176
1177 continue;
1178 }
1179
1180 /*
1181 * This basically calculates:
1182 *
1183 * i->real_ratio := i->volume / s->real_volume
1184 * i->soft_volume := i->real_ratio * i->volume_factor
1185 */
1186
1187 remapped = s->real_volume;
1188 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1189
1190 o->real_ratio.channels = o->sample_spec.channels;
1191 o->soft_volume.channels = o->sample_spec.channels;
1192
1193 for (c = 0; c < o->sample_spec.channels; c++) {
1194
1195 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1196 /* We leave o->real_ratio untouched */
1197 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1198 continue;
1199 }
1200
1201 /* Don't lose accuracy unless necessary */
1202 if (pa_sw_volume_multiply(
1203 o->real_ratio.values[c],
1204 remapped.values[c]) != o->volume.values[c])
1205
1206 o->real_ratio.values[c] = pa_sw_volume_divide(
1207 o->volume.values[c],
1208 remapped.values[c]);
1209
1210 o->soft_volume.values[c] = pa_sw_volume_multiply(
1211 o->real_ratio.values[c],
1212 o->volume_factor.values[c]);
1213 }
1214
1215 /* We don't copy the soft_volume to the thread_info data
1216 * here. That must be done by the caller */
1217 }
1218 }
1219
1220 static pa_cvolume *cvolume_remap_minimal_impact(
1221 pa_cvolume *v,
1222 const pa_cvolume *template,
1223 const pa_channel_map *from,
1224 const pa_channel_map *to) {
1225
1226 pa_cvolume t;
1227
1228 pa_assert(v);
1229 pa_assert(template);
1230 pa_assert(from);
1231 pa_assert(to);
1232 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1233 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1234
1235 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1236 * mapping from source output to source volumes:
1237 *
1238 * If template is a possible remapping from v it is used instead
1239 * of remapping anew.
1240 *
1241 * If the channel maps don't match we set an all-channel volume on
1242 * the source to ensure that changing a volume on one stream has no
1243 * effect that cannot be compensated for in another stream that
1244 * does not have the same channel map as the source. */
1245
1246 if (pa_channel_map_equal(from, to))
1247 return v;
1248
1249 t = *template;
1250 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1251 *v = *template;
1252 return v;
1253 }
1254
1255 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1256 return v;
1257 }
1258
1259 /* Called from main thread. Only called for the root source in volume sharing
1260 * cases, except for internal recursive calls. */
1261 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1262 pa_source_output *o;
1263 uint32_t idx;
1264
1265 pa_source_assert_ref(s);
1266 pa_assert(max_volume);
1267 pa_assert(channel_map);
1268 pa_assert(pa_source_flat_volume_enabled(s));
1269
1270 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1271 pa_cvolume remapped;
1272
1273 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1274 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1275
1276 /* Ignore this output. The origin source uses volume sharing, so this
1277 * output's volume will be set to be equal to the root source's real
1278 * volume. Obviously this output's current volume must not then
1279 * affect what the root source's real volume will be. */
1280 continue;
1281 }
1282
1283 remapped = o->volume;
1284 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1285 pa_cvolume_merge(max_volume, max_volume, &remapped);
1286 }
1287 }
1288
1289 /* Called from main thread. Only called for the root source in volume sharing
1290 * cases, except for internal recursive calls. */
1291 static pa_bool_t has_outputs(pa_source *s) {
1292 pa_source_output *o;
1293 uint32_t idx;
1294
1295 pa_source_assert_ref(s);
1296
1297 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1298 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1299 return TRUE;
1300 }
1301
1302 return FALSE;
1303 }
1304
1305 /* Called from main thread. Only called for the root source in volume sharing
1306 * cases, except for internal recursive calls. */
1307 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1308 pa_source_output *o;
1309 uint32_t idx;
1310
1311 pa_source_assert_ref(s);
1312 pa_assert(new_volume);
1313 pa_assert(channel_map);
1314
1315 s->real_volume = *new_volume;
1316 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1317
1318 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1319 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1320 if (pa_source_flat_volume_enabled(s)) {
1321 pa_cvolume old_volume = o->volume;
1322
1323 /* Follow the root source's real volume. */
1324 o->volume = *new_volume;
1325 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1326 compute_reference_ratio(o);
1327
1328 /* The volume changed, let's tell people so */
1329 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1330 if (o->volume_changed)
1331 o->volume_changed(o);
1332
1333 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1334 }
1335 }
1336
1337 update_real_volume(o->destination_source, new_volume, channel_map);
1338 }
1339 }
1340 }
1341
1342 /* Called from main thread. Only called for the root source in shared volume
1343 * cases. */
1344 static void compute_real_volume(pa_source *s) {
1345 pa_source_assert_ref(s);
1346 pa_assert_ctl_context();
1347 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1348 pa_assert(pa_source_flat_volume_enabled(s));
1349 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1350
1351 /* This determines the maximum volume of all streams and sets
1352 * s->real_volume accordingly. */
1353
1354 if (!has_outputs(s)) {
1355 /* In the special case that we have no source outputs we leave the
1356 * volume unmodified. */
1357 update_real_volume(s, &s->reference_volume, &s->channel_map);
1358 return;
1359 }
1360
1361 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1362
1363 /* First let's determine the new maximum volume of all outputs
1364 * connected to this source */
1365 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1366 update_real_volume(s, &s->real_volume, &s->channel_map);
1367
1368 /* Then, let's update the real ratios/soft volumes of all outputs
1369 * connected to this source */
1370 compute_real_ratios(s);
1371 }
1372
1373 /* Called from main thread. Only called for the root source in shared volume
1374 * cases, except for internal recursive calls. */
1375 static void propagate_reference_volume(pa_source *s) {
1376 pa_source_output *o;
1377 uint32_t idx;
1378
1379 pa_source_assert_ref(s);
1380 pa_assert_ctl_context();
1381 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1382 pa_assert(pa_source_flat_volume_enabled(s));
1383
1384 /* This is called whenever the source volume changes that is not
1385 * caused by a source output volume change. We need to fix up the
1386 * source output volumes accordingly */
1387
1388 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1389 pa_cvolume old_volume;
1390
1391 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1392 propagate_reference_volume(o->destination_source);
1393
1394 /* Since the origin source uses volume sharing, this output's volume
1395 * needs to be updated to match the root source's real volume, but
1396 * that will be done later in update_shared_real_volume(). */
1397 continue;
1398 }
1399
1400 old_volume = o->volume;
1401
1402 /* This basically calculates:
1403 *
1404 * o->volume := o->reference_volume * o->reference_ratio */
1405
1406 o->volume = s->reference_volume;
1407 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1408 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1409
1410 /* The volume changed, let's tell people so */
1411 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1412
1413 if (o->volume_changed)
1414 o->volume_changed(o);
1415
1416 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1417 }
1418 }
1419 }
1420
1421 /* Called from main thread. Only called for the root source in volume sharing
1422 * cases, except for internal recursive calls. The return value indicates
1423 * whether any reference volume actually changed. */
1424 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1425 pa_cvolume volume;
1426 pa_bool_t reference_volume_changed;
1427 pa_source_output *o;
1428 uint32_t idx;
1429
1430 pa_source_assert_ref(s);
1431 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1432 pa_assert(v);
1433 pa_assert(channel_map);
1434 pa_assert(pa_cvolume_valid(v));
1435
1436 volume = *v;
1437 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1438
1439 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1440 s->reference_volume = volume;
1441
1442 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1443
1444 if (reference_volume_changed)
1445 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1446 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1447 /* If the root source's volume doesn't change, then there can't be any
1448 * changes in the other source in the source tree either.
1449 *
1450 * It's probably theoretically possible that even if the root source's
1451 * volume changes slightly, some filter source doesn't change its volume
1452 * due to rounding errors. If that happens, we still want to propagate
1453 * the changed root source volume to the sources connected to the
1454 * intermediate source that didn't change its volume. This theoretical
1455 * possibility is the reason why we have that !(s->flags &
1456 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1457 * notice even if we returned here FALSE always if
1458 * reference_volume_changed is FALSE. */
1459 return FALSE;
1460
1461 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1462 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1463 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1464 }
1465
1466 return TRUE;
1467 }
1468
1469 /* Called from main thread */
1470 void pa_source_set_volume(
1471 pa_source *s,
1472 const pa_cvolume *volume,
1473 pa_bool_t send_msg,
1474 pa_bool_t save) {
1475
1476 pa_cvolume new_reference_volume;
1477 pa_source *root_source;
1478
1479 pa_source_assert_ref(s);
1480 pa_assert_ctl_context();
1481 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1482 pa_assert(!volume || pa_cvolume_valid(volume));
1483 pa_assert(volume || pa_source_flat_volume_enabled(s));
1484 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1485
1486 /* make sure we don't change the volume in PASSTHROUGH mode ...
1487 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1488 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1489 pa_log_warn("Cannot change volume, Source is monitor of a PASSTHROUGH sink");
1490 return;
1491 }
1492
1493 /* In case of volume sharing, the volume is set for the root source first,
1494 * from which it's then propagated to the sharing sources. */
1495 root_source = pa_source_get_master(s);
1496
1497 if (PA_UNLIKELY(!root_source))
1498 return;
1499
1500 /* As a special exception we accept mono volumes on all sources --
1501 * even on those with more complex channel maps */
1502
1503 if (volume) {
1504 if (pa_cvolume_compatible(volume, &s->sample_spec))
1505 new_reference_volume = *volume;
1506 else {
1507 new_reference_volume = s->reference_volume;
1508 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1509 }
1510
1511 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1512
1513 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1514 if (pa_source_flat_volume_enabled(root_source)) {
1515 /* OK, propagate this volume change back to the outputs */
1516 propagate_reference_volume(root_source);
1517
1518 /* And now recalculate the real volume */
1519 compute_real_volume(root_source);
1520 } else
1521 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1522 }
1523
1524 } else {
1525 /* If volume is NULL we synchronize the source's real and
1526 * reference volumes with the stream volumes. */
1527
1528 pa_assert(pa_source_flat_volume_enabled(root_source));
1529
1530 /* Ok, let's determine the new real volume */
1531 compute_real_volume(root_source);
1532
1533 /* Let's 'push' the reference volume if necessary */
1534 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1535 /* If the source and it's root don't have the same number of channels, we need to remap */
1536 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1537 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1538 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1539
1540 /* Now that the reference volume is updated, we can update the streams'
1541 * reference ratios. */
1542 compute_reference_ratios(root_source);
1543 }
1544
1545 if (root_source->set_volume) {
1546 /* If we have a function set_volume(), then we do not apply a
1547 * soft volume by default. However, set_volume() is free to
1548 * apply one to root_source->soft_volume */
1549
1550 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1551 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1552 root_source->set_volume(root_source);
1553
1554 } else
1555 /* If we have no function set_volume(), then the soft volume
1556 * becomes the real volume */
1557 root_source->soft_volume = root_source->real_volume;
1558
1559 /* This tells the source that soft volume and/or real volume changed */
1560 if (send_msg)
1561 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1562 }
1563
1564 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1565 * Only to be called by source implementor */
1566 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1567
1568 pa_source_assert_ref(s);
1569 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1570
1571 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1572 pa_source_assert_io_context(s);
1573 else
1574 pa_assert_ctl_context();
1575
1576 if (!volume)
1577 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1578 else
1579 s->soft_volume = *volume;
1580
1581 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1582 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1583 else
1584 s->thread_info.soft_volume = s->soft_volume;
1585 }
1586
1587 /* Called from the main thread. Only called for the root source in volume sharing
1588 * cases, except for internal recursive calls. */
1589 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1590 pa_source_output *o;
1591 uint32_t idx;
1592
1593 pa_source_assert_ref(s);
1594 pa_assert(old_real_volume);
1595 pa_assert_ctl_context();
1596 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1597
1598 /* This is called when the hardware's real volume changes due to
1599 * some external event. We copy the real volume into our
1600 * reference volume and then rebuild the stream volumes based on
1601 * i->real_ratio which should stay fixed. */
1602
1603 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1604 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1605 return;
1606
1607 /* 1. Make the real volume the reference volume */
1608 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1609 }
1610
1611 if (pa_source_flat_volume_enabled(s)) {
1612
1613 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1614 pa_cvolume old_volume = o->volume;
1615
1616 /* 2. Since the source's reference and real volumes are equal
1617 * now our ratios should be too. */
1618 o->reference_ratio = o->real_ratio;
1619
1620 /* 3. Recalculate the new stream reference volume based on the
1621 * reference ratio and the sink's reference volume.
1622 *
1623 * This basically calculates:
1624 *
1625 * o->volume = s->reference_volume * o->reference_ratio
1626 *
1627 * This is identical to propagate_reference_volume() */
1628 o->volume = s->reference_volume;
1629 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1630 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1631
1632 /* Notify if something changed */
1633 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1634
1635 if (o->volume_changed)
1636 o->volume_changed(o);
1637
1638 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1639 }
1640
1641 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1642 propagate_real_volume(o->destination_source, old_real_volume);
1643 }
1644 }
1645
1646 /* Something got changed in the hardware. It probably makes sense
1647 * to save changed hw settings given that hw volume changes not
1648 * triggered by PA are almost certainly done by the user. */
1649 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1650 s->save_volume = TRUE;
1651 }
1652
1653 /* Called from io thread */
1654 void pa_source_update_volume_and_mute(pa_source *s) {
1655 pa_assert(s);
1656 pa_source_assert_io_context(s);
1657
1658 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1659 }
1660
1661 /* Called from main thread */
1662 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1663 pa_source_assert_ref(s);
1664 pa_assert_ctl_context();
1665 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1666
1667 if (s->refresh_volume || force_refresh) {
1668 struct pa_cvolume old_real_volume;
1669
1670 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1671
1672 old_real_volume = s->real_volume;
1673
1674 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1675 s->get_volume(s);
1676
1677 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1678
1679 update_real_volume(s, &s->real_volume, &s->channel_map);
1680 propagate_real_volume(s, &old_real_volume);
1681 }
1682
1683 return &s->reference_volume;
1684 }
1685
1686 /* Called from main thread. In volume sharing cases, only the root source may
1687 * call this. */
1688 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1689 pa_cvolume old_real_volume;
1690
1691 pa_source_assert_ref(s);
1692 pa_assert_ctl_context();
1693 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1694 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1695
1696 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1697
1698 old_real_volume = s->real_volume;
1699 update_real_volume(s, new_real_volume, &s->channel_map);
1700 propagate_real_volume(s, &old_real_volume);
1701 }
1702
1703 /* Called from main thread */
1704 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1705 pa_bool_t old_muted;
1706
1707 pa_source_assert_ref(s);
1708 pa_assert_ctl_context();
1709 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1710
1711 old_muted = s->muted;
1712 s->muted = mute;
1713 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1714
1715 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute)
1716 s->set_mute(s);
1717
1718 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1719
1720 if (old_muted != s->muted)
1721 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1722 }
1723
1724 /* Called from main thread */
1725 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1726
1727 pa_source_assert_ref(s);
1728 pa_assert_ctl_context();
1729 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1730
1731 if (s->refresh_muted || force_refresh) {
1732 pa_bool_t old_muted = s->muted;
1733
1734 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_mute)
1735 s->get_mute(s);
1736
1737 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1738
1739 if (old_muted != s->muted) {
1740 s->save_muted = TRUE;
1741
1742 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1743
1744 /* Make sure the soft mute status stays in sync */
1745 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1746 }
1747 }
1748
1749 return s->muted;
1750 }
1751
1752 /* Called from main thread */
1753 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1754 pa_source_assert_ref(s);
1755 pa_assert_ctl_context();
1756 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1757
1758 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1759
1760 if (s->muted == new_muted)
1761 return;
1762
1763 s->muted = new_muted;
1764 s->save_muted = TRUE;
1765
1766 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1767 }
1768
1769 /* Called from main thread */
1770 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1771 pa_source_assert_ref(s);
1772 pa_assert_ctl_context();
1773
1774 if (p)
1775 pa_proplist_update(s->proplist, mode, p);
1776
1777 if (PA_SOURCE_IS_LINKED(s->state)) {
1778 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1779 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1780 }
1781
1782 return TRUE;
1783 }
1784
1785 /* Called from main thread */
1786 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1787 void pa_source_set_description(pa_source *s, const char *description) {
1788 const char *old;
1789 pa_source_assert_ref(s);
1790 pa_assert_ctl_context();
1791
1792 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1793 return;
1794
1795 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1796
1797 if (old && description && pa_streq(old, description))
1798 return;
1799
1800 if (description)
1801 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1802 else
1803 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1804
1805 if (PA_SOURCE_IS_LINKED(s->state)) {
1806 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1807 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1808 }
1809 }
1810
1811 /* Called from main thread */
1812 unsigned pa_source_linked_by(pa_source *s) {
1813 pa_source_assert_ref(s);
1814 pa_assert_ctl_context();
1815 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1816
1817 return pa_idxset_size(s->outputs);
1818 }
1819
1820 /* Called from main thread */
1821 unsigned pa_source_used_by(pa_source *s) {
1822 unsigned ret;
1823
1824 pa_source_assert_ref(s);
1825 pa_assert_ctl_context();
1826 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1827
1828 ret = pa_idxset_size(s->outputs);
1829 pa_assert(ret >= s->n_corked);
1830
1831 return ret - s->n_corked;
1832 }
1833
1834 /* Called from main thread */
1835 unsigned pa_source_check_suspend(pa_source *s) {
1836 unsigned ret;
1837 pa_source_output *o;
1838 uint32_t idx;
1839
1840 pa_source_assert_ref(s);
1841 pa_assert_ctl_context();
1842
1843 if (!PA_SOURCE_IS_LINKED(s->state))
1844 return 0;
1845
1846 ret = 0;
1847
1848 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1849 pa_source_output_state_t st;
1850
1851 st = pa_source_output_get_state(o);
1852
1853 /* We do not assert here. It is perfectly valid for a source output to
1854 * be in the INIT state (i.e. created, marked done but not yet put)
1855 * and we should not care if it's unlinked as it won't contribute
1856 * towards our busy status.
1857 */
1858 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1859 continue;
1860
1861 if (st == PA_SOURCE_OUTPUT_CORKED)
1862 continue;
1863
1864 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1865 continue;
1866
1867 ret ++;
1868 }
1869
1870 return ret;
1871 }
1872
1873 /* Called from the IO thread */
1874 static void sync_output_volumes_within_thread(pa_source *s) {
1875 pa_source_output *o;
1876 void *state = NULL;
1877
1878 pa_source_assert_ref(s);
1879 pa_source_assert_io_context(s);
1880
1881 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1882 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1883 continue;
1884
1885 o->thread_info.soft_volume = o->soft_volume;
1886 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1887 }
1888 }
1889
1890 /* Called from the IO thread. Only called for the root source in volume sharing
1891 * cases, except for internal recursive calls. */
1892 static void set_shared_volume_within_thread(pa_source *s) {
1893 pa_source_output *o;
1894 void *state = NULL;
1895
1896 pa_source_assert_ref(s);
1897
1898 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1899
1900 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1901 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1902 set_shared_volume_within_thread(o->destination_source);
1903 }
1904 }
1905
1906 /* Called from IO thread, except when it is not */
1907 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1908 pa_source *s = PA_SOURCE(object);
1909 pa_source_assert_ref(s);
1910
1911 switch ((pa_source_message_t) code) {
1912
1913 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1914 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1915
1916 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1917
1918 if (o->direct_on_input) {
1919 o->thread_info.direct_on_input = o->direct_on_input;
1920 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1921 }
1922
1923 pa_assert(!o->thread_info.attached);
1924 o->thread_info.attached = TRUE;
1925
1926 if (o->attach)
1927 o->attach(o);
1928
1929 pa_source_output_set_state_within_thread(o, o->state);
1930
1931 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1932 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1933
1934 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1935
1936 /* We don't just invalidate the requested latency here,
1937 * because if we are in a move we might need to fix up the
1938 * requested latency. */
1939 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1940
1941 /* In flat volume mode we need to update the volume as
1942 * well */
1943 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1944 }
1945
1946 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1947 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1948
1949 pa_source_output_set_state_within_thread(o, o->state);
1950
1951 if (o->detach)
1952 o->detach(o);
1953
1954 pa_assert(o->thread_info.attached);
1955 o->thread_info.attached = FALSE;
1956
1957 if (o->thread_info.direct_on_input) {
1958 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
1959 o->thread_info.direct_on_input = NULL;
1960 }
1961
1962 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
1963 pa_source_output_unref(o);
1964
1965 pa_source_invalidate_requested_latency(s, TRUE);
1966
1967 /* In flat volume mode we need to update the volume as
1968 * well */
1969 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1970 }
1971
1972 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
1973 pa_source *root_source = pa_source_get_master(s);
1974
1975 if (PA_LIKELY(root_source))
1976 set_shared_volume_within_thread(root_source);
1977
1978 return 0;
1979 }
1980
1981 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
1982
1983 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
1984 s->set_volume(s);
1985 pa_source_volume_change_push(s);
1986 }
1987 /* Fall through ... */
1988
1989 case PA_SOURCE_MESSAGE_SET_VOLUME:
1990
1991 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1992 s->thread_info.soft_volume = s->soft_volume;
1993 }
1994
1995 /* Fall through ... */
1996
1997 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
1998 sync_output_volumes_within_thread(s);
1999 return 0;
2000
2001 case PA_SOURCE_MESSAGE_GET_VOLUME:
2002
2003 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2004 s->get_volume(s);
2005 pa_source_volume_change_flush(s);
2006 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2007 }
2008
2009 /* In case source implementor reset SW volume. */
2010 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2011 s->thread_info.soft_volume = s->soft_volume;
2012 }
2013
2014 return 0;
2015
2016 case PA_SOURCE_MESSAGE_SET_MUTE:
2017
2018 if (s->thread_info.soft_muted != s->muted) {
2019 s->thread_info.soft_muted = s->muted;
2020 }
2021
2022 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2023 s->set_mute(s);
2024
2025 return 0;
2026
2027 case PA_SOURCE_MESSAGE_GET_MUTE:
2028
2029 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2030 s->get_mute(s);
2031
2032 return 0;
2033
2034 case PA_SOURCE_MESSAGE_SET_STATE: {
2035
2036 pa_bool_t suspend_change =
2037 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2038 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2039
2040 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2041
2042 if (suspend_change) {
2043 pa_source_output *o;
2044 void *state = NULL;
2045
2046 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2047 if (o->suspend_within_thread)
2048 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2049 }
2050
2051 return 0;
2052 }
2053
2054 case PA_SOURCE_MESSAGE_DETACH:
2055
2056 /* Detach all streams */
2057 pa_source_detach_within_thread(s);
2058 return 0;
2059
2060 case PA_SOURCE_MESSAGE_ATTACH:
2061
2062 /* Reattach all streams */
2063 pa_source_attach_within_thread(s);
2064 return 0;
2065
2066 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2067
2068 pa_usec_t *usec = userdata;
2069 *usec = pa_source_get_requested_latency_within_thread(s);
2070
2071 /* Yes, that's right, the IO thread will see -1 when no
2072 * explicit requested latency is configured, the main
2073 * thread will see max_latency */
2074 if (*usec == (pa_usec_t) -1)
2075 *usec = s->thread_info.max_latency;
2076
2077 return 0;
2078 }
2079
2080 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2081 pa_usec_t *r = userdata;
2082
2083 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2084
2085 return 0;
2086 }
2087
2088 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2089 pa_usec_t *r = userdata;
2090
2091 r[0] = s->thread_info.min_latency;
2092 r[1] = s->thread_info.max_latency;
2093
2094 return 0;
2095 }
2096
2097 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2098
2099 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2100 return 0;
2101
2102 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2103
2104 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2105 return 0;
2106
2107 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2108
2109 *((size_t*) userdata) = s->thread_info.max_rewind;
2110 return 0;
2111
2112 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2113
2114 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2115 return 0;
2116
2117 case PA_SOURCE_MESSAGE_GET_LATENCY:
2118
2119 if (s->monitor_of) {
2120 *((pa_usec_t*) userdata) = 0;
2121 return 0;
2122 }
2123
2124 /* Implementors need to overwrite this implementation! */
2125 return -1;
2126
2127 case PA_SOURCE_MESSAGE_SET_PORT:
2128
2129 pa_assert(userdata);
2130 if (s->set_port) {
2131 struct source_message_set_port *msg_data = userdata;
2132 msg_data->ret = s->set_port(s, msg_data->port);
2133 }
2134 return 0;
2135
2136 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2137 /* This message is sent from IO-thread and handled in main thread. */
2138 pa_assert_ctl_context();
2139
2140 /* Make sure we're not messing with main thread when no longer linked */
2141 if (!PA_SOURCE_IS_LINKED(s->state))
2142 return 0;
2143
2144 pa_source_get_volume(s, TRUE);
2145 pa_source_get_mute(s, TRUE);
2146 return 0;
2147
2148 case PA_SOURCE_MESSAGE_MAX:
2149 ;
2150 }
2151
2152 return -1;
2153 }
2154
2155 /* Called from main thread */
2156 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2157 pa_source *source;
2158 uint32_t idx;
2159 int ret = 0;
2160
2161 pa_core_assert_ref(c);
2162 pa_assert_ctl_context();
2163 pa_assert(cause != 0);
2164
2165 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2166 int r;
2167
2168 if (source->monitor_of)
2169 continue;
2170
2171 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2172 ret = r;
2173 }
2174
2175 return ret;
2176 }
2177
2178 /* Called from main thread */
2179 void pa_source_detach(pa_source *s) {
2180 pa_source_assert_ref(s);
2181 pa_assert_ctl_context();
2182 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2183
2184 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2185 }
2186
2187 /* Called from main thread */
2188 void pa_source_attach(pa_source *s) {
2189 pa_source_assert_ref(s);
2190 pa_assert_ctl_context();
2191 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2192
2193 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2194 }
2195
2196 /* Called from IO thread */
2197 void pa_source_detach_within_thread(pa_source *s) {
2198 pa_source_output *o;
2199 void *state = NULL;
2200
2201 pa_source_assert_ref(s);
2202 pa_source_assert_io_context(s);
2203 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2204
2205 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2206 if (o->detach)
2207 o->detach(o);
2208 }
2209
2210 /* Called from IO thread */
2211 void pa_source_attach_within_thread(pa_source *s) {
2212 pa_source_output *o;
2213 void *state = NULL;
2214
2215 pa_source_assert_ref(s);
2216 pa_source_assert_io_context(s);
2217 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2218
2219 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2220 if (o->attach)
2221 o->attach(o);
2222 }
2223
2224 /* Called from IO thread */
2225 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2226 pa_usec_t result = (pa_usec_t) -1;
2227 pa_source_output *o;
2228 void *state = NULL;
2229
2230 pa_source_assert_ref(s);
2231 pa_source_assert_io_context(s);
2232
2233 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2234 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2235
2236 if (s->thread_info.requested_latency_valid)
2237 return s->thread_info.requested_latency;
2238
2239 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2240 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2241 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2242 result = o->thread_info.requested_source_latency;
2243
2244 if (result != (pa_usec_t) -1)
2245 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2246
2247 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2248 /* Only cache this if we are fully set up */
2249 s->thread_info.requested_latency = result;
2250 s->thread_info.requested_latency_valid = TRUE;
2251 }
2252
2253 return result;
2254 }
2255
2256 /* Called from main thread */
2257 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2258 pa_usec_t usec = 0;
2259
2260 pa_source_assert_ref(s);
2261 pa_assert_ctl_context();
2262 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2263
2264 if (s->state == PA_SOURCE_SUSPENDED)
2265 return 0;
2266
2267 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2268
2269 return usec;
2270 }
2271
2272 /* Called from IO thread */
2273 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2274 pa_source_output *o;
2275 void *state = NULL;
2276
2277 pa_source_assert_ref(s);
2278 pa_source_assert_io_context(s);
2279
2280 if (max_rewind == s->thread_info.max_rewind)
2281 return;
2282
2283 s->thread_info.max_rewind = max_rewind;
2284
2285 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2286 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2287 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2288 }
2289
2290 /* Called from main thread */
2291 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2292 pa_source_assert_ref(s);
2293 pa_assert_ctl_context();
2294
2295 if (PA_SOURCE_IS_LINKED(s->state))
2296 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2297 else
2298 pa_source_set_max_rewind_within_thread(s, max_rewind);
2299 }
2300
2301 /* Called from IO thread */
2302 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2303 pa_source_output *o;
2304 void *state = NULL;
2305
2306 pa_source_assert_ref(s);
2307 pa_source_assert_io_context(s);
2308
2309 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2310 s->thread_info.requested_latency_valid = FALSE;
2311 else if (dynamic)
2312 return;
2313
2314 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2315
2316 if (s->update_requested_latency)
2317 s->update_requested_latency(s);
2318
2319 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2320 if (o->update_source_requested_latency)
2321 o->update_source_requested_latency(o);
2322 }
2323
2324 if (s->monitor_of)
2325 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2326 }
2327
2328 /* Called from main thread */
2329 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2330 pa_source_assert_ref(s);
2331 pa_assert_ctl_context();
2332
2333 /* min_latency == 0: no limit
2334 * min_latency anything else: specified limit
2335 *
2336 * Similar for max_latency */
2337
2338 if (min_latency < ABSOLUTE_MIN_LATENCY)
2339 min_latency = ABSOLUTE_MIN_LATENCY;
2340
2341 if (max_latency <= 0 ||
2342 max_latency > ABSOLUTE_MAX_LATENCY)
2343 max_latency = ABSOLUTE_MAX_LATENCY;
2344
2345 pa_assert(min_latency <= max_latency);
2346
2347 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2348 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2349 max_latency == ABSOLUTE_MAX_LATENCY) ||
2350 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2351
2352 if (PA_SOURCE_IS_LINKED(s->state)) {
2353 pa_usec_t r[2];
2354
2355 r[0] = min_latency;
2356 r[1] = max_latency;
2357
2358 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2359 } else
2360 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2361 }
2362
2363 /* Called from main thread */
2364 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2365 pa_source_assert_ref(s);
2366 pa_assert_ctl_context();
2367 pa_assert(min_latency);
2368 pa_assert(max_latency);
2369
2370 if (PA_SOURCE_IS_LINKED(s->state)) {
2371 pa_usec_t r[2] = { 0, 0 };
2372
2373 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2374
2375 *min_latency = r[0];
2376 *max_latency = r[1];
2377 } else {
2378 *min_latency = s->thread_info.min_latency;
2379 *max_latency = s->thread_info.max_latency;
2380 }
2381 }
2382
2383 /* Called from IO thread, and from main thread before pa_source_put() is called */
2384 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2385 pa_source_assert_ref(s);
2386 pa_source_assert_io_context(s);
2387
2388 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2389 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2390 pa_assert(min_latency <= max_latency);
2391
2392 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2393 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2394 max_latency == ABSOLUTE_MAX_LATENCY) ||
2395 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2396 s->monitor_of);
2397
2398 if (s->thread_info.min_latency == min_latency &&
2399 s->thread_info.max_latency == max_latency)
2400 return;
2401
2402 s->thread_info.min_latency = min_latency;
2403 s->thread_info.max_latency = max_latency;
2404
2405 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2406 pa_source_output *o;
2407 void *state = NULL;
2408
2409 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2410 if (o->update_source_latency_range)
2411 o->update_source_latency_range(o);
2412 }
2413
2414 pa_source_invalidate_requested_latency(s, FALSE);
2415 }
2416
2417 /* Called from main thread, before the source is put */
2418 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2419 pa_source_assert_ref(s);
2420 pa_assert_ctl_context();
2421
2422 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2423 pa_assert(latency == 0);
2424 return;
2425 }
2426
2427 if (latency < ABSOLUTE_MIN_LATENCY)
2428 latency = ABSOLUTE_MIN_LATENCY;
2429
2430 if (latency > ABSOLUTE_MAX_LATENCY)
2431 latency = ABSOLUTE_MAX_LATENCY;
2432
2433 if (PA_SOURCE_IS_LINKED(s->state))
2434 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2435 else
2436 s->thread_info.fixed_latency = latency;
2437 }
2438
2439 /* Called from main thread */
2440 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2441 pa_usec_t latency;
2442
2443 pa_source_assert_ref(s);
2444 pa_assert_ctl_context();
2445
2446 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2447 return 0;
2448
2449 if (PA_SOURCE_IS_LINKED(s->state))
2450 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2451 else
2452 latency = s->thread_info.fixed_latency;
2453
2454 return latency;
2455 }
2456
2457 /* Called from IO thread */
2458 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2459 pa_source_assert_ref(s);
2460 pa_source_assert_io_context(s);
2461
2462 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2463 pa_assert(latency == 0);
2464 return;
2465 }
2466
2467 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2468 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2469
2470 if (s->thread_info.fixed_latency == latency)
2471 return;
2472
2473 s->thread_info.fixed_latency = latency;
2474
2475 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2476 pa_source_output *o;
2477 void *state = NULL;
2478
2479 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2480 if (o->update_source_fixed_latency)
2481 o->update_source_fixed_latency(o);
2482 }
2483
2484 pa_source_invalidate_requested_latency(s, FALSE);
2485 }
2486
2487 /* Called from main thread */
2488 size_t pa_source_get_max_rewind(pa_source *s) {
2489 size_t r;
2490 pa_assert_ctl_context();
2491 pa_source_assert_ref(s);
2492
2493 if (!PA_SOURCE_IS_LINKED(s->state))
2494 return s->thread_info.max_rewind;
2495
2496 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2497
2498 return r;
2499 }
2500
2501 /* Called from main context */
2502 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2503 pa_device_port *port;
2504 int ret;
2505
2506 pa_source_assert_ref(s);
2507 pa_assert_ctl_context();
2508
2509 if (!s->set_port) {
2510 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2511 return -PA_ERR_NOTIMPLEMENTED;
2512 }
2513
2514 if (!s->ports)
2515 return -PA_ERR_NOENTITY;
2516
2517 if (!(port = pa_hashmap_get(s->ports, name)))
2518 return -PA_ERR_NOENTITY;
2519
2520 if (s->active_port == port) {
2521 s->save_port = s->save_port || save;
2522 return 0;
2523 }
2524
2525 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2526 struct source_message_set_port msg = { .port = port, .ret = 0 };
2527 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2528 ret = msg.ret;
2529 }
2530 else
2531 ret = s->set_port(s, port);
2532
2533 if (ret < 0)
2534 return -PA_ERR_NOENTITY;
2535
2536 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2537
2538 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2539
2540 s->active_port = port;
2541 s->save_port = save;
2542
2543 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2544
2545 return 0;
2546 }
2547
2548 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2549
2550 /* Called from the IO thread. */
2551 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2552 pa_source_volume_change *c;
2553 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2554 c = pa_xnew(pa_source_volume_change, 1);
2555
2556 PA_LLIST_INIT(pa_source_volume_change, c);
2557 c->at = 0;
2558 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2559 return c;
2560 }
2561
2562 /* Called from the IO thread. */
2563 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2564 pa_assert(c);
2565 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2566 pa_xfree(c);
2567 }
2568
2569 /* Called from the IO thread. */
2570 void pa_source_volume_change_push(pa_source *s) {
2571 pa_source_volume_change *c = NULL;
2572 pa_source_volume_change *nc = NULL;
2573 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2574
2575 const char *direction = NULL;
2576
2577 pa_assert(s);
2578 nc = pa_source_volume_change_new(s);
2579
2580 /* NOTE: There is already more different volumes in pa_source that I can remember.
2581 * Adding one more volume for HW would get us rid of this, but I am trying
2582 * to survive with the ones we already have. */
2583 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2584
2585 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2586 pa_log_debug("Volume not changing");
2587 pa_source_volume_change_free(nc);
2588 return;
2589 }
2590
2591 nc->at = pa_source_get_latency_within_thread(s);
2592 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2593
2594 if (s->thread_info.volume_changes_tail) {
2595 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2596 /* If volume is going up let's do it a bit late. If it is going
2597 * down let's do it a bit early. */
2598 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2599 if (nc->at + safety_margin > c->at) {
2600 nc->at += safety_margin;
2601 direction = "up";
2602 break;
2603 }
2604 }
2605 else if (nc->at - safety_margin > c->at) {
2606 nc->at -= safety_margin;
2607 direction = "down";
2608 break;
2609 }
2610 }
2611 }
2612
2613 if (c == NULL) {
2614 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2615 nc->at += safety_margin;
2616 direction = "up";
2617 } else {
2618 nc->at -= safety_margin;
2619 direction = "down";
2620 }
2621 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2622 }
2623 else {
2624 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2625 }
2626
2627 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2628
2629 /* We can ignore volume events that came earlier but should happen later than this. */
2630 PA_LLIST_FOREACH(c, nc->next) {
2631 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2632 pa_source_volume_change_free(c);
2633 }
2634 nc->next = NULL;
2635 s->thread_info.volume_changes_tail = nc;
2636 }
2637
2638 /* Called from the IO thread. */
2639 static void pa_source_volume_change_flush(pa_source *s) {
2640 pa_source_volume_change *c = s->thread_info.volume_changes;
2641 pa_assert(s);
2642 s->thread_info.volume_changes = NULL;
2643 s->thread_info.volume_changes_tail = NULL;
2644 while (c) {
2645 pa_source_volume_change *next = c->next;
2646 pa_source_volume_change_free(c);
2647 c = next;
2648 }
2649 }
2650
2651 /* Called from the IO thread. */
2652 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2653 pa_usec_t now;
2654 pa_bool_t ret = FALSE;
2655
2656 pa_assert(s);
2657
2658 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2659 if (usec_to_next)
2660 *usec_to_next = 0;
2661 return ret;
2662 }
2663
2664 pa_assert(s->write_volume);
2665
2666 now = pa_rtclock_now();
2667
2668 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2669 pa_source_volume_change *c = s->thread_info.volume_changes;
2670 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2671 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2672 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2673 ret = TRUE;
2674 s->thread_info.current_hw_volume = c->hw_volume;
2675 pa_source_volume_change_free(c);
2676 }
2677
2678 if (ret)
2679 s->write_volume(s);
2680
2681 if (s->thread_info.volume_changes) {
2682 if (usec_to_next)
2683 *usec_to_next = s->thread_info.volume_changes->at - now;
2684 if (pa_log_ratelimit(PA_LOG_DEBUG))
2685 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2686 }
2687 else {
2688 if (usec_to_next)
2689 *usec_to_next = 0;
2690 s->thread_info.volume_changes_tail = NULL;
2691 }
2692 return ret;
2693 }
2694
2695
2696 /* Called from the main thread */
2697 /* Gets the list of formats supported by the source. The members and idxset must
2698 * be freed by the caller. */
2699 pa_idxset* pa_source_get_formats(pa_source *s) {
2700 pa_idxset *ret;
2701
2702 pa_assert(s);
2703
2704 if (s->get_formats) {
2705 /* Source supports format query, all is good */
2706 ret = s->get_formats(s);
2707 } else {
2708 /* Source doesn't support format query, so assume it does PCM */
2709 pa_format_info *f = pa_format_info_new();
2710 f->encoding = PA_ENCODING_PCM;
2711
2712 ret = pa_idxset_new(NULL, NULL);
2713 pa_idxset_put(ret, f, NULL);
2714 }
2715
2716 return ret;
2717 }
2718
2719 /* Called from the main thread */
2720 /* Checks if the source can accept this format */
2721 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2722 {
2723 pa_idxset *formats = NULL;
2724 pa_bool_t ret = FALSE;
2725
2726 pa_assert(s);
2727 pa_assert(f);
2728
2729 formats = pa_source_get_formats(s);
2730
2731 if (formats) {
2732 pa_format_info *finfo_device;
2733 uint32_t i;
2734
2735 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2736 if (pa_format_info_is_compatible(finfo_device, f)) {
2737 ret = TRUE;
2738 break;
2739 }
2740 }
2741
2742 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2743 }
2744
2745 return ret;
2746 }
2747
2748 /* Called from the main thread */
2749 /* Calculates the intersection between formats supported by the source and
2750 * in_formats, and returns these, in the order of the source's formats. */
2751 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2752 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2753 pa_format_info *f_source, *f_in;
2754 uint32_t i, j;
2755
2756 pa_assert(s);
2757
2758 if (!in_formats || pa_idxset_isempty(in_formats))
2759 goto done;
2760
2761 source_formats = pa_source_get_formats(s);
2762
2763 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2764 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2765 if (pa_format_info_is_compatible(f_source, f_in))
2766 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2767 }
2768 }
2769
2770 done:
2771 if (source_formats)
2772 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2773
2774 return out_formats;
2775 }