]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
sink,source: Handle equal default and alternate sample rates
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76
77 return data;
78 }
79
80 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
81 pa_assert(data);
82
83 pa_xfree(data->name);
84 data->name = pa_xstrdup(name);
85 }
86
87 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
88 pa_assert(data);
89
90 if ((data->sample_spec_is_set = !!spec))
91 data->sample_spec = *spec;
92 }
93
94 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
95 pa_assert(data);
96
97 if ((data->channel_map_is_set = !!map))
98 data->channel_map = *map;
99 }
100
101 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
102 pa_assert(data);
103
104 data->alternate_sample_rate_is_set = TRUE;
105 data->alternate_sample_rate = alternate_sample_rate;
106 }
107
108 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
109 pa_assert(data);
110
111 if ((data->volume_is_set = !!volume))
112 data->volume = *volume;
113 }
114
115 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
116 pa_assert(data);
117
118 data->muted_is_set = TRUE;
119 data->muted = !!mute;
120 }
121
122 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
123 pa_assert(data);
124
125 pa_xfree(data->active_port);
126 data->active_port = pa_xstrdup(port);
127 }
128
129 void pa_source_new_data_done(pa_source_new_data *data) {
130 pa_assert(data);
131
132 pa_proplist_free(data->proplist);
133
134 if (data->ports) {
135 pa_device_port *p;
136
137 while ((p = pa_hashmap_steal_first(data->ports)))
138 pa_device_port_free(p);
139
140 pa_hashmap_free(data->ports, NULL, NULL);
141 }
142
143 pa_xfree(data->name);
144 pa_xfree(data->active_port);
145 }
146
147 /* Called from main context */
148 static void reset_callbacks(pa_source *s) {
149 pa_assert(s);
150
151 s->set_state = NULL;
152 s->get_volume = NULL;
153 s->set_volume = NULL;
154 s->write_volume = NULL;
155 s->get_mute = NULL;
156 s->set_mute = NULL;
157 s->update_requested_latency = NULL;
158 s->set_port = NULL;
159 s->get_formats = NULL;
160 s->update_rate = NULL;
161 }
162
163 /* Called from main context */
164 pa_source* pa_source_new(
165 pa_core *core,
166 pa_source_new_data *data,
167 pa_source_flags_t flags) {
168
169 pa_source *s;
170 const char *name;
171 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
172 char *pt;
173
174 pa_assert(core);
175 pa_assert(data);
176 pa_assert(data->name);
177 pa_assert_ctl_context();
178
179 s = pa_msgobject_new(pa_source);
180
181 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
182 pa_log_debug("Failed to register name %s.", data->name);
183 pa_xfree(s);
184 return NULL;
185 }
186
187 pa_source_new_data_set_name(data, name);
188
189 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
190 pa_xfree(s);
191 pa_namereg_unregister(core, name);
192 return NULL;
193 }
194
195 /* FIXME, need to free s here on failure */
196
197 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
198 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
199
200 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
201
202 if (!data->channel_map_is_set)
203 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
204
205 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
206 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
207
208 /* FIXME: There should probably be a general function for checking whether
209 * the source volume is allowed to be set, like there is for source outputs. */
210 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
211
212 if (!data->volume_is_set) {
213 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
214 data->save_volume = FALSE;
215 }
216
217 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
218 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
219
220 if (!data->muted_is_set)
221 data->muted = FALSE;
222
223 if (data->card)
224 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
225
226 pa_device_init_description(data->proplist);
227 pa_device_init_icon(data->proplist, FALSE);
228 pa_device_init_intended_roles(data->proplist);
229
230 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
231 pa_xfree(s);
232 pa_namereg_unregister(core, name);
233 return NULL;
234 }
235
236 s->parent.parent.free = source_free;
237 s->parent.process_msg = pa_source_process_msg;
238
239 s->core = core;
240 s->state = PA_SOURCE_INIT;
241 s->flags = flags;
242 s->priority = 0;
243 s->suspend_cause = 0;
244 s->name = pa_xstrdup(name);
245 s->proplist = pa_proplist_copy(data->proplist);
246 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
247 s->module = data->module;
248 s->card = data->card;
249
250 s->priority = pa_device_init_priority(s->proplist);
251
252 s->sample_spec = data->sample_spec;
253 s->channel_map = data->channel_map;
254 s->default_sample_rate = s->sample_spec.rate;
255
256 if (data->alternate_sample_rate_is_set)
257 s->alternate_sample_rate = data->alternate_sample_rate;
258 else
259 s->alternate_sample_rate = s->core->alternate_sample_rate;
260
261 if (s->sample_spec.rate == s->alternate_sample_rate) {
262 pa_log_warn("Default and alternate sample rates are the same.");
263 s->alternate_sample_rate = 0;
264 }
265
266 s->outputs = pa_idxset_new(NULL, NULL);
267 s->n_corked = 0;
268 s->monitor_of = NULL;
269 s->output_from_master = NULL;
270
271 s->reference_volume = s->real_volume = data->volume;
272 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
273 s->base_volume = PA_VOLUME_NORM;
274 s->n_volume_steps = PA_VOLUME_NORM+1;
275 s->muted = data->muted;
276 s->refresh_volume = s->refresh_muted = FALSE;
277
278 reset_callbacks(s);
279 s->userdata = NULL;
280
281 s->asyncmsgq = NULL;
282
283 /* As a minor optimization we just steal the list instead of
284 * copying it here */
285 s->ports = data->ports;
286 data->ports = NULL;
287
288 s->active_port = NULL;
289 s->save_port = FALSE;
290
291 if (data->active_port && s->ports)
292 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
293 s->save_port = data->save_port;
294
295 if (!s->active_port && s->ports) {
296 void *state;
297 pa_device_port *p;
298
299 PA_HASHMAP_FOREACH(p, s->ports, state)
300 if (!s->active_port || p->priority > s->active_port->priority)
301 s->active_port = p;
302 }
303
304 s->save_volume = data->save_volume;
305 s->save_muted = data->save_muted;
306
307 pa_silence_memchunk_get(
308 &core->silence_cache,
309 core->mempool,
310 &s->silence,
311 &s->sample_spec,
312 0);
313
314 s->thread_info.rtpoll = NULL;
315 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
316 s->thread_info.soft_volume = s->soft_volume;
317 s->thread_info.soft_muted = s->muted;
318 s->thread_info.state = s->state;
319 s->thread_info.max_rewind = 0;
320 s->thread_info.requested_latency_valid = FALSE;
321 s->thread_info.requested_latency = 0;
322 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
323 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
324 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
325
326 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
327 s->thread_info.volume_changes_tail = NULL;
328 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
329 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
330 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
331
332 /* FIXME: This should probably be moved to pa_source_put() */
333 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
334
335 if (s->card)
336 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
337
338 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
339 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
340 s->index,
341 s->name,
342 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
343 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
344 pt);
345 pa_xfree(pt);
346
347 return s;
348 }
349
350 /* Called from main context */
351 static int source_set_state(pa_source *s, pa_source_state_t state) {
352 int ret;
353 pa_bool_t suspend_change;
354 pa_source_state_t original_state;
355
356 pa_assert(s);
357 pa_assert_ctl_context();
358
359 if (s->state == state)
360 return 0;
361
362 original_state = s->state;
363
364 suspend_change =
365 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
366 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
367
368 if (s->set_state)
369 if ((ret = s->set_state(s, state)) < 0)
370 return ret;
371
372 if (s->asyncmsgq)
373 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
374
375 if (s->set_state)
376 s->set_state(s, original_state);
377
378 return ret;
379 }
380
381 s->state = state;
382
383 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
384 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
385 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
386 }
387
388 if (suspend_change) {
389 pa_source_output *o;
390 uint32_t idx;
391
392 /* We're suspending or resuming, tell everyone about it */
393
394 PA_IDXSET_FOREACH(o, s->outputs, idx)
395 if (s->state == PA_SOURCE_SUSPENDED &&
396 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
397 pa_source_output_kill(o);
398 else if (o->suspend)
399 o->suspend(o, state == PA_SOURCE_SUSPENDED);
400 }
401
402 return 0;
403 }
404
405 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
406 pa_assert(s);
407
408 s->get_volume = cb;
409 }
410
411 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
412 pa_source_flags_t flags;
413
414 pa_assert(s);
415 pa_assert(!s->write_volume || cb);
416
417 s->set_volume = cb;
418
419 /* Save the current flags so we can tell if they've changed */
420 flags = s->flags;
421
422 if (cb) {
423 /* The source implementor is responsible for setting decibel volume support */
424 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
425 } else {
426 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
427 /* See note below in pa_source_put() about volume sharing and decibel volumes */
428 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
429 }
430
431 /* If the flags have changed after init, let any clients know via a change event */
432 if (s->state != PA_SOURCE_INIT && flags != s->flags)
433 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
434 }
435
436 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
437 pa_source_flags_t flags;
438
439 pa_assert(s);
440 pa_assert(!cb || s->set_volume);
441
442 s->write_volume = cb;
443
444 /* Save the current flags so we can tell if they've changed */
445 flags = s->flags;
446
447 if (cb)
448 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
449 else
450 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
451
452 /* If the flags have changed after init, let any clients know via a change event */
453 if (s->state != PA_SOURCE_INIT && flags != s->flags)
454 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
455 }
456
457 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
458 pa_assert(s);
459
460 s->get_mute = cb;
461 }
462
463 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
464 pa_source_flags_t flags;
465
466 pa_assert(s);
467
468 s->set_mute = cb;
469
470 /* Save the current flags so we can tell if they've changed */
471 flags = s->flags;
472
473 if (cb)
474 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
475 else
476 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
477
478 /* If the flags have changed after init, let any clients know via a change event */
479 if (s->state != PA_SOURCE_INIT && flags != s->flags)
480 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
481 }
482
483 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
484 pa_source_flags_t flags;
485
486 pa_assert(s);
487
488 /* Always follow the overall user preference here */
489 enable = enable && s->core->flat_volumes;
490
491 /* Save the current flags so we can tell if they've changed */
492 flags = s->flags;
493
494 if (enable)
495 s->flags |= PA_SOURCE_FLAT_VOLUME;
496 else
497 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
498
499 /* If the flags have changed after init, let any clients know via a change event */
500 if (s->state != PA_SOURCE_INIT && flags != s->flags)
501 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
502 }
503
504 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
505 pa_source_flags_t flags;
506
507 pa_assert(s);
508
509 /* Save the current flags so we can tell if they've changed */
510 flags = s->flags;
511
512 if (enable) {
513 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
514 enable_flat_volume(s, TRUE);
515 } else {
516 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
517 enable_flat_volume(s, FALSE);
518 }
519
520 /* If the flags have changed after init, let any clients know via a change event */
521 if (s->state != PA_SOURCE_INIT && flags != s->flags)
522 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
523 }
524
525 /* Called from main context */
526 void pa_source_put(pa_source *s) {
527 pa_source_assert_ref(s);
528 pa_assert_ctl_context();
529
530 pa_assert(s->state == PA_SOURCE_INIT);
531 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
532
533 /* The following fields must be initialized properly when calling _put() */
534 pa_assert(s->asyncmsgq);
535 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
536
537 /* Generally, flags should be initialized via pa_source_new(). As a
538 * special exception we allow some volume related flags to be set
539 * between _new() and _put() by the callback setter functions above.
540 *
541 * Thus we implement a couple safeguards here which ensure the above
542 * setters were used (or at least the implementor made manual changes
543 * in a compatible way).
544 *
545 * Note: All of these flags set here can change over the life time
546 * of the source. */
547 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
548 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
549 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
550
551 /* XXX: Currently decibel volume is disabled for all sources that use volume
552 * sharing. When the master source supports decibel volume, it would be good
553 * to have the flag also in the filter source, but currently we don't do that
554 * so that the flags of the filter source never change when it's moved from
555 * a master source to another. One solution for this problem would be to
556 * remove user-visible volume altogether from filter sources when volume
557 * sharing is used, but the current approach was easier to implement... */
558 /* We always support decibel volumes in software, otherwise we leave it to
559 * the source implementor to set this flag as needed.
560 *
561 * Note: This flag can also change over the life time of the source. */
562 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
563 pa_source_enable_decibel_volume(s, TRUE);
564
565 /* If the source implementor support DB volumes by itself, we should always
566 * try and enable flat volumes too */
567 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
568 enable_flat_volume(s, TRUE);
569
570 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
571 pa_source *root_source = pa_source_get_master(s);
572
573 pa_assert(PA_LIKELY(root_source));
574
575 s->reference_volume = root_source->reference_volume;
576 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
577
578 s->real_volume = root_source->real_volume;
579 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
580 } else
581 /* We assume that if the sink implementor changed the default
582 * volume he did so in real_volume, because that is the usual
583 * place where he is supposed to place his changes. */
584 s->reference_volume = s->real_volume;
585
586 s->thread_info.soft_volume = s->soft_volume;
587 s->thread_info.soft_muted = s->muted;
588 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
589
590 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
591 || (s->base_volume == PA_VOLUME_NORM
592 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
593 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
594 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
595
596 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
597
598 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
599 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
600 }
601
602 /* Called from main context */
603 void pa_source_unlink(pa_source *s) {
604 pa_bool_t linked;
605 pa_source_output *o, *j = NULL;
606
607 pa_assert(s);
608 pa_assert_ctl_context();
609
610 /* See pa_sink_unlink() for a couple of comments how this function
611 * works. */
612
613 linked = PA_SOURCE_IS_LINKED(s->state);
614
615 if (linked)
616 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
617
618 if (s->state != PA_SOURCE_UNLINKED)
619 pa_namereg_unregister(s->core, s->name);
620 pa_idxset_remove_by_data(s->core->sources, s, NULL);
621
622 if (s->card)
623 pa_idxset_remove_by_data(s->card->sources, s, NULL);
624
625 while ((o = pa_idxset_first(s->outputs, NULL))) {
626 pa_assert(o != j);
627 pa_source_output_kill(o);
628 j = o;
629 }
630
631 if (linked)
632 source_set_state(s, PA_SOURCE_UNLINKED);
633 else
634 s->state = PA_SOURCE_UNLINKED;
635
636 reset_callbacks(s);
637
638 if (linked) {
639 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
640 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
641 }
642 }
643
644 /* Called from main context */
645 static void source_free(pa_object *o) {
646 pa_source_output *so;
647 pa_source *s = PA_SOURCE(o);
648
649 pa_assert(s);
650 pa_assert_ctl_context();
651 pa_assert(pa_source_refcnt(s) == 0);
652
653 if (PA_SOURCE_IS_LINKED(s->state))
654 pa_source_unlink(s);
655
656 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
657
658 pa_idxset_free(s->outputs, NULL, NULL);
659
660 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
661 pa_source_output_unref(so);
662
663 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
664
665 if (s->silence.memblock)
666 pa_memblock_unref(s->silence.memblock);
667
668 pa_xfree(s->name);
669 pa_xfree(s->driver);
670
671 if (s->proplist)
672 pa_proplist_free(s->proplist);
673
674 if (s->ports) {
675 pa_device_port *p;
676
677 while ((p = pa_hashmap_steal_first(s->ports)))
678 pa_device_port_free(p);
679
680 pa_hashmap_free(s->ports, NULL, NULL);
681 }
682
683 pa_xfree(s);
684 }
685
686 /* Called from main context, and not while the IO thread is active, please */
687 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
688 pa_source_assert_ref(s);
689 pa_assert_ctl_context();
690
691 s->asyncmsgq = q;
692 }
693
694 /* Called from main context, and not while the IO thread is active, please */
695 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
696 pa_source_assert_ref(s);
697 pa_assert_ctl_context();
698
699 if (mask == 0)
700 return;
701
702 /* For now, allow only a minimal set of flags to be changed. */
703 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
704
705 s->flags = (s->flags & ~mask) | (value & mask);
706 }
707
708 /* Called from IO context, or before _put() from main context */
709 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
710 pa_source_assert_ref(s);
711 pa_source_assert_io_context(s);
712
713 s->thread_info.rtpoll = p;
714 }
715
716 /* Called from main context */
717 int pa_source_update_status(pa_source*s) {
718 pa_source_assert_ref(s);
719 pa_assert_ctl_context();
720 pa_assert(PA_SOURCE_IS_LINKED(s->state));
721
722 if (s->state == PA_SOURCE_SUSPENDED)
723 return 0;
724
725 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
726 }
727
728 /* Called from main context */
729 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
730 pa_source_assert_ref(s);
731 pa_assert_ctl_context();
732 pa_assert(PA_SOURCE_IS_LINKED(s->state));
733 pa_assert(cause != 0);
734
735 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
736 return -PA_ERR_NOTSUPPORTED;
737
738 if (suspend)
739 s->suspend_cause |= cause;
740 else
741 s->suspend_cause &= ~cause;
742
743 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
744 return 0;
745
746 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
747
748 if (s->suspend_cause)
749 return source_set_state(s, PA_SOURCE_SUSPENDED);
750 else
751 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
752 }
753
754 /* Called from main context */
755 int pa_source_sync_suspend(pa_source *s) {
756 pa_sink_state_t state;
757
758 pa_source_assert_ref(s);
759 pa_assert_ctl_context();
760 pa_assert(PA_SOURCE_IS_LINKED(s->state));
761 pa_assert(s->monitor_of);
762
763 state = pa_sink_get_state(s->monitor_of);
764
765 if (state == PA_SINK_SUSPENDED)
766 return source_set_state(s, PA_SOURCE_SUSPENDED);
767
768 pa_assert(PA_SINK_IS_OPENED(state));
769
770 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
771 }
772
773 /* Called from main context */
774 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
775 pa_source_output *o, *n;
776 uint32_t idx;
777
778 pa_source_assert_ref(s);
779 pa_assert_ctl_context();
780 pa_assert(PA_SOURCE_IS_LINKED(s->state));
781
782 if (!q)
783 q = pa_queue_new();
784
785 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
786 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
787
788 pa_source_output_ref(o);
789
790 if (pa_source_output_start_move(o) >= 0)
791 pa_queue_push(q, o);
792 else
793 pa_source_output_unref(o);
794 }
795
796 return q;
797 }
798
799 /* Called from main context */
800 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
801 pa_source_output *o;
802
803 pa_source_assert_ref(s);
804 pa_assert_ctl_context();
805 pa_assert(PA_SOURCE_IS_LINKED(s->state));
806 pa_assert(q);
807
808 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
809 if (pa_source_output_finish_move(o, s, save) < 0)
810 pa_source_output_fail_move(o);
811
812 pa_source_output_unref(o);
813 }
814
815 pa_queue_free(q, NULL, NULL);
816 }
817
818 /* Called from main context */
819 void pa_source_move_all_fail(pa_queue *q) {
820 pa_source_output *o;
821
822 pa_assert_ctl_context();
823 pa_assert(q);
824
825 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
826 pa_source_output_fail_move(o);
827 pa_source_output_unref(o);
828 }
829
830 pa_queue_free(q, NULL, NULL);
831 }
832
833 /* Called from IO thread context */
834 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
835 pa_source_output *o;
836 void *state = NULL;
837
838 pa_source_assert_ref(s);
839 pa_source_assert_io_context(s);
840 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
841
842 if (nbytes <= 0)
843 return;
844
845 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
846 return;
847
848 pa_log_debug("Processing rewind...");
849
850 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
851 pa_source_output_assert_ref(o);
852 pa_source_output_process_rewind(o, nbytes);
853 }
854 }
855
856 /* Called from IO thread context */
857 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
858 pa_source_output *o;
859 void *state = NULL;
860
861 pa_source_assert_ref(s);
862 pa_source_assert_io_context(s);
863 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
864 pa_assert(chunk);
865
866 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
867 return;
868
869 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
870 pa_memchunk vchunk = *chunk;
871
872 pa_memblock_ref(vchunk.memblock);
873 pa_memchunk_make_writable(&vchunk, 0);
874
875 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
876 pa_silence_memchunk(&vchunk, &s->sample_spec);
877 else
878 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
879
880 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
881 pa_source_output_assert_ref(o);
882
883 if (!o->thread_info.direct_on_input)
884 pa_source_output_push(o, &vchunk);
885 }
886
887 pa_memblock_unref(vchunk.memblock);
888 } else {
889
890 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
891 pa_source_output_assert_ref(o);
892
893 if (!o->thread_info.direct_on_input)
894 pa_source_output_push(o, chunk);
895 }
896 }
897 }
898
899 /* Called from IO thread context */
900 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
901 pa_source_assert_ref(s);
902 pa_source_assert_io_context(s);
903 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
904 pa_source_output_assert_ref(o);
905 pa_assert(o->thread_info.direct_on_input);
906 pa_assert(chunk);
907
908 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
909 return;
910
911 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
912 pa_memchunk vchunk = *chunk;
913
914 pa_memblock_ref(vchunk.memblock);
915 pa_memchunk_make_writable(&vchunk, 0);
916
917 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
918 pa_silence_memchunk(&vchunk, &s->sample_spec);
919 else
920 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
921
922 pa_source_output_push(o, &vchunk);
923
924 pa_memblock_unref(vchunk.memblock);
925 } else
926 pa_source_output_push(o, chunk);
927 }
928
929 /* Called from main thread */
930 pa_bool_t pa_source_update_rate(pa_source *s, uint32_t rate, pa_bool_t passthrough)
931 {
932 if (s->update_rate) {
933 uint32_t desired_rate = rate;
934 uint32_t default_rate = s->default_sample_rate;
935 uint32_t alternate_rate = s->alternate_sample_rate;
936 pa_bool_t use_alternate = FALSE;
937
938 if (PA_UNLIKELY(default_rate == alternate_rate)) {
939 pa_log_warn("Default and alternate sample rates are the same.");
940 return FALSE;
941 }
942
943 if (PA_SOURCE_IS_RUNNING(s->state)) {
944 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u kHz",
945 s->sample_spec.rate);
946 return FALSE;
947 }
948
949 if (PA_UNLIKELY (desired_rate < 8000 ||
950 desired_rate > PA_RATE_MAX))
951 return FALSE;
952
953 if (!passthrough) {
954 pa_assert(default_rate % 4000 || default_rate % 11025);
955 pa_assert(alternate_rate % 4000 || alternate_rate % 11025);
956
957 if (default_rate % 4000) {
958 /* default is a 11025 multiple */
959 if ((alternate_rate % 4000 == 0) && (desired_rate % 4000 == 0))
960 use_alternate=TRUE;
961 } else {
962 /* default is 4000 multiple */
963 if ((alternate_rate % 11025 == 0) && (desired_rate % 11025 == 0))
964 use_alternate=TRUE;
965 }
966
967 if (use_alternate)
968 desired_rate = alternate_rate;
969 else
970 desired_rate = default_rate;
971 } else {
972 desired_rate = rate; /* use stream sampling rate, discard default/alternate settings */
973 }
974
975 if (passthrough || pa_source_used_by(s) == 0) {
976 pa_source_suspend(s, TRUE, PA_SUSPEND_IDLE); /* needed before rate update, will be resumed automatically */
977 }
978
979 if (s->update_rate(s, desired_rate) == TRUE) {
980 pa_log_info("Changed sampling rate successfully ");
981 return TRUE;
982 }
983 }
984 return FALSE;
985 }
986
987 /* Called from main thread */
988 pa_usec_t pa_source_get_latency(pa_source *s) {
989 pa_usec_t usec;
990
991 pa_source_assert_ref(s);
992 pa_assert_ctl_context();
993 pa_assert(PA_SOURCE_IS_LINKED(s->state));
994
995 if (s->state == PA_SOURCE_SUSPENDED)
996 return 0;
997
998 if (!(s->flags & PA_SOURCE_LATENCY))
999 return 0;
1000
1001 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1002
1003 return usec;
1004 }
1005
1006 /* Called from IO thread */
1007 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
1008 pa_usec_t usec = 0;
1009 pa_msgobject *o;
1010
1011 pa_source_assert_ref(s);
1012 pa_source_assert_io_context(s);
1013 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1014
1015 /* The returned value is supposed to be in the time domain of the sound card! */
1016
1017 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1018 return 0;
1019
1020 if (!(s->flags & PA_SOURCE_LATENCY))
1021 return 0;
1022
1023 o = PA_MSGOBJECT(s);
1024
1025 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1026
1027 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
1028 return -1;
1029
1030 return usec;
1031 }
1032
1033 /* Called from the main thread (and also from the IO thread while the main
1034 * thread is waiting).
1035 *
1036 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1037 * set. Instead, flat volume mode is detected by checking whether the root source
1038 * has the flag set. */
1039 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
1040 pa_source_assert_ref(s);
1041
1042 s = pa_source_get_master(s);
1043
1044 if (PA_LIKELY(s))
1045 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1046 else
1047 return FALSE;
1048 }
1049
1050 /* Called from the main thread (and also from the IO thread while the main
1051 * thread is waiting). */
1052 pa_source *pa_source_get_master(pa_source *s) {
1053 pa_source_assert_ref(s);
1054
1055 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1056 if (PA_UNLIKELY(!s->output_from_master))
1057 return NULL;
1058
1059 s = s->output_from_master->source;
1060 }
1061
1062 return s;
1063 }
1064
1065 /* Called from main context */
1066 pa_bool_t pa_source_is_passthrough(pa_source *s) {
1067
1068 pa_source_assert_ref(s);
1069
1070 /* NB Currently only monitor sources support passthrough mode */
1071 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1072 }
1073
1074 /* Called from main context */
1075 void pa_source_enter_passthrough(pa_source *s) {
1076 pa_cvolume volume;
1077
1078 /* set the volume to NORM */
1079 s->saved_volume = *pa_source_get_volume(s, TRUE);
1080 s->saved_save_volume = s->save_volume;
1081
1082 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1083 pa_source_set_volume(s, &volume, TRUE, FALSE);
1084 }
1085
1086 /* Called from main context */
1087 void pa_source_leave_passthrough(pa_source *s) {
1088 /* Restore source volume to what it was before we entered passthrough mode */
1089 pa_source_set_volume(s, &s->saved_volume, TRUE, s->saved_save_volume);
1090
1091 pa_cvolume_init(&s->saved_volume);
1092 s->saved_save_volume = FALSE;
1093 }
1094
1095 /* Called from main context. */
1096 static void compute_reference_ratio(pa_source_output *o) {
1097 unsigned c = 0;
1098 pa_cvolume remapped;
1099
1100 pa_assert(o);
1101 pa_assert(pa_source_flat_volume_enabled(o->source));
1102
1103 /*
1104 * Calculates the reference ratio from the source's reference
1105 * volume. This basically calculates:
1106 *
1107 * o->reference_ratio = o->volume / o->source->reference_volume
1108 */
1109
1110 remapped = o->source->reference_volume;
1111 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1112
1113 o->reference_ratio.channels = o->sample_spec.channels;
1114
1115 for (c = 0; c < o->sample_spec.channels; c++) {
1116
1117 /* We don't update when the source volume is 0 anyway */
1118 if (remapped.values[c] <= PA_VOLUME_MUTED)
1119 continue;
1120
1121 /* Don't update the reference ratio unless necessary */
1122 if (pa_sw_volume_multiply(
1123 o->reference_ratio.values[c],
1124 remapped.values[c]) == o->volume.values[c])
1125 continue;
1126
1127 o->reference_ratio.values[c] = pa_sw_volume_divide(
1128 o->volume.values[c],
1129 remapped.values[c]);
1130 }
1131 }
1132
1133 /* Called from main context. Only called for the root source in volume sharing
1134 * cases, except for internal recursive calls. */
1135 static void compute_reference_ratios(pa_source *s) {
1136 uint32_t idx;
1137 pa_source_output *o;
1138
1139 pa_source_assert_ref(s);
1140 pa_assert_ctl_context();
1141 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1142 pa_assert(pa_source_flat_volume_enabled(s));
1143
1144 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1145 compute_reference_ratio(o);
1146
1147 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1148 compute_reference_ratios(o->destination_source);
1149 }
1150 }
1151
1152 /* Called from main context. Only called for the root source in volume sharing
1153 * cases, except for internal recursive calls. */
1154 static void compute_real_ratios(pa_source *s) {
1155 pa_source_output *o;
1156 uint32_t idx;
1157
1158 pa_source_assert_ref(s);
1159 pa_assert_ctl_context();
1160 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1161 pa_assert(pa_source_flat_volume_enabled(s));
1162
1163 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1164 unsigned c;
1165 pa_cvolume remapped;
1166
1167 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1168 /* The origin source uses volume sharing, so this input's real ratio
1169 * is handled as a special case - the real ratio must be 0 dB, and
1170 * as a result i->soft_volume must equal i->volume_factor. */
1171 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1172 o->soft_volume = o->volume_factor;
1173
1174 compute_real_ratios(o->destination_source);
1175
1176 continue;
1177 }
1178
1179 /*
1180 * This basically calculates:
1181 *
1182 * i->real_ratio := i->volume / s->real_volume
1183 * i->soft_volume := i->real_ratio * i->volume_factor
1184 */
1185
1186 remapped = s->real_volume;
1187 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1188
1189 o->real_ratio.channels = o->sample_spec.channels;
1190 o->soft_volume.channels = o->sample_spec.channels;
1191
1192 for (c = 0; c < o->sample_spec.channels; c++) {
1193
1194 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1195 /* We leave o->real_ratio untouched */
1196 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1197 continue;
1198 }
1199
1200 /* Don't lose accuracy unless necessary */
1201 if (pa_sw_volume_multiply(
1202 o->real_ratio.values[c],
1203 remapped.values[c]) != o->volume.values[c])
1204
1205 o->real_ratio.values[c] = pa_sw_volume_divide(
1206 o->volume.values[c],
1207 remapped.values[c]);
1208
1209 o->soft_volume.values[c] = pa_sw_volume_multiply(
1210 o->real_ratio.values[c],
1211 o->volume_factor.values[c]);
1212 }
1213
1214 /* We don't copy the soft_volume to the thread_info data
1215 * here. That must be done by the caller */
1216 }
1217 }
1218
1219 static pa_cvolume *cvolume_remap_minimal_impact(
1220 pa_cvolume *v,
1221 const pa_cvolume *template,
1222 const pa_channel_map *from,
1223 const pa_channel_map *to) {
1224
1225 pa_cvolume t;
1226
1227 pa_assert(v);
1228 pa_assert(template);
1229 pa_assert(from);
1230 pa_assert(to);
1231 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1232 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1233
1234 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1235 * mapping from source output to source volumes:
1236 *
1237 * If template is a possible remapping from v it is used instead
1238 * of remapping anew.
1239 *
1240 * If the channel maps don't match we set an all-channel volume on
1241 * the source to ensure that changing a volume on one stream has no
1242 * effect that cannot be compensated for in another stream that
1243 * does not have the same channel map as the source. */
1244
1245 if (pa_channel_map_equal(from, to))
1246 return v;
1247
1248 t = *template;
1249 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1250 *v = *template;
1251 return v;
1252 }
1253
1254 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1255 return v;
1256 }
1257
1258 /* Called from main thread. Only called for the root source in volume sharing
1259 * cases, except for internal recursive calls. */
1260 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1261 pa_source_output *o;
1262 uint32_t idx;
1263
1264 pa_source_assert_ref(s);
1265 pa_assert(max_volume);
1266 pa_assert(channel_map);
1267 pa_assert(pa_source_flat_volume_enabled(s));
1268
1269 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1270 pa_cvolume remapped;
1271
1272 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1273 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1274
1275 /* Ignore this output. The origin source uses volume sharing, so this
1276 * output's volume will be set to be equal to the root source's real
1277 * volume. Obviously this output's current volume must not then
1278 * affect what the root source's real volume will be. */
1279 continue;
1280 }
1281
1282 remapped = o->volume;
1283 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1284 pa_cvolume_merge(max_volume, max_volume, &remapped);
1285 }
1286 }
1287
1288 /* Called from main thread. Only called for the root source in volume sharing
1289 * cases, except for internal recursive calls. */
1290 static pa_bool_t has_outputs(pa_source *s) {
1291 pa_source_output *o;
1292 uint32_t idx;
1293
1294 pa_source_assert_ref(s);
1295
1296 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1297 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1298 return TRUE;
1299 }
1300
1301 return FALSE;
1302 }
1303
1304 /* Called from main thread. Only called for the root source in volume sharing
1305 * cases, except for internal recursive calls. */
1306 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1307 pa_source_output *o;
1308 uint32_t idx;
1309
1310 pa_source_assert_ref(s);
1311 pa_assert(new_volume);
1312 pa_assert(channel_map);
1313
1314 s->real_volume = *new_volume;
1315 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1316
1317 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1318 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1319 if (pa_source_flat_volume_enabled(s)) {
1320 pa_cvolume old_volume = o->volume;
1321
1322 /* Follow the root source's real volume. */
1323 o->volume = *new_volume;
1324 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1325 compute_reference_ratio(o);
1326
1327 /* The volume changed, let's tell people so */
1328 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1329 if (o->volume_changed)
1330 o->volume_changed(o);
1331
1332 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1333 }
1334 }
1335
1336 update_real_volume(o->destination_source, new_volume, channel_map);
1337 }
1338 }
1339 }
1340
1341 /* Called from main thread. Only called for the root source in shared volume
1342 * cases. */
1343 static void compute_real_volume(pa_source *s) {
1344 pa_source_assert_ref(s);
1345 pa_assert_ctl_context();
1346 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1347 pa_assert(pa_source_flat_volume_enabled(s));
1348 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1349
1350 /* This determines the maximum volume of all streams and sets
1351 * s->real_volume accordingly. */
1352
1353 if (!has_outputs(s)) {
1354 /* In the special case that we have no source outputs we leave the
1355 * volume unmodified. */
1356 update_real_volume(s, &s->reference_volume, &s->channel_map);
1357 return;
1358 }
1359
1360 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1361
1362 /* First let's determine the new maximum volume of all outputs
1363 * connected to this source */
1364 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1365 update_real_volume(s, &s->real_volume, &s->channel_map);
1366
1367 /* Then, let's update the real ratios/soft volumes of all outputs
1368 * connected to this source */
1369 compute_real_ratios(s);
1370 }
1371
1372 /* Called from main thread. Only called for the root source in shared volume
1373 * cases, except for internal recursive calls. */
1374 static void propagate_reference_volume(pa_source *s) {
1375 pa_source_output *o;
1376 uint32_t idx;
1377
1378 pa_source_assert_ref(s);
1379 pa_assert_ctl_context();
1380 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1381 pa_assert(pa_source_flat_volume_enabled(s));
1382
1383 /* This is called whenever the source volume changes that is not
1384 * caused by a source output volume change. We need to fix up the
1385 * source output volumes accordingly */
1386
1387 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1388 pa_cvolume old_volume;
1389
1390 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1391 propagate_reference_volume(o->destination_source);
1392
1393 /* Since the origin source uses volume sharing, this output's volume
1394 * needs to be updated to match the root source's real volume, but
1395 * that will be done later in update_shared_real_volume(). */
1396 continue;
1397 }
1398
1399 old_volume = o->volume;
1400
1401 /* This basically calculates:
1402 *
1403 * o->volume := o->reference_volume * o->reference_ratio */
1404
1405 o->volume = s->reference_volume;
1406 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1407 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1408
1409 /* The volume changed, let's tell people so */
1410 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1411
1412 if (o->volume_changed)
1413 o->volume_changed(o);
1414
1415 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1416 }
1417 }
1418 }
1419
1420 /* Called from main thread. Only called for the root source in volume sharing
1421 * cases, except for internal recursive calls. The return value indicates
1422 * whether any reference volume actually changed. */
1423 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1424 pa_cvolume volume;
1425 pa_bool_t reference_volume_changed;
1426 pa_source_output *o;
1427 uint32_t idx;
1428
1429 pa_source_assert_ref(s);
1430 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1431 pa_assert(v);
1432 pa_assert(channel_map);
1433 pa_assert(pa_cvolume_valid(v));
1434
1435 volume = *v;
1436 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1437
1438 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1439 s->reference_volume = volume;
1440
1441 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1442
1443 if (reference_volume_changed)
1444 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1445 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1446 /* If the root source's volume doesn't change, then there can't be any
1447 * changes in the other source in the source tree either.
1448 *
1449 * It's probably theoretically possible that even if the root source's
1450 * volume changes slightly, some filter source doesn't change its volume
1451 * due to rounding errors. If that happens, we still want to propagate
1452 * the changed root source volume to the sources connected to the
1453 * intermediate source that didn't change its volume. This theoretical
1454 * possibility is the reason why we have that !(s->flags &
1455 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1456 * notice even if we returned here FALSE always if
1457 * reference_volume_changed is FALSE. */
1458 return FALSE;
1459
1460 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1461 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1462 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1463 }
1464
1465 return TRUE;
1466 }
1467
1468 /* Called from main thread */
1469 void pa_source_set_volume(
1470 pa_source *s,
1471 const pa_cvolume *volume,
1472 pa_bool_t send_msg,
1473 pa_bool_t save) {
1474
1475 pa_cvolume new_reference_volume;
1476 pa_source *root_source;
1477
1478 pa_source_assert_ref(s);
1479 pa_assert_ctl_context();
1480 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1481 pa_assert(!volume || pa_cvolume_valid(volume));
1482 pa_assert(volume || pa_source_flat_volume_enabled(s));
1483 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1484
1485 /* make sure we don't change the volume in PASSTHROUGH mode ...
1486 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1487 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1488 pa_log_warn("Cannot change volume, Source is monitor of a PASSTHROUGH sink");
1489 return;
1490 }
1491
1492 /* In case of volume sharing, the volume is set for the root source first,
1493 * from which it's then propagated to the sharing sources. */
1494 root_source = pa_source_get_master(s);
1495
1496 if (PA_UNLIKELY(!root_source))
1497 return;
1498
1499 /* As a special exception we accept mono volumes on all sources --
1500 * even on those with more complex channel maps */
1501
1502 if (volume) {
1503 if (pa_cvolume_compatible(volume, &s->sample_spec))
1504 new_reference_volume = *volume;
1505 else {
1506 new_reference_volume = s->reference_volume;
1507 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1508 }
1509
1510 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1511 }
1512
1513 /* If volume is NULL we synchronize the source's real and reference
1514 * volumes with the stream volumes. If it is not NULL we update
1515 * the reference_volume with it. */
1516
1517 if (volume) {
1518 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1519 if (pa_source_flat_volume_enabled(root_source)) {
1520 /* OK, propagate this volume change back to the outputs */
1521 propagate_reference_volume(root_source);
1522
1523 /* And now recalculate the real volume */
1524 compute_real_volume(root_source);
1525 } else
1526 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1527 }
1528
1529 } else {
1530 pa_assert(pa_source_flat_volume_enabled(root_source));
1531
1532 /* Ok, let's determine the new real volume */
1533 compute_real_volume(root_source);
1534
1535 /* Let's 'push' the reference volume if necessary */
1536 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1537 /* If the source and it's root don't have the same number of channels, we need to remap */
1538 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1539 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1540 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1541
1542 /* Now that the reference volume is updated, we can update the streams'
1543 * reference ratios. */
1544 compute_reference_ratios(root_source);
1545 }
1546
1547 if (root_source->set_volume) {
1548 /* If we have a function set_volume(), then we do not apply a
1549 * soft volume by default. However, set_volume() is free to
1550 * apply one to root_source->soft_volume */
1551
1552 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1553 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1554 root_source->set_volume(root_source);
1555
1556 } else
1557 /* If we have no function set_volume(), then the soft volume
1558 * becomes the real volume */
1559 root_source->soft_volume = root_source->real_volume;
1560
1561 /* This tells the source that soft volume and/or real volume changed */
1562 if (send_msg)
1563 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1564 }
1565
1566 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1567 * Only to be called by source implementor */
1568 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1569
1570 pa_source_assert_ref(s);
1571 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1572
1573 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1574 pa_source_assert_io_context(s);
1575 else
1576 pa_assert_ctl_context();
1577
1578 if (!volume)
1579 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1580 else
1581 s->soft_volume = *volume;
1582
1583 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1584 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1585 else
1586 s->thread_info.soft_volume = s->soft_volume;
1587 }
1588
1589 /* Called from the main thread. Only called for the root source in volume sharing
1590 * cases, except for internal recursive calls. */
1591 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1592 pa_source_output *o;
1593 uint32_t idx;
1594
1595 pa_source_assert_ref(s);
1596 pa_assert(old_real_volume);
1597 pa_assert_ctl_context();
1598 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1599
1600 /* This is called when the hardware's real volume changes due to
1601 * some external event. We copy the real volume into our
1602 * reference volume and then rebuild the stream volumes based on
1603 * i->real_ratio which should stay fixed. */
1604
1605 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1606 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1607 return;
1608
1609 /* 1. Make the real volume the reference volume */
1610 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1611 }
1612
1613 if (pa_source_flat_volume_enabled(s)) {
1614
1615 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1616 pa_cvolume old_volume = o->volume;
1617
1618 /* 2. Since the source's reference and real volumes are equal
1619 * now our ratios should be too. */
1620 o->reference_ratio = o->real_ratio;
1621
1622 /* 3. Recalculate the new stream reference volume based on the
1623 * reference ratio and the sink's reference volume.
1624 *
1625 * This basically calculates:
1626 *
1627 * o->volume = s->reference_volume * o->reference_ratio
1628 *
1629 * This is identical to propagate_reference_volume() */
1630 o->volume = s->reference_volume;
1631 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1632 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1633
1634 /* Notify if something changed */
1635 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1636
1637 if (o->volume_changed)
1638 o->volume_changed(o);
1639
1640 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1641 }
1642
1643 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1644 propagate_real_volume(o->destination_source, old_real_volume);
1645 }
1646 }
1647
1648 /* Something got changed in the hardware. It probably makes sense
1649 * to save changed hw settings given that hw volume changes not
1650 * triggered by PA are almost certainly done by the user. */
1651 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1652 s->save_volume = TRUE;
1653 }
1654
1655 /* Called from io thread */
1656 void pa_source_update_volume_and_mute(pa_source *s) {
1657 pa_assert(s);
1658 pa_source_assert_io_context(s);
1659
1660 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1661 }
1662
1663 /* Called from main thread */
1664 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1665 pa_source_assert_ref(s);
1666 pa_assert_ctl_context();
1667 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1668
1669 if (s->refresh_volume || force_refresh) {
1670 struct pa_cvolume old_real_volume;
1671
1672 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1673
1674 old_real_volume = s->real_volume;
1675
1676 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1677 s->get_volume(s);
1678
1679 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1680
1681 update_real_volume(s, &s->real_volume, &s->channel_map);
1682 propagate_real_volume(s, &old_real_volume);
1683 }
1684
1685 return &s->reference_volume;
1686 }
1687
1688 /* Called from main thread. In volume sharing cases, only the root source may
1689 * call this. */
1690 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1691 pa_cvolume old_real_volume;
1692
1693 pa_source_assert_ref(s);
1694 pa_assert_ctl_context();
1695 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1696 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1697
1698 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1699
1700 old_real_volume = s->real_volume;
1701 update_real_volume(s, new_real_volume, &s->channel_map);
1702 propagate_real_volume(s, &old_real_volume);
1703 }
1704
1705 /* Called from main thread */
1706 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1707 pa_bool_t old_muted;
1708
1709 pa_source_assert_ref(s);
1710 pa_assert_ctl_context();
1711 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1712
1713 old_muted = s->muted;
1714 s->muted = mute;
1715 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1716
1717 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute)
1718 s->set_mute(s);
1719
1720 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1721
1722 if (old_muted != s->muted)
1723 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1724 }
1725
1726 /* Called from main thread */
1727 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1728
1729 pa_source_assert_ref(s);
1730 pa_assert_ctl_context();
1731 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1732
1733 if (s->refresh_muted || force_refresh) {
1734 pa_bool_t old_muted = s->muted;
1735
1736 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_mute)
1737 s->get_mute(s);
1738
1739 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1740
1741 if (old_muted != s->muted) {
1742 s->save_muted = TRUE;
1743
1744 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1745
1746 /* Make sure the soft mute status stays in sync */
1747 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1748 }
1749 }
1750
1751 return s->muted;
1752 }
1753
1754 /* Called from main thread */
1755 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1756 pa_source_assert_ref(s);
1757 pa_assert_ctl_context();
1758 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1759
1760 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1761
1762 if (s->muted == new_muted)
1763 return;
1764
1765 s->muted = new_muted;
1766 s->save_muted = TRUE;
1767
1768 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1769 }
1770
1771 /* Called from main thread */
1772 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1773 pa_source_assert_ref(s);
1774 pa_assert_ctl_context();
1775
1776 if (p)
1777 pa_proplist_update(s->proplist, mode, p);
1778
1779 if (PA_SOURCE_IS_LINKED(s->state)) {
1780 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1781 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1782 }
1783
1784 return TRUE;
1785 }
1786
1787 /* Called from main thread */
1788 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1789 void pa_source_set_description(pa_source *s, const char *description) {
1790 const char *old;
1791 pa_source_assert_ref(s);
1792 pa_assert_ctl_context();
1793
1794 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1795 return;
1796
1797 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1798
1799 if (old && description && pa_streq(old, description))
1800 return;
1801
1802 if (description)
1803 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1804 else
1805 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1806
1807 if (PA_SOURCE_IS_LINKED(s->state)) {
1808 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1809 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1810 }
1811 }
1812
1813 /* Called from main thread */
1814 unsigned pa_source_linked_by(pa_source *s) {
1815 pa_source_assert_ref(s);
1816 pa_assert_ctl_context();
1817 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1818
1819 return pa_idxset_size(s->outputs);
1820 }
1821
1822 /* Called from main thread */
1823 unsigned pa_source_used_by(pa_source *s) {
1824 unsigned ret;
1825
1826 pa_source_assert_ref(s);
1827 pa_assert_ctl_context();
1828 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1829
1830 ret = pa_idxset_size(s->outputs);
1831 pa_assert(ret >= s->n_corked);
1832
1833 return ret - s->n_corked;
1834 }
1835
1836 /* Called from main thread */
1837 unsigned pa_source_check_suspend(pa_source *s) {
1838 unsigned ret;
1839 pa_source_output *o;
1840 uint32_t idx;
1841
1842 pa_source_assert_ref(s);
1843 pa_assert_ctl_context();
1844
1845 if (!PA_SOURCE_IS_LINKED(s->state))
1846 return 0;
1847
1848 ret = 0;
1849
1850 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1851 pa_source_output_state_t st;
1852
1853 st = pa_source_output_get_state(o);
1854
1855 /* We do not assert here. It is perfectly valid for a source output to
1856 * be in the INIT state (i.e. created, marked done but not yet put)
1857 * and we should not care if it's unlinked as it won't contribute
1858 * towards our busy status.
1859 */
1860 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1861 continue;
1862
1863 if (st == PA_SOURCE_OUTPUT_CORKED)
1864 continue;
1865
1866 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1867 continue;
1868
1869 ret ++;
1870 }
1871
1872 return ret;
1873 }
1874
1875 /* Called from the IO thread */
1876 static void sync_output_volumes_within_thread(pa_source *s) {
1877 pa_source_output *o;
1878 void *state = NULL;
1879
1880 pa_source_assert_ref(s);
1881 pa_source_assert_io_context(s);
1882
1883 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1884 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1885 continue;
1886
1887 o->thread_info.soft_volume = o->soft_volume;
1888 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1889 }
1890 }
1891
1892 /* Called from the IO thread. Only called for the root source in volume sharing
1893 * cases, except for internal recursive calls. */
1894 static void set_shared_volume_within_thread(pa_source *s) {
1895 pa_source_output *o;
1896 void *state = NULL;
1897
1898 pa_source_assert_ref(s);
1899
1900 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1901
1902 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1903 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1904 set_shared_volume_within_thread(o->destination_source);
1905 }
1906 }
1907
1908 /* Called from IO thread, except when it is not */
1909 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1910 pa_source *s = PA_SOURCE(object);
1911 pa_source_assert_ref(s);
1912
1913 switch ((pa_source_message_t) code) {
1914
1915 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1916 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1917
1918 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1919
1920 if (o->direct_on_input) {
1921 o->thread_info.direct_on_input = o->direct_on_input;
1922 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1923 }
1924
1925 pa_assert(!o->thread_info.attached);
1926 o->thread_info.attached = TRUE;
1927
1928 if (o->attach)
1929 o->attach(o);
1930
1931 pa_source_output_set_state_within_thread(o, o->state);
1932
1933 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1934 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1935
1936 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1937
1938 /* We don't just invalidate the requested latency here,
1939 * because if we are in a move we might need to fix up the
1940 * requested latency. */
1941 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1942
1943 /* In flat volume mode we need to update the volume as
1944 * well */
1945 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1946 }
1947
1948 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1949 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1950
1951 pa_source_output_set_state_within_thread(o, o->state);
1952
1953 if (o->detach)
1954 o->detach(o);
1955
1956 pa_assert(o->thread_info.attached);
1957 o->thread_info.attached = FALSE;
1958
1959 if (o->thread_info.direct_on_input) {
1960 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
1961 o->thread_info.direct_on_input = NULL;
1962 }
1963
1964 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
1965 pa_source_output_unref(o);
1966
1967 pa_source_invalidate_requested_latency(s, TRUE);
1968
1969 /* In flat volume mode we need to update the volume as
1970 * well */
1971 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1972 }
1973
1974 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
1975 pa_source *root_source = pa_source_get_master(s);
1976
1977 if (PA_LIKELY(root_source))
1978 set_shared_volume_within_thread(root_source);
1979
1980 return 0;
1981 }
1982
1983 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
1984
1985 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
1986 s->set_volume(s);
1987 pa_source_volume_change_push(s);
1988 }
1989 /* Fall through ... */
1990
1991 case PA_SOURCE_MESSAGE_SET_VOLUME:
1992
1993 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1994 s->thread_info.soft_volume = s->soft_volume;
1995 }
1996
1997 /* Fall through ... */
1998
1999 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
2000 sync_output_volumes_within_thread(s);
2001 return 0;
2002
2003 case PA_SOURCE_MESSAGE_GET_VOLUME:
2004
2005 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2006 s->get_volume(s);
2007 pa_source_volume_change_flush(s);
2008 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2009 }
2010
2011 /* In case source implementor reset SW volume. */
2012 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2013 s->thread_info.soft_volume = s->soft_volume;
2014 }
2015
2016 return 0;
2017
2018 case PA_SOURCE_MESSAGE_SET_MUTE:
2019
2020 if (s->thread_info.soft_muted != s->muted) {
2021 s->thread_info.soft_muted = s->muted;
2022 }
2023
2024 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2025 s->set_mute(s);
2026
2027 return 0;
2028
2029 case PA_SOURCE_MESSAGE_GET_MUTE:
2030
2031 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2032 s->get_mute(s);
2033
2034 return 0;
2035
2036 case PA_SOURCE_MESSAGE_SET_STATE: {
2037
2038 pa_bool_t suspend_change =
2039 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2040 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2041
2042 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2043
2044 if (suspend_change) {
2045 pa_source_output *o;
2046 void *state = NULL;
2047
2048 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2049 if (o->suspend_within_thread)
2050 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2051 }
2052
2053 return 0;
2054 }
2055
2056 case PA_SOURCE_MESSAGE_DETACH:
2057
2058 /* Detach all streams */
2059 pa_source_detach_within_thread(s);
2060 return 0;
2061
2062 case PA_SOURCE_MESSAGE_ATTACH:
2063
2064 /* Reattach all streams */
2065 pa_source_attach_within_thread(s);
2066 return 0;
2067
2068 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2069
2070 pa_usec_t *usec = userdata;
2071 *usec = pa_source_get_requested_latency_within_thread(s);
2072
2073 /* Yes, that's right, the IO thread will see -1 when no
2074 * explicit requested latency is configured, the main
2075 * thread will see max_latency */
2076 if (*usec == (pa_usec_t) -1)
2077 *usec = s->thread_info.max_latency;
2078
2079 return 0;
2080 }
2081
2082 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2083 pa_usec_t *r = userdata;
2084
2085 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2086
2087 return 0;
2088 }
2089
2090 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2091 pa_usec_t *r = userdata;
2092
2093 r[0] = s->thread_info.min_latency;
2094 r[1] = s->thread_info.max_latency;
2095
2096 return 0;
2097 }
2098
2099 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2100
2101 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2102 return 0;
2103
2104 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2105
2106 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2107 return 0;
2108
2109 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2110
2111 *((size_t*) userdata) = s->thread_info.max_rewind;
2112 return 0;
2113
2114 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2115
2116 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2117 return 0;
2118
2119 case PA_SOURCE_MESSAGE_GET_LATENCY:
2120
2121 if (s->monitor_of) {
2122 *((pa_usec_t*) userdata) = 0;
2123 return 0;
2124 }
2125
2126 /* Implementors need to overwrite this implementation! */
2127 return -1;
2128
2129 case PA_SOURCE_MESSAGE_SET_PORT:
2130
2131 pa_assert(userdata);
2132 if (s->set_port) {
2133 struct source_message_set_port *msg_data = userdata;
2134 msg_data->ret = s->set_port(s, msg_data->port);
2135 }
2136 return 0;
2137
2138 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2139 /* This message is sent from IO-thread and handled in main thread. */
2140 pa_assert_ctl_context();
2141
2142 /* Make sure we're not messing with main thread when no longer linked */
2143 if (!PA_SOURCE_IS_LINKED(s->state))
2144 return 0;
2145
2146 pa_source_get_volume(s, TRUE);
2147 pa_source_get_mute(s, TRUE);
2148 return 0;
2149
2150 case PA_SOURCE_MESSAGE_MAX:
2151 ;
2152 }
2153
2154 return -1;
2155 }
2156
2157 /* Called from main thread */
2158 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2159 pa_source *source;
2160 uint32_t idx;
2161 int ret = 0;
2162
2163 pa_core_assert_ref(c);
2164 pa_assert_ctl_context();
2165 pa_assert(cause != 0);
2166
2167 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2168 int r;
2169
2170 if (source->monitor_of)
2171 continue;
2172
2173 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2174 ret = r;
2175 }
2176
2177 return ret;
2178 }
2179
2180 /* Called from main thread */
2181 void pa_source_detach(pa_source *s) {
2182 pa_source_assert_ref(s);
2183 pa_assert_ctl_context();
2184 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2185
2186 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2187 }
2188
2189 /* Called from main thread */
2190 void pa_source_attach(pa_source *s) {
2191 pa_source_assert_ref(s);
2192 pa_assert_ctl_context();
2193 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2194
2195 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2196 }
2197
2198 /* Called from IO thread */
2199 void pa_source_detach_within_thread(pa_source *s) {
2200 pa_source_output *o;
2201 void *state = NULL;
2202
2203 pa_source_assert_ref(s);
2204 pa_source_assert_io_context(s);
2205 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2206
2207 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2208 if (o->detach)
2209 o->detach(o);
2210 }
2211
2212 /* Called from IO thread */
2213 void pa_source_attach_within_thread(pa_source *s) {
2214 pa_source_output *o;
2215 void *state = NULL;
2216
2217 pa_source_assert_ref(s);
2218 pa_source_assert_io_context(s);
2219 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2220
2221 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2222 if (o->attach)
2223 o->attach(o);
2224 }
2225
2226 /* Called from IO thread */
2227 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2228 pa_usec_t result = (pa_usec_t) -1;
2229 pa_source_output *o;
2230 void *state = NULL;
2231
2232 pa_source_assert_ref(s);
2233 pa_source_assert_io_context(s);
2234
2235 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2236 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2237
2238 if (s->thread_info.requested_latency_valid)
2239 return s->thread_info.requested_latency;
2240
2241 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2242 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2243 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2244 result = o->thread_info.requested_source_latency;
2245
2246 if (result != (pa_usec_t) -1)
2247 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2248
2249 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2250 /* Only cache this if we are fully set up */
2251 s->thread_info.requested_latency = result;
2252 s->thread_info.requested_latency_valid = TRUE;
2253 }
2254
2255 return result;
2256 }
2257
2258 /* Called from main thread */
2259 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2260 pa_usec_t usec = 0;
2261
2262 pa_source_assert_ref(s);
2263 pa_assert_ctl_context();
2264 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2265
2266 if (s->state == PA_SOURCE_SUSPENDED)
2267 return 0;
2268
2269 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2270
2271 return usec;
2272 }
2273
2274 /* Called from IO thread */
2275 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2276 pa_source_output *o;
2277 void *state = NULL;
2278
2279 pa_source_assert_ref(s);
2280 pa_source_assert_io_context(s);
2281
2282 if (max_rewind == s->thread_info.max_rewind)
2283 return;
2284
2285 s->thread_info.max_rewind = max_rewind;
2286
2287 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2288 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2289 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2290 }
2291
2292 /* Called from main thread */
2293 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2294 pa_source_assert_ref(s);
2295 pa_assert_ctl_context();
2296
2297 if (PA_SOURCE_IS_LINKED(s->state))
2298 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2299 else
2300 pa_source_set_max_rewind_within_thread(s, max_rewind);
2301 }
2302
2303 /* Called from IO thread */
2304 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2305 pa_source_output *o;
2306 void *state = NULL;
2307
2308 pa_source_assert_ref(s);
2309 pa_source_assert_io_context(s);
2310
2311 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2312 s->thread_info.requested_latency_valid = FALSE;
2313 else if (dynamic)
2314 return;
2315
2316 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2317
2318 if (s->update_requested_latency)
2319 s->update_requested_latency(s);
2320
2321 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2322 if (o->update_source_requested_latency)
2323 o->update_source_requested_latency(o);
2324 }
2325
2326 if (s->monitor_of)
2327 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2328 }
2329
2330 /* Called from main thread */
2331 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2332 pa_source_assert_ref(s);
2333 pa_assert_ctl_context();
2334
2335 /* min_latency == 0: no limit
2336 * min_latency anything else: specified limit
2337 *
2338 * Similar for max_latency */
2339
2340 if (min_latency < ABSOLUTE_MIN_LATENCY)
2341 min_latency = ABSOLUTE_MIN_LATENCY;
2342
2343 if (max_latency <= 0 ||
2344 max_latency > ABSOLUTE_MAX_LATENCY)
2345 max_latency = ABSOLUTE_MAX_LATENCY;
2346
2347 pa_assert(min_latency <= max_latency);
2348
2349 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2350 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2351 max_latency == ABSOLUTE_MAX_LATENCY) ||
2352 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2353
2354 if (PA_SOURCE_IS_LINKED(s->state)) {
2355 pa_usec_t r[2];
2356
2357 r[0] = min_latency;
2358 r[1] = max_latency;
2359
2360 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2361 } else
2362 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2363 }
2364
2365 /* Called from main thread */
2366 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2367 pa_source_assert_ref(s);
2368 pa_assert_ctl_context();
2369 pa_assert(min_latency);
2370 pa_assert(max_latency);
2371
2372 if (PA_SOURCE_IS_LINKED(s->state)) {
2373 pa_usec_t r[2] = { 0, 0 };
2374
2375 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2376
2377 *min_latency = r[0];
2378 *max_latency = r[1];
2379 } else {
2380 *min_latency = s->thread_info.min_latency;
2381 *max_latency = s->thread_info.max_latency;
2382 }
2383 }
2384
2385 /* Called from IO thread, and from main thread before pa_source_put() is called */
2386 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2387 pa_source_assert_ref(s);
2388 pa_source_assert_io_context(s);
2389
2390 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2391 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2392 pa_assert(min_latency <= max_latency);
2393
2394 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2395 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2396 max_latency == ABSOLUTE_MAX_LATENCY) ||
2397 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2398 s->monitor_of);
2399
2400 if (s->thread_info.min_latency == min_latency &&
2401 s->thread_info.max_latency == max_latency)
2402 return;
2403
2404 s->thread_info.min_latency = min_latency;
2405 s->thread_info.max_latency = max_latency;
2406
2407 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2408 pa_source_output *o;
2409 void *state = NULL;
2410
2411 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2412 if (o->update_source_latency_range)
2413 o->update_source_latency_range(o);
2414 }
2415
2416 pa_source_invalidate_requested_latency(s, FALSE);
2417 }
2418
2419 /* Called from main thread, before the source is put */
2420 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2421 pa_source_assert_ref(s);
2422 pa_assert_ctl_context();
2423
2424 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2425 pa_assert(latency == 0);
2426 return;
2427 }
2428
2429 if (latency < ABSOLUTE_MIN_LATENCY)
2430 latency = ABSOLUTE_MIN_LATENCY;
2431
2432 if (latency > ABSOLUTE_MAX_LATENCY)
2433 latency = ABSOLUTE_MAX_LATENCY;
2434
2435 if (PA_SOURCE_IS_LINKED(s->state))
2436 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2437 else
2438 s->thread_info.fixed_latency = latency;
2439 }
2440
2441 /* Called from main thread */
2442 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2443 pa_usec_t latency;
2444
2445 pa_source_assert_ref(s);
2446 pa_assert_ctl_context();
2447
2448 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2449 return 0;
2450
2451 if (PA_SOURCE_IS_LINKED(s->state))
2452 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2453 else
2454 latency = s->thread_info.fixed_latency;
2455
2456 return latency;
2457 }
2458
2459 /* Called from IO thread */
2460 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2461 pa_source_assert_ref(s);
2462 pa_source_assert_io_context(s);
2463
2464 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2465 pa_assert(latency == 0);
2466 return;
2467 }
2468
2469 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2470 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2471
2472 if (s->thread_info.fixed_latency == latency)
2473 return;
2474
2475 s->thread_info.fixed_latency = latency;
2476
2477 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2478 pa_source_output *o;
2479 void *state = NULL;
2480
2481 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2482 if (o->update_source_fixed_latency)
2483 o->update_source_fixed_latency(o);
2484 }
2485
2486 pa_source_invalidate_requested_latency(s, FALSE);
2487 }
2488
2489 /* Called from main thread */
2490 size_t pa_source_get_max_rewind(pa_source *s) {
2491 size_t r;
2492 pa_assert_ctl_context();
2493 pa_source_assert_ref(s);
2494
2495 if (!PA_SOURCE_IS_LINKED(s->state))
2496 return s->thread_info.max_rewind;
2497
2498 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2499
2500 return r;
2501 }
2502
2503 /* Called from main context */
2504 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2505 pa_device_port *port;
2506 int ret;
2507
2508 pa_source_assert_ref(s);
2509 pa_assert_ctl_context();
2510
2511 if (!s->set_port) {
2512 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2513 return -PA_ERR_NOTIMPLEMENTED;
2514 }
2515
2516 if (!s->ports)
2517 return -PA_ERR_NOENTITY;
2518
2519 if (!(port = pa_hashmap_get(s->ports, name)))
2520 return -PA_ERR_NOENTITY;
2521
2522 if (s->active_port == port) {
2523 s->save_port = s->save_port || save;
2524 return 0;
2525 }
2526
2527 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2528 struct source_message_set_port msg = { .port = port, .ret = 0 };
2529 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2530 ret = msg.ret;
2531 }
2532 else
2533 ret = s->set_port(s, port);
2534
2535 if (ret < 0)
2536 return -PA_ERR_NOENTITY;
2537
2538 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2539
2540 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2541
2542 s->active_port = port;
2543 s->save_port = save;
2544
2545 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2546
2547 return 0;
2548 }
2549
2550 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2551
2552 /* Called from the IO thread. */
2553 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2554 pa_source_volume_change *c;
2555 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2556 c = pa_xnew(pa_source_volume_change, 1);
2557
2558 PA_LLIST_INIT(pa_source_volume_change, c);
2559 c->at = 0;
2560 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2561 return c;
2562 }
2563
2564 /* Called from the IO thread. */
2565 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2566 pa_assert(c);
2567 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2568 pa_xfree(c);
2569 }
2570
2571 /* Called from the IO thread. */
2572 void pa_source_volume_change_push(pa_source *s) {
2573 pa_source_volume_change *c = NULL;
2574 pa_source_volume_change *nc = NULL;
2575 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2576
2577 const char *direction = NULL;
2578
2579 pa_assert(s);
2580 nc = pa_source_volume_change_new(s);
2581
2582 /* NOTE: There is already more different volumes in pa_source that I can remember.
2583 * Adding one more volume for HW would get us rid of this, but I am trying
2584 * to survive with the ones we already have. */
2585 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2586
2587 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2588 pa_log_debug("Volume not changing");
2589 pa_source_volume_change_free(nc);
2590 return;
2591 }
2592
2593 nc->at = pa_source_get_latency_within_thread(s);
2594 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2595
2596 if (s->thread_info.volume_changes_tail) {
2597 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2598 /* If volume is going up let's do it a bit late. If it is going
2599 * down let's do it a bit early. */
2600 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2601 if (nc->at + safety_margin > c->at) {
2602 nc->at += safety_margin;
2603 direction = "up";
2604 break;
2605 }
2606 }
2607 else if (nc->at - safety_margin > c->at) {
2608 nc->at -= safety_margin;
2609 direction = "down";
2610 break;
2611 }
2612 }
2613 }
2614
2615 if (c == NULL) {
2616 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2617 nc->at += safety_margin;
2618 direction = "up";
2619 } else {
2620 nc->at -= safety_margin;
2621 direction = "down";
2622 }
2623 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2624 }
2625 else {
2626 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2627 }
2628
2629 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2630
2631 /* We can ignore volume events that came earlier but should happen later than this. */
2632 PA_LLIST_FOREACH(c, nc->next) {
2633 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2634 pa_source_volume_change_free(c);
2635 }
2636 nc->next = NULL;
2637 s->thread_info.volume_changes_tail = nc;
2638 }
2639
2640 /* Called from the IO thread. */
2641 static void pa_source_volume_change_flush(pa_source *s) {
2642 pa_source_volume_change *c = s->thread_info.volume_changes;
2643 pa_assert(s);
2644 s->thread_info.volume_changes = NULL;
2645 s->thread_info.volume_changes_tail = NULL;
2646 while (c) {
2647 pa_source_volume_change *next = c->next;
2648 pa_source_volume_change_free(c);
2649 c = next;
2650 }
2651 }
2652
2653 /* Called from the IO thread. */
2654 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2655 pa_usec_t now;
2656 pa_bool_t ret = FALSE;
2657
2658 pa_assert(s);
2659
2660 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2661 if (usec_to_next)
2662 *usec_to_next = 0;
2663 return ret;
2664 }
2665
2666 pa_assert(s->write_volume);
2667
2668 now = pa_rtclock_now();
2669
2670 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2671 pa_source_volume_change *c = s->thread_info.volume_changes;
2672 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2673 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2674 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2675 ret = TRUE;
2676 s->thread_info.current_hw_volume = c->hw_volume;
2677 pa_source_volume_change_free(c);
2678 }
2679
2680 if (ret)
2681 s->write_volume(s);
2682
2683 if (s->thread_info.volume_changes) {
2684 if (usec_to_next)
2685 *usec_to_next = s->thread_info.volume_changes->at - now;
2686 if (pa_log_ratelimit(PA_LOG_DEBUG))
2687 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2688 }
2689 else {
2690 if (usec_to_next)
2691 *usec_to_next = 0;
2692 s->thread_info.volume_changes_tail = NULL;
2693 }
2694 return ret;
2695 }
2696
2697
2698 /* Called from the main thread */
2699 /* Gets the list of formats supported by the source. The members and idxset must
2700 * be freed by the caller. */
2701 pa_idxset* pa_source_get_formats(pa_source *s) {
2702 pa_idxset *ret;
2703
2704 pa_assert(s);
2705
2706 if (s->get_formats) {
2707 /* Source supports format query, all is good */
2708 ret = s->get_formats(s);
2709 } else {
2710 /* Source doesn't support format query, so assume it does PCM */
2711 pa_format_info *f = pa_format_info_new();
2712 f->encoding = PA_ENCODING_PCM;
2713
2714 ret = pa_idxset_new(NULL, NULL);
2715 pa_idxset_put(ret, f, NULL);
2716 }
2717
2718 return ret;
2719 }
2720
2721 /* Called from the main thread */
2722 /* Checks if the source can accept this format */
2723 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2724 {
2725 pa_idxset *formats = NULL;
2726 pa_bool_t ret = FALSE;
2727
2728 pa_assert(s);
2729 pa_assert(f);
2730
2731 formats = pa_source_get_formats(s);
2732
2733 if (formats) {
2734 pa_format_info *finfo_device;
2735 uint32_t i;
2736
2737 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2738 if (pa_format_info_is_compatible(finfo_device, f)) {
2739 ret = TRUE;
2740 break;
2741 }
2742 }
2743
2744 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2745 }
2746
2747 return ret;
2748 }
2749
2750 /* Called from the main thread */
2751 /* Calculates the intersection between formats supported by the source and
2752 * in_formats, and returns these, in the order of the source's formats. */
2753 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2754 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2755 pa_format_info *f_source, *f_in;
2756 uint32_t i, j;
2757
2758 pa_assert(s);
2759
2760 if (!in_formats || pa_idxset_isempty(in_formats))
2761 goto done;
2762
2763 source_formats = pa_source_get_formats(s);
2764
2765 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2766 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2767 if (pa_format_info_is_compatible(f_source, f_in))
2768 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2769 }
2770 }
2771
2772 done:
2773 if (source_formats)
2774 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2775
2776 return out_formats;
2777 }