]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
Log the reason for every suspend/resume.
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76 data->ports = pa_hashmap_new(pa_idxset_string_hash_func, pa_idxset_string_compare_func);
77
78 return data;
79 }
80
81 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
82 pa_assert(data);
83
84 pa_xfree(data->name);
85 data->name = pa_xstrdup(name);
86 }
87
88 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
89 pa_assert(data);
90
91 if ((data->sample_spec_is_set = !!spec))
92 data->sample_spec = *spec;
93 }
94
95 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
96 pa_assert(data);
97
98 if ((data->channel_map_is_set = !!map))
99 data->channel_map = *map;
100 }
101
102 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
103 pa_assert(data);
104
105 data->alternate_sample_rate_is_set = TRUE;
106 data->alternate_sample_rate = alternate_sample_rate;
107 }
108
109 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
110 pa_assert(data);
111
112 if ((data->volume_is_set = !!volume))
113 data->volume = *volume;
114 }
115
116 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
117 pa_assert(data);
118
119 data->muted_is_set = TRUE;
120 data->muted = !!mute;
121 }
122
123 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
124 pa_assert(data);
125
126 pa_xfree(data->active_port);
127 data->active_port = pa_xstrdup(port);
128 }
129
130 void pa_source_new_data_done(pa_source_new_data *data) {
131 pa_assert(data);
132
133 pa_proplist_free(data->proplist);
134
135 if (data->ports)
136 pa_device_port_hashmap_free(data->ports);
137
138 pa_xfree(data->name);
139 pa_xfree(data->active_port);
140 }
141
142 /* Called from main context */
143 static void reset_callbacks(pa_source *s) {
144 pa_assert(s);
145
146 s->set_state = NULL;
147 s->get_volume = NULL;
148 s->set_volume = NULL;
149 s->write_volume = NULL;
150 s->get_mute = NULL;
151 s->set_mute = NULL;
152 s->update_requested_latency = NULL;
153 s->set_port = NULL;
154 s->get_formats = NULL;
155 s->update_rate = NULL;
156 }
157
158 /* Called from main context */
159 pa_source* pa_source_new(
160 pa_core *core,
161 pa_source_new_data *data,
162 pa_source_flags_t flags) {
163
164 pa_source *s;
165 const char *name;
166 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
167 char *pt;
168
169 pa_assert(core);
170 pa_assert(data);
171 pa_assert(data->name);
172 pa_assert_ctl_context();
173
174 s = pa_msgobject_new(pa_source);
175
176 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
177 pa_log_debug("Failed to register name %s.", data->name);
178 pa_xfree(s);
179 return NULL;
180 }
181
182 pa_source_new_data_set_name(data, name);
183
184 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
185 pa_xfree(s);
186 pa_namereg_unregister(core, name);
187 return NULL;
188 }
189
190 /* FIXME, need to free s here on failure */
191
192 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
193 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
194
195 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
196
197 if (!data->channel_map_is_set)
198 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
199
200 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
201 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
202
203 /* FIXME: There should probably be a general function for checking whether
204 * the source volume is allowed to be set, like there is for source outputs. */
205 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
206
207 if (!data->volume_is_set) {
208 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
209 data->save_volume = FALSE;
210 }
211
212 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
213 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
214
215 if (!data->muted_is_set)
216 data->muted = FALSE;
217
218 if (data->card)
219 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
220
221 pa_device_init_description(data->proplist);
222 pa_device_init_icon(data->proplist, FALSE);
223 pa_device_init_intended_roles(data->proplist);
224
225 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
226 pa_xfree(s);
227 pa_namereg_unregister(core, name);
228 return NULL;
229 }
230
231 s->parent.parent.free = source_free;
232 s->parent.process_msg = pa_source_process_msg;
233
234 s->core = core;
235 s->state = PA_SOURCE_INIT;
236 s->flags = flags;
237 s->priority = 0;
238 s->suspend_cause = data->suspend_cause;
239 pa_source_set_mixer_dirty(s, FALSE);
240 s->name = pa_xstrdup(name);
241 s->proplist = pa_proplist_copy(data->proplist);
242 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
243 s->module = data->module;
244 s->card = data->card;
245
246 s->priority = pa_device_init_priority(s->proplist);
247
248 s->sample_spec = data->sample_spec;
249 s->channel_map = data->channel_map;
250 s->default_sample_rate = s->sample_spec.rate;
251
252 if (data->alternate_sample_rate_is_set)
253 s->alternate_sample_rate = data->alternate_sample_rate;
254 else
255 s->alternate_sample_rate = s->core->alternate_sample_rate;
256
257 if (s->sample_spec.rate == s->alternate_sample_rate) {
258 pa_log_warn("Default and alternate sample rates are the same.");
259 s->alternate_sample_rate = 0;
260 }
261
262 s->outputs = pa_idxset_new(NULL, NULL);
263 s->n_corked = 0;
264 s->monitor_of = NULL;
265 s->output_from_master = NULL;
266
267 s->reference_volume = s->real_volume = data->volume;
268 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
269 s->base_volume = PA_VOLUME_NORM;
270 s->n_volume_steps = PA_VOLUME_NORM+1;
271 s->muted = data->muted;
272 s->refresh_volume = s->refresh_muted = FALSE;
273
274 reset_callbacks(s);
275 s->userdata = NULL;
276
277 s->asyncmsgq = NULL;
278
279 /* As a minor optimization we just steal the list instead of
280 * copying it here */
281 s->ports = data->ports;
282 data->ports = NULL;
283
284 s->active_port = NULL;
285 s->save_port = FALSE;
286
287 if (data->active_port)
288 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
289 s->save_port = data->save_port;
290
291 if (!s->active_port) {
292 void *state;
293 pa_device_port *p;
294
295 PA_HASHMAP_FOREACH(p, s->ports, state)
296 if (!s->active_port || p->priority > s->active_port->priority)
297 s->active_port = p;
298 }
299
300 if (s->active_port)
301 s->latency_offset = s->active_port->latency_offset;
302 else
303 s->latency_offset = 0;
304
305 s->save_volume = data->save_volume;
306 s->save_muted = data->save_muted;
307
308 pa_silence_memchunk_get(
309 &core->silence_cache,
310 core->mempool,
311 &s->silence,
312 &s->sample_spec,
313 0);
314
315 s->thread_info.rtpoll = NULL;
316 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
317 s->thread_info.soft_volume = s->soft_volume;
318 s->thread_info.soft_muted = s->muted;
319 s->thread_info.state = s->state;
320 s->thread_info.max_rewind = 0;
321 s->thread_info.requested_latency_valid = FALSE;
322 s->thread_info.requested_latency = 0;
323 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
324 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
325 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
326
327 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
328 s->thread_info.volume_changes_tail = NULL;
329 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
330 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
331 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
332 s->thread_info.latency_offset = s->latency_offset;
333
334 /* FIXME: This should probably be moved to pa_source_put() */
335 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
336
337 if (s->card)
338 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
339
340 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
341 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
342 s->index,
343 s->name,
344 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
345 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
346 pt);
347 pa_xfree(pt);
348
349 return s;
350 }
351
352 /* Called from main context */
353 static int source_set_state(pa_source *s, pa_source_state_t state) {
354 int ret;
355 pa_bool_t suspend_change;
356 pa_source_state_t original_state;
357
358 pa_assert(s);
359 pa_assert_ctl_context();
360
361 if (s->state == state)
362 return 0;
363
364 original_state = s->state;
365
366 suspend_change =
367 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
368 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
369
370 if (s->set_state)
371 if ((ret = s->set_state(s, state)) < 0)
372 return ret;
373
374 if (s->asyncmsgq)
375 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
376
377 if (s->set_state)
378 s->set_state(s, original_state);
379
380 return ret;
381 }
382
383 s->state = state;
384
385 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
386 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
387 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
388 }
389
390 if (suspend_change) {
391 pa_source_output *o;
392 uint32_t idx;
393
394 /* We're suspending or resuming, tell everyone about it */
395
396 PA_IDXSET_FOREACH(o, s->outputs, idx)
397 if (s->state == PA_SOURCE_SUSPENDED &&
398 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
399 pa_source_output_kill(o);
400 else if (o->suspend)
401 o->suspend(o, state == PA_SOURCE_SUSPENDED);
402 }
403
404 return 0;
405 }
406
407 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
408 pa_assert(s);
409
410 s->get_volume = cb;
411 }
412
413 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
414 pa_source_flags_t flags;
415
416 pa_assert(s);
417 pa_assert(!s->write_volume || cb);
418
419 s->set_volume = cb;
420
421 /* Save the current flags so we can tell if they've changed */
422 flags = s->flags;
423
424 if (cb) {
425 /* The source implementor is responsible for setting decibel volume support */
426 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
427 } else {
428 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
429 /* See note below in pa_source_put() about volume sharing and decibel volumes */
430 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
431 }
432
433 /* If the flags have changed after init, let any clients know via a change event */
434 if (s->state != PA_SOURCE_INIT && flags != s->flags)
435 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
436 }
437
438 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
439 pa_source_flags_t flags;
440
441 pa_assert(s);
442 pa_assert(!cb || s->set_volume);
443
444 s->write_volume = cb;
445
446 /* Save the current flags so we can tell if they've changed */
447 flags = s->flags;
448
449 if (cb)
450 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
451 else
452 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
453
454 /* If the flags have changed after init, let any clients know via a change event */
455 if (s->state != PA_SOURCE_INIT && flags != s->flags)
456 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
457 }
458
459 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
460 pa_assert(s);
461
462 s->get_mute = cb;
463 }
464
465 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
466 pa_source_flags_t flags;
467
468 pa_assert(s);
469
470 s->set_mute = cb;
471
472 /* Save the current flags so we can tell if they've changed */
473 flags = s->flags;
474
475 if (cb)
476 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
477 else
478 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
479
480 /* If the flags have changed after init, let any clients know via a change event */
481 if (s->state != PA_SOURCE_INIT && flags != s->flags)
482 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
483 }
484
485 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
486 pa_source_flags_t flags;
487
488 pa_assert(s);
489
490 /* Always follow the overall user preference here */
491 enable = enable && s->core->flat_volumes;
492
493 /* Save the current flags so we can tell if they've changed */
494 flags = s->flags;
495
496 if (enable)
497 s->flags |= PA_SOURCE_FLAT_VOLUME;
498 else
499 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
500
501 /* If the flags have changed after init, let any clients know via a change event */
502 if (s->state != PA_SOURCE_INIT && flags != s->flags)
503 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
504 }
505
506 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
507 pa_source_flags_t flags;
508
509 pa_assert(s);
510
511 /* Save the current flags so we can tell if they've changed */
512 flags = s->flags;
513
514 if (enable) {
515 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
516 enable_flat_volume(s, TRUE);
517 } else {
518 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
519 enable_flat_volume(s, FALSE);
520 }
521
522 /* If the flags have changed after init, let any clients know via a change event */
523 if (s->state != PA_SOURCE_INIT && flags != s->flags)
524 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
525 }
526
527 /* Called from main context */
528 void pa_source_put(pa_source *s) {
529 pa_source_assert_ref(s);
530 pa_assert_ctl_context();
531
532 pa_assert(s->state == PA_SOURCE_INIT);
533 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
534
535 /* The following fields must be initialized properly when calling _put() */
536 pa_assert(s->asyncmsgq);
537 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
538
539 /* Generally, flags should be initialized via pa_source_new(). As a
540 * special exception we allow some volume related flags to be set
541 * between _new() and _put() by the callback setter functions above.
542 *
543 * Thus we implement a couple safeguards here which ensure the above
544 * setters were used (or at least the implementor made manual changes
545 * in a compatible way).
546 *
547 * Note: All of these flags set here can change over the life time
548 * of the source. */
549 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
550 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
551 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
552
553 /* XXX: Currently decibel volume is disabled for all sources that use volume
554 * sharing. When the master source supports decibel volume, it would be good
555 * to have the flag also in the filter source, but currently we don't do that
556 * so that the flags of the filter source never change when it's moved from
557 * a master source to another. One solution for this problem would be to
558 * remove user-visible volume altogether from filter sources when volume
559 * sharing is used, but the current approach was easier to implement... */
560 /* We always support decibel volumes in software, otherwise we leave it to
561 * the source implementor to set this flag as needed.
562 *
563 * Note: This flag can also change over the life time of the source. */
564 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
565 pa_source_enable_decibel_volume(s, TRUE);
566
567 /* If the source implementor support DB volumes by itself, we should always
568 * try and enable flat volumes too */
569 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
570 enable_flat_volume(s, TRUE);
571
572 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
573 pa_source *root_source = pa_source_get_master(s);
574
575 pa_assert(PA_LIKELY(root_source));
576
577 s->reference_volume = root_source->reference_volume;
578 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
579
580 s->real_volume = root_source->real_volume;
581 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
582 } else
583 /* We assume that if the sink implementor changed the default
584 * volume he did so in real_volume, because that is the usual
585 * place where he is supposed to place his changes. */
586 s->reference_volume = s->real_volume;
587
588 s->thread_info.soft_volume = s->soft_volume;
589 s->thread_info.soft_muted = s->muted;
590 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
591
592 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
593 || (s->base_volume == PA_VOLUME_NORM
594 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
595 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
596 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
597
598 if (s->suspend_cause)
599 pa_assert_se(source_set_state(s, PA_SOURCE_SUSPENDED) == 0);
600 else
601 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
602
603 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
604 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
605 }
606
607 /* Called from main context */
608 void pa_source_unlink(pa_source *s) {
609 pa_bool_t linked;
610 pa_source_output *o, *j = NULL;
611
612 pa_assert(s);
613 pa_assert_ctl_context();
614
615 /* See pa_sink_unlink() for a couple of comments how this function
616 * works. */
617
618 linked = PA_SOURCE_IS_LINKED(s->state);
619
620 if (linked)
621 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
622
623 if (s->state != PA_SOURCE_UNLINKED)
624 pa_namereg_unregister(s->core, s->name);
625 pa_idxset_remove_by_data(s->core->sources, s, NULL);
626
627 if (s->card)
628 pa_idxset_remove_by_data(s->card->sources, s, NULL);
629
630 while ((o = pa_idxset_first(s->outputs, NULL))) {
631 pa_assert(o != j);
632 pa_source_output_kill(o);
633 j = o;
634 }
635
636 if (linked)
637 source_set_state(s, PA_SOURCE_UNLINKED);
638 else
639 s->state = PA_SOURCE_UNLINKED;
640
641 reset_callbacks(s);
642
643 if (linked) {
644 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
645 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
646 }
647 }
648
649 /* Called from main context */
650 static void source_free(pa_object *o) {
651 pa_source_output *so;
652 pa_source *s = PA_SOURCE(o);
653
654 pa_assert(s);
655 pa_assert_ctl_context();
656 pa_assert(pa_source_refcnt(s) == 0);
657
658 if (PA_SOURCE_IS_LINKED(s->state))
659 pa_source_unlink(s);
660
661 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
662
663 pa_idxset_free(s->outputs, NULL, NULL);
664
665 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
666 pa_source_output_unref(so);
667
668 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
669
670 if (s->silence.memblock)
671 pa_memblock_unref(s->silence.memblock);
672
673 pa_xfree(s->name);
674 pa_xfree(s->driver);
675
676 if (s->proplist)
677 pa_proplist_free(s->proplist);
678
679 if (s->ports)
680 pa_device_port_hashmap_free(s->ports);
681
682 pa_xfree(s);
683 }
684
685 /* Called from main context, and not while the IO thread is active, please */
686 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
687 pa_source_assert_ref(s);
688 pa_assert_ctl_context();
689
690 s->asyncmsgq = q;
691 }
692
693 /* Called from main context, and not while the IO thread is active, please */
694 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
695 pa_source_assert_ref(s);
696 pa_assert_ctl_context();
697
698 if (mask == 0)
699 return;
700
701 /* For now, allow only a minimal set of flags to be changed. */
702 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
703
704 s->flags = (s->flags & ~mask) | (value & mask);
705 }
706
707 /* Called from IO context, or before _put() from main context */
708 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
709 pa_source_assert_ref(s);
710 pa_source_assert_io_context(s);
711
712 s->thread_info.rtpoll = p;
713 }
714
715 /* Called from main context */
716 int pa_source_update_status(pa_source*s) {
717 pa_source_assert_ref(s);
718 pa_assert_ctl_context();
719 pa_assert(PA_SOURCE_IS_LINKED(s->state));
720
721 if (s->state == PA_SOURCE_SUSPENDED)
722 return 0;
723
724 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
725 }
726
727 /* Called from any context - must be threadsafe */
728 void pa_source_set_mixer_dirty(pa_source *s, pa_bool_t is_dirty)
729 {
730 pa_atomic_store(&s->mixer_dirty, is_dirty ? 1 : 0);
731 }
732
733 /* Called from main context */
734 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
735 pa_source_assert_ref(s);
736 pa_assert_ctl_context();
737 pa_assert(PA_SOURCE_IS_LINKED(s->state));
738 pa_assert(cause != 0);
739
740 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
741 return -PA_ERR_NOTSUPPORTED;
742
743 if (suspend)
744 s->suspend_cause |= cause;
745 else
746 s->suspend_cause &= ~cause;
747
748 if (!(s->suspend_cause & PA_SUSPEND_SESSION) && (pa_atomic_load(&s->mixer_dirty) != 0)) {
749 /* This might look racy but isn't: If somebody sets mixer_dirty exactly here,
750 it'll be handled just fine. */
751 pa_source_set_mixer_dirty(s, FALSE);
752 pa_log_debug("Mixer is now accessible. Updating alsa mixer settings.");
753 if (s->active_port && s->set_port) {
754 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
755 struct source_message_set_port msg = { .port = s->active_port, .ret = 0 };
756 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
757 }
758 else
759 s->set_port(s, s->active_port);
760 }
761 else {
762 if (s->set_mute)
763 s->set_mute(s);
764 if (s->set_volume)
765 s->set_volume(s);
766 }
767 }
768
769 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
770 return 0;
771
772 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
773
774 if (s->suspend_cause)
775 return source_set_state(s, PA_SOURCE_SUSPENDED);
776 else
777 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
778 }
779
780 /* Called from main context */
781 int pa_source_sync_suspend(pa_source *s) {
782 pa_sink_state_t state;
783
784 pa_source_assert_ref(s);
785 pa_assert_ctl_context();
786 pa_assert(PA_SOURCE_IS_LINKED(s->state));
787 pa_assert(s->monitor_of);
788
789 state = pa_sink_get_state(s->monitor_of);
790
791 if (state == PA_SINK_SUSPENDED)
792 return source_set_state(s, PA_SOURCE_SUSPENDED);
793
794 pa_assert(PA_SINK_IS_OPENED(state));
795
796 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
797 }
798
799 /* Called from main context */
800 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
801 pa_source_output *o, *n;
802 uint32_t idx;
803
804 pa_source_assert_ref(s);
805 pa_assert_ctl_context();
806 pa_assert(PA_SOURCE_IS_LINKED(s->state));
807
808 if (!q)
809 q = pa_queue_new();
810
811 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
812 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
813
814 pa_source_output_ref(o);
815
816 if (pa_source_output_start_move(o) >= 0)
817 pa_queue_push(q, o);
818 else
819 pa_source_output_unref(o);
820 }
821
822 return q;
823 }
824
825 /* Called from main context */
826 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
827 pa_source_output *o;
828
829 pa_source_assert_ref(s);
830 pa_assert_ctl_context();
831 pa_assert(PA_SOURCE_IS_LINKED(s->state));
832 pa_assert(q);
833
834 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
835 if (pa_source_output_finish_move(o, s, save) < 0)
836 pa_source_output_fail_move(o);
837
838 pa_source_output_unref(o);
839 }
840
841 pa_queue_free(q, NULL);
842 }
843
844 /* Called from main context */
845 void pa_source_move_all_fail(pa_queue *q) {
846 pa_source_output *o;
847
848 pa_assert_ctl_context();
849 pa_assert(q);
850
851 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
852 pa_source_output_fail_move(o);
853 pa_source_output_unref(o);
854 }
855
856 pa_queue_free(q, NULL);
857 }
858
859 /* Called from IO thread context */
860 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
861 pa_source_output *o;
862 void *state = NULL;
863
864 pa_source_assert_ref(s);
865 pa_source_assert_io_context(s);
866 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
867
868 if (nbytes <= 0)
869 return;
870
871 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
872 return;
873
874 pa_log_debug("Processing rewind...");
875
876 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
877 pa_source_output_assert_ref(o);
878 pa_source_output_process_rewind(o, nbytes);
879 }
880 }
881
882 /* Called from IO thread context */
883 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
884 pa_source_output *o;
885 void *state = NULL;
886
887 pa_source_assert_ref(s);
888 pa_source_assert_io_context(s);
889 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
890 pa_assert(chunk);
891
892 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
893 return;
894
895 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
896 pa_memchunk vchunk = *chunk;
897
898 pa_memblock_ref(vchunk.memblock);
899 pa_memchunk_make_writable(&vchunk, 0);
900
901 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
902 pa_silence_memchunk(&vchunk, &s->sample_spec);
903 else
904 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
905
906 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
907 pa_source_output_assert_ref(o);
908
909 if (!o->thread_info.direct_on_input)
910 pa_source_output_push(o, &vchunk);
911 }
912
913 pa_memblock_unref(vchunk.memblock);
914 } else {
915
916 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
917 pa_source_output_assert_ref(o);
918
919 if (!o->thread_info.direct_on_input)
920 pa_source_output_push(o, chunk);
921 }
922 }
923 }
924
925 /* Called from IO thread context */
926 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
927 pa_source_assert_ref(s);
928 pa_source_assert_io_context(s);
929 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
930 pa_source_output_assert_ref(o);
931 pa_assert(o->thread_info.direct_on_input);
932 pa_assert(chunk);
933
934 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
935 return;
936
937 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
938 pa_memchunk vchunk = *chunk;
939
940 pa_memblock_ref(vchunk.memblock);
941 pa_memchunk_make_writable(&vchunk, 0);
942
943 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
944 pa_silence_memchunk(&vchunk, &s->sample_spec);
945 else
946 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
947
948 pa_source_output_push(o, &vchunk);
949
950 pa_memblock_unref(vchunk.memblock);
951 } else
952 pa_source_output_push(o, chunk);
953 }
954
955 /* Called from main thread */
956 pa_bool_t pa_source_update_rate(pa_source *s, uint32_t rate, pa_bool_t passthrough)
957 {
958 if (s->update_rate) {
959 uint32_t desired_rate = rate;
960 uint32_t default_rate = s->default_sample_rate;
961 uint32_t alternate_rate = s->alternate_sample_rate;
962 uint32_t idx;
963 pa_source_output *o;
964 pa_bool_t use_alternate = FALSE;
965
966 if (PA_UNLIKELY(default_rate == alternate_rate)) {
967 pa_log_warn("Default and alternate sample rates are the same.");
968 return FALSE;
969 }
970
971 if (PA_SOURCE_IS_RUNNING(s->state)) {
972 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u Hz",
973 s->sample_spec.rate);
974 return FALSE;
975 }
976
977 if (PA_UNLIKELY (desired_rate < 8000 ||
978 desired_rate > PA_RATE_MAX))
979 return FALSE;
980
981 if (!passthrough) {
982 pa_assert(default_rate % 4000 || default_rate % 11025);
983 pa_assert(alternate_rate % 4000 || alternate_rate % 11025);
984
985 if (default_rate % 4000) {
986 /* default is a 11025 multiple */
987 if ((alternate_rate % 4000 == 0) && (desired_rate % 4000 == 0))
988 use_alternate=TRUE;
989 } else {
990 /* default is 4000 multiple */
991 if ((alternate_rate % 11025 == 0) && (desired_rate % 11025 == 0))
992 use_alternate=TRUE;
993 }
994
995 if (use_alternate)
996 desired_rate = alternate_rate;
997 else
998 desired_rate = default_rate;
999 } else {
1000 desired_rate = rate; /* use stream sampling rate, discard default/alternate settings */
1001 }
1002
1003 if (desired_rate == s->sample_spec.rate)
1004 return FALSE;
1005
1006 if (!passthrough && pa_source_used_by(s) > 0)
1007 return FALSE;
1008
1009 pa_log_debug("Suspending source %s due to changing the sample rate.", s->name);
1010 pa_source_suspend(s, TRUE, PA_SUSPEND_IDLE); /* needed before rate update, will be resumed automatically */
1011
1012 if (s->update_rate(s, desired_rate) == TRUE) {
1013 pa_log_info("Changed sampling rate successfully ");
1014
1015 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1016 if (o->state == PA_SOURCE_OUTPUT_CORKED)
1017 pa_source_output_update_rate(o);
1018 }
1019 return TRUE;
1020 }
1021 }
1022 return FALSE;
1023 }
1024
1025 /* Called from main thread */
1026 pa_usec_t pa_source_get_latency(pa_source *s) {
1027 pa_usec_t usec;
1028
1029 pa_source_assert_ref(s);
1030 pa_assert_ctl_context();
1031 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1032
1033 if (s->state == PA_SOURCE_SUSPENDED)
1034 return 0;
1035
1036 if (!(s->flags & PA_SOURCE_LATENCY))
1037 return 0;
1038
1039 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1040
1041 /* usec is unsigned, so check that the offset can be added to usec without
1042 * underflowing. */
1043 if (-s->latency_offset <= (int64_t) usec)
1044 usec += s->latency_offset;
1045 else
1046 usec = 0;
1047
1048 return usec;
1049 }
1050
1051 /* Called from IO thread */
1052 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
1053 pa_usec_t usec = 0;
1054 pa_msgobject *o;
1055
1056 pa_source_assert_ref(s);
1057 pa_source_assert_io_context(s);
1058 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1059
1060 /* The returned value is supposed to be in the time domain of the sound card! */
1061
1062 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1063 return 0;
1064
1065 if (!(s->flags & PA_SOURCE_LATENCY))
1066 return 0;
1067
1068 o = PA_MSGOBJECT(s);
1069
1070 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1071
1072 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
1073 return -1;
1074
1075 /* usec is unsigned, so check that the offset can be added to usec without
1076 * underflowing. */
1077 if (-s->thread_info.latency_offset <= (int64_t) usec)
1078 usec += s->thread_info.latency_offset;
1079 else
1080 usec = 0;
1081
1082 return usec;
1083 }
1084
1085 /* Called from the main thread (and also from the IO thread while the main
1086 * thread is waiting).
1087 *
1088 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1089 * set. Instead, flat volume mode is detected by checking whether the root source
1090 * has the flag set. */
1091 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
1092 pa_source_assert_ref(s);
1093
1094 s = pa_source_get_master(s);
1095
1096 if (PA_LIKELY(s))
1097 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1098 else
1099 return FALSE;
1100 }
1101
1102 /* Called from the main thread (and also from the IO thread while the main
1103 * thread is waiting). */
1104 pa_source *pa_source_get_master(pa_source *s) {
1105 pa_source_assert_ref(s);
1106
1107 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1108 if (PA_UNLIKELY(!s->output_from_master))
1109 return NULL;
1110
1111 s = s->output_from_master->source;
1112 }
1113
1114 return s;
1115 }
1116
1117 /* Called from main context */
1118 pa_bool_t pa_source_is_passthrough(pa_source *s) {
1119
1120 pa_source_assert_ref(s);
1121
1122 /* NB Currently only monitor sources support passthrough mode */
1123 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1124 }
1125
1126 /* Called from main context */
1127 void pa_source_enter_passthrough(pa_source *s) {
1128 pa_cvolume volume;
1129
1130 /* set the volume to NORM */
1131 s->saved_volume = *pa_source_get_volume(s, TRUE);
1132 s->saved_save_volume = s->save_volume;
1133
1134 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1135 pa_source_set_volume(s, &volume, TRUE, FALSE);
1136 }
1137
1138 /* Called from main context */
1139 void pa_source_leave_passthrough(pa_source *s) {
1140 /* Restore source volume to what it was before we entered passthrough mode */
1141 pa_source_set_volume(s, &s->saved_volume, TRUE, s->saved_save_volume);
1142
1143 pa_cvolume_init(&s->saved_volume);
1144 s->saved_save_volume = FALSE;
1145 }
1146
1147 /* Called from main context. */
1148 static void compute_reference_ratio(pa_source_output *o) {
1149 unsigned c = 0;
1150 pa_cvolume remapped;
1151
1152 pa_assert(o);
1153 pa_assert(pa_source_flat_volume_enabled(o->source));
1154
1155 /*
1156 * Calculates the reference ratio from the source's reference
1157 * volume. This basically calculates:
1158 *
1159 * o->reference_ratio = o->volume / o->source->reference_volume
1160 */
1161
1162 remapped = o->source->reference_volume;
1163 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1164
1165 o->reference_ratio.channels = o->sample_spec.channels;
1166
1167 for (c = 0; c < o->sample_spec.channels; c++) {
1168
1169 /* We don't update when the source volume is 0 anyway */
1170 if (remapped.values[c] <= PA_VOLUME_MUTED)
1171 continue;
1172
1173 /* Don't update the reference ratio unless necessary */
1174 if (pa_sw_volume_multiply(
1175 o->reference_ratio.values[c],
1176 remapped.values[c]) == o->volume.values[c])
1177 continue;
1178
1179 o->reference_ratio.values[c] = pa_sw_volume_divide(
1180 o->volume.values[c],
1181 remapped.values[c]);
1182 }
1183 }
1184
1185 /* Called from main context. Only called for the root source in volume sharing
1186 * cases, except for internal recursive calls. */
1187 static void compute_reference_ratios(pa_source *s) {
1188 uint32_t idx;
1189 pa_source_output *o;
1190
1191 pa_source_assert_ref(s);
1192 pa_assert_ctl_context();
1193 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1194 pa_assert(pa_source_flat_volume_enabled(s));
1195
1196 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1197 compute_reference_ratio(o);
1198
1199 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1200 compute_reference_ratios(o->destination_source);
1201 }
1202 }
1203
1204 /* Called from main context. Only called for the root source in volume sharing
1205 * cases, except for internal recursive calls. */
1206 static void compute_real_ratios(pa_source *s) {
1207 pa_source_output *o;
1208 uint32_t idx;
1209
1210 pa_source_assert_ref(s);
1211 pa_assert_ctl_context();
1212 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1213 pa_assert(pa_source_flat_volume_enabled(s));
1214
1215 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1216 unsigned c;
1217 pa_cvolume remapped;
1218
1219 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1220 /* The origin source uses volume sharing, so this input's real ratio
1221 * is handled as a special case - the real ratio must be 0 dB, and
1222 * as a result i->soft_volume must equal i->volume_factor. */
1223 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1224 o->soft_volume = o->volume_factor;
1225
1226 compute_real_ratios(o->destination_source);
1227
1228 continue;
1229 }
1230
1231 /*
1232 * This basically calculates:
1233 *
1234 * i->real_ratio := i->volume / s->real_volume
1235 * i->soft_volume := i->real_ratio * i->volume_factor
1236 */
1237
1238 remapped = s->real_volume;
1239 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1240
1241 o->real_ratio.channels = o->sample_spec.channels;
1242 o->soft_volume.channels = o->sample_spec.channels;
1243
1244 for (c = 0; c < o->sample_spec.channels; c++) {
1245
1246 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1247 /* We leave o->real_ratio untouched */
1248 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1249 continue;
1250 }
1251
1252 /* Don't lose accuracy unless necessary */
1253 if (pa_sw_volume_multiply(
1254 o->real_ratio.values[c],
1255 remapped.values[c]) != o->volume.values[c])
1256
1257 o->real_ratio.values[c] = pa_sw_volume_divide(
1258 o->volume.values[c],
1259 remapped.values[c]);
1260
1261 o->soft_volume.values[c] = pa_sw_volume_multiply(
1262 o->real_ratio.values[c],
1263 o->volume_factor.values[c]);
1264 }
1265
1266 /* We don't copy the soft_volume to the thread_info data
1267 * here. That must be done by the caller */
1268 }
1269 }
1270
1271 static pa_cvolume *cvolume_remap_minimal_impact(
1272 pa_cvolume *v,
1273 const pa_cvolume *template,
1274 const pa_channel_map *from,
1275 const pa_channel_map *to) {
1276
1277 pa_cvolume t;
1278
1279 pa_assert(v);
1280 pa_assert(template);
1281 pa_assert(from);
1282 pa_assert(to);
1283 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1284 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1285
1286 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1287 * mapping from source output to source volumes:
1288 *
1289 * If template is a possible remapping from v it is used instead
1290 * of remapping anew.
1291 *
1292 * If the channel maps don't match we set an all-channel volume on
1293 * the source to ensure that changing a volume on one stream has no
1294 * effect that cannot be compensated for in another stream that
1295 * does not have the same channel map as the source. */
1296
1297 if (pa_channel_map_equal(from, to))
1298 return v;
1299
1300 t = *template;
1301 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1302 *v = *template;
1303 return v;
1304 }
1305
1306 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1307 return v;
1308 }
1309
1310 /* Called from main thread. Only called for the root source in volume sharing
1311 * cases, except for internal recursive calls. */
1312 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1313 pa_source_output *o;
1314 uint32_t idx;
1315
1316 pa_source_assert_ref(s);
1317 pa_assert(max_volume);
1318 pa_assert(channel_map);
1319 pa_assert(pa_source_flat_volume_enabled(s));
1320
1321 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1322 pa_cvolume remapped;
1323
1324 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1325 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1326
1327 /* Ignore this output. The origin source uses volume sharing, so this
1328 * output's volume will be set to be equal to the root source's real
1329 * volume. Obviously this output's current volume must not then
1330 * affect what the root source's real volume will be. */
1331 continue;
1332 }
1333
1334 remapped = o->volume;
1335 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1336 pa_cvolume_merge(max_volume, max_volume, &remapped);
1337 }
1338 }
1339
1340 /* Called from main thread. Only called for the root source in volume sharing
1341 * cases, except for internal recursive calls. */
1342 static pa_bool_t has_outputs(pa_source *s) {
1343 pa_source_output *o;
1344 uint32_t idx;
1345
1346 pa_source_assert_ref(s);
1347
1348 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1349 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1350 return TRUE;
1351 }
1352
1353 return FALSE;
1354 }
1355
1356 /* Called from main thread. Only called for the root source in volume sharing
1357 * cases, except for internal recursive calls. */
1358 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1359 pa_source_output *o;
1360 uint32_t idx;
1361
1362 pa_source_assert_ref(s);
1363 pa_assert(new_volume);
1364 pa_assert(channel_map);
1365
1366 s->real_volume = *new_volume;
1367 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1368
1369 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1370 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1371 if (pa_source_flat_volume_enabled(s)) {
1372 pa_cvolume old_volume = o->volume;
1373
1374 /* Follow the root source's real volume. */
1375 o->volume = *new_volume;
1376 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1377 compute_reference_ratio(o);
1378
1379 /* The volume changed, let's tell people so */
1380 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1381 if (o->volume_changed)
1382 o->volume_changed(o);
1383
1384 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1385 }
1386 }
1387
1388 update_real_volume(o->destination_source, new_volume, channel_map);
1389 }
1390 }
1391 }
1392
1393 /* Called from main thread. Only called for the root source in shared volume
1394 * cases. */
1395 static void compute_real_volume(pa_source *s) {
1396 pa_source_assert_ref(s);
1397 pa_assert_ctl_context();
1398 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1399 pa_assert(pa_source_flat_volume_enabled(s));
1400 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1401
1402 /* This determines the maximum volume of all streams and sets
1403 * s->real_volume accordingly. */
1404
1405 if (!has_outputs(s)) {
1406 /* In the special case that we have no source outputs we leave the
1407 * volume unmodified. */
1408 update_real_volume(s, &s->reference_volume, &s->channel_map);
1409 return;
1410 }
1411
1412 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1413
1414 /* First let's determine the new maximum volume of all outputs
1415 * connected to this source */
1416 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1417 update_real_volume(s, &s->real_volume, &s->channel_map);
1418
1419 /* Then, let's update the real ratios/soft volumes of all outputs
1420 * connected to this source */
1421 compute_real_ratios(s);
1422 }
1423
1424 /* Called from main thread. Only called for the root source in shared volume
1425 * cases, except for internal recursive calls. */
1426 static void propagate_reference_volume(pa_source *s) {
1427 pa_source_output *o;
1428 uint32_t idx;
1429
1430 pa_source_assert_ref(s);
1431 pa_assert_ctl_context();
1432 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1433 pa_assert(pa_source_flat_volume_enabled(s));
1434
1435 /* This is called whenever the source volume changes that is not
1436 * caused by a source output volume change. We need to fix up the
1437 * source output volumes accordingly */
1438
1439 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1440 pa_cvolume old_volume;
1441
1442 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1443 propagate_reference_volume(o->destination_source);
1444
1445 /* Since the origin source uses volume sharing, this output's volume
1446 * needs to be updated to match the root source's real volume, but
1447 * that will be done later in update_shared_real_volume(). */
1448 continue;
1449 }
1450
1451 old_volume = o->volume;
1452
1453 /* This basically calculates:
1454 *
1455 * o->volume := o->reference_volume * o->reference_ratio */
1456
1457 o->volume = s->reference_volume;
1458 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1459 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1460
1461 /* The volume changed, let's tell people so */
1462 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1463
1464 if (o->volume_changed)
1465 o->volume_changed(o);
1466
1467 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1468 }
1469 }
1470 }
1471
1472 /* Called from main thread. Only called for the root source in volume sharing
1473 * cases, except for internal recursive calls. The return value indicates
1474 * whether any reference volume actually changed. */
1475 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1476 pa_cvolume volume;
1477 pa_bool_t reference_volume_changed;
1478 pa_source_output *o;
1479 uint32_t idx;
1480
1481 pa_source_assert_ref(s);
1482 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1483 pa_assert(v);
1484 pa_assert(channel_map);
1485 pa_assert(pa_cvolume_valid(v));
1486
1487 volume = *v;
1488 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1489
1490 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1491 s->reference_volume = volume;
1492
1493 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1494
1495 if (reference_volume_changed)
1496 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1497 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1498 /* If the root source's volume doesn't change, then there can't be any
1499 * changes in the other source in the source tree either.
1500 *
1501 * It's probably theoretically possible that even if the root source's
1502 * volume changes slightly, some filter source doesn't change its volume
1503 * due to rounding errors. If that happens, we still want to propagate
1504 * the changed root source volume to the sources connected to the
1505 * intermediate source that didn't change its volume. This theoretical
1506 * possibility is the reason why we have that !(s->flags &
1507 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1508 * notice even if we returned here FALSE always if
1509 * reference_volume_changed is FALSE. */
1510 return FALSE;
1511
1512 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1513 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1514 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1515 }
1516
1517 return TRUE;
1518 }
1519
1520 /* Called from main thread */
1521 void pa_source_set_volume(
1522 pa_source *s,
1523 const pa_cvolume *volume,
1524 pa_bool_t send_msg,
1525 pa_bool_t save) {
1526
1527 pa_cvolume new_reference_volume;
1528 pa_source *root_source;
1529
1530 pa_source_assert_ref(s);
1531 pa_assert_ctl_context();
1532 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1533 pa_assert(!volume || pa_cvolume_valid(volume));
1534 pa_assert(volume || pa_source_flat_volume_enabled(s));
1535 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1536
1537 /* make sure we don't change the volume in PASSTHROUGH mode ...
1538 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1539 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1540 pa_log_warn("Cannot change volume, source is monitor of a PASSTHROUGH sink");
1541 return;
1542 }
1543
1544 /* In case of volume sharing, the volume is set for the root source first,
1545 * from which it's then propagated to the sharing sources. */
1546 root_source = pa_source_get_master(s);
1547
1548 if (PA_UNLIKELY(!root_source))
1549 return;
1550
1551 /* As a special exception we accept mono volumes on all sources --
1552 * even on those with more complex channel maps */
1553
1554 if (volume) {
1555 if (pa_cvolume_compatible(volume, &s->sample_spec))
1556 new_reference_volume = *volume;
1557 else {
1558 new_reference_volume = s->reference_volume;
1559 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1560 }
1561
1562 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1563
1564 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1565 if (pa_source_flat_volume_enabled(root_source)) {
1566 /* OK, propagate this volume change back to the outputs */
1567 propagate_reference_volume(root_source);
1568
1569 /* And now recalculate the real volume */
1570 compute_real_volume(root_source);
1571 } else
1572 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1573 }
1574
1575 } else {
1576 /* If volume is NULL we synchronize the source's real and
1577 * reference volumes with the stream volumes. */
1578
1579 pa_assert(pa_source_flat_volume_enabled(root_source));
1580
1581 /* Ok, let's determine the new real volume */
1582 compute_real_volume(root_source);
1583
1584 /* Let's 'push' the reference volume if necessary */
1585 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1586 /* If the source and it's root don't have the same number of channels, we need to remap */
1587 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1588 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1589 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1590
1591 /* Now that the reference volume is updated, we can update the streams'
1592 * reference ratios. */
1593 compute_reference_ratios(root_source);
1594 }
1595
1596 if (root_source->set_volume) {
1597 /* If we have a function set_volume(), then we do not apply a
1598 * soft volume by default. However, set_volume() is free to
1599 * apply one to root_source->soft_volume */
1600
1601 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1602 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1603 root_source->set_volume(root_source);
1604
1605 } else
1606 /* If we have no function set_volume(), then the soft volume
1607 * becomes the real volume */
1608 root_source->soft_volume = root_source->real_volume;
1609
1610 /* This tells the source that soft volume and/or real volume changed */
1611 if (send_msg)
1612 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1613 }
1614
1615 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1616 * Only to be called by source implementor */
1617 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1618
1619 pa_source_assert_ref(s);
1620 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1621
1622 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1623 pa_source_assert_io_context(s);
1624 else
1625 pa_assert_ctl_context();
1626
1627 if (!volume)
1628 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1629 else
1630 s->soft_volume = *volume;
1631
1632 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1633 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1634 else
1635 s->thread_info.soft_volume = s->soft_volume;
1636 }
1637
1638 /* Called from the main thread. Only called for the root source in volume sharing
1639 * cases, except for internal recursive calls. */
1640 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1641 pa_source_output *o;
1642 uint32_t idx;
1643
1644 pa_source_assert_ref(s);
1645 pa_assert(old_real_volume);
1646 pa_assert_ctl_context();
1647 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1648
1649 /* This is called when the hardware's real volume changes due to
1650 * some external event. We copy the real volume into our
1651 * reference volume and then rebuild the stream volumes based on
1652 * i->real_ratio which should stay fixed. */
1653
1654 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1655 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1656 return;
1657
1658 /* 1. Make the real volume the reference volume */
1659 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1660 }
1661
1662 if (pa_source_flat_volume_enabled(s)) {
1663
1664 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1665 pa_cvolume old_volume = o->volume;
1666
1667 /* 2. Since the source's reference and real volumes are equal
1668 * now our ratios should be too. */
1669 o->reference_ratio = o->real_ratio;
1670
1671 /* 3. Recalculate the new stream reference volume based on the
1672 * reference ratio and the sink's reference volume.
1673 *
1674 * This basically calculates:
1675 *
1676 * o->volume = s->reference_volume * o->reference_ratio
1677 *
1678 * This is identical to propagate_reference_volume() */
1679 o->volume = s->reference_volume;
1680 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1681 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1682
1683 /* Notify if something changed */
1684 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1685
1686 if (o->volume_changed)
1687 o->volume_changed(o);
1688
1689 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1690 }
1691
1692 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1693 propagate_real_volume(o->destination_source, old_real_volume);
1694 }
1695 }
1696
1697 /* Something got changed in the hardware. It probably makes sense
1698 * to save changed hw settings given that hw volume changes not
1699 * triggered by PA are almost certainly done by the user. */
1700 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1701 s->save_volume = TRUE;
1702 }
1703
1704 /* Called from io thread */
1705 void pa_source_update_volume_and_mute(pa_source *s) {
1706 pa_assert(s);
1707 pa_source_assert_io_context(s);
1708
1709 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1710 }
1711
1712 /* Called from main thread */
1713 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1714 pa_source_assert_ref(s);
1715 pa_assert_ctl_context();
1716 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1717
1718 if (s->refresh_volume || force_refresh) {
1719 struct pa_cvolume old_real_volume;
1720
1721 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1722
1723 old_real_volume = s->real_volume;
1724
1725 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1726 s->get_volume(s);
1727
1728 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1729
1730 update_real_volume(s, &s->real_volume, &s->channel_map);
1731 propagate_real_volume(s, &old_real_volume);
1732 }
1733
1734 return &s->reference_volume;
1735 }
1736
1737 /* Called from main thread. In volume sharing cases, only the root source may
1738 * call this. */
1739 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1740 pa_cvolume old_real_volume;
1741
1742 pa_source_assert_ref(s);
1743 pa_assert_ctl_context();
1744 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1745 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1746
1747 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1748
1749 old_real_volume = s->real_volume;
1750 update_real_volume(s, new_real_volume, &s->channel_map);
1751 propagate_real_volume(s, &old_real_volume);
1752 }
1753
1754 /* Called from main thread */
1755 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1756 pa_bool_t old_muted;
1757
1758 pa_source_assert_ref(s);
1759 pa_assert_ctl_context();
1760 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1761
1762 old_muted = s->muted;
1763 s->muted = mute;
1764 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1765
1766 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute)
1767 s->set_mute(s);
1768
1769 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1770
1771 if (old_muted != s->muted)
1772 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1773 }
1774
1775 /* Called from main thread */
1776 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1777
1778 pa_source_assert_ref(s);
1779 pa_assert_ctl_context();
1780 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1781
1782 if (s->refresh_muted || force_refresh) {
1783 pa_bool_t old_muted = s->muted;
1784
1785 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_mute)
1786 s->get_mute(s);
1787
1788 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1789
1790 if (old_muted != s->muted) {
1791 s->save_muted = TRUE;
1792
1793 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1794
1795 /* Make sure the soft mute status stays in sync */
1796 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1797 }
1798 }
1799
1800 return s->muted;
1801 }
1802
1803 /* Called from main thread */
1804 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1805 pa_source_assert_ref(s);
1806 pa_assert_ctl_context();
1807 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1808
1809 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1810
1811 if (s->muted == new_muted)
1812 return;
1813
1814 s->muted = new_muted;
1815 s->save_muted = TRUE;
1816
1817 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1818 }
1819
1820 /* Called from main thread */
1821 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1822 pa_source_assert_ref(s);
1823 pa_assert_ctl_context();
1824
1825 if (p)
1826 pa_proplist_update(s->proplist, mode, p);
1827
1828 if (PA_SOURCE_IS_LINKED(s->state)) {
1829 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1830 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1831 }
1832
1833 return TRUE;
1834 }
1835
1836 /* Called from main thread */
1837 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1838 void pa_source_set_description(pa_source *s, const char *description) {
1839 const char *old;
1840 pa_source_assert_ref(s);
1841 pa_assert_ctl_context();
1842
1843 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1844 return;
1845
1846 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1847
1848 if (old && description && pa_streq(old, description))
1849 return;
1850
1851 if (description)
1852 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1853 else
1854 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1855
1856 if (PA_SOURCE_IS_LINKED(s->state)) {
1857 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1858 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1859 }
1860 }
1861
1862 /* Called from main thread */
1863 unsigned pa_source_linked_by(pa_source *s) {
1864 pa_source_assert_ref(s);
1865 pa_assert_ctl_context();
1866 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1867
1868 return pa_idxset_size(s->outputs);
1869 }
1870
1871 /* Called from main thread */
1872 unsigned pa_source_used_by(pa_source *s) {
1873 unsigned ret;
1874
1875 pa_source_assert_ref(s);
1876 pa_assert_ctl_context();
1877 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1878
1879 ret = pa_idxset_size(s->outputs);
1880 pa_assert(ret >= s->n_corked);
1881
1882 return ret - s->n_corked;
1883 }
1884
1885 /* Called from main thread */
1886 unsigned pa_source_check_suspend(pa_source *s) {
1887 unsigned ret;
1888 pa_source_output *o;
1889 uint32_t idx;
1890
1891 pa_source_assert_ref(s);
1892 pa_assert_ctl_context();
1893
1894 if (!PA_SOURCE_IS_LINKED(s->state))
1895 return 0;
1896
1897 ret = 0;
1898
1899 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1900 pa_source_output_state_t st;
1901
1902 st = pa_source_output_get_state(o);
1903
1904 /* We do not assert here. It is perfectly valid for a source output to
1905 * be in the INIT state (i.e. created, marked done but not yet put)
1906 * and we should not care if it's unlinked as it won't contribute
1907 * towards our busy status.
1908 */
1909 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1910 continue;
1911
1912 if (st == PA_SOURCE_OUTPUT_CORKED)
1913 continue;
1914
1915 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1916 continue;
1917
1918 ret ++;
1919 }
1920
1921 return ret;
1922 }
1923
1924 /* Called from the IO thread */
1925 static void sync_output_volumes_within_thread(pa_source *s) {
1926 pa_source_output *o;
1927 void *state = NULL;
1928
1929 pa_source_assert_ref(s);
1930 pa_source_assert_io_context(s);
1931
1932 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1933 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1934 continue;
1935
1936 o->thread_info.soft_volume = o->soft_volume;
1937 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1938 }
1939 }
1940
1941 /* Called from the IO thread. Only called for the root source in volume sharing
1942 * cases, except for internal recursive calls. */
1943 static void set_shared_volume_within_thread(pa_source *s) {
1944 pa_source_output *o;
1945 void *state = NULL;
1946
1947 pa_source_assert_ref(s);
1948
1949 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1950
1951 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1952 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1953 set_shared_volume_within_thread(o->destination_source);
1954 }
1955 }
1956
1957 /* Called from IO thread, except when it is not */
1958 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1959 pa_source *s = PA_SOURCE(object);
1960 pa_source_assert_ref(s);
1961
1962 switch ((pa_source_message_t) code) {
1963
1964 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1965 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1966
1967 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1968
1969 if (o->direct_on_input) {
1970 o->thread_info.direct_on_input = o->direct_on_input;
1971 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1972 }
1973
1974 pa_assert(!o->thread_info.attached);
1975 o->thread_info.attached = TRUE;
1976
1977 if (o->attach)
1978 o->attach(o);
1979
1980 pa_source_output_set_state_within_thread(o, o->state);
1981
1982 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1983 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1984
1985 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1986
1987 /* We don't just invalidate the requested latency here,
1988 * because if we are in a move we might need to fix up the
1989 * requested latency. */
1990 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1991
1992 /* In flat volume mode we need to update the volume as
1993 * well */
1994 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1995 }
1996
1997 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1998 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1999
2000 pa_source_output_set_state_within_thread(o, o->state);
2001
2002 if (o->detach)
2003 o->detach(o);
2004
2005 pa_assert(o->thread_info.attached);
2006 o->thread_info.attached = FALSE;
2007
2008 if (o->thread_info.direct_on_input) {
2009 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
2010 o->thread_info.direct_on_input = NULL;
2011 }
2012
2013 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
2014 pa_source_output_unref(o);
2015
2016 pa_source_invalidate_requested_latency(s, TRUE);
2017
2018 /* In flat volume mode we need to update the volume as
2019 * well */
2020 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2021 }
2022
2023 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
2024 pa_source *root_source = pa_source_get_master(s);
2025
2026 if (PA_LIKELY(root_source))
2027 set_shared_volume_within_thread(root_source);
2028
2029 return 0;
2030 }
2031
2032 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
2033
2034 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2035 s->set_volume(s);
2036 pa_source_volume_change_push(s);
2037 }
2038 /* Fall through ... */
2039
2040 case PA_SOURCE_MESSAGE_SET_VOLUME:
2041
2042 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2043 s->thread_info.soft_volume = s->soft_volume;
2044 }
2045
2046 /* Fall through ... */
2047
2048 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
2049 sync_output_volumes_within_thread(s);
2050 return 0;
2051
2052 case PA_SOURCE_MESSAGE_GET_VOLUME:
2053
2054 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2055 s->get_volume(s);
2056 pa_source_volume_change_flush(s);
2057 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2058 }
2059
2060 /* In case source implementor reset SW volume. */
2061 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2062 s->thread_info.soft_volume = s->soft_volume;
2063 }
2064
2065 return 0;
2066
2067 case PA_SOURCE_MESSAGE_SET_MUTE:
2068
2069 if (s->thread_info.soft_muted != s->muted) {
2070 s->thread_info.soft_muted = s->muted;
2071 }
2072
2073 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2074 s->set_mute(s);
2075
2076 return 0;
2077
2078 case PA_SOURCE_MESSAGE_GET_MUTE:
2079
2080 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2081 s->get_mute(s);
2082
2083 return 0;
2084
2085 case PA_SOURCE_MESSAGE_SET_STATE: {
2086
2087 pa_bool_t suspend_change =
2088 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2089 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2090
2091 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2092
2093 if (suspend_change) {
2094 pa_source_output *o;
2095 void *state = NULL;
2096
2097 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2098 if (o->suspend_within_thread)
2099 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2100 }
2101
2102 return 0;
2103 }
2104
2105 case PA_SOURCE_MESSAGE_DETACH:
2106
2107 /* Detach all streams */
2108 pa_source_detach_within_thread(s);
2109 return 0;
2110
2111 case PA_SOURCE_MESSAGE_ATTACH:
2112
2113 /* Reattach all streams */
2114 pa_source_attach_within_thread(s);
2115 return 0;
2116
2117 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2118
2119 pa_usec_t *usec = userdata;
2120 *usec = pa_source_get_requested_latency_within_thread(s);
2121
2122 /* Yes, that's right, the IO thread will see -1 when no
2123 * explicit requested latency is configured, the main
2124 * thread will see max_latency */
2125 if (*usec == (pa_usec_t) -1)
2126 *usec = s->thread_info.max_latency;
2127
2128 return 0;
2129 }
2130
2131 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2132 pa_usec_t *r = userdata;
2133
2134 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2135
2136 return 0;
2137 }
2138
2139 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2140 pa_usec_t *r = userdata;
2141
2142 r[0] = s->thread_info.min_latency;
2143 r[1] = s->thread_info.max_latency;
2144
2145 return 0;
2146 }
2147
2148 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2149
2150 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2151 return 0;
2152
2153 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2154
2155 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2156 return 0;
2157
2158 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2159
2160 *((size_t*) userdata) = s->thread_info.max_rewind;
2161 return 0;
2162
2163 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2164
2165 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2166 return 0;
2167
2168 case PA_SOURCE_MESSAGE_GET_LATENCY:
2169
2170 if (s->monitor_of) {
2171 *((pa_usec_t*) userdata) = 0;
2172 return 0;
2173 }
2174
2175 /* Implementors need to overwrite this implementation! */
2176 return -1;
2177
2178 case PA_SOURCE_MESSAGE_SET_PORT:
2179
2180 pa_assert(userdata);
2181 if (s->set_port) {
2182 struct source_message_set_port *msg_data = userdata;
2183 msg_data->ret = s->set_port(s, msg_data->port);
2184 }
2185 return 0;
2186
2187 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2188 /* This message is sent from IO-thread and handled in main thread. */
2189 pa_assert_ctl_context();
2190
2191 /* Make sure we're not messing with main thread when no longer linked */
2192 if (!PA_SOURCE_IS_LINKED(s->state))
2193 return 0;
2194
2195 pa_source_get_volume(s, TRUE);
2196 pa_source_get_mute(s, TRUE);
2197 return 0;
2198
2199 case PA_SOURCE_MESSAGE_SET_LATENCY_OFFSET:
2200 s->thread_info.latency_offset = offset;
2201 return 0;
2202
2203 case PA_SOURCE_MESSAGE_MAX:
2204 ;
2205 }
2206
2207 return -1;
2208 }
2209
2210 /* Called from main thread */
2211 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2212 pa_source *source;
2213 uint32_t idx;
2214 int ret = 0;
2215
2216 pa_core_assert_ref(c);
2217 pa_assert_ctl_context();
2218 pa_assert(cause != 0);
2219
2220 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2221 int r;
2222
2223 if (source->monitor_of)
2224 continue;
2225
2226 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2227 ret = r;
2228 }
2229
2230 return ret;
2231 }
2232
2233 /* Called from main thread */
2234 void pa_source_detach(pa_source *s) {
2235 pa_source_assert_ref(s);
2236 pa_assert_ctl_context();
2237 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2238
2239 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2240 }
2241
2242 /* Called from main thread */
2243 void pa_source_attach(pa_source *s) {
2244 pa_source_assert_ref(s);
2245 pa_assert_ctl_context();
2246 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2247
2248 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2249 }
2250
2251 /* Called from IO thread */
2252 void pa_source_detach_within_thread(pa_source *s) {
2253 pa_source_output *o;
2254 void *state = NULL;
2255
2256 pa_source_assert_ref(s);
2257 pa_source_assert_io_context(s);
2258 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2259
2260 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2261 if (o->detach)
2262 o->detach(o);
2263 }
2264
2265 /* Called from IO thread */
2266 void pa_source_attach_within_thread(pa_source *s) {
2267 pa_source_output *o;
2268 void *state = NULL;
2269
2270 pa_source_assert_ref(s);
2271 pa_source_assert_io_context(s);
2272 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2273
2274 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2275 if (o->attach)
2276 o->attach(o);
2277 }
2278
2279 /* Called from IO thread */
2280 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2281 pa_usec_t result = (pa_usec_t) -1;
2282 pa_source_output *o;
2283 void *state = NULL;
2284
2285 pa_source_assert_ref(s);
2286 pa_source_assert_io_context(s);
2287
2288 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2289 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2290
2291 if (s->thread_info.requested_latency_valid)
2292 return s->thread_info.requested_latency;
2293
2294 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2295 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2296 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2297 result = o->thread_info.requested_source_latency;
2298
2299 if (result != (pa_usec_t) -1)
2300 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2301
2302 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2303 /* Only cache this if we are fully set up */
2304 s->thread_info.requested_latency = result;
2305 s->thread_info.requested_latency_valid = TRUE;
2306 }
2307
2308 return result;
2309 }
2310
2311 /* Called from main thread */
2312 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2313 pa_usec_t usec = 0;
2314
2315 pa_source_assert_ref(s);
2316 pa_assert_ctl_context();
2317 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2318
2319 if (s->state == PA_SOURCE_SUSPENDED)
2320 return 0;
2321
2322 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2323
2324 return usec;
2325 }
2326
2327 /* Called from IO thread */
2328 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2329 pa_source_output *o;
2330 void *state = NULL;
2331
2332 pa_source_assert_ref(s);
2333 pa_source_assert_io_context(s);
2334
2335 if (max_rewind == s->thread_info.max_rewind)
2336 return;
2337
2338 s->thread_info.max_rewind = max_rewind;
2339
2340 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2341 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2342 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2343 }
2344
2345 /* Called from main thread */
2346 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2347 pa_source_assert_ref(s);
2348 pa_assert_ctl_context();
2349
2350 if (PA_SOURCE_IS_LINKED(s->state))
2351 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2352 else
2353 pa_source_set_max_rewind_within_thread(s, max_rewind);
2354 }
2355
2356 /* Called from IO thread */
2357 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2358 pa_source_output *o;
2359 void *state = NULL;
2360
2361 pa_source_assert_ref(s);
2362 pa_source_assert_io_context(s);
2363
2364 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2365 s->thread_info.requested_latency_valid = FALSE;
2366 else if (dynamic)
2367 return;
2368
2369 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2370
2371 if (s->update_requested_latency)
2372 s->update_requested_latency(s);
2373
2374 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2375 if (o->update_source_requested_latency)
2376 o->update_source_requested_latency(o);
2377 }
2378
2379 if (s->monitor_of)
2380 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2381 }
2382
2383 /* Called from main thread */
2384 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2385 pa_source_assert_ref(s);
2386 pa_assert_ctl_context();
2387
2388 /* min_latency == 0: no limit
2389 * min_latency anything else: specified limit
2390 *
2391 * Similar for max_latency */
2392
2393 if (min_latency < ABSOLUTE_MIN_LATENCY)
2394 min_latency = ABSOLUTE_MIN_LATENCY;
2395
2396 if (max_latency <= 0 ||
2397 max_latency > ABSOLUTE_MAX_LATENCY)
2398 max_latency = ABSOLUTE_MAX_LATENCY;
2399
2400 pa_assert(min_latency <= max_latency);
2401
2402 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2403 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2404 max_latency == ABSOLUTE_MAX_LATENCY) ||
2405 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2406
2407 if (PA_SOURCE_IS_LINKED(s->state)) {
2408 pa_usec_t r[2];
2409
2410 r[0] = min_latency;
2411 r[1] = max_latency;
2412
2413 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2414 } else
2415 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2416 }
2417
2418 /* Called from main thread */
2419 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2420 pa_source_assert_ref(s);
2421 pa_assert_ctl_context();
2422 pa_assert(min_latency);
2423 pa_assert(max_latency);
2424
2425 if (PA_SOURCE_IS_LINKED(s->state)) {
2426 pa_usec_t r[2] = { 0, 0 };
2427
2428 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2429
2430 *min_latency = r[0];
2431 *max_latency = r[1];
2432 } else {
2433 *min_latency = s->thread_info.min_latency;
2434 *max_latency = s->thread_info.max_latency;
2435 }
2436 }
2437
2438 /* Called from IO thread, and from main thread before pa_source_put() is called */
2439 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2440 pa_source_assert_ref(s);
2441 pa_source_assert_io_context(s);
2442
2443 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2444 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2445 pa_assert(min_latency <= max_latency);
2446
2447 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2448 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2449 max_latency == ABSOLUTE_MAX_LATENCY) ||
2450 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2451 s->monitor_of);
2452
2453 if (s->thread_info.min_latency == min_latency &&
2454 s->thread_info.max_latency == max_latency)
2455 return;
2456
2457 s->thread_info.min_latency = min_latency;
2458 s->thread_info.max_latency = max_latency;
2459
2460 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2461 pa_source_output *o;
2462 void *state = NULL;
2463
2464 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2465 if (o->update_source_latency_range)
2466 o->update_source_latency_range(o);
2467 }
2468
2469 pa_source_invalidate_requested_latency(s, FALSE);
2470 }
2471
2472 /* Called from main thread, before the source is put */
2473 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2474 pa_source_assert_ref(s);
2475 pa_assert_ctl_context();
2476
2477 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2478 pa_assert(latency == 0);
2479 return;
2480 }
2481
2482 if (latency < ABSOLUTE_MIN_LATENCY)
2483 latency = ABSOLUTE_MIN_LATENCY;
2484
2485 if (latency > ABSOLUTE_MAX_LATENCY)
2486 latency = ABSOLUTE_MAX_LATENCY;
2487
2488 if (PA_SOURCE_IS_LINKED(s->state))
2489 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2490 else
2491 s->thread_info.fixed_latency = latency;
2492 }
2493
2494 /* Called from main thread */
2495 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2496 pa_usec_t latency;
2497
2498 pa_source_assert_ref(s);
2499 pa_assert_ctl_context();
2500
2501 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2502 return 0;
2503
2504 if (PA_SOURCE_IS_LINKED(s->state))
2505 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2506 else
2507 latency = s->thread_info.fixed_latency;
2508
2509 return latency;
2510 }
2511
2512 /* Called from IO thread */
2513 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2514 pa_source_assert_ref(s);
2515 pa_source_assert_io_context(s);
2516
2517 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2518 pa_assert(latency == 0);
2519 return;
2520 }
2521
2522 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2523 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2524
2525 if (s->thread_info.fixed_latency == latency)
2526 return;
2527
2528 s->thread_info.fixed_latency = latency;
2529
2530 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2531 pa_source_output *o;
2532 void *state = NULL;
2533
2534 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2535 if (o->update_source_fixed_latency)
2536 o->update_source_fixed_latency(o);
2537 }
2538
2539 pa_source_invalidate_requested_latency(s, FALSE);
2540 }
2541
2542 /* Called from main thread */
2543 void pa_source_set_latency_offset(pa_source *s, int64_t offset) {
2544 pa_source_assert_ref(s);
2545
2546 s->latency_offset = offset;
2547
2548 if (PA_SOURCE_IS_LINKED(s->state))
2549 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_OFFSET, NULL, offset, NULL) == 0);
2550 else
2551 s->thread_info.latency_offset = offset;
2552 }
2553
2554 /* Called from main thread */
2555 size_t pa_source_get_max_rewind(pa_source *s) {
2556 size_t r;
2557 pa_assert_ctl_context();
2558 pa_source_assert_ref(s);
2559
2560 if (!PA_SOURCE_IS_LINKED(s->state))
2561 return s->thread_info.max_rewind;
2562
2563 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2564
2565 return r;
2566 }
2567
2568 /* Called from main context */
2569 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2570 pa_device_port *port;
2571 int ret;
2572
2573 pa_source_assert_ref(s);
2574 pa_assert_ctl_context();
2575
2576 if (!s->set_port) {
2577 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2578 return -PA_ERR_NOTIMPLEMENTED;
2579 }
2580
2581 if (!name)
2582 return -PA_ERR_NOENTITY;
2583
2584 if (!(port = pa_hashmap_get(s->ports, name)))
2585 return -PA_ERR_NOENTITY;
2586
2587 if (s->active_port == port) {
2588 s->save_port = s->save_port || save;
2589 return 0;
2590 }
2591
2592 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2593 struct source_message_set_port msg = { .port = port, .ret = 0 };
2594 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2595 ret = msg.ret;
2596 }
2597 else
2598 ret = s->set_port(s, port);
2599
2600 if (ret < 0)
2601 return -PA_ERR_NOENTITY;
2602
2603 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2604
2605 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2606
2607 s->active_port = port;
2608 s->save_port = save;
2609
2610 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2611
2612 return 0;
2613 }
2614
2615 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2616
2617 /* Called from the IO thread. */
2618 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2619 pa_source_volume_change *c;
2620 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2621 c = pa_xnew(pa_source_volume_change, 1);
2622
2623 PA_LLIST_INIT(pa_source_volume_change, c);
2624 c->at = 0;
2625 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2626 return c;
2627 }
2628
2629 /* Called from the IO thread. */
2630 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2631 pa_assert(c);
2632 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2633 pa_xfree(c);
2634 }
2635
2636 /* Called from the IO thread. */
2637 void pa_source_volume_change_push(pa_source *s) {
2638 pa_source_volume_change *c = NULL;
2639 pa_source_volume_change *nc = NULL;
2640 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2641
2642 const char *direction = NULL;
2643
2644 pa_assert(s);
2645 nc = pa_source_volume_change_new(s);
2646
2647 /* NOTE: There is already more different volumes in pa_source that I can remember.
2648 * Adding one more volume for HW would get us rid of this, but I am trying
2649 * to survive with the ones we already have. */
2650 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2651
2652 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2653 pa_log_debug("Volume not changing");
2654 pa_source_volume_change_free(nc);
2655 return;
2656 }
2657
2658 nc->at = pa_source_get_latency_within_thread(s);
2659 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2660
2661 if (s->thread_info.volume_changes_tail) {
2662 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2663 /* If volume is going up let's do it a bit late. If it is going
2664 * down let's do it a bit early. */
2665 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2666 if (nc->at + safety_margin > c->at) {
2667 nc->at += safety_margin;
2668 direction = "up";
2669 break;
2670 }
2671 }
2672 else if (nc->at - safety_margin > c->at) {
2673 nc->at -= safety_margin;
2674 direction = "down";
2675 break;
2676 }
2677 }
2678 }
2679
2680 if (c == NULL) {
2681 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2682 nc->at += safety_margin;
2683 direction = "up";
2684 } else {
2685 nc->at -= safety_margin;
2686 direction = "down";
2687 }
2688 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2689 }
2690 else {
2691 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2692 }
2693
2694 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2695
2696 /* We can ignore volume events that came earlier but should happen later than this. */
2697 PA_LLIST_FOREACH(c, nc->next) {
2698 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2699 pa_source_volume_change_free(c);
2700 }
2701 nc->next = NULL;
2702 s->thread_info.volume_changes_tail = nc;
2703 }
2704
2705 /* Called from the IO thread. */
2706 static void pa_source_volume_change_flush(pa_source *s) {
2707 pa_source_volume_change *c = s->thread_info.volume_changes;
2708 pa_assert(s);
2709 s->thread_info.volume_changes = NULL;
2710 s->thread_info.volume_changes_tail = NULL;
2711 while (c) {
2712 pa_source_volume_change *next = c->next;
2713 pa_source_volume_change_free(c);
2714 c = next;
2715 }
2716 }
2717
2718 /* Called from the IO thread. */
2719 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2720 pa_usec_t now;
2721 pa_bool_t ret = FALSE;
2722
2723 pa_assert(s);
2724
2725 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2726 if (usec_to_next)
2727 *usec_to_next = 0;
2728 return ret;
2729 }
2730
2731 pa_assert(s->write_volume);
2732
2733 now = pa_rtclock_now();
2734
2735 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2736 pa_source_volume_change *c = s->thread_info.volume_changes;
2737 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2738 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2739 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2740 ret = TRUE;
2741 s->thread_info.current_hw_volume = c->hw_volume;
2742 pa_source_volume_change_free(c);
2743 }
2744
2745 if (ret)
2746 s->write_volume(s);
2747
2748 if (s->thread_info.volume_changes) {
2749 if (usec_to_next)
2750 *usec_to_next = s->thread_info.volume_changes->at - now;
2751 if (pa_log_ratelimit(PA_LOG_DEBUG))
2752 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2753 }
2754 else {
2755 if (usec_to_next)
2756 *usec_to_next = 0;
2757 s->thread_info.volume_changes_tail = NULL;
2758 }
2759 return ret;
2760 }
2761
2762
2763 /* Called from the main thread */
2764 /* Gets the list of formats supported by the source. The members and idxset must
2765 * be freed by the caller. */
2766 pa_idxset* pa_source_get_formats(pa_source *s) {
2767 pa_idxset *ret;
2768
2769 pa_assert(s);
2770
2771 if (s->get_formats) {
2772 /* Source supports format query, all is good */
2773 ret = s->get_formats(s);
2774 } else {
2775 /* Source doesn't support format query, so assume it does PCM */
2776 pa_format_info *f = pa_format_info_new();
2777 f->encoding = PA_ENCODING_PCM;
2778
2779 ret = pa_idxset_new(NULL, NULL);
2780 pa_idxset_put(ret, f, NULL);
2781 }
2782
2783 return ret;
2784 }
2785
2786 /* Called from the main thread */
2787 /* Checks if the source can accept this format */
2788 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2789 {
2790 pa_idxset *formats = NULL;
2791 pa_bool_t ret = FALSE;
2792
2793 pa_assert(s);
2794 pa_assert(f);
2795
2796 formats = pa_source_get_formats(s);
2797
2798 if (formats) {
2799 pa_format_info *finfo_device;
2800 uint32_t i;
2801
2802 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2803 if (pa_format_info_is_compatible(finfo_device, f)) {
2804 ret = TRUE;
2805 break;
2806 }
2807 }
2808
2809 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2810 }
2811
2812 return ret;
2813 }
2814
2815 /* Called from the main thread */
2816 /* Calculates the intersection between formats supported by the source and
2817 * in_formats, and returns these, in the order of the source's formats. */
2818 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2819 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2820 pa_format_info *f_source, *f_in;
2821 uint32_t i, j;
2822
2823 pa_assert(s);
2824
2825 if (!in_formats || pa_idxset_isempty(in_formats))
2826 goto done;
2827
2828 source_formats = pa_source_get_formats(s);
2829
2830 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2831 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2832 if (pa_format_info_is_compatible(f_source, f_in))
2833 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2834 }
2835 }
2836
2837 done:
2838 if (source_formats)
2839 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2840
2841 return out_formats;
2842 }