]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
sink, source: Support creating suspended sinks and sources
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29
30 #include <pulse/format.h>
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76 data->ports = pa_hashmap_new(pa_idxset_string_hash_func, pa_idxset_string_compare_func);
77
78 return data;
79 }
80
81 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
82 pa_assert(data);
83
84 pa_xfree(data->name);
85 data->name = pa_xstrdup(name);
86 }
87
88 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
89 pa_assert(data);
90
91 if ((data->sample_spec_is_set = !!spec))
92 data->sample_spec = *spec;
93 }
94
95 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
96 pa_assert(data);
97
98 if ((data->channel_map_is_set = !!map))
99 data->channel_map = *map;
100 }
101
102 void pa_source_new_data_set_alternate_sample_rate(pa_source_new_data *data, const uint32_t alternate_sample_rate) {
103 pa_assert(data);
104
105 data->alternate_sample_rate_is_set = TRUE;
106 data->alternate_sample_rate = alternate_sample_rate;
107 }
108
109 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
110 pa_assert(data);
111
112 if ((data->volume_is_set = !!volume))
113 data->volume = *volume;
114 }
115
116 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
117 pa_assert(data);
118
119 data->muted_is_set = TRUE;
120 data->muted = !!mute;
121 }
122
123 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
124 pa_assert(data);
125
126 pa_xfree(data->active_port);
127 data->active_port = pa_xstrdup(port);
128 }
129
130 void pa_source_new_data_done(pa_source_new_data *data) {
131 pa_assert(data);
132
133 pa_proplist_free(data->proplist);
134
135 if (data->ports)
136 pa_device_port_hashmap_free(data->ports);
137
138 pa_xfree(data->name);
139 pa_xfree(data->active_port);
140 }
141
142 /* Called from main context */
143 static void reset_callbacks(pa_source *s) {
144 pa_assert(s);
145
146 s->set_state = NULL;
147 s->get_volume = NULL;
148 s->set_volume = NULL;
149 s->write_volume = NULL;
150 s->get_mute = NULL;
151 s->set_mute = NULL;
152 s->update_requested_latency = NULL;
153 s->set_port = NULL;
154 s->get_formats = NULL;
155 s->update_rate = NULL;
156 }
157
158 /* Called from main context */
159 pa_source* pa_source_new(
160 pa_core *core,
161 pa_source_new_data *data,
162 pa_source_flags_t flags) {
163
164 pa_source *s;
165 const char *name;
166 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
167 char *pt;
168
169 pa_assert(core);
170 pa_assert(data);
171 pa_assert(data->name);
172 pa_assert_ctl_context();
173
174 s = pa_msgobject_new(pa_source);
175
176 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
177 pa_log_debug("Failed to register name %s.", data->name);
178 pa_xfree(s);
179 return NULL;
180 }
181
182 pa_source_new_data_set_name(data, name);
183
184 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
185 pa_xfree(s);
186 pa_namereg_unregister(core, name);
187 return NULL;
188 }
189
190 /* FIXME, need to free s here on failure */
191
192 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
193 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
194
195 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
196
197 if (!data->channel_map_is_set)
198 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
199
200 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
201 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
202
203 /* FIXME: There should probably be a general function for checking whether
204 * the source volume is allowed to be set, like there is for source outputs. */
205 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
206
207 if (!data->volume_is_set) {
208 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
209 data->save_volume = FALSE;
210 }
211
212 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
213 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
214
215 if (!data->muted_is_set)
216 data->muted = FALSE;
217
218 if (data->card)
219 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
220
221 pa_device_init_description(data->proplist);
222 pa_device_init_icon(data->proplist, FALSE);
223 pa_device_init_intended_roles(data->proplist);
224
225 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
226 pa_xfree(s);
227 pa_namereg_unregister(core, name);
228 return NULL;
229 }
230
231 s->parent.parent.free = source_free;
232 s->parent.process_msg = pa_source_process_msg;
233
234 s->core = core;
235 s->state = PA_SOURCE_INIT;
236 s->flags = flags;
237 s->priority = 0;
238 s->suspend_cause = data->suspend_cause;
239 pa_source_set_mixer_dirty(s, FALSE);
240 s->name = pa_xstrdup(name);
241 s->proplist = pa_proplist_copy(data->proplist);
242 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
243 s->module = data->module;
244 s->card = data->card;
245
246 s->priority = pa_device_init_priority(s->proplist);
247
248 s->sample_spec = data->sample_spec;
249 s->channel_map = data->channel_map;
250 s->default_sample_rate = s->sample_spec.rate;
251
252 if (data->alternate_sample_rate_is_set)
253 s->alternate_sample_rate = data->alternate_sample_rate;
254 else
255 s->alternate_sample_rate = s->core->alternate_sample_rate;
256
257 if (s->sample_spec.rate == s->alternate_sample_rate) {
258 pa_log_warn("Default and alternate sample rates are the same.");
259 s->alternate_sample_rate = 0;
260 }
261
262 s->outputs = pa_idxset_new(NULL, NULL);
263 s->n_corked = 0;
264 s->monitor_of = NULL;
265 s->output_from_master = NULL;
266
267 s->reference_volume = s->real_volume = data->volume;
268 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
269 s->base_volume = PA_VOLUME_NORM;
270 s->n_volume_steps = PA_VOLUME_NORM+1;
271 s->muted = data->muted;
272 s->refresh_volume = s->refresh_muted = FALSE;
273
274 reset_callbacks(s);
275 s->userdata = NULL;
276
277 s->asyncmsgq = NULL;
278
279 /* As a minor optimization we just steal the list instead of
280 * copying it here */
281 s->ports = data->ports;
282 data->ports = NULL;
283
284 s->active_port = NULL;
285 s->save_port = FALSE;
286
287 if (data->active_port)
288 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
289 s->save_port = data->save_port;
290
291 if (!s->active_port) {
292 void *state;
293 pa_device_port *p;
294
295 PA_HASHMAP_FOREACH(p, s->ports, state)
296 if (!s->active_port || p->priority > s->active_port->priority)
297 s->active_port = p;
298 }
299
300 if (s->active_port)
301 s->latency_offset = s->active_port->latency_offset;
302 else
303 s->latency_offset = 0;
304
305 s->save_volume = data->save_volume;
306 s->save_muted = data->save_muted;
307
308 pa_silence_memchunk_get(
309 &core->silence_cache,
310 core->mempool,
311 &s->silence,
312 &s->sample_spec,
313 0);
314
315 s->thread_info.rtpoll = NULL;
316 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
317 s->thread_info.soft_volume = s->soft_volume;
318 s->thread_info.soft_muted = s->muted;
319 s->thread_info.state = s->state;
320 s->thread_info.max_rewind = 0;
321 s->thread_info.requested_latency_valid = FALSE;
322 s->thread_info.requested_latency = 0;
323 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
324 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
325 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
326
327 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
328 s->thread_info.volume_changes_tail = NULL;
329 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
330 s->thread_info.volume_change_safety_margin = core->deferred_volume_safety_margin_usec;
331 s->thread_info.volume_change_extra_delay = core->deferred_volume_extra_delay_usec;
332 s->thread_info.latency_offset = s->latency_offset;
333
334 /* FIXME: This should probably be moved to pa_source_put() */
335 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
336
337 if (s->card)
338 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
339
340 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
341 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
342 s->index,
343 s->name,
344 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
345 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
346 pt);
347 pa_xfree(pt);
348
349 return s;
350 }
351
352 /* Called from main context */
353 static int source_set_state(pa_source *s, pa_source_state_t state) {
354 int ret;
355 pa_bool_t suspend_change;
356 pa_source_state_t original_state;
357
358 pa_assert(s);
359 pa_assert_ctl_context();
360
361 if (s->state == state)
362 return 0;
363
364 original_state = s->state;
365
366 suspend_change =
367 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
368 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
369
370 if (s->set_state)
371 if ((ret = s->set_state(s, state)) < 0)
372 return ret;
373
374 if (s->asyncmsgq)
375 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
376
377 if (s->set_state)
378 s->set_state(s, original_state);
379
380 return ret;
381 }
382
383 s->state = state;
384
385 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the appropriate events */
386 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
387 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
388 }
389
390 if (suspend_change) {
391 pa_source_output *o;
392 uint32_t idx;
393
394 /* We're suspending or resuming, tell everyone about it */
395
396 PA_IDXSET_FOREACH(o, s->outputs, idx)
397 if (s->state == PA_SOURCE_SUSPENDED &&
398 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
399 pa_source_output_kill(o);
400 else if (o->suspend)
401 o->suspend(o, state == PA_SOURCE_SUSPENDED);
402 }
403
404 return 0;
405 }
406
407 void pa_source_set_get_volume_callback(pa_source *s, pa_source_cb_t cb) {
408 pa_assert(s);
409
410 s->get_volume = cb;
411 }
412
413 void pa_source_set_set_volume_callback(pa_source *s, pa_source_cb_t cb) {
414 pa_source_flags_t flags;
415
416 pa_assert(s);
417 pa_assert(!s->write_volume || cb);
418
419 s->set_volume = cb;
420
421 /* Save the current flags so we can tell if they've changed */
422 flags = s->flags;
423
424 if (cb) {
425 /* The source implementor is responsible for setting decibel volume support */
426 s->flags |= PA_SOURCE_HW_VOLUME_CTRL;
427 } else {
428 s->flags &= ~PA_SOURCE_HW_VOLUME_CTRL;
429 /* See note below in pa_source_put() about volume sharing and decibel volumes */
430 pa_source_enable_decibel_volume(s, !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
431 }
432
433 /* If the flags have changed after init, let any clients know via a change event */
434 if (s->state != PA_SOURCE_INIT && flags != s->flags)
435 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
436 }
437
438 void pa_source_set_write_volume_callback(pa_source *s, pa_source_cb_t cb) {
439 pa_source_flags_t flags;
440
441 pa_assert(s);
442 pa_assert(!cb || s->set_volume);
443
444 s->write_volume = cb;
445
446 /* Save the current flags so we can tell if they've changed */
447 flags = s->flags;
448
449 if (cb)
450 s->flags |= PA_SOURCE_DEFERRED_VOLUME;
451 else
452 s->flags &= ~PA_SOURCE_DEFERRED_VOLUME;
453
454 /* If the flags have changed after init, let any clients know via a change event */
455 if (s->state != PA_SOURCE_INIT && flags != s->flags)
456 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
457 }
458
459 void pa_source_set_get_mute_callback(pa_source *s, pa_source_cb_t cb) {
460 pa_assert(s);
461
462 s->get_mute = cb;
463 }
464
465 void pa_source_set_set_mute_callback(pa_source *s, pa_source_cb_t cb) {
466 pa_source_flags_t flags;
467
468 pa_assert(s);
469
470 s->set_mute = cb;
471
472 /* Save the current flags so we can tell if they've changed */
473 flags = s->flags;
474
475 if (cb)
476 s->flags |= PA_SOURCE_HW_MUTE_CTRL;
477 else
478 s->flags &= ~PA_SOURCE_HW_MUTE_CTRL;
479
480 /* If the flags have changed after init, let any clients know via a change event */
481 if (s->state != PA_SOURCE_INIT && flags != s->flags)
482 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
483 }
484
485 static void enable_flat_volume(pa_source *s, pa_bool_t enable) {
486 pa_source_flags_t flags;
487
488 pa_assert(s);
489
490 /* Always follow the overall user preference here */
491 enable = enable && s->core->flat_volumes;
492
493 /* Save the current flags so we can tell if they've changed */
494 flags = s->flags;
495
496 if (enable)
497 s->flags |= PA_SOURCE_FLAT_VOLUME;
498 else
499 s->flags &= ~PA_SOURCE_FLAT_VOLUME;
500
501 /* If the flags have changed after init, let any clients know via a change event */
502 if (s->state != PA_SOURCE_INIT && flags != s->flags)
503 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
504 }
505
506 void pa_source_enable_decibel_volume(pa_source *s, pa_bool_t enable) {
507 pa_source_flags_t flags;
508
509 pa_assert(s);
510
511 /* Save the current flags so we can tell if they've changed */
512 flags = s->flags;
513
514 if (enable) {
515 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
516 enable_flat_volume(s, TRUE);
517 } else {
518 s->flags &= ~PA_SOURCE_DECIBEL_VOLUME;
519 enable_flat_volume(s, FALSE);
520 }
521
522 /* If the flags have changed after init, let any clients know via a change event */
523 if (s->state != PA_SOURCE_INIT && flags != s->flags)
524 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
525 }
526
527 /* Called from main context */
528 void pa_source_put(pa_source *s) {
529 pa_source_assert_ref(s);
530 pa_assert_ctl_context();
531
532 pa_assert(s->state == PA_SOURCE_INIT);
533 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
534
535 /* The following fields must be initialized properly when calling _put() */
536 pa_assert(s->asyncmsgq);
537 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
538
539 /* Generally, flags should be initialized via pa_source_new(). As a
540 * special exception we allow some volume related flags to be set
541 * between _new() and _put() by the callback setter functions above.
542 *
543 * Thus we implement a couple safeguards here which ensure the above
544 * setters were used (or at least the implementor made manual changes
545 * in a compatible way).
546 *
547 * Note: All of these flags set here can change over the life time
548 * of the source. */
549 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
550 pa_assert(!(s->flags & PA_SOURCE_DEFERRED_VOLUME) || s->write_volume);
551 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
552
553 /* XXX: Currently decibel volume is disabled for all sources that use volume
554 * sharing. When the master source supports decibel volume, it would be good
555 * to have the flag also in the filter source, but currently we don't do that
556 * so that the flags of the filter source never change when it's moved from
557 * a master source to another. One solution for this problem would be to
558 * remove user-visible volume altogether from filter sources when volume
559 * sharing is used, but the current approach was easier to implement... */
560 /* We always support decibel volumes in software, otherwise we leave it to
561 * the source implementor to set this flag as needed.
562 *
563 * Note: This flag can also change over the life time of the source. */
564 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
565 pa_source_enable_decibel_volume(s, TRUE);
566
567 /* If the source implementor support DB volumes by itself, we should always
568 * try and enable flat volumes too */
569 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME))
570 enable_flat_volume(s, TRUE);
571
572 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
573 pa_source *root_source = pa_source_get_master(s);
574
575 pa_assert(PA_LIKELY(root_source));
576
577 s->reference_volume = root_source->reference_volume;
578 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
579
580 s->real_volume = root_source->real_volume;
581 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
582 } else
583 /* We assume that if the sink implementor changed the default
584 * volume he did so in real_volume, because that is the usual
585 * place where he is supposed to place his changes. */
586 s->reference_volume = s->real_volume;
587
588 s->thread_info.soft_volume = s->soft_volume;
589 s->thread_info.soft_muted = s->muted;
590 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
591
592 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
593 || (s->base_volume == PA_VOLUME_NORM
594 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
595 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
596 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
597
598 if (s->suspend_cause)
599 pa_assert_se(source_set_state(s, PA_SOURCE_SUSPENDED) == 0);
600 else
601 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
602
603 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
604 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
605 }
606
607 /* Called from main context */
608 void pa_source_unlink(pa_source *s) {
609 pa_bool_t linked;
610 pa_source_output *o, *j = NULL;
611
612 pa_assert(s);
613 pa_assert_ctl_context();
614
615 /* See pa_sink_unlink() for a couple of comments how this function
616 * works. */
617
618 linked = PA_SOURCE_IS_LINKED(s->state);
619
620 if (linked)
621 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
622
623 if (s->state != PA_SOURCE_UNLINKED)
624 pa_namereg_unregister(s->core, s->name);
625 pa_idxset_remove_by_data(s->core->sources, s, NULL);
626
627 if (s->card)
628 pa_idxset_remove_by_data(s->card->sources, s, NULL);
629
630 while ((o = pa_idxset_first(s->outputs, NULL))) {
631 pa_assert(o != j);
632 pa_source_output_kill(o);
633 j = o;
634 }
635
636 if (linked)
637 source_set_state(s, PA_SOURCE_UNLINKED);
638 else
639 s->state = PA_SOURCE_UNLINKED;
640
641 reset_callbacks(s);
642
643 if (linked) {
644 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
645 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
646 }
647 }
648
649 /* Called from main context */
650 static void source_free(pa_object *o) {
651 pa_source_output *so;
652 pa_source *s = PA_SOURCE(o);
653
654 pa_assert(s);
655 pa_assert_ctl_context();
656 pa_assert(pa_source_refcnt(s) == 0);
657
658 if (PA_SOURCE_IS_LINKED(s->state))
659 pa_source_unlink(s);
660
661 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
662
663 pa_idxset_free(s->outputs, NULL, NULL);
664
665 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
666 pa_source_output_unref(so);
667
668 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
669
670 if (s->silence.memblock)
671 pa_memblock_unref(s->silence.memblock);
672
673 pa_xfree(s->name);
674 pa_xfree(s->driver);
675
676 if (s->proplist)
677 pa_proplist_free(s->proplist);
678
679 if (s->ports)
680 pa_device_port_hashmap_free(s->ports);
681
682 pa_xfree(s);
683 }
684
685 /* Called from main context, and not while the IO thread is active, please */
686 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
687 pa_source_assert_ref(s);
688 pa_assert_ctl_context();
689
690 s->asyncmsgq = q;
691 }
692
693 /* Called from main context, and not while the IO thread is active, please */
694 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
695 pa_source_assert_ref(s);
696 pa_assert_ctl_context();
697
698 if (mask == 0)
699 return;
700
701 /* For now, allow only a minimal set of flags to be changed. */
702 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
703
704 s->flags = (s->flags & ~mask) | (value & mask);
705 }
706
707 /* Called from IO context, or before _put() from main context */
708 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
709 pa_source_assert_ref(s);
710 pa_source_assert_io_context(s);
711
712 s->thread_info.rtpoll = p;
713 }
714
715 /* Called from main context */
716 int pa_source_update_status(pa_source*s) {
717 pa_source_assert_ref(s);
718 pa_assert_ctl_context();
719 pa_assert(PA_SOURCE_IS_LINKED(s->state));
720
721 if (s->state == PA_SOURCE_SUSPENDED)
722 return 0;
723
724 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
725 }
726
727 /* Called from any context - must be threadsafe */
728 void pa_source_set_mixer_dirty(pa_source *s, pa_bool_t is_dirty)
729 {
730 pa_atomic_store(&s->mixer_dirty, is_dirty ? 1 : 0);
731 }
732
733 /* Called from main context */
734 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
735 pa_source_assert_ref(s);
736 pa_assert_ctl_context();
737 pa_assert(PA_SOURCE_IS_LINKED(s->state));
738 pa_assert(cause != 0);
739
740 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
741 return -PA_ERR_NOTSUPPORTED;
742
743 if (suspend)
744 s->suspend_cause |= cause;
745 else
746 s->suspend_cause &= ~cause;
747
748 if (!(s->suspend_cause & PA_SUSPEND_SESSION) && (pa_atomic_load(&s->mixer_dirty) != 0)) {
749 /* This might look racy but isn't: If somebody sets mixer_dirty exactly here,
750 it'll be handled just fine. */
751 pa_source_set_mixer_dirty(s, FALSE);
752 pa_log_debug("Mixer is now accessible. Updating alsa mixer settings.");
753 if (s->active_port && s->set_port) {
754 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
755 struct source_message_set_port msg = { .port = s->active_port, .ret = 0 };
756 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
757 }
758 else
759 s->set_port(s, s->active_port);
760 }
761 else {
762 if (s->set_mute)
763 s->set_mute(s);
764 if (s->set_volume)
765 s->set_volume(s);
766 }
767 }
768
769 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
770 return 0;
771
772 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
773
774 if (s->suspend_cause)
775 return source_set_state(s, PA_SOURCE_SUSPENDED);
776 else
777 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
778 }
779
780 /* Called from main context */
781 int pa_source_sync_suspend(pa_source *s) {
782 pa_sink_state_t state;
783
784 pa_source_assert_ref(s);
785 pa_assert_ctl_context();
786 pa_assert(PA_SOURCE_IS_LINKED(s->state));
787 pa_assert(s->monitor_of);
788
789 state = pa_sink_get_state(s->monitor_of);
790
791 if (state == PA_SINK_SUSPENDED)
792 return source_set_state(s, PA_SOURCE_SUSPENDED);
793
794 pa_assert(PA_SINK_IS_OPENED(state));
795
796 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
797 }
798
799 /* Called from main context */
800 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
801 pa_source_output *o, *n;
802 uint32_t idx;
803
804 pa_source_assert_ref(s);
805 pa_assert_ctl_context();
806 pa_assert(PA_SOURCE_IS_LINKED(s->state));
807
808 if (!q)
809 q = pa_queue_new();
810
811 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
812 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
813
814 pa_source_output_ref(o);
815
816 if (pa_source_output_start_move(o) >= 0)
817 pa_queue_push(q, o);
818 else
819 pa_source_output_unref(o);
820 }
821
822 return q;
823 }
824
825 /* Called from main context */
826 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
827 pa_source_output *o;
828
829 pa_source_assert_ref(s);
830 pa_assert_ctl_context();
831 pa_assert(PA_SOURCE_IS_LINKED(s->state));
832 pa_assert(q);
833
834 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
835 if (pa_source_output_finish_move(o, s, save) < 0)
836 pa_source_output_fail_move(o);
837
838 pa_source_output_unref(o);
839 }
840
841 pa_queue_free(q, NULL);
842 }
843
844 /* Called from main context */
845 void pa_source_move_all_fail(pa_queue *q) {
846 pa_source_output *o;
847
848 pa_assert_ctl_context();
849 pa_assert(q);
850
851 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
852 pa_source_output_fail_move(o);
853 pa_source_output_unref(o);
854 }
855
856 pa_queue_free(q, NULL);
857 }
858
859 /* Called from IO thread context */
860 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
861 pa_source_output *o;
862 void *state = NULL;
863
864 pa_source_assert_ref(s);
865 pa_source_assert_io_context(s);
866 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
867
868 if (nbytes <= 0)
869 return;
870
871 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
872 return;
873
874 pa_log_debug("Processing rewind...");
875
876 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
877 pa_source_output_assert_ref(o);
878 pa_source_output_process_rewind(o, nbytes);
879 }
880 }
881
882 /* Called from IO thread context */
883 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
884 pa_source_output *o;
885 void *state = NULL;
886
887 pa_source_assert_ref(s);
888 pa_source_assert_io_context(s);
889 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
890 pa_assert(chunk);
891
892 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
893 return;
894
895 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
896 pa_memchunk vchunk = *chunk;
897
898 pa_memblock_ref(vchunk.memblock);
899 pa_memchunk_make_writable(&vchunk, 0);
900
901 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
902 pa_silence_memchunk(&vchunk, &s->sample_spec);
903 else
904 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
905
906 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
907 pa_source_output_assert_ref(o);
908
909 if (!o->thread_info.direct_on_input)
910 pa_source_output_push(o, &vchunk);
911 }
912
913 pa_memblock_unref(vchunk.memblock);
914 } else {
915
916 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
917 pa_source_output_assert_ref(o);
918
919 if (!o->thread_info.direct_on_input)
920 pa_source_output_push(o, chunk);
921 }
922 }
923 }
924
925 /* Called from IO thread context */
926 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
927 pa_source_assert_ref(s);
928 pa_source_assert_io_context(s);
929 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
930 pa_source_output_assert_ref(o);
931 pa_assert(o->thread_info.direct_on_input);
932 pa_assert(chunk);
933
934 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
935 return;
936
937 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
938 pa_memchunk vchunk = *chunk;
939
940 pa_memblock_ref(vchunk.memblock);
941 pa_memchunk_make_writable(&vchunk, 0);
942
943 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
944 pa_silence_memchunk(&vchunk, &s->sample_spec);
945 else
946 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
947
948 pa_source_output_push(o, &vchunk);
949
950 pa_memblock_unref(vchunk.memblock);
951 } else
952 pa_source_output_push(o, chunk);
953 }
954
955 /* Called from main thread */
956 pa_bool_t pa_source_update_rate(pa_source *s, uint32_t rate, pa_bool_t passthrough)
957 {
958 if (s->update_rate) {
959 uint32_t desired_rate = rate;
960 uint32_t default_rate = s->default_sample_rate;
961 uint32_t alternate_rate = s->alternate_sample_rate;
962 uint32_t idx;
963 pa_source_output *o;
964 pa_bool_t use_alternate = FALSE;
965
966 if (PA_UNLIKELY(default_rate == alternate_rate)) {
967 pa_log_warn("Default and alternate sample rates are the same.");
968 return FALSE;
969 }
970
971 if (PA_SOURCE_IS_RUNNING(s->state)) {
972 pa_log_info("Cannot update rate, SOURCE_IS_RUNNING, will keep using %u Hz",
973 s->sample_spec.rate);
974 return FALSE;
975 }
976
977 if (PA_UNLIKELY (desired_rate < 8000 ||
978 desired_rate > PA_RATE_MAX))
979 return FALSE;
980
981 if (!passthrough) {
982 pa_assert(default_rate % 4000 || default_rate % 11025);
983 pa_assert(alternate_rate % 4000 || alternate_rate % 11025);
984
985 if (default_rate % 4000) {
986 /* default is a 11025 multiple */
987 if ((alternate_rate % 4000 == 0) && (desired_rate % 4000 == 0))
988 use_alternate=TRUE;
989 } else {
990 /* default is 4000 multiple */
991 if ((alternate_rate % 11025 == 0) && (desired_rate % 11025 == 0))
992 use_alternate=TRUE;
993 }
994
995 if (use_alternate)
996 desired_rate = alternate_rate;
997 else
998 desired_rate = default_rate;
999 } else {
1000 desired_rate = rate; /* use stream sampling rate, discard default/alternate settings */
1001 }
1002
1003 if (!passthrough && pa_source_used_by(s) > 0)
1004 return FALSE;
1005
1006 pa_source_suspend(s, TRUE, PA_SUSPEND_IDLE); /* needed before rate update, will be resumed automatically */
1007
1008 if (s->update_rate(s, desired_rate) == TRUE) {
1009 pa_log_info("Changed sampling rate successfully ");
1010
1011 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1012 if (o->state == PA_SOURCE_OUTPUT_CORKED)
1013 pa_source_output_update_rate(o);
1014 }
1015 return TRUE;
1016 }
1017 }
1018 return FALSE;
1019 }
1020
1021 /* Called from main thread */
1022 pa_usec_t pa_source_get_latency(pa_source *s) {
1023 pa_usec_t usec;
1024
1025 pa_source_assert_ref(s);
1026 pa_assert_ctl_context();
1027 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1028
1029 if (s->state == PA_SOURCE_SUSPENDED)
1030 return 0;
1031
1032 if (!(s->flags & PA_SOURCE_LATENCY))
1033 return 0;
1034
1035 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
1036
1037 /* usec is unsigned, so check that the offset can be added to usec without
1038 * underflowing. */
1039 if (-s->latency_offset <= (int64_t) usec)
1040 usec += s->latency_offset;
1041 else
1042 usec = 0;
1043
1044 return usec;
1045 }
1046
1047 /* Called from IO thread */
1048 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
1049 pa_usec_t usec = 0;
1050 pa_msgobject *o;
1051
1052 pa_source_assert_ref(s);
1053 pa_source_assert_io_context(s);
1054 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1055
1056 /* The returned value is supposed to be in the time domain of the sound card! */
1057
1058 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
1059 return 0;
1060
1061 if (!(s->flags & PA_SOURCE_LATENCY))
1062 return 0;
1063
1064 o = PA_MSGOBJECT(s);
1065
1066 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
1067
1068 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
1069 return -1;
1070
1071 /* usec is unsigned, so check that the offset can be added to usec without
1072 * underflowing. */
1073 if (-s->thread_info.latency_offset <= (int64_t) usec)
1074 usec += s->thread_info.latency_offset;
1075 else
1076 usec = 0;
1077
1078 return usec;
1079 }
1080
1081 /* Called from the main thread (and also from the IO thread while the main
1082 * thread is waiting).
1083 *
1084 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
1085 * set. Instead, flat volume mode is detected by checking whether the root source
1086 * has the flag set. */
1087 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
1088 pa_source_assert_ref(s);
1089
1090 s = pa_source_get_master(s);
1091
1092 if (PA_LIKELY(s))
1093 return (s->flags & PA_SOURCE_FLAT_VOLUME);
1094 else
1095 return FALSE;
1096 }
1097
1098 /* Called from the main thread (and also from the IO thread while the main
1099 * thread is waiting). */
1100 pa_source *pa_source_get_master(pa_source *s) {
1101 pa_source_assert_ref(s);
1102
1103 while (s && (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1104 if (PA_UNLIKELY(!s->output_from_master))
1105 return NULL;
1106
1107 s = s->output_from_master->source;
1108 }
1109
1110 return s;
1111 }
1112
1113 /* Called from main context */
1114 pa_bool_t pa_source_is_passthrough(pa_source *s) {
1115
1116 pa_source_assert_ref(s);
1117
1118 /* NB Currently only monitor sources support passthrough mode */
1119 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
1120 }
1121
1122 /* Called from main context */
1123 void pa_source_enter_passthrough(pa_source *s) {
1124 pa_cvolume volume;
1125
1126 /* set the volume to NORM */
1127 s->saved_volume = *pa_source_get_volume(s, TRUE);
1128 s->saved_save_volume = s->save_volume;
1129
1130 pa_cvolume_set(&volume, s->sample_spec.channels, PA_MIN(s->base_volume, PA_VOLUME_NORM));
1131 pa_source_set_volume(s, &volume, TRUE, FALSE);
1132 }
1133
1134 /* Called from main context */
1135 void pa_source_leave_passthrough(pa_source *s) {
1136 /* Restore source volume to what it was before we entered passthrough mode */
1137 pa_source_set_volume(s, &s->saved_volume, TRUE, s->saved_save_volume);
1138
1139 pa_cvolume_init(&s->saved_volume);
1140 s->saved_save_volume = FALSE;
1141 }
1142
1143 /* Called from main context. */
1144 static void compute_reference_ratio(pa_source_output *o) {
1145 unsigned c = 0;
1146 pa_cvolume remapped;
1147
1148 pa_assert(o);
1149 pa_assert(pa_source_flat_volume_enabled(o->source));
1150
1151 /*
1152 * Calculates the reference ratio from the source's reference
1153 * volume. This basically calculates:
1154 *
1155 * o->reference_ratio = o->volume / o->source->reference_volume
1156 */
1157
1158 remapped = o->source->reference_volume;
1159 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
1160
1161 o->reference_ratio.channels = o->sample_spec.channels;
1162
1163 for (c = 0; c < o->sample_spec.channels; c++) {
1164
1165 /* We don't update when the source volume is 0 anyway */
1166 if (remapped.values[c] <= PA_VOLUME_MUTED)
1167 continue;
1168
1169 /* Don't update the reference ratio unless necessary */
1170 if (pa_sw_volume_multiply(
1171 o->reference_ratio.values[c],
1172 remapped.values[c]) == o->volume.values[c])
1173 continue;
1174
1175 o->reference_ratio.values[c] = pa_sw_volume_divide(
1176 o->volume.values[c],
1177 remapped.values[c]);
1178 }
1179 }
1180
1181 /* Called from main context. Only called for the root source in volume sharing
1182 * cases, except for internal recursive calls. */
1183 static void compute_reference_ratios(pa_source *s) {
1184 uint32_t idx;
1185 pa_source_output *o;
1186
1187 pa_source_assert_ref(s);
1188 pa_assert_ctl_context();
1189 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1190 pa_assert(pa_source_flat_volume_enabled(s));
1191
1192 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1193 compute_reference_ratio(o);
1194
1195 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1196 compute_reference_ratios(o->destination_source);
1197 }
1198 }
1199
1200 /* Called from main context. Only called for the root source in volume sharing
1201 * cases, except for internal recursive calls. */
1202 static void compute_real_ratios(pa_source *s) {
1203 pa_source_output *o;
1204 uint32_t idx;
1205
1206 pa_source_assert_ref(s);
1207 pa_assert_ctl_context();
1208 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1209 pa_assert(pa_source_flat_volume_enabled(s));
1210
1211 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1212 unsigned c;
1213 pa_cvolume remapped;
1214
1215 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1216 /* The origin source uses volume sharing, so this input's real ratio
1217 * is handled as a special case - the real ratio must be 0 dB, and
1218 * as a result i->soft_volume must equal i->volume_factor. */
1219 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
1220 o->soft_volume = o->volume_factor;
1221
1222 compute_real_ratios(o->destination_source);
1223
1224 continue;
1225 }
1226
1227 /*
1228 * This basically calculates:
1229 *
1230 * i->real_ratio := i->volume / s->real_volume
1231 * i->soft_volume := i->real_ratio * i->volume_factor
1232 */
1233
1234 remapped = s->real_volume;
1235 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
1236
1237 o->real_ratio.channels = o->sample_spec.channels;
1238 o->soft_volume.channels = o->sample_spec.channels;
1239
1240 for (c = 0; c < o->sample_spec.channels; c++) {
1241
1242 if (remapped.values[c] <= PA_VOLUME_MUTED) {
1243 /* We leave o->real_ratio untouched */
1244 o->soft_volume.values[c] = PA_VOLUME_MUTED;
1245 continue;
1246 }
1247
1248 /* Don't lose accuracy unless necessary */
1249 if (pa_sw_volume_multiply(
1250 o->real_ratio.values[c],
1251 remapped.values[c]) != o->volume.values[c])
1252
1253 o->real_ratio.values[c] = pa_sw_volume_divide(
1254 o->volume.values[c],
1255 remapped.values[c]);
1256
1257 o->soft_volume.values[c] = pa_sw_volume_multiply(
1258 o->real_ratio.values[c],
1259 o->volume_factor.values[c]);
1260 }
1261
1262 /* We don't copy the soft_volume to the thread_info data
1263 * here. That must be done by the caller */
1264 }
1265 }
1266
1267 static pa_cvolume *cvolume_remap_minimal_impact(
1268 pa_cvolume *v,
1269 const pa_cvolume *template,
1270 const pa_channel_map *from,
1271 const pa_channel_map *to) {
1272
1273 pa_cvolume t;
1274
1275 pa_assert(v);
1276 pa_assert(template);
1277 pa_assert(from);
1278 pa_assert(to);
1279 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
1280 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
1281
1282 /* Much like pa_cvolume_remap(), but tries to minimize impact when
1283 * mapping from source output to source volumes:
1284 *
1285 * If template is a possible remapping from v it is used instead
1286 * of remapping anew.
1287 *
1288 * If the channel maps don't match we set an all-channel volume on
1289 * the source to ensure that changing a volume on one stream has no
1290 * effect that cannot be compensated for in another stream that
1291 * does not have the same channel map as the source. */
1292
1293 if (pa_channel_map_equal(from, to))
1294 return v;
1295
1296 t = *template;
1297 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1298 *v = *template;
1299 return v;
1300 }
1301
1302 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1303 return v;
1304 }
1305
1306 /* Called from main thread. Only called for the root source in volume sharing
1307 * cases, except for internal recursive calls. */
1308 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1309 pa_source_output *o;
1310 uint32_t idx;
1311
1312 pa_source_assert_ref(s);
1313 pa_assert(max_volume);
1314 pa_assert(channel_map);
1315 pa_assert(pa_source_flat_volume_enabled(s));
1316
1317 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1318 pa_cvolume remapped;
1319
1320 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1321 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1322
1323 /* Ignore this output. The origin source uses volume sharing, so this
1324 * output's volume will be set to be equal to the root source's real
1325 * volume. Obviously this output's current volume must not then
1326 * affect what the root source's real volume will be. */
1327 continue;
1328 }
1329
1330 remapped = o->volume;
1331 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1332 pa_cvolume_merge(max_volume, max_volume, &remapped);
1333 }
1334 }
1335
1336 /* Called from main thread. Only called for the root source in volume sharing
1337 * cases, except for internal recursive calls. */
1338 static pa_bool_t has_outputs(pa_source *s) {
1339 pa_source_output *o;
1340 uint32_t idx;
1341
1342 pa_source_assert_ref(s);
1343
1344 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1345 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1346 return TRUE;
1347 }
1348
1349 return FALSE;
1350 }
1351
1352 /* Called from main thread. Only called for the root source in volume sharing
1353 * cases, except for internal recursive calls. */
1354 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1355 pa_source_output *o;
1356 uint32_t idx;
1357
1358 pa_source_assert_ref(s);
1359 pa_assert(new_volume);
1360 pa_assert(channel_map);
1361
1362 s->real_volume = *new_volume;
1363 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1364
1365 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1366 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1367 if (pa_source_flat_volume_enabled(s)) {
1368 pa_cvolume old_volume = o->volume;
1369
1370 /* Follow the root source's real volume. */
1371 o->volume = *new_volume;
1372 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1373 compute_reference_ratio(o);
1374
1375 /* The volume changed, let's tell people so */
1376 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1377 if (o->volume_changed)
1378 o->volume_changed(o);
1379
1380 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1381 }
1382 }
1383
1384 update_real_volume(o->destination_source, new_volume, channel_map);
1385 }
1386 }
1387 }
1388
1389 /* Called from main thread. Only called for the root source in shared volume
1390 * cases. */
1391 static void compute_real_volume(pa_source *s) {
1392 pa_source_assert_ref(s);
1393 pa_assert_ctl_context();
1394 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1395 pa_assert(pa_source_flat_volume_enabled(s));
1396 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1397
1398 /* This determines the maximum volume of all streams and sets
1399 * s->real_volume accordingly. */
1400
1401 if (!has_outputs(s)) {
1402 /* In the special case that we have no source outputs we leave the
1403 * volume unmodified. */
1404 update_real_volume(s, &s->reference_volume, &s->channel_map);
1405 return;
1406 }
1407
1408 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1409
1410 /* First let's determine the new maximum volume of all outputs
1411 * connected to this source */
1412 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1413 update_real_volume(s, &s->real_volume, &s->channel_map);
1414
1415 /* Then, let's update the real ratios/soft volumes of all outputs
1416 * connected to this source */
1417 compute_real_ratios(s);
1418 }
1419
1420 /* Called from main thread. Only called for the root source in shared volume
1421 * cases, except for internal recursive calls. */
1422 static void propagate_reference_volume(pa_source *s) {
1423 pa_source_output *o;
1424 uint32_t idx;
1425
1426 pa_source_assert_ref(s);
1427 pa_assert_ctl_context();
1428 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1429 pa_assert(pa_source_flat_volume_enabled(s));
1430
1431 /* This is called whenever the source volume changes that is not
1432 * caused by a source output volume change. We need to fix up the
1433 * source output volumes accordingly */
1434
1435 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1436 pa_cvolume old_volume;
1437
1438 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1439 propagate_reference_volume(o->destination_source);
1440
1441 /* Since the origin source uses volume sharing, this output's volume
1442 * needs to be updated to match the root source's real volume, but
1443 * that will be done later in update_shared_real_volume(). */
1444 continue;
1445 }
1446
1447 old_volume = o->volume;
1448
1449 /* This basically calculates:
1450 *
1451 * o->volume := o->reference_volume * o->reference_ratio */
1452
1453 o->volume = s->reference_volume;
1454 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1455 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1456
1457 /* The volume changed, let's tell people so */
1458 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1459
1460 if (o->volume_changed)
1461 o->volume_changed(o);
1462
1463 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1464 }
1465 }
1466 }
1467
1468 /* Called from main thread. Only called for the root source in volume sharing
1469 * cases, except for internal recursive calls. The return value indicates
1470 * whether any reference volume actually changed. */
1471 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1472 pa_cvolume volume;
1473 pa_bool_t reference_volume_changed;
1474 pa_source_output *o;
1475 uint32_t idx;
1476
1477 pa_source_assert_ref(s);
1478 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1479 pa_assert(v);
1480 pa_assert(channel_map);
1481 pa_assert(pa_cvolume_valid(v));
1482
1483 volume = *v;
1484 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1485
1486 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1487 s->reference_volume = volume;
1488
1489 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1490
1491 if (reference_volume_changed)
1492 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1493 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1494 /* If the root source's volume doesn't change, then there can't be any
1495 * changes in the other source in the source tree either.
1496 *
1497 * It's probably theoretically possible that even if the root source's
1498 * volume changes slightly, some filter source doesn't change its volume
1499 * due to rounding errors. If that happens, we still want to propagate
1500 * the changed root source volume to the sources connected to the
1501 * intermediate source that didn't change its volume. This theoretical
1502 * possibility is the reason why we have that !(s->flags &
1503 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1504 * notice even if we returned here FALSE always if
1505 * reference_volume_changed is FALSE. */
1506 return FALSE;
1507
1508 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1509 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1510 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1511 }
1512
1513 return TRUE;
1514 }
1515
1516 /* Called from main thread */
1517 void pa_source_set_volume(
1518 pa_source *s,
1519 const pa_cvolume *volume,
1520 pa_bool_t send_msg,
1521 pa_bool_t save) {
1522
1523 pa_cvolume new_reference_volume;
1524 pa_source *root_source;
1525
1526 pa_source_assert_ref(s);
1527 pa_assert_ctl_context();
1528 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1529 pa_assert(!volume || pa_cvolume_valid(volume));
1530 pa_assert(volume || pa_source_flat_volume_enabled(s));
1531 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1532
1533 /* make sure we don't change the volume in PASSTHROUGH mode ...
1534 * ... *except* if we're being invoked to reset the volume to ensure 0 dB gain */
1535 if (pa_source_is_passthrough(s) && (!volume || !pa_cvolume_is_norm(volume))) {
1536 pa_log_warn("Cannot change volume, source is monitor of a PASSTHROUGH sink");
1537 return;
1538 }
1539
1540 /* In case of volume sharing, the volume is set for the root source first,
1541 * from which it's then propagated to the sharing sources. */
1542 root_source = pa_source_get_master(s);
1543
1544 if (PA_UNLIKELY(!root_source))
1545 return;
1546
1547 /* As a special exception we accept mono volumes on all sources --
1548 * even on those with more complex channel maps */
1549
1550 if (volume) {
1551 if (pa_cvolume_compatible(volume, &s->sample_spec))
1552 new_reference_volume = *volume;
1553 else {
1554 new_reference_volume = s->reference_volume;
1555 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1556 }
1557
1558 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1559
1560 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1561 if (pa_source_flat_volume_enabled(root_source)) {
1562 /* OK, propagate this volume change back to the outputs */
1563 propagate_reference_volume(root_source);
1564
1565 /* And now recalculate the real volume */
1566 compute_real_volume(root_source);
1567 } else
1568 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1569 }
1570
1571 } else {
1572 /* If volume is NULL we synchronize the source's real and
1573 * reference volumes with the stream volumes. */
1574
1575 pa_assert(pa_source_flat_volume_enabled(root_source));
1576
1577 /* Ok, let's determine the new real volume */
1578 compute_real_volume(root_source);
1579
1580 /* Let's 'push' the reference volume if necessary */
1581 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1582 /* If the source and it's root don't have the same number of channels, we need to remap */
1583 if (s != root_source && !pa_channel_map_equal(&s->channel_map, &root_source->channel_map))
1584 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1585 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1586
1587 /* Now that the reference volume is updated, we can update the streams'
1588 * reference ratios. */
1589 compute_reference_ratios(root_source);
1590 }
1591
1592 if (root_source->set_volume) {
1593 /* If we have a function set_volume(), then we do not apply a
1594 * soft volume by default. However, set_volume() is free to
1595 * apply one to root_source->soft_volume */
1596
1597 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1598 if (!(root_source->flags & PA_SOURCE_DEFERRED_VOLUME))
1599 root_source->set_volume(root_source);
1600
1601 } else
1602 /* If we have no function set_volume(), then the soft volume
1603 * becomes the real volume */
1604 root_source->soft_volume = root_source->real_volume;
1605
1606 /* This tells the source that soft volume and/or real volume changed */
1607 if (send_msg)
1608 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1609 }
1610
1611 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1612 * Only to be called by source implementor */
1613 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1614
1615 pa_source_assert_ref(s);
1616 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1617
1618 if (s->flags & PA_SOURCE_DEFERRED_VOLUME)
1619 pa_source_assert_io_context(s);
1620 else
1621 pa_assert_ctl_context();
1622
1623 if (!volume)
1624 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1625 else
1626 s->soft_volume = *volume;
1627
1628 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_DEFERRED_VOLUME))
1629 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1630 else
1631 s->thread_info.soft_volume = s->soft_volume;
1632 }
1633
1634 /* Called from the main thread. Only called for the root source in volume sharing
1635 * cases, except for internal recursive calls. */
1636 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1637 pa_source_output *o;
1638 uint32_t idx;
1639
1640 pa_source_assert_ref(s);
1641 pa_assert(old_real_volume);
1642 pa_assert_ctl_context();
1643 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1644
1645 /* This is called when the hardware's real volume changes due to
1646 * some external event. We copy the real volume into our
1647 * reference volume and then rebuild the stream volumes based on
1648 * i->real_ratio which should stay fixed. */
1649
1650 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1651 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1652 return;
1653
1654 /* 1. Make the real volume the reference volume */
1655 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1656 }
1657
1658 if (pa_source_flat_volume_enabled(s)) {
1659
1660 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1661 pa_cvolume old_volume = o->volume;
1662
1663 /* 2. Since the source's reference and real volumes are equal
1664 * now our ratios should be too. */
1665 o->reference_ratio = o->real_ratio;
1666
1667 /* 3. Recalculate the new stream reference volume based on the
1668 * reference ratio and the sink's reference volume.
1669 *
1670 * This basically calculates:
1671 *
1672 * o->volume = s->reference_volume * o->reference_ratio
1673 *
1674 * This is identical to propagate_reference_volume() */
1675 o->volume = s->reference_volume;
1676 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1677 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1678
1679 /* Notify if something changed */
1680 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1681
1682 if (o->volume_changed)
1683 o->volume_changed(o);
1684
1685 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1686 }
1687
1688 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1689 propagate_real_volume(o->destination_source, old_real_volume);
1690 }
1691 }
1692
1693 /* Something got changed in the hardware. It probably makes sense
1694 * to save changed hw settings given that hw volume changes not
1695 * triggered by PA are almost certainly done by the user. */
1696 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1697 s->save_volume = TRUE;
1698 }
1699
1700 /* Called from io thread */
1701 void pa_source_update_volume_and_mute(pa_source *s) {
1702 pa_assert(s);
1703 pa_source_assert_io_context(s);
1704
1705 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1706 }
1707
1708 /* Called from main thread */
1709 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1710 pa_source_assert_ref(s);
1711 pa_assert_ctl_context();
1712 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1713
1714 if (s->refresh_volume || force_refresh) {
1715 struct pa_cvolume old_real_volume;
1716
1717 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1718
1719 old_real_volume = s->real_volume;
1720
1721 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume)
1722 s->get_volume(s);
1723
1724 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1725
1726 update_real_volume(s, &s->real_volume, &s->channel_map);
1727 propagate_real_volume(s, &old_real_volume);
1728 }
1729
1730 return &s->reference_volume;
1731 }
1732
1733 /* Called from main thread. In volume sharing cases, only the root source may
1734 * call this. */
1735 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1736 pa_cvolume old_real_volume;
1737
1738 pa_source_assert_ref(s);
1739 pa_assert_ctl_context();
1740 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1741 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1742
1743 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1744
1745 old_real_volume = s->real_volume;
1746 update_real_volume(s, new_real_volume, &s->channel_map);
1747 propagate_real_volume(s, &old_real_volume);
1748 }
1749
1750 /* Called from main thread */
1751 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1752 pa_bool_t old_muted;
1753
1754 pa_source_assert_ref(s);
1755 pa_assert_ctl_context();
1756 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1757
1758 old_muted = s->muted;
1759 s->muted = mute;
1760 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1761
1762 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->set_mute)
1763 s->set_mute(s);
1764
1765 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1766
1767 if (old_muted != s->muted)
1768 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1769 }
1770
1771 /* Called from main thread */
1772 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1773
1774 pa_source_assert_ref(s);
1775 pa_assert_ctl_context();
1776 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1777
1778 if (s->refresh_muted || force_refresh) {
1779 pa_bool_t old_muted = s->muted;
1780
1781 if (!(s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_mute)
1782 s->get_mute(s);
1783
1784 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1785
1786 if (old_muted != s->muted) {
1787 s->save_muted = TRUE;
1788
1789 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1790
1791 /* Make sure the soft mute status stays in sync */
1792 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1793 }
1794 }
1795
1796 return s->muted;
1797 }
1798
1799 /* Called from main thread */
1800 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1801 pa_source_assert_ref(s);
1802 pa_assert_ctl_context();
1803 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1804
1805 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1806
1807 if (s->muted == new_muted)
1808 return;
1809
1810 s->muted = new_muted;
1811 s->save_muted = TRUE;
1812
1813 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1814 }
1815
1816 /* Called from main thread */
1817 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1818 pa_source_assert_ref(s);
1819 pa_assert_ctl_context();
1820
1821 if (p)
1822 pa_proplist_update(s->proplist, mode, p);
1823
1824 if (PA_SOURCE_IS_LINKED(s->state)) {
1825 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1826 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1827 }
1828
1829 return TRUE;
1830 }
1831
1832 /* Called from main thread */
1833 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1834 void pa_source_set_description(pa_source *s, const char *description) {
1835 const char *old;
1836 pa_source_assert_ref(s);
1837 pa_assert_ctl_context();
1838
1839 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1840 return;
1841
1842 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1843
1844 if (old && description && pa_streq(old, description))
1845 return;
1846
1847 if (description)
1848 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1849 else
1850 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1851
1852 if (PA_SOURCE_IS_LINKED(s->state)) {
1853 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1854 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1855 }
1856 }
1857
1858 /* Called from main thread */
1859 unsigned pa_source_linked_by(pa_source *s) {
1860 pa_source_assert_ref(s);
1861 pa_assert_ctl_context();
1862 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1863
1864 return pa_idxset_size(s->outputs);
1865 }
1866
1867 /* Called from main thread */
1868 unsigned pa_source_used_by(pa_source *s) {
1869 unsigned ret;
1870
1871 pa_source_assert_ref(s);
1872 pa_assert_ctl_context();
1873 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1874
1875 ret = pa_idxset_size(s->outputs);
1876 pa_assert(ret >= s->n_corked);
1877
1878 return ret - s->n_corked;
1879 }
1880
1881 /* Called from main thread */
1882 unsigned pa_source_check_suspend(pa_source *s) {
1883 unsigned ret;
1884 pa_source_output *o;
1885 uint32_t idx;
1886
1887 pa_source_assert_ref(s);
1888 pa_assert_ctl_context();
1889
1890 if (!PA_SOURCE_IS_LINKED(s->state))
1891 return 0;
1892
1893 ret = 0;
1894
1895 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1896 pa_source_output_state_t st;
1897
1898 st = pa_source_output_get_state(o);
1899
1900 /* We do not assert here. It is perfectly valid for a source output to
1901 * be in the INIT state (i.e. created, marked done but not yet put)
1902 * and we should not care if it's unlinked as it won't contribute
1903 * towards our busy status.
1904 */
1905 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1906 continue;
1907
1908 if (st == PA_SOURCE_OUTPUT_CORKED)
1909 continue;
1910
1911 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1912 continue;
1913
1914 ret ++;
1915 }
1916
1917 return ret;
1918 }
1919
1920 /* Called from the IO thread */
1921 static void sync_output_volumes_within_thread(pa_source *s) {
1922 pa_source_output *o;
1923 void *state = NULL;
1924
1925 pa_source_assert_ref(s);
1926 pa_source_assert_io_context(s);
1927
1928 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1929 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1930 continue;
1931
1932 o->thread_info.soft_volume = o->soft_volume;
1933 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1934 }
1935 }
1936
1937 /* Called from the IO thread. Only called for the root source in volume sharing
1938 * cases, except for internal recursive calls. */
1939 static void set_shared_volume_within_thread(pa_source *s) {
1940 pa_source_output *o;
1941 void *state = NULL;
1942
1943 pa_source_assert_ref(s);
1944
1945 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1946
1947 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1948 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1949 set_shared_volume_within_thread(o->destination_source);
1950 }
1951 }
1952
1953 /* Called from IO thread, except when it is not */
1954 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1955 pa_source *s = PA_SOURCE(object);
1956 pa_source_assert_ref(s);
1957
1958 switch ((pa_source_message_t) code) {
1959
1960 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1961 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1962
1963 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1964
1965 if (o->direct_on_input) {
1966 o->thread_info.direct_on_input = o->direct_on_input;
1967 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1968 }
1969
1970 pa_assert(!o->thread_info.attached);
1971 o->thread_info.attached = TRUE;
1972
1973 if (o->attach)
1974 o->attach(o);
1975
1976 pa_source_output_set_state_within_thread(o, o->state);
1977
1978 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1979 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1980
1981 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1982
1983 /* We don't just invalidate the requested latency here,
1984 * because if we are in a move we might need to fix up the
1985 * requested latency. */
1986 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1987
1988 /* In flat volume mode we need to update the volume as
1989 * well */
1990 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1991 }
1992
1993 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1994 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1995
1996 pa_source_output_set_state_within_thread(o, o->state);
1997
1998 if (o->detach)
1999 o->detach(o);
2000
2001 pa_assert(o->thread_info.attached);
2002 o->thread_info.attached = FALSE;
2003
2004 if (o->thread_info.direct_on_input) {
2005 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
2006 o->thread_info.direct_on_input = NULL;
2007 }
2008
2009 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
2010 pa_source_output_unref(o);
2011
2012 pa_source_invalidate_requested_latency(s, TRUE);
2013
2014 /* In flat volume mode we need to update the volume as
2015 * well */
2016 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
2017 }
2018
2019 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
2020 pa_source *root_source = pa_source_get_master(s);
2021
2022 if (PA_LIKELY(root_source))
2023 set_shared_volume_within_thread(root_source);
2024
2025 return 0;
2026 }
2027
2028 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
2029
2030 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2031 s->set_volume(s);
2032 pa_source_volume_change_push(s);
2033 }
2034 /* Fall through ... */
2035
2036 case PA_SOURCE_MESSAGE_SET_VOLUME:
2037
2038 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2039 s->thread_info.soft_volume = s->soft_volume;
2040 }
2041
2042 /* Fall through ... */
2043
2044 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
2045 sync_output_volumes_within_thread(s);
2046 return 0;
2047
2048 case PA_SOURCE_MESSAGE_GET_VOLUME:
2049
2050 if ((s->flags & PA_SOURCE_DEFERRED_VOLUME) && s->get_volume) {
2051 s->get_volume(s);
2052 pa_source_volume_change_flush(s);
2053 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
2054 }
2055
2056 /* In case source implementor reset SW volume. */
2057 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
2058 s->thread_info.soft_volume = s->soft_volume;
2059 }
2060
2061 return 0;
2062
2063 case PA_SOURCE_MESSAGE_SET_MUTE:
2064
2065 if (s->thread_info.soft_muted != s->muted) {
2066 s->thread_info.soft_muted = s->muted;
2067 }
2068
2069 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->set_mute)
2070 s->set_mute(s);
2071
2072 return 0;
2073
2074 case PA_SOURCE_MESSAGE_GET_MUTE:
2075
2076 if (s->flags & PA_SOURCE_DEFERRED_VOLUME && s->get_mute)
2077 s->get_mute(s);
2078
2079 return 0;
2080
2081 case PA_SOURCE_MESSAGE_SET_STATE: {
2082
2083 pa_bool_t suspend_change =
2084 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
2085 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
2086
2087 s->thread_info.state = PA_PTR_TO_UINT(userdata);
2088
2089 if (suspend_change) {
2090 pa_source_output *o;
2091 void *state = NULL;
2092
2093 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2094 if (o->suspend_within_thread)
2095 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
2096 }
2097
2098 return 0;
2099 }
2100
2101 case PA_SOURCE_MESSAGE_DETACH:
2102
2103 /* Detach all streams */
2104 pa_source_detach_within_thread(s);
2105 return 0;
2106
2107 case PA_SOURCE_MESSAGE_ATTACH:
2108
2109 /* Reattach all streams */
2110 pa_source_attach_within_thread(s);
2111 return 0;
2112
2113 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
2114
2115 pa_usec_t *usec = userdata;
2116 *usec = pa_source_get_requested_latency_within_thread(s);
2117
2118 /* Yes, that's right, the IO thread will see -1 when no
2119 * explicit requested latency is configured, the main
2120 * thread will see max_latency */
2121 if (*usec == (pa_usec_t) -1)
2122 *usec = s->thread_info.max_latency;
2123
2124 return 0;
2125 }
2126
2127 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
2128 pa_usec_t *r = userdata;
2129
2130 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
2131
2132 return 0;
2133 }
2134
2135 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
2136 pa_usec_t *r = userdata;
2137
2138 r[0] = s->thread_info.min_latency;
2139 r[1] = s->thread_info.max_latency;
2140
2141 return 0;
2142 }
2143
2144 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
2145
2146 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
2147 return 0;
2148
2149 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
2150
2151 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
2152 return 0;
2153
2154 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
2155
2156 *((size_t*) userdata) = s->thread_info.max_rewind;
2157 return 0;
2158
2159 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
2160
2161 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
2162 return 0;
2163
2164 case PA_SOURCE_MESSAGE_GET_LATENCY:
2165
2166 if (s->monitor_of) {
2167 *((pa_usec_t*) userdata) = 0;
2168 return 0;
2169 }
2170
2171 /* Implementors need to overwrite this implementation! */
2172 return -1;
2173
2174 case PA_SOURCE_MESSAGE_SET_PORT:
2175
2176 pa_assert(userdata);
2177 if (s->set_port) {
2178 struct source_message_set_port *msg_data = userdata;
2179 msg_data->ret = s->set_port(s, msg_data->port);
2180 }
2181 return 0;
2182
2183 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
2184 /* This message is sent from IO-thread and handled in main thread. */
2185 pa_assert_ctl_context();
2186
2187 /* Make sure we're not messing with main thread when no longer linked */
2188 if (!PA_SOURCE_IS_LINKED(s->state))
2189 return 0;
2190
2191 pa_source_get_volume(s, TRUE);
2192 pa_source_get_mute(s, TRUE);
2193 return 0;
2194
2195 case PA_SOURCE_MESSAGE_SET_LATENCY_OFFSET:
2196 s->thread_info.latency_offset = offset;
2197 return 0;
2198
2199 case PA_SOURCE_MESSAGE_MAX:
2200 ;
2201 }
2202
2203 return -1;
2204 }
2205
2206 /* Called from main thread */
2207 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
2208 pa_source *source;
2209 uint32_t idx;
2210 int ret = 0;
2211
2212 pa_core_assert_ref(c);
2213 pa_assert_ctl_context();
2214 pa_assert(cause != 0);
2215
2216 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
2217 int r;
2218
2219 if (source->monitor_of)
2220 continue;
2221
2222 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
2223 ret = r;
2224 }
2225
2226 return ret;
2227 }
2228
2229 /* Called from main thread */
2230 void pa_source_detach(pa_source *s) {
2231 pa_source_assert_ref(s);
2232 pa_assert_ctl_context();
2233 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2234
2235 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
2236 }
2237
2238 /* Called from main thread */
2239 void pa_source_attach(pa_source *s) {
2240 pa_source_assert_ref(s);
2241 pa_assert_ctl_context();
2242 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2243
2244 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
2245 }
2246
2247 /* Called from IO thread */
2248 void pa_source_detach_within_thread(pa_source *s) {
2249 pa_source_output *o;
2250 void *state = NULL;
2251
2252 pa_source_assert_ref(s);
2253 pa_source_assert_io_context(s);
2254 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2255
2256 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2257 if (o->detach)
2258 o->detach(o);
2259 }
2260
2261 /* Called from IO thread */
2262 void pa_source_attach_within_thread(pa_source *s) {
2263 pa_source_output *o;
2264 void *state = NULL;
2265
2266 pa_source_assert_ref(s);
2267 pa_source_assert_io_context(s);
2268 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
2269
2270 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2271 if (o->attach)
2272 o->attach(o);
2273 }
2274
2275 /* Called from IO thread */
2276 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
2277 pa_usec_t result = (pa_usec_t) -1;
2278 pa_source_output *o;
2279 void *state = NULL;
2280
2281 pa_source_assert_ref(s);
2282 pa_source_assert_io_context(s);
2283
2284 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2285 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
2286
2287 if (s->thread_info.requested_latency_valid)
2288 return s->thread_info.requested_latency;
2289
2290 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2291 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
2292 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
2293 result = o->thread_info.requested_source_latency;
2294
2295 if (result != (pa_usec_t) -1)
2296 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
2297
2298 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2299 /* Only cache this if we are fully set up */
2300 s->thread_info.requested_latency = result;
2301 s->thread_info.requested_latency_valid = TRUE;
2302 }
2303
2304 return result;
2305 }
2306
2307 /* Called from main thread */
2308 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2309 pa_usec_t usec = 0;
2310
2311 pa_source_assert_ref(s);
2312 pa_assert_ctl_context();
2313 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2314
2315 if (s->state == PA_SOURCE_SUSPENDED)
2316 return 0;
2317
2318 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2319
2320 return usec;
2321 }
2322
2323 /* Called from IO thread */
2324 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2325 pa_source_output *o;
2326 void *state = NULL;
2327
2328 pa_source_assert_ref(s);
2329 pa_source_assert_io_context(s);
2330
2331 if (max_rewind == s->thread_info.max_rewind)
2332 return;
2333
2334 s->thread_info.max_rewind = max_rewind;
2335
2336 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2337 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2338 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2339 }
2340
2341 /* Called from main thread */
2342 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2343 pa_source_assert_ref(s);
2344 pa_assert_ctl_context();
2345
2346 if (PA_SOURCE_IS_LINKED(s->state))
2347 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2348 else
2349 pa_source_set_max_rewind_within_thread(s, max_rewind);
2350 }
2351
2352 /* Called from IO thread */
2353 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2354 pa_source_output *o;
2355 void *state = NULL;
2356
2357 pa_source_assert_ref(s);
2358 pa_source_assert_io_context(s);
2359
2360 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2361 s->thread_info.requested_latency_valid = FALSE;
2362 else if (dynamic)
2363 return;
2364
2365 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2366
2367 if (s->update_requested_latency)
2368 s->update_requested_latency(s);
2369
2370 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2371 if (o->update_source_requested_latency)
2372 o->update_source_requested_latency(o);
2373 }
2374
2375 if (s->monitor_of)
2376 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2377 }
2378
2379 /* Called from main thread */
2380 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2381 pa_source_assert_ref(s);
2382 pa_assert_ctl_context();
2383
2384 /* min_latency == 0: no limit
2385 * min_latency anything else: specified limit
2386 *
2387 * Similar for max_latency */
2388
2389 if (min_latency < ABSOLUTE_MIN_LATENCY)
2390 min_latency = ABSOLUTE_MIN_LATENCY;
2391
2392 if (max_latency <= 0 ||
2393 max_latency > ABSOLUTE_MAX_LATENCY)
2394 max_latency = ABSOLUTE_MAX_LATENCY;
2395
2396 pa_assert(min_latency <= max_latency);
2397
2398 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2399 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2400 max_latency == ABSOLUTE_MAX_LATENCY) ||
2401 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2402
2403 if (PA_SOURCE_IS_LINKED(s->state)) {
2404 pa_usec_t r[2];
2405
2406 r[0] = min_latency;
2407 r[1] = max_latency;
2408
2409 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2410 } else
2411 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2412 }
2413
2414 /* Called from main thread */
2415 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2416 pa_source_assert_ref(s);
2417 pa_assert_ctl_context();
2418 pa_assert(min_latency);
2419 pa_assert(max_latency);
2420
2421 if (PA_SOURCE_IS_LINKED(s->state)) {
2422 pa_usec_t r[2] = { 0, 0 };
2423
2424 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2425
2426 *min_latency = r[0];
2427 *max_latency = r[1];
2428 } else {
2429 *min_latency = s->thread_info.min_latency;
2430 *max_latency = s->thread_info.max_latency;
2431 }
2432 }
2433
2434 /* Called from IO thread, and from main thread before pa_source_put() is called */
2435 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2436 pa_source_assert_ref(s);
2437 pa_source_assert_io_context(s);
2438
2439 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2440 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2441 pa_assert(min_latency <= max_latency);
2442
2443 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2444 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2445 max_latency == ABSOLUTE_MAX_LATENCY) ||
2446 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2447 s->monitor_of);
2448
2449 if (s->thread_info.min_latency == min_latency &&
2450 s->thread_info.max_latency == max_latency)
2451 return;
2452
2453 s->thread_info.min_latency = min_latency;
2454 s->thread_info.max_latency = max_latency;
2455
2456 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2457 pa_source_output *o;
2458 void *state = NULL;
2459
2460 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2461 if (o->update_source_latency_range)
2462 o->update_source_latency_range(o);
2463 }
2464
2465 pa_source_invalidate_requested_latency(s, FALSE);
2466 }
2467
2468 /* Called from main thread, before the source is put */
2469 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2470 pa_source_assert_ref(s);
2471 pa_assert_ctl_context();
2472
2473 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2474 pa_assert(latency == 0);
2475 return;
2476 }
2477
2478 if (latency < ABSOLUTE_MIN_LATENCY)
2479 latency = ABSOLUTE_MIN_LATENCY;
2480
2481 if (latency > ABSOLUTE_MAX_LATENCY)
2482 latency = ABSOLUTE_MAX_LATENCY;
2483
2484 if (PA_SOURCE_IS_LINKED(s->state))
2485 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2486 else
2487 s->thread_info.fixed_latency = latency;
2488 }
2489
2490 /* Called from main thread */
2491 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2492 pa_usec_t latency;
2493
2494 pa_source_assert_ref(s);
2495 pa_assert_ctl_context();
2496
2497 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2498 return 0;
2499
2500 if (PA_SOURCE_IS_LINKED(s->state))
2501 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2502 else
2503 latency = s->thread_info.fixed_latency;
2504
2505 return latency;
2506 }
2507
2508 /* Called from IO thread */
2509 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2510 pa_source_assert_ref(s);
2511 pa_source_assert_io_context(s);
2512
2513 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2514 pa_assert(latency == 0);
2515 return;
2516 }
2517
2518 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2519 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2520
2521 if (s->thread_info.fixed_latency == latency)
2522 return;
2523
2524 s->thread_info.fixed_latency = latency;
2525
2526 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2527 pa_source_output *o;
2528 void *state = NULL;
2529
2530 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2531 if (o->update_source_fixed_latency)
2532 o->update_source_fixed_latency(o);
2533 }
2534
2535 pa_source_invalidate_requested_latency(s, FALSE);
2536 }
2537
2538 /* Called from main thread */
2539 void pa_source_set_latency_offset(pa_source *s, int64_t offset) {
2540 pa_source_assert_ref(s);
2541
2542 s->latency_offset = offset;
2543
2544 if (PA_SOURCE_IS_LINKED(s->state))
2545 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_OFFSET, NULL, offset, NULL) == 0);
2546 else
2547 s->thread_info.latency_offset = offset;
2548 }
2549
2550 /* Called from main thread */
2551 size_t pa_source_get_max_rewind(pa_source *s) {
2552 size_t r;
2553 pa_assert_ctl_context();
2554 pa_source_assert_ref(s);
2555
2556 if (!PA_SOURCE_IS_LINKED(s->state))
2557 return s->thread_info.max_rewind;
2558
2559 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2560
2561 return r;
2562 }
2563
2564 /* Called from main context */
2565 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2566 pa_device_port *port;
2567 int ret;
2568
2569 pa_source_assert_ref(s);
2570 pa_assert_ctl_context();
2571
2572 if (!s->set_port) {
2573 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2574 return -PA_ERR_NOTIMPLEMENTED;
2575 }
2576
2577 if (!name)
2578 return -PA_ERR_NOENTITY;
2579
2580 if (!(port = pa_hashmap_get(s->ports, name)))
2581 return -PA_ERR_NOENTITY;
2582
2583 if (s->active_port == port) {
2584 s->save_port = s->save_port || save;
2585 return 0;
2586 }
2587
2588 if (s->flags & PA_SOURCE_DEFERRED_VOLUME) {
2589 struct source_message_set_port msg = { .port = port, .ret = 0 };
2590 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2591 ret = msg.ret;
2592 }
2593 else
2594 ret = s->set_port(s, port);
2595
2596 if (ret < 0)
2597 return -PA_ERR_NOENTITY;
2598
2599 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2600
2601 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2602
2603 s->active_port = port;
2604 s->save_port = save;
2605
2606 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2607
2608 return 0;
2609 }
2610
2611 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2612
2613 /* Called from the IO thread. */
2614 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2615 pa_source_volume_change *c;
2616 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2617 c = pa_xnew(pa_source_volume_change, 1);
2618
2619 PA_LLIST_INIT(pa_source_volume_change, c);
2620 c->at = 0;
2621 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2622 return c;
2623 }
2624
2625 /* Called from the IO thread. */
2626 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2627 pa_assert(c);
2628 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2629 pa_xfree(c);
2630 }
2631
2632 /* Called from the IO thread. */
2633 void pa_source_volume_change_push(pa_source *s) {
2634 pa_source_volume_change *c = NULL;
2635 pa_source_volume_change *nc = NULL;
2636 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2637
2638 const char *direction = NULL;
2639
2640 pa_assert(s);
2641 nc = pa_source_volume_change_new(s);
2642
2643 /* NOTE: There is already more different volumes in pa_source that I can remember.
2644 * Adding one more volume for HW would get us rid of this, but I am trying
2645 * to survive with the ones we already have. */
2646 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2647
2648 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2649 pa_log_debug("Volume not changing");
2650 pa_source_volume_change_free(nc);
2651 return;
2652 }
2653
2654 nc->at = pa_source_get_latency_within_thread(s);
2655 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2656
2657 if (s->thread_info.volume_changes_tail) {
2658 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2659 /* If volume is going up let's do it a bit late. If it is going
2660 * down let's do it a bit early. */
2661 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2662 if (nc->at + safety_margin > c->at) {
2663 nc->at += safety_margin;
2664 direction = "up";
2665 break;
2666 }
2667 }
2668 else if (nc->at - safety_margin > c->at) {
2669 nc->at -= safety_margin;
2670 direction = "down";
2671 break;
2672 }
2673 }
2674 }
2675
2676 if (c == NULL) {
2677 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2678 nc->at += safety_margin;
2679 direction = "up";
2680 } else {
2681 nc->at -= safety_margin;
2682 direction = "down";
2683 }
2684 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2685 }
2686 else {
2687 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2688 }
2689
2690 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2691
2692 /* We can ignore volume events that came earlier but should happen later than this. */
2693 PA_LLIST_FOREACH(c, nc->next) {
2694 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2695 pa_source_volume_change_free(c);
2696 }
2697 nc->next = NULL;
2698 s->thread_info.volume_changes_tail = nc;
2699 }
2700
2701 /* Called from the IO thread. */
2702 static void pa_source_volume_change_flush(pa_source *s) {
2703 pa_source_volume_change *c = s->thread_info.volume_changes;
2704 pa_assert(s);
2705 s->thread_info.volume_changes = NULL;
2706 s->thread_info.volume_changes_tail = NULL;
2707 while (c) {
2708 pa_source_volume_change *next = c->next;
2709 pa_source_volume_change_free(c);
2710 c = next;
2711 }
2712 }
2713
2714 /* Called from the IO thread. */
2715 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2716 pa_usec_t now;
2717 pa_bool_t ret = FALSE;
2718
2719 pa_assert(s);
2720
2721 if (!s->thread_info.volume_changes || !PA_SOURCE_IS_LINKED(s->state)) {
2722 if (usec_to_next)
2723 *usec_to_next = 0;
2724 return ret;
2725 }
2726
2727 pa_assert(s->write_volume);
2728
2729 now = pa_rtclock_now();
2730
2731 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2732 pa_source_volume_change *c = s->thread_info.volume_changes;
2733 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2734 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2735 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2736 ret = TRUE;
2737 s->thread_info.current_hw_volume = c->hw_volume;
2738 pa_source_volume_change_free(c);
2739 }
2740
2741 if (ret)
2742 s->write_volume(s);
2743
2744 if (s->thread_info.volume_changes) {
2745 if (usec_to_next)
2746 *usec_to_next = s->thread_info.volume_changes->at - now;
2747 if (pa_log_ratelimit(PA_LOG_DEBUG))
2748 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2749 }
2750 else {
2751 if (usec_to_next)
2752 *usec_to_next = 0;
2753 s->thread_info.volume_changes_tail = NULL;
2754 }
2755 return ret;
2756 }
2757
2758
2759 /* Called from the main thread */
2760 /* Gets the list of formats supported by the source. The members and idxset must
2761 * be freed by the caller. */
2762 pa_idxset* pa_source_get_formats(pa_source *s) {
2763 pa_idxset *ret;
2764
2765 pa_assert(s);
2766
2767 if (s->get_formats) {
2768 /* Source supports format query, all is good */
2769 ret = s->get_formats(s);
2770 } else {
2771 /* Source doesn't support format query, so assume it does PCM */
2772 pa_format_info *f = pa_format_info_new();
2773 f->encoding = PA_ENCODING_PCM;
2774
2775 ret = pa_idxset_new(NULL, NULL);
2776 pa_idxset_put(ret, f, NULL);
2777 }
2778
2779 return ret;
2780 }
2781
2782 /* Called from the main thread */
2783 /* Checks if the source can accept this format */
2784 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2785 {
2786 pa_idxset *formats = NULL;
2787 pa_bool_t ret = FALSE;
2788
2789 pa_assert(s);
2790 pa_assert(f);
2791
2792 formats = pa_source_get_formats(s);
2793
2794 if (formats) {
2795 pa_format_info *finfo_device;
2796 uint32_t i;
2797
2798 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2799 if (pa_format_info_is_compatible(finfo_device, f)) {
2800 ret = TRUE;
2801 break;
2802 }
2803 }
2804
2805 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2806 }
2807
2808 return ret;
2809 }
2810
2811 /* Called from the main thread */
2812 /* Calculates the intersection between formats supported by the source and
2813 * in_formats, and returns these, in the order of the source's formats. */
2814 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2815 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2816 pa_format_info *f_source, *f_in;
2817 uint32_t i, j;
2818
2819 pa_assert(s);
2820
2821 if (!in_formats || pa_idxset_isempty(in_formats))
2822 goto done;
2823
2824 source_formats = pa_source_get_formats(s);
2825
2826 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2827 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2828 if (pa_format_info_is_compatible(f_source, f_in))
2829 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2830 }
2831 }
2832
2833 done:
2834 if (source_formats)
2835 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2836
2837 return out_formats;
2838 }