]> code.delx.au - pulseaudio/blob - src/pulsecore/source.c
capture: Remove support for synchronised capture streams.
[pulseaudio] / src / pulsecore / source.c
1 /***
2 This file is part of PulseAudio.
3
4 Copyright 2004-2006 Lennart Poettering
5 Copyright 2006 Pierre Ossman <ossman@cendio.se> for Cendio AB
6
7 PulseAudio is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published
9 by the Free Software Foundation; either version 2.1 of the License,
10 or (at your option) any later version.
11
12 PulseAudio is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public License
18 along with PulseAudio; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
20 USA.
21 ***/
22
23 #ifdef HAVE_CONFIG_H
24 #include <config.h>
25 #endif
26
27 #include <stdio.h>
28 #include <stdlib.h>
29 #include <string.h>
30
31 #include <pulse/utf8.h>
32 #include <pulse/xmalloc.h>
33 #include <pulse/timeval.h>
34 #include <pulse/util.h>
35 #include <pulse/rtclock.h>
36 #include <pulse/internal.h>
37
38 #include <pulsecore/core-util.h>
39 #include <pulsecore/source-output.h>
40 #include <pulsecore/namereg.h>
41 #include <pulsecore/core-subscribe.h>
42 #include <pulsecore/log.h>
43 #include <pulsecore/sample-util.h>
44 #include <pulsecore/flist.h>
45
46 #include "source.h"
47
48 #define ABSOLUTE_MIN_LATENCY (500)
49 #define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
50 #define DEFAULT_FIXED_LATENCY (250*PA_USEC_PER_MSEC)
51
52 PA_DEFINE_PUBLIC_CLASS(pa_source, pa_msgobject);
53
54 struct pa_source_volume_change {
55 pa_usec_t at;
56 pa_cvolume hw_volume;
57
58 PA_LLIST_FIELDS(pa_source_volume_change);
59 };
60
61 struct source_message_set_port {
62 pa_device_port *port;
63 int ret;
64 };
65
66 static void source_free(pa_object *o);
67
68 static void pa_source_volume_change_push(pa_source *s);
69 static void pa_source_volume_change_flush(pa_source *s);
70
71 pa_source_new_data* pa_source_new_data_init(pa_source_new_data *data) {
72 pa_assert(data);
73
74 pa_zero(*data);
75 data->proplist = pa_proplist_new();
76
77 return data;
78 }
79
80 void pa_source_new_data_set_name(pa_source_new_data *data, const char *name) {
81 pa_assert(data);
82
83 pa_xfree(data->name);
84 data->name = pa_xstrdup(name);
85 }
86
87 void pa_source_new_data_set_sample_spec(pa_source_new_data *data, const pa_sample_spec *spec) {
88 pa_assert(data);
89
90 if ((data->sample_spec_is_set = !!spec))
91 data->sample_spec = *spec;
92 }
93
94 void pa_source_new_data_set_channel_map(pa_source_new_data *data, const pa_channel_map *map) {
95 pa_assert(data);
96
97 if ((data->channel_map_is_set = !!map))
98 data->channel_map = *map;
99 }
100
101 void pa_source_new_data_set_volume(pa_source_new_data *data, const pa_cvolume *volume) {
102 pa_assert(data);
103
104 if ((data->volume_is_set = !!volume))
105 data->volume = *volume;
106 }
107
108 void pa_source_new_data_set_muted(pa_source_new_data *data, pa_bool_t mute) {
109 pa_assert(data);
110
111 data->muted_is_set = TRUE;
112 data->muted = !!mute;
113 }
114
115 void pa_source_new_data_set_port(pa_source_new_data *data, const char *port) {
116 pa_assert(data);
117
118 pa_xfree(data->active_port);
119 data->active_port = pa_xstrdup(port);
120 }
121
122 void pa_source_new_data_done(pa_source_new_data *data) {
123 pa_assert(data);
124
125 pa_proplist_free(data->proplist);
126
127 if (data->ports) {
128 pa_device_port *p;
129
130 while ((p = pa_hashmap_steal_first(data->ports)))
131 pa_device_port_free(p);
132
133 pa_hashmap_free(data->ports, NULL, NULL);
134 }
135
136 pa_xfree(data->name);
137 pa_xfree(data->active_port);
138 }
139
140 /* Called from main context */
141 static void reset_callbacks(pa_source *s) {
142 pa_assert(s);
143
144 s->set_state = NULL;
145 s->get_volume = NULL;
146 s->set_volume = NULL;
147 s->get_mute = NULL;
148 s->set_mute = NULL;
149 s->update_requested_latency = NULL;
150 s->set_port = NULL;
151 s->get_formats = NULL;
152 }
153
154 /* Called from main context */
155 pa_source* pa_source_new(
156 pa_core *core,
157 pa_source_new_data *data,
158 pa_source_flags_t flags) {
159
160 pa_source *s;
161 const char *name;
162 char st[PA_SAMPLE_SPEC_SNPRINT_MAX], cm[PA_CHANNEL_MAP_SNPRINT_MAX];
163 char *pt;
164
165 pa_assert(core);
166 pa_assert(data);
167 pa_assert(data->name);
168 pa_assert_ctl_context();
169
170 s = pa_msgobject_new(pa_source);
171
172 if (!(name = pa_namereg_register(core, data->name, PA_NAMEREG_SOURCE, s, data->namereg_fail))) {
173 pa_log_debug("Failed to register name %s.", data->name);
174 pa_xfree(s);
175 return NULL;
176 }
177
178 pa_source_new_data_set_name(data, name);
179
180 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_NEW], data) < 0) {
181 pa_xfree(s);
182 pa_namereg_unregister(core, name);
183 return NULL;
184 }
185
186 /* FIXME, need to free s here on failure */
187
188 pa_return_null_if_fail(!data->driver || pa_utf8_valid(data->driver));
189 pa_return_null_if_fail(data->name && pa_utf8_valid(data->name) && data->name[0]);
190
191 pa_return_null_if_fail(data->sample_spec_is_set && pa_sample_spec_valid(&data->sample_spec));
192
193 if (!data->channel_map_is_set)
194 pa_return_null_if_fail(pa_channel_map_init_auto(&data->channel_map, data->sample_spec.channels, PA_CHANNEL_MAP_DEFAULT));
195
196 pa_return_null_if_fail(pa_channel_map_valid(&data->channel_map));
197 pa_return_null_if_fail(data->channel_map.channels == data->sample_spec.channels);
198
199 /* FIXME: There should probably be a general function for checking whether
200 * the source volume is allowed to be set, like there is for source outputs. */
201 pa_assert(!data->volume_is_set || !(flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
202
203 if (!data->volume_is_set) {
204 pa_cvolume_reset(&data->volume, data->sample_spec.channels);
205 data->save_volume = FALSE;
206 }
207
208 pa_return_null_if_fail(pa_cvolume_valid(&data->volume));
209 pa_return_null_if_fail(pa_cvolume_compatible(&data->volume, &data->sample_spec));
210
211 if (!data->muted_is_set)
212 data->muted = FALSE;
213
214 if (data->card)
215 pa_proplist_update(data->proplist, PA_UPDATE_MERGE, data->card->proplist);
216
217 pa_device_init_description(data->proplist);
218 pa_device_init_icon(data->proplist, FALSE);
219 pa_device_init_intended_roles(data->proplist);
220
221 if (pa_hook_fire(&core->hooks[PA_CORE_HOOK_SOURCE_FIXATE], data) < 0) {
222 pa_xfree(s);
223 pa_namereg_unregister(core, name);
224 return NULL;
225 }
226
227 s->parent.parent.free = source_free;
228 s->parent.process_msg = pa_source_process_msg;
229
230 s->core = core;
231 s->state = PA_SOURCE_INIT;
232 s->flags = flags;
233 s->priority = 0;
234 s->suspend_cause = 0;
235 s->name = pa_xstrdup(name);
236 s->proplist = pa_proplist_copy(data->proplist);
237 s->driver = pa_xstrdup(pa_path_get_filename(data->driver));
238 s->module = data->module;
239 s->card = data->card;
240
241 s->priority = pa_device_init_priority(s->proplist);
242
243 s->sample_spec = data->sample_spec;
244 s->channel_map = data->channel_map;
245
246 s->outputs = pa_idxset_new(NULL, NULL);
247 s->n_corked = 0;
248 s->monitor_of = NULL;
249 s->output_from_master = NULL;
250
251 s->reference_volume = s->real_volume = data->volume;
252 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
253 s->base_volume = PA_VOLUME_NORM;
254 s->n_volume_steps = PA_VOLUME_NORM+1;
255 s->muted = data->muted;
256 s->refresh_volume = s->refresh_muted = FALSE;
257
258 reset_callbacks(s);
259 s->userdata = NULL;
260
261 s->asyncmsgq = NULL;
262
263 /* As a minor optimization we just steal the list instead of
264 * copying it here */
265 s->ports = data->ports;
266 data->ports = NULL;
267
268 s->active_port = NULL;
269 s->save_port = FALSE;
270
271 if (data->active_port && s->ports)
272 if ((s->active_port = pa_hashmap_get(s->ports, data->active_port)))
273 s->save_port = data->save_port;
274
275 if (!s->active_port && s->ports) {
276 void *state;
277 pa_device_port *p;
278
279 PA_HASHMAP_FOREACH(p, s->ports, state)
280 if (!s->active_port || p->priority > s->active_port->priority)
281 s->active_port = p;
282 }
283
284 s->save_volume = data->save_volume;
285 s->save_muted = data->save_muted;
286
287 pa_silence_memchunk_get(
288 &core->silence_cache,
289 core->mempool,
290 &s->silence,
291 &s->sample_spec,
292 0);
293
294 s->thread_info.rtpoll = NULL;
295 s->thread_info.outputs = pa_hashmap_new(pa_idxset_trivial_hash_func, pa_idxset_trivial_compare_func);
296 s->thread_info.soft_volume = s->soft_volume;
297 s->thread_info.soft_muted = s->muted;
298 s->thread_info.state = s->state;
299 s->thread_info.max_rewind = 0;
300 s->thread_info.requested_latency_valid = FALSE;
301 s->thread_info.requested_latency = 0;
302 s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
303 s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
304 s->thread_info.fixed_latency = flags & PA_SOURCE_DYNAMIC_LATENCY ? 0 : DEFAULT_FIXED_LATENCY;
305
306 PA_LLIST_HEAD_INIT(pa_source_volume_change, s->thread_info.volume_changes);
307 s->thread_info.volume_changes_tail = NULL;
308 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
309 s->thread_info.volume_change_safety_margin = core->sync_volume_safety_margin_usec;
310 s->thread_info.volume_change_extra_delay = core->sync_volume_extra_delay_usec;
311
312 /* FIXME: This should probably be moved to pa_source_put() */
313 pa_assert_se(pa_idxset_put(core->sources, s, &s->index) >= 0);
314
315 if (s->card)
316 pa_assert_se(pa_idxset_put(s->card->sources, s, NULL) >= 0);
317
318 pt = pa_proplist_to_string_sep(s->proplist, "\n ");
319 pa_log_info("Created source %u \"%s\" with sample spec %s and channel map %s\n %s",
320 s->index,
321 s->name,
322 pa_sample_spec_snprint(st, sizeof(st), &s->sample_spec),
323 pa_channel_map_snprint(cm, sizeof(cm), &s->channel_map),
324 pt);
325 pa_xfree(pt);
326
327 return s;
328 }
329
330 /* Called from main context */
331 static int source_set_state(pa_source *s, pa_source_state_t state) {
332 int ret;
333 pa_bool_t suspend_change;
334 pa_source_state_t original_state;
335
336 pa_assert(s);
337 pa_assert_ctl_context();
338
339 if (s->state == state)
340 return 0;
341
342 original_state = s->state;
343
344 suspend_change =
345 (original_state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(state)) ||
346 (PA_SOURCE_IS_OPENED(original_state) && state == PA_SOURCE_SUSPENDED);
347
348 if (s->set_state)
349 if ((ret = s->set_state(s, state)) < 0)
350 return ret;
351
352 if (s->asyncmsgq)
353 if ((ret = pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_STATE, PA_UINT_TO_PTR(state), 0, NULL)) < 0) {
354
355 if (s->set_state)
356 s->set_state(s, original_state);
357
358 return ret;
359 }
360
361 s->state = state;
362
363 if (state != PA_SOURCE_UNLINKED) { /* if we enter UNLINKED state pa_source_unlink() will fire the apropriate events */
364 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_STATE_CHANGED], s);
365 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
366 }
367
368 if (suspend_change) {
369 pa_source_output *o;
370 uint32_t idx;
371
372 /* We're suspending or resuming, tell everyone about it */
373
374 PA_IDXSET_FOREACH(o, s->outputs, idx)
375 if (s->state == PA_SOURCE_SUSPENDED &&
376 (o->flags & PA_SOURCE_OUTPUT_KILL_ON_SUSPEND))
377 pa_source_output_kill(o);
378 else if (o->suspend)
379 o->suspend(o, state == PA_SOURCE_SUSPENDED);
380 }
381
382 return 0;
383 }
384
385 /* Called from main context */
386 void pa_source_put(pa_source *s) {
387 pa_source_assert_ref(s);
388 pa_assert_ctl_context();
389
390 pa_assert(s->state == PA_SOURCE_INIT);
391 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || s->output_from_master);
392
393 /* The following fields must be initialized properly when calling _put() */
394 pa_assert(s->asyncmsgq);
395 pa_assert(s->thread_info.min_latency <= s->thread_info.max_latency);
396
397 /* Generally, flags should be initialized via pa_source_new(). As a
398 * special exception we allow volume related flags to be set
399 * between _new() and _put(). */
400
401 /* XXX: Currently decibel volume is disabled for all sources that use volume
402 * sharing. When the master source supports decibel volume, it would be good
403 * to have the flag also in the filter source, but currently we don't do that
404 * so that the flags of the filter source never change when it's moved from
405 * a master source to another. One solution for this problem would be to
406 * remove user-visible volume altogether from filter sources when volume
407 * sharing is used, but the current approach was easier to implement... */
408 if (!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) && !(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
409 s->flags |= PA_SOURCE_DECIBEL_VOLUME;
410
411 if ((s->flags & PA_SOURCE_DECIBEL_VOLUME) && s->core->flat_volumes)
412 s->flags |= PA_SOURCE_FLAT_VOLUME;
413
414 if (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) {
415 pa_source *root_source = s->output_from_master->source;
416
417 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
418 root_source = root_source->output_from_master->source;
419
420 s->reference_volume = root_source->reference_volume;
421 pa_cvolume_remap(&s->reference_volume, &root_source->channel_map, &s->channel_map);
422
423 s->real_volume = root_source->real_volume;
424 pa_cvolume_remap(&s->real_volume, &root_source->channel_map, &s->channel_map);
425 } else
426 /* We assume that if the sink implementor changed the default
427 * volume he did so in real_volume, because that is the usual
428 * place where he is supposed to place his changes. */
429 s->reference_volume = s->real_volume;
430
431 s->thread_info.soft_volume = s->soft_volume;
432 s->thread_info.soft_muted = s->muted;
433 pa_sw_cvolume_multiply(&s->thread_info.current_hw_volume, &s->soft_volume, &s->real_volume);
434
435 pa_assert((s->flags & PA_SOURCE_HW_VOLUME_CTRL)
436 || (s->base_volume == PA_VOLUME_NORM
437 && ((s->flags & PA_SOURCE_DECIBEL_VOLUME || (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)))));
438 pa_assert(!(s->flags & PA_SOURCE_DECIBEL_VOLUME) || s->n_volume_steps == PA_VOLUME_NORM+1);
439 pa_assert(!(s->flags & PA_SOURCE_DYNAMIC_LATENCY) == (s->thread_info.fixed_latency != 0));
440 pa_assert(!(s->flags & PA_SOURCE_HW_VOLUME_CTRL) || s->set_volume);
441 pa_assert(!(s->flags & PA_SOURCE_SYNC_VOLUME) || (s->flags & PA_SOURCE_HW_VOLUME_CTRL));
442 pa_assert(!(s->flags & PA_SOURCE_SYNC_VOLUME) || s->write_volume);
443 pa_assert(!(s->flags & PA_SOURCE_HW_MUTE_CTRL) || s->set_mute);
444
445 pa_assert_se(source_set_state(s, PA_SOURCE_IDLE) == 0);
446
447 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_NEW, s->index);
448 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PUT], s);
449 }
450
451 /* Called from main context */
452 void pa_source_unlink(pa_source *s) {
453 pa_bool_t linked;
454 pa_source_output *o, *j = NULL;
455
456 pa_assert(s);
457 pa_assert_ctl_context();
458
459 /* See pa_sink_unlink() for a couple of comments how this function
460 * works. */
461
462 linked = PA_SOURCE_IS_LINKED(s->state);
463
464 if (linked)
465 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK], s);
466
467 if (s->state != PA_SOURCE_UNLINKED)
468 pa_namereg_unregister(s->core, s->name);
469 pa_idxset_remove_by_data(s->core->sources, s, NULL);
470
471 if (s->card)
472 pa_idxset_remove_by_data(s->card->sources, s, NULL);
473
474 while ((o = pa_idxset_first(s->outputs, NULL))) {
475 pa_assert(o != j);
476 pa_source_output_kill(o);
477 j = o;
478 }
479
480 if (linked)
481 source_set_state(s, PA_SOURCE_UNLINKED);
482 else
483 s->state = PA_SOURCE_UNLINKED;
484
485 reset_callbacks(s);
486
487 if (linked) {
488 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE | PA_SUBSCRIPTION_EVENT_REMOVE, s->index);
489 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_UNLINK_POST], s);
490 }
491 }
492
493 /* Called from main context */
494 static void source_free(pa_object *o) {
495 pa_source_output *so;
496 pa_source *s = PA_SOURCE(o);
497
498 pa_assert(s);
499 pa_assert_ctl_context();
500 pa_assert(pa_source_refcnt(s) == 0);
501
502 if (PA_SOURCE_IS_LINKED(s->state))
503 pa_source_unlink(s);
504
505 pa_log_info("Freeing source %u \"%s\"", s->index, s->name);
506
507 pa_idxset_free(s->outputs, NULL, NULL);
508
509 while ((so = pa_hashmap_steal_first(s->thread_info.outputs)))
510 pa_source_output_unref(so);
511
512 pa_hashmap_free(s->thread_info.outputs, NULL, NULL);
513
514 if (s->silence.memblock)
515 pa_memblock_unref(s->silence.memblock);
516
517 pa_xfree(s->name);
518 pa_xfree(s->driver);
519
520 if (s->proplist)
521 pa_proplist_free(s->proplist);
522
523 if (s->ports) {
524 pa_device_port *p;
525
526 while ((p = pa_hashmap_steal_first(s->ports)))
527 pa_device_port_free(p);
528
529 pa_hashmap_free(s->ports, NULL, NULL);
530 }
531
532 pa_xfree(s);
533 }
534
535 /* Called from main context, and not while the IO thread is active, please */
536 void pa_source_set_asyncmsgq(pa_source *s, pa_asyncmsgq *q) {
537 pa_source_assert_ref(s);
538 pa_assert_ctl_context();
539
540 s->asyncmsgq = q;
541 }
542
543 /* Called from main context, and not while the IO thread is active, please */
544 void pa_source_update_flags(pa_source *s, pa_source_flags_t mask, pa_source_flags_t value) {
545 pa_source_assert_ref(s);
546 pa_assert_ctl_context();
547
548 if (mask == 0)
549 return;
550
551 /* For now, allow only a minimal set of flags to be changed. */
552 pa_assert((mask & ~(PA_SOURCE_DYNAMIC_LATENCY|PA_SOURCE_LATENCY)) == 0);
553
554 s->flags = (s->flags & ~mask) | (value & mask);
555 }
556
557 /* Called from IO context, or before _put() from main context */
558 void pa_source_set_rtpoll(pa_source *s, pa_rtpoll *p) {
559 pa_source_assert_ref(s);
560 pa_source_assert_io_context(s);
561
562 s->thread_info.rtpoll = p;
563 }
564
565 /* Called from main context */
566 int pa_source_update_status(pa_source*s) {
567 pa_source_assert_ref(s);
568 pa_assert_ctl_context();
569 pa_assert(PA_SOURCE_IS_LINKED(s->state));
570
571 if (s->state == PA_SOURCE_SUSPENDED)
572 return 0;
573
574 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
575 }
576
577 /* Called from main context */
578 int pa_source_suspend(pa_source *s, pa_bool_t suspend, pa_suspend_cause_t cause) {
579 pa_source_assert_ref(s);
580 pa_assert_ctl_context();
581 pa_assert(PA_SOURCE_IS_LINKED(s->state));
582 pa_assert(cause != 0);
583
584 if (s->monitor_of && cause != PA_SUSPEND_PASSTHROUGH)
585 return -PA_ERR_NOTSUPPORTED;
586
587 if (suspend)
588 s->suspend_cause |= cause;
589 else
590 s->suspend_cause &= ~cause;
591
592 if ((pa_source_get_state(s) == PA_SOURCE_SUSPENDED) == !!s->suspend_cause)
593 return 0;
594
595 pa_log_debug("Suspend cause of source %s is 0x%04x, %s", s->name, s->suspend_cause, s->suspend_cause ? "suspending" : "resuming");
596
597 if (s->suspend_cause)
598 return source_set_state(s, PA_SOURCE_SUSPENDED);
599 else
600 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
601 }
602
603 /* Called from main context */
604 int pa_source_sync_suspend(pa_source *s) {
605 pa_sink_state_t state;
606
607 pa_source_assert_ref(s);
608 pa_assert_ctl_context();
609 pa_assert(PA_SOURCE_IS_LINKED(s->state));
610 pa_assert(s->monitor_of);
611
612 state = pa_sink_get_state(s->monitor_of);
613
614 if (state == PA_SINK_SUSPENDED)
615 return source_set_state(s, PA_SOURCE_SUSPENDED);
616
617 pa_assert(PA_SINK_IS_OPENED(state));
618
619 return source_set_state(s, pa_source_used_by(s) ? PA_SOURCE_RUNNING : PA_SOURCE_IDLE);
620 }
621
622 /* Called from main context */
623 pa_queue *pa_source_move_all_start(pa_source *s, pa_queue *q) {
624 pa_source_output *o, *n;
625 uint32_t idx;
626
627 pa_source_assert_ref(s);
628 pa_assert_ctl_context();
629 pa_assert(PA_SOURCE_IS_LINKED(s->state));
630
631 if (!q)
632 q = pa_queue_new();
633
634 for (o = PA_SOURCE_OUTPUT(pa_idxset_first(s->outputs, &idx)); o; o = n) {
635 n = PA_SOURCE_OUTPUT(pa_idxset_next(s->outputs, &idx));
636
637 pa_source_output_ref(o);
638
639 if (pa_source_output_start_move(o) >= 0)
640 pa_queue_push(q, o);
641 else
642 pa_source_output_unref(o);
643 }
644
645 return q;
646 }
647
648 /* Called from main context */
649 void pa_source_move_all_finish(pa_source *s, pa_queue *q, pa_bool_t save) {
650 pa_source_output *o;
651
652 pa_source_assert_ref(s);
653 pa_assert_ctl_context();
654 pa_assert(PA_SOURCE_IS_LINKED(s->state));
655 pa_assert(q);
656
657 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
658 if (pa_source_output_finish_move(o, s, save) < 0)
659 pa_source_output_fail_move(o);
660
661 pa_source_output_unref(o);
662 }
663
664 pa_queue_free(q, NULL, NULL);
665 }
666
667 /* Called from main context */
668 void pa_source_move_all_fail(pa_queue *q) {
669 pa_source_output *o;
670
671 pa_assert_ctl_context();
672 pa_assert(q);
673
674 while ((o = PA_SOURCE_OUTPUT(pa_queue_pop(q)))) {
675 pa_source_output_fail_move(o);
676 pa_source_output_unref(o);
677 }
678
679 pa_queue_free(q, NULL, NULL);
680 }
681
682 /* Called from IO thread context */
683 void pa_source_process_rewind(pa_source *s, size_t nbytes) {
684 pa_source_output *o;
685 void *state = NULL;
686
687 pa_source_assert_ref(s);
688 pa_source_assert_io_context(s);
689 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
690
691 if (nbytes <= 0)
692 return;
693
694 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
695 return;
696
697 pa_log_debug("Processing rewind...");
698
699 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
700 pa_source_output_assert_ref(o);
701 pa_source_output_process_rewind(o, nbytes);
702 }
703 }
704
705 /* Called from IO thread context */
706 void pa_source_post(pa_source*s, const pa_memchunk *chunk) {
707 pa_source_output *o;
708 void *state = NULL;
709
710 pa_source_assert_ref(s);
711 pa_source_assert_io_context(s);
712 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
713 pa_assert(chunk);
714
715 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
716 return;
717
718 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
719 pa_memchunk vchunk = *chunk;
720
721 pa_memblock_ref(vchunk.memblock);
722 pa_memchunk_make_writable(&vchunk, 0);
723
724 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
725 pa_silence_memchunk(&vchunk, &s->sample_spec);
726 else
727 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
728
729 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
730 pa_source_output_assert_ref(o);
731
732 if (!o->thread_info.direct_on_input)
733 pa_source_output_push(o, &vchunk);
734 }
735
736 pa_memblock_unref(vchunk.memblock);
737 } else {
738
739 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL))) {
740 pa_source_output_assert_ref(o);
741
742 if (!o->thread_info.direct_on_input)
743 pa_source_output_push(o, chunk);
744 }
745 }
746 }
747
748 /* Called from IO thread context */
749 void pa_source_post_direct(pa_source*s, pa_source_output *o, const pa_memchunk *chunk) {
750 pa_source_assert_ref(s);
751 pa_source_assert_io_context(s);
752 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
753 pa_source_output_assert_ref(o);
754 pa_assert(o->thread_info.direct_on_input);
755 pa_assert(chunk);
756
757 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
758 return;
759
760 if (s->thread_info.soft_muted || !pa_cvolume_is_norm(&s->thread_info.soft_volume)) {
761 pa_memchunk vchunk = *chunk;
762
763 pa_memblock_ref(vchunk.memblock);
764 pa_memchunk_make_writable(&vchunk, 0);
765
766 if (s->thread_info.soft_muted || pa_cvolume_is_muted(&s->thread_info.soft_volume))
767 pa_silence_memchunk(&vchunk, &s->sample_spec);
768 else
769 pa_volume_memchunk(&vchunk, &s->sample_spec, &s->thread_info.soft_volume);
770
771 pa_source_output_push(o, &vchunk);
772
773 pa_memblock_unref(vchunk.memblock);
774 } else
775 pa_source_output_push(o, chunk);
776 }
777
778 /* Called from main thread */
779 pa_usec_t pa_source_get_latency(pa_source *s) {
780 pa_usec_t usec;
781
782 pa_source_assert_ref(s);
783 pa_assert_ctl_context();
784 pa_assert(PA_SOURCE_IS_LINKED(s->state));
785
786 if (s->state == PA_SOURCE_SUSPENDED)
787 return 0;
788
789 if (!(s->flags & PA_SOURCE_LATENCY))
790 return 0;
791
792 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) == 0);
793
794 return usec;
795 }
796
797 /* Called from IO thread */
798 pa_usec_t pa_source_get_latency_within_thread(pa_source *s) {
799 pa_usec_t usec = 0;
800 pa_msgobject *o;
801
802 pa_source_assert_ref(s);
803 pa_source_assert_io_context(s);
804 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
805
806 /* The returned value is supposed to be in the time domain of the sound card! */
807
808 if (s->thread_info.state == PA_SOURCE_SUSPENDED)
809 return 0;
810
811 if (!(s->flags & PA_SOURCE_LATENCY))
812 return 0;
813
814 o = PA_MSGOBJECT(s);
815
816 /* FIXME: We probably should make this a proper vtable callback instead of going through process_msg() */
817
818 if (o->process_msg(o, PA_SOURCE_MESSAGE_GET_LATENCY, &usec, 0, NULL) < 0)
819 return -1;
820
821 return usec;
822 }
823
824 /* Called from the main thread (and also from the IO thread while the main
825 * thread is waiting).
826 *
827 * When a source uses volume sharing, it never has the PA_SOURCE_FLAT_VOLUME flag
828 * set. Instead, flat volume mode is detected by checking whether the root source
829 * has the flag set. */
830 pa_bool_t pa_source_flat_volume_enabled(pa_source *s) {
831 pa_source_assert_ref(s);
832
833 while (s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
834 s = s->output_from_master->source;
835
836 return (s->flags & PA_SOURCE_FLAT_VOLUME);
837 }
838
839 /* Called from main context */
840 pa_bool_t pa_source_is_passthrough(pa_source *s) {
841
842 pa_source_assert_ref(s);
843
844 /* NB Currently only monitor sources support passthrough mode */
845 return (s->monitor_of && pa_sink_is_passthrough(s->monitor_of));
846 }
847
848 /* Called from main context. */
849 static void compute_reference_ratio(pa_source_output *o) {
850 unsigned c = 0;
851 pa_cvolume remapped;
852
853 pa_assert(o);
854 pa_assert(pa_source_flat_volume_enabled(o->source));
855
856 /*
857 * Calculates the reference ratio from the source's reference
858 * volume. This basically calculates:
859 *
860 * o->reference_ratio = o->volume / o->source->reference_volume
861 */
862
863 remapped = o->source->reference_volume;
864 pa_cvolume_remap(&remapped, &o->source->channel_map, &o->channel_map);
865
866 o->reference_ratio.channels = o->sample_spec.channels;
867
868 for (c = 0; c < o->sample_spec.channels; c++) {
869
870 /* We don't update when the source volume is 0 anyway */
871 if (remapped.values[c] <= PA_VOLUME_MUTED)
872 continue;
873
874 /* Don't update the reference ratio unless necessary */
875 if (pa_sw_volume_multiply(
876 o->reference_ratio.values[c],
877 remapped.values[c]) == o->volume.values[c])
878 continue;
879
880 o->reference_ratio.values[c] = pa_sw_volume_divide(
881 o->volume.values[c],
882 remapped.values[c]);
883 }
884 }
885
886 /* Called from main context. Only called for the root source in volume sharing
887 * cases, except for internal recursive calls. */
888 static void compute_reference_ratios(pa_source *s) {
889 uint32_t idx;
890 pa_source_output *o;
891
892 pa_source_assert_ref(s);
893 pa_assert_ctl_context();
894 pa_assert(PA_SOURCE_IS_LINKED(s->state));
895 pa_assert(pa_source_flat_volume_enabled(s));
896
897 PA_IDXSET_FOREACH(o, s->outputs, idx) {
898 compute_reference_ratio(o);
899
900 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
901 compute_reference_ratios(o->destination_source);
902 }
903 }
904
905 /* Called from main context. Only called for the root source in volume sharing
906 * cases, except for internal recursive calls. */
907 static void compute_real_ratios(pa_source *s) {
908 pa_source_output *o;
909 uint32_t idx;
910
911 pa_source_assert_ref(s);
912 pa_assert_ctl_context();
913 pa_assert(PA_SOURCE_IS_LINKED(s->state));
914 pa_assert(pa_source_flat_volume_enabled(s));
915
916 PA_IDXSET_FOREACH(o, s->outputs, idx) {
917 unsigned c;
918 pa_cvolume remapped;
919
920 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
921 /* The origin source uses volume sharing, so this input's real ratio
922 * is handled as a special case - the real ratio must be 0 dB, and
923 * as a result i->soft_volume must equal i->volume_factor. */
924 pa_cvolume_reset(&o->real_ratio, o->real_ratio.channels);
925 o->soft_volume = o->volume_factor;
926
927 compute_real_ratios(o->destination_source);
928
929 continue;
930 }
931
932 /*
933 * This basically calculates:
934 *
935 * i->real_ratio := i->volume / s->real_volume
936 * i->soft_volume := i->real_ratio * i->volume_factor
937 */
938
939 remapped = s->real_volume;
940 pa_cvolume_remap(&remapped, &s->channel_map, &o->channel_map);
941
942 o->real_ratio.channels = o->sample_spec.channels;
943 o->soft_volume.channels = o->sample_spec.channels;
944
945 for (c = 0; c < o->sample_spec.channels; c++) {
946
947 if (remapped.values[c] <= PA_VOLUME_MUTED) {
948 /* We leave o->real_ratio untouched */
949 o->soft_volume.values[c] = PA_VOLUME_MUTED;
950 continue;
951 }
952
953 /* Don't lose accuracy unless necessary */
954 if (pa_sw_volume_multiply(
955 o->real_ratio.values[c],
956 remapped.values[c]) != o->volume.values[c])
957
958 o->real_ratio.values[c] = pa_sw_volume_divide(
959 o->volume.values[c],
960 remapped.values[c]);
961
962 o->soft_volume.values[c] = pa_sw_volume_multiply(
963 o->real_ratio.values[c],
964 o->volume_factor.values[c]);
965 }
966
967 /* We don't copy the soft_volume to the thread_info data
968 * here. That must be done by the caller */
969 }
970 }
971
972 static pa_cvolume *cvolume_remap_minimal_impact(
973 pa_cvolume *v,
974 const pa_cvolume *template,
975 const pa_channel_map *from,
976 const pa_channel_map *to) {
977
978 pa_cvolume t;
979
980 pa_assert(v);
981 pa_assert(template);
982 pa_assert(from);
983 pa_assert(to);
984 pa_assert(pa_cvolume_compatible_with_channel_map(v, from));
985 pa_assert(pa_cvolume_compatible_with_channel_map(template, to));
986
987 /* Much like pa_cvolume_remap(), but tries to minimize impact when
988 * mapping from source output to source volumes:
989 *
990 * If template is a possible remapping from v it is used instead
991 * of remapping anew.
992 *
993 * If the channel maps don't match we set an all-channel volume on
994 * the source to ensure that changing a volume on one stream has no
995 * effect that cannot be compensated for in another stream that
996 * does not have the same channel map as the source. */
997
998 if (pa_channel_map_equal(from, to))
999 return v;
1000
1001 t = *template;
1002 if (pa_cvolume_equal(pa_cvolume_remap(&t, to, from), v)) {
1003 *v = *template;
1004 return v;
1005 }
1006
1007 pa_cvolume_set(v, to->channels, pa_cvolume_max(v));
1008 return v;
1009 }
1010
1011 /* Called from main thread. Only called for the root source in volume sharing
1012 * cases, except for internal recursive calls. */
1013 static void get_maximum_output_volume(pa_source *s, pa_cvolume *max_volume, const pa_channel_map *channel_map) {
1014 pa_source_output *o;
1015 uint32_t idx;
1016
1017 pa_source_assert_ref(s);
1018 pa_assert(max_volume);
1019 pa_assert(channel_map);
1020 pa_assert(pa_source_flat_volume_enabled(s));
1021
1022 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1023 pa_cvolume remapped;
1024
1025 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1026 get_maximum_output_volume(o->destination_source, max_volume, channel_map);
1027
1028 /* Ignore this output. The origin source uses volume sharing, so this
1029 * output's volume will be set to be equal to the root source's real
1030 * volume. Obviously this outputs's current volume must not then
1031 * affect what the root source's real volume will be. */
1032 continue;
1033 }
1034
1035 remapped = o->volume;
1036 cvolume_remap_minimal_impact(&remapped, max_volume, &o->channel_map, channel_map);
1037 pa_cvolume_merge(max_volume, max_volume, &remapped);
1038 }
1039 }
1040
1041 /* Called from main thread. Only called for the root source in volume sharing
1042 * cases, except for internal recursive calls. */
1043 static pa_bool_t has_outputs(pa_source *s) {
1044 pa_source_output *o;
1045 uint32_t idx;
1046
1047 pa_source_assert_ref(s);
1048
1049 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1050 if (!o->destination_source || !(o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER) || has_outputs(o->destination_source))
1051 return TRUE;
1052 }
1053
1054 return FALSE;
1055 }
1056
1057 /* Called from main thread. Only called for the root source in volume sharing
1058 * cases, except for internal recursive calls. */
1059 static void update_real_volume(pa_source *s, const pa_cvolume *new_volume, pa_channel_map *channel_map) {
1060 pa_source_output *o;
1061 uint32_t idx;
1062
1063 pa_source_assert_ref(s);
1064 pa_assert(new_volume);
1065 pa_assert(channel_map);
1066
1067 s->real_volume = *new_volume;
1068 pa_cvolume_remap(&s->real_volume, channel_map, &s->channel_map);
1069
1070 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1071 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1072 if (pa_source_flat_volume_enabled(s)) {
1073 pa_cvolume old_volume = o->volume;
1074
1075 /* Follow the root source's real volume. */
1076 o->volume = *new_volume;
1077 pa_cvolume_remap(&o->volume, channel_map, &o->channel_map);
1078 compute_reference_ratio(o);
1079
1080 /* The volume changed, let's tell people so */
1081 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1082 if (o->volume_changed)
1083 o->volume_changed(o);
1084
1085 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1086 }
1087 }
1088
1089 update_real_volume(o->destination_source, new_volume, channel_map);
1090 }
1091 }
1092 }
1093
1094 /* Called from main thread. Only called for the root source in shared volume
1095 * cases. */
1096 static void compute_real_volume(pa_source *s) {
1097 pa_source_assert_ref(s);
1098 pa_assert_ctl_context();
1099 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1100 pa_assert(pa_source_flat_volume_enabled(s));
1101 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1102
1103 /* This determines the maximum volume of all streams and sets
1104 * s->real_volume accordingly. */
1105
1106 if (!has_outputs(s)) {
1107 /* In the special case that we have no source outputs we leave the
1108 * volume unmodified. */
1109 update_real_volume(s, &s->reference_volume, &s->channel_map);
1110 return;
1111 }
1112
1113 pa_cvolume_mute(&s->real_volume, s->channel_map.channels);
1114
1115 /* First let's determine the new maximum volume of all outputs
1116 * connected to this source */
1117 get_maximum_output_volume(s, &s->real_volume, &s->channel_map);
1118 update_real_volume(s, &s->real_volume, &s->channel_map);
1119
1120 /* Then, let's update the real ratios/soft volumes of all outputs
1121 * connected to this source */
1122 compute_real_ratios(s);
1123 }
1124
1125 /* Called from main thread. Only called for the root source in shared volume
1126 * cases, except for internal recursive calls. */
1127 static void propagate_reference_volume(pa_source *s) {
1128 pa_source_output *o;
1129 uint32_t idx;
1130
1131 pa_source_assert_ref(s);
1132 pa_assert_ctl_context();
1133 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1134 pa_assert(pa_source_flat_volume_enabled(s));
1135
1136 /* This is called whenever the source volume changes that is not
1137 * caused by a source output volume change. We need to fix up the
1138 * source output volumes accordingly */
1139
1140 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1141 pa_cvolume old_volume;
1142
1143 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1144 propagate_reference_volume(o->destination_source);
1145
1146 /* Since the origin source uses volume sharing, this output's volume
1147 * needs to be updated to match the root source's real volume, but
1148 * that will be done later in update_shared_real_volume(). */
1149 continue;
1150 }
1151
1152 old_volume = o->volume;
1153
1154 /* This basically calculates:
1155 *
1156 * o->volume := o->reference_volume * o->reference_ratio */
1157
1158 o->volume = s->reference_volume;
1159 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1160 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1161
1162 /* The volume changed, let's tell people so */
1163 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1164
1165 if (o->volume_changed)
1166 o->volume_changed(o);
1167
1168 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1169 }
1170 }
1171 }
1172
1173 /* Called from main thread. Only called for the root source in volume sharing
1174 * cases, except for internal recursive calls. The return value indicates
1175 * whether any reference volume actually changed. */
1176 static pa_bool_t update_reference_volume(pa_source *s, const pa_cvolume *v, const pa_channel_map *channel_map, pa_bool_t save) {
1177 pa_cvolume volume;
1178 pa_bool_t reference_volume_changed;
1179 pa_source_output *o;
1180 uint32_t idx;
1181
1182 pa_source_assert_ref(s);
1183 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1184 pa_assert(v);
1185 pa_assert(channel_map);
1186 pa_assert(pa_cvolume_valid(v));
1187
1188 volume = *v;
1189 pa_cvolume_remap(&volume, channel_map, &s->channel_map);
1190
1191 reference_volume_changed = !pa_cvolume_equal(&volume, &s->reference_volume);
1192 s->reference_volume = volume;
1193
1194 s->save_volume = (!reference_volume_changed && s->save_volume) || save;
1195
1196 if (reference_volume_changed)
1197 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1198 else if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1199 /* If the root source's volume doesn't change, then there can't be any
1200 * changes in the other source in the source tree either.
1201 *
1202 * It's probably theoretically possible that even if the root source's
1203 * volume changes slightly, some filter source doesn't change its volume
1204 * due to rounding errors. If that happens, we still want to propagate
1205 * the changed root source volume to the sources connected to the
1206 * intermediate source that didn't change its volume. This theoretical
1207 * possiblity is the reason why we have that !(s->flags &
1208 * PA_SOURCE_SHARE_VOLUME_WITH_MASTER) condition. Probably nobody would
1209 * notice even if we returned here FALSE always if
1210 * reference_volume_changed is FALSE. */
1211 return FALSE;
1212
1213 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1214 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1215 update_reference_volume(o->destination_source, v, channel_map, FALSE);
1216 }
1217
1218 return TRUE;
1219 }
1220
1221 /* Called from main thread */
1222 void pa_source_set_volume(
1223 pa_source *s,
1224 const pa_cvolume *volume,
1225 pa_bool_t send_msg,
1226 pa_bool_t save) {
1227
1228 pa_cvolume new_reference_volume;
1229 pa_source *root_source = s;
1230
1231 pa_source_assert_ref(s);
1232 pa_assert_ctl_context();
1233 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1234 pa_assert(!volume || pa_cvolume_valid(volume));
1235 pa_assert(volume || pa_source_flat_volume_enabled(s));
1236 pa_assert(!volume || volume->channels == 1 || pa_cvolume_compatible(volume, &s->sample_spec));
1237
1238 /* make sure we don't change the volume when a PASSTHROUGH output is connected */
1239 if (pa_source_is_passthrough(s)) {
1240 /* FIXME: Need to notify client that volume control is disabled */
1241 pa_log_warn("Cannot change volume, Source is monitor of a PASSTHROUGH sink");
1242 return;
1243 }
1244
1245 /* In case of volume sharing, the volume is set for the root source first,
1246 * from which it's then propagated to the sharing sources. */
1247 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1248 root_source = root_source->output_from_master->source;
1249
1250 /* As a special exception we accept mono volumes on all sources --
1251 * even on those with more complex channel maps */
1252
1253 if (volume) {
1254 if (pa_cvolume_compatible(volume, &s->sample_spec))
1255 new_reference_volume = *volume;
1256 else {
1257 new_reference_volume = s->reference_volume;
1258 pa_cvolume_scale(&new_reference_volume, pa_cvolume_max(volume));
1259 }
1260
1261 pa_cvolume_remap(&new_reference_volume, &s->channel_map, &root_source->channel_map);
1262 }
1263
1264 /* If volume is NULL we synchronize the source's real and reference
1265 * volumes with the stream volumes. If it is not NULL we update
1266 * the reference_volume with it. */
1267
1268 if (volume) {
1269 if (update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save)) {
1270 if (pa_source_flat_volume_enabled(root_source)) {
1271 /* OK, propagate this volume change back to the outputs */
1272 propagate_reference_volume(root_source);
1273
1274 /* And now recalculate the real volume */
1275 compute_real_volume(root_source);
1276 } else
1277 update_real_volume(root_source, &root_source->reference_volume, &root_source->channel_map);
1278 }
1279
1280 } else {
1281 pa_assert(pa_source_flat_volume_enabled(root_source));
1282
1283 /* Ok, let's determine the new real volume */
1284 compute_real_volume(root_source);
1285
1286 /* Let's 'push' the reference volume if necessary */
1287 pa_cvolume_merge(&new_reference_volume, &s->reference_volume, &root_source->real_volume);
1288 update_reference_volume(root_source, &new_reference_volume, &root_source->channel_map, save);
1289
1290 /* Now that the reference volume is updated, we can update the streams'
1291 * reference ratios. */
1292 compute_reference_ratios(root_source);
1293 }
1294
1295 if (root_source->set_volume) {
1296 /* If we have a function set_volume(), then we do not apply a
1297 * soft volume by default. However, set_volume() is free to
1298 * apply one to root_source->soft_volume */
1299
1300 pa_cvolume_reset(&root_source->soft_volume, root_source->sample_spec.channels);
1301 if (!(root_source->flags & PA_SOURCE_SYNC_VOLUME))
1302 root_source->set_volume(root_source);
1303
1304 } else
1305 /* If we have no function set_volume(), then the soft volume
1306 * becomes the real volume */
1307 root_source->soft_volume = root_source->real_volume;
1308
1309 /* This tells the source that soft volume and/or real volume changed */
1310 if (send_msg)
1311 pa_assert_se(pa_asyncmsgq_send(root_source->asyncmsgq, PA_MSGOBJECT(root_source), PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL) == 0);
1312 }
1313
1314 /* Called from the io thread if sync volume is used, otherwise from the main thread.
1315 * Only to be called by source implementor */
1316 void pa_source_set_soft_volume(pa_source *s, const pa_cvolume *volume) {
1317
1318 pa_source_assert_ref(s);
1319 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1320
1321 if (s->flags & PA_SOURCE_SYNC_VOLUME)
1322 pa_source_assert_io_context(s);
1323 else
1324 pa_assert_ctl_context();
1325
1326 if (!volume)
1327 pa_cvolume_reset(&s->soft_volume, s->sample_spec.channels);
1328 else
1329 s->soft_volume = *volume;
1330
1331 if (PA_SOURCE_IS_LINKED(s->state) && !(s->flags & PA_SOURCE_SYNC_VOLUME))
1332 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME, NULL, 0, NULL) == 0);
1333 else
1334 s->thread_info.soft_volume = s->soft_volume;
1335 }
1336
1337 /* Called from the main thread. Only called for the root source in volume sharing
1338 * cases, except for internal recursive calls. */
1339 static void propagate_real_volume(pa_source *s, const pa_cvolume *old_real_volume) {
1340 pa_source_output *o;
1341 uint32_t idx;
1342
1343 pa_source_assert_ref(s);
1344 pa_assert(old_real_volume);
1345 pa_assert_ctl_context();
1346 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1347
1348 /* This is called when the hardware's real volume changes due to
1349 * some external event. We copy the real volume into our
1350 * reference volume and then rebuild the stream volumes based on
1351 * i->real_ratio which should stay fixed. */
1352
1353 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)) {
1354 if (pa_cvolume_equal(old_real_volume, &s->real_volume))
1355 return;
1356
1357 /* 1. Make the real volume the reference volume */
1358 update_reference_volume(s, &s->real_volume, &s->channel_map, TRUE);
1359 }
1360
1361 if (pa_source_flat_volume_enabled(s)) {
1362
1363 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1364 pa_cvolume old_volume = o->volume;
1365
1366 /* 2. Since the source's reference and real volumes are equal
1367 * now our ratios should be too. */
1368 o->reference_ratio = o->real_ratio;
1369
1370 /* 3. Recalculate the new stream reference volume based on the
1371 * reference ratio and the sink's reference volume.
1372 *
1373 * This basically calculates:
1374 *
1375 * o->volume = s->reference_volume * o->reference_ratio
1376 *
1377 * This is identical to propagate_reference_volume() */
1378 o->volume = s->reference_volume;
1379 pa_cvolume_remap(&o->volume, &s->channel_map, &o->channel_map);
1380 pa_sw_cvolume_multiply(&o->volume, &o->volume, &o->reference_ratio);
1381
1382 /* Notify if something changed */
1383 if (!pa_cvolume_equal(&old_volume, &o->volume)) {
1384
1385 if (o->volume_changed)
1386 o->volume_changed(o);
1387
1388 pa_subscription_post(o->core, PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT|PA_SUBSCRIPTION_EVENT_CHANGE, o->index);
1389 }
1390
1391 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1392 propagate_real_volume(o->destination_source, old_real_volume);
1393 }
1394 }
1395
1396 /* Something got changed in the hardware. It probably makes sense
1397 * to save changed hw settings given that hw volume changes not
1398 * triggered by PA are almost certainly done by the user. */
1399 if (!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1400 s->save_volume = TRUE;
1401 }
1402
1403 /* Called from io thread */
1404 void pa_source_update_volume_and_mute(pa_source *s) {
1405 pa_assert(s);
1406 pa_source_assert_io_context(s);
1407
1408 pa_asyncmsgq_post(pa_thread_mq_get()->outq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE, NULL, 0, NULL, NULL);
1409 }
1410
1411 /* Called from main thread */
1412 const pa_cvolume *pa_source_get_volume(pa_source *s, pa_bool_t force_refresh) {
1413 pa_source_assert_ref(s);
1414 pa_assert_ctl_context();
1415 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1416
1417 if (s->refresh_volume || force_refresh) {
1418 struct pa_cvolume old_real_volume;
1419
1420 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1421
1422 old_real_volume = s->real_volume;
1423
1424 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_volume)
1425 s->get_volume(s);
1426
1427 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_VOLUME, NULL, 0, NULL) == 0);
1428
1429 update_real_volume(s, &s->real_volume, &s->channel_map);
1430 propagate_real_volume(s, &old_real_volume);
1431 }
1432
1433 return &s->reference_volume;
1434 }
1435
1436 /* Called from main thread. In volume sharing cases, only the root source may
1437 * call this. */
1438 void pa_source_volume_changed(pa_source *s, const pa_cvolume *new_real_volume) {
1439 pa_cvolume old_real_volume;
1440
1441 pa_source_assert_ref(s);
1442 pa_assert_ctl_context();
1443 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1444 pa_assert(!(s->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER));
1445
1446 /* The source implementor may call this if the volume changed to make sure everyone is notified */
1447
1448 old_real_volume = s->real_volume;
1449 update_real_volume(s, new_real_volume, &s->channel_map);
1450 propagate_real_volume(s, &old_real_volume);
1451 }
1452
1453 /* Called from main thread */
1454 void pa_source_set_mute(pa_source *s, pa_bool_t mute, pa_bool_t save) {
1455 pa_bool_t old_muted;
1456
1457 pa_source_assert_ref(s);
1458 pa_assert_ctl_context();
1459 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1460
1461 old_muted = s->muted;
1462 s->muted = mute;
1463 s->save_muted = (old_muted == s->muted && s->save_muted) || save;
1464
1465 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->set_mute)
1466 s->set_mute(s);
1467
1468 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1469
1470 if (old_muted != s->muted)
1471 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1472 }
1473
1474 /* Called from main thread */
1475 pa_bool_t pa_source_get_mute(pa_source *s, pa_bool_t force_refresh) {
1476
1477 pa_source_assert_ref(s);
1478 pa_assert_ctl_context();
1479 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1480
1481 if (s->refresh_muted || force_refresh) {
1482 pa_bool_t old_muted = s->muted;
1483
1484 if (!(s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_mute)
1485 s->get_mute(s);
1486
1487 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MUTE, NULL, 0, NULL) == 0);
1488
1489 if (old_muted != s->muted) {
1490 s->save_muted = TRUE;
1491
1492 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1493
1494 /* Make sure the soft mute status stays in sync */
1495 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MUTE, NULL, 0, NULL) == 0);
1496 }
1497 }
1498
1499 return s->muted;
1500 }
1501
1502 /* Called from main thread */
1503 void pa_source_mute_changed(pa_source *s, pa_bool_t new_muted) {
1504 pa_source_assert_ref(s);
1505 pa_assert_ctl_context();
1506 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1507
1508 /* The source implementor may call this if the mute state changed to make sure everyone is notified */
1509
1510 if (s->muted == new_muted)
1511 return;
1512
1513 s->muted = new_muted;
1514 s->save_muted = TRUE;
1515
1516 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1517 }
1518
1519 /* Called from main thread */
1520 pa_bool_t pa_source_update_proplist(pa_source *s, pa_update_mode_t mode, pa_proplist *p) {
1521 pa_source_assert_ref(s);
1522 pa_assert_ctl_context();
1523
1524 if (p)
1525 pa_proplist_update(s->proplist, mode, p);
1526
1527 if (PA_SOURCE_IS_LINKED(s->state)) {
1528 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1529 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1530 }
1531
1532 return TRUE;
1533 }
1534
1535 /* Called from main thread */
1536 /* FIXME -- this should be dropped and be merged into pa_source_update_proplist() */
1537 void pa_source_set_description(pa_source *s, const char *description) {
1538 const char *old;
1539 pa_source_assert_ref(s);
1540 pa_assert_ctl_context();
1541
1542 if (!description && !pa_proplist_contains(s->proplist, PA_PROP_DEVICE_DESCRIPTION))
1543 return;
1544
1545 old = pa_proplist_gets(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1546
1547 if (old && description && pa_streq(old, description))
1548 return;
1549
1550 if (description)
1551 pa_proplist_sets(s->proplist, PA_PROP_DEVICE_DESCRIPTION, description);
1552 else
1553 pa_proplist_unset(s->proplist, PA_PROP_DEVICE_DESCRIPTION);
1554
1555 if (PA_SOURCE_IS_LINKED(s->state)) {
1556 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
1557 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PROPLIST_CHANGED], s);
1558 }
1559 }
1560
1561 /* Called from main thread */
1562 unsigned pa_source_linked_by(pa_source *s) {
1563 pa_source_assert_ref(s);
1564 pa_assert_ctl_context();
1565 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1566
1567 return pa_idxset_size(s->outputs);
1568 }
1569
1570 /* Called from main thread */
1571 unsigned pa_source_used_by(pa_source *s) {
1572 unsigned ret;
1573
1574 pa_source_assert_ref(s);
1575 pa_assert_ctl_context();
1576 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1577
1578 ret = pa_idxset_size(s->outputs);
1579 pa_assert(ret >= s->n_corked);
1580
1581 return ret - s->n_corked;
1582 }
1583
1584 /* Called from main thread */
1585 unsigned pa_source_check_suspend(pa_source *s) {
1586 unsigned ret;
1587 pa_source_output *o;
1588 uint32_t idx;
1589
1590 pa_source_assert_ref(s);
1591 pa_assert_ctl_context();
1592
1593 if (!PA_SOURCE_IS_LINKED(s->state))
1594 return 0;
1595
1596 ret = 0;
1597
1598 PA_IDXSET_FOREACH(o, s->outputs, idx) {
1599 pa_source_output_state_t st;
1600
1601 st = pa_source_output_get_state(o);
1602
1603 /* We do not assert here. It is perfectly valid for a source output to
1604 * be in the INIT state (i.e. created, marked done but not yet put)
1605 * and we should not care if it's unlinked as it won't contribute
1606 * towarards our busy status.
1607 */
1608 if (!PA_SOURCE_OUTPUT_IS_LINKED(st))
1609 continue;
1610
1611 if (st == PA_SOURCE_OUTPUT_CORKED)
1612 continue;
1613
1614 if (o->flags & PA_SOURCE_OUTPUT_DONT_INHIBIT_AUTO_SUSPEND)
1615 continue;
1616
1617 ret ++;
1618 }
1619
1620 return ret;
1621 }
1622
1623 /* Called from the IO thread */
1624 static void sync_output_volumes_within_thread(pa_source *s) {
1625 pa_source_output *o;
1626 void *state = NULL;
1627
1628 pa_source_assert_ref(s);
1629 pa_source_assert_io_context(s);
1630
1631 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1632 if (pa_cvolume_equal(&o->thread_info.soft_volume, &o->soft_volume))
1633 continue;
1634
1635 o->thread_info.soft_volume = o->soft_volume;
1636 //pa_source_output_request_rewind(o, 0, TRUE, FALSE, FALSE);
1637 }
1638 }
1639
1640 /* Called from the IO thread. Only called for the root source in volume sharing
1641 * cases, except for internal recursive calls. */
1642 static void set_shared_volume_within_thread(pa_source *s) {
1643 pa_source_output *o;
1644 void *state = NULL;
1645
1646 pa_source_assert_ref(s);
1647
1648 PA_MSGOBJECT(s)->process_msg(PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED, NULL, 0, NULL);
1649
1650 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state) {
1651 if (o->destination_source && (o->destination_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER))
1652 set_shared_volume_within_thread(o->destination_source);
1653 }
1654 }
1655
1656 /* Called from IO thread, except when it is not */
1657 int pa_source_process_msg(pa_msgobject *object, int code, void *userdata, int64_t offset, pa_memchunk *chunk) {
1658 pa_source *s = PA_SOURCE(object);
1659 pa_source_assert_ref(s);
1660
1661 switch ((pa_source_message_t) code) {
1662
1663 case PA_SOURCE_MESSAGE_ADD_OUTPUT: {
1664 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1665
1666 pa_hashmap_put(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index), pa_source_output_ref(o));
1667
1668 if (o->direct_on_input) {
1669 o->thread_info.direct_on_input = o->direct_on_input;
1670 pa_hashmap_put(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index), o);
1671 }
1672
1673 pa_assert(!o->thread_info.attached);
1674 o->thread_info.attached = TRUE;
1675
1676 if (o->attach)
1677 o->attach(o);
1678
1679 pa_source_output_set_state_within_thread(o, o->state);
1680
1681 if (o->thread_info.requested_source_latency != (pa_usec_t) -1)
1682 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1683
1684 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
1685
1686 /* We don't just invalidate the requested latency here,
1687 * because if we are in a move we might need to fix up the
1688 * requested latency. */
1689 pa_source_output_set_requested_latency_within_thread(o, o->thread_info.requested_source_latency);
1690
1691 /* In flat volume mode we need to update the volume as
1692 * well */
1693 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1694 }
1695
1696 case PA_SOURCE_MESSAGE_REMOVE_OUTPUT: {
1697 pa_source_output *o = PA_SOURCE_OUTPUT(userdata);
1698
1699 pa_source_output_set_state_within_thread(o, o->state);
1700
1701 if (o->detach)
1702 o->detach(o);
1703
1704 pa_assert(o->thread_info.attached);
1705 o->thread_info.attached = FALSE;
1706
1707 if (o->thread_info.direct_on_input) {
1708 pa_hashmap_remove(o->thread_info.direct_on_input->thread_info.direct_outputs, PA_UINT32_TO_PTR(o->index));
1709 o->thread_info.direct_on_input = NULL;
1710 }
1711
1712 if (pa_hashmap_remove(s->thread_info.outputs, PA_UINT32_TO_PTR(o->index)))
1713 pa_source_output_unref(o);
1714
1715 pa_source_invalidate_requested_latency(s, TRUE);
1716
1717 /* In flat volume mode we need to update the volume as
1718 * well */
1719 return object->process_msg(object, PA_SOURCE_MESSAGE_SET_SHARED_VOLUME, NULL, 0, NULL);
1720 }
1721
1722 case PA_SOURCE_MESSAGE_SET_SHARED_VOLUME: {
1723 pa_source *root_source = s;
1724
1725 while (root_source->flags & PA_SOURCE_SHARE_VOLUME_WITH_MASTER)
1726 root_source = root_source->output_from_master->source;
1727
1728 set_shared_volume_within_thread(root_source);
1729 return 0;
1730 }
1731
1732 case PA_SOURCE_MESSAGE_SET_VOLUME_SYNCED:
1733
1734 if (s->flags & PA_SOURCE_SYNC_VOLUME) {
1735 s->set_volume(s);
1736 pa_source_volume_change_push(s);
1737 }
1738 /* Fall through ... */
1739
1740 case PA_SOURCE_MESSAGE_SET_VOLUME:
1741
1742 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1743 s->thread_info.soft_volume = s->soft_volume;
1744 }
1745
1746 /* Fall through ... */
1747
1748 case PA_SOURCE_MESSAGE_SYNC_VOLUMES:
1749 sync_output_volumes_within_thread(s);
1750 return 0;
1751
1752 case PA_SOURCE_MESSAGE_GET_VOLUME:
1753
1754 if ((s->flags & PA_SOURCE_SYNC_VOLUME) && s->get_volume) {
1755 s->get_volume(s);
1756 pa_source_volume_change_flush(s);
1757 pa_sw_cvolume_divide(&s->thread_info.current_hw_volume, &s->real_volume, &s->soft_volume);
1758 }
1759
1760 /* In case source implementor reset SW volume. */
1761 if (!pa_cvolume_equal(&s->thread_info.soft_volume, &s->soft_volume)) {
1762 s->thread_info.soft_volume = s->soft_volume;
1763 }
1764
1765 return 0;
1766
1767 case PA_SOURCE_MESSAGE_SET_MUTE:
1768
1769 if (s->thread_info.soft_muted != s->muted) {
1770 s->thread_info.soft_muted = s->muted;
1771 }
1772
1773 if (s->flags & PA_SOURCE_SYNC_VOLUME && s->set_mute)
1774 s->set_mute(s);
1775
1776 return 0;
1777
1778 case PA_SOURCE_MESSAGE_GET_MUTE:
1779
1780 if (s->flags & PA_SOURCE_SYNC_VOLUME && s->get_mute)
1781 s->get_mute(s);
1782
1783 return 0;
1784
1785 case PA_SOURCE_MESSAGE_SET_STATE: {
1786
1787 pa_bool_t suspend_change =
1788 (s->thread_info.state == PA_SOURCE_SUSPENDED && PA_SOURCE_IS_OPENED(PA_PTR_TO_UINT(userdata))) ||
1789 (PA_SOURCE_IS_OPENED(s->thread_info.state) && PA_PTR_TO_UINT(userdata) == PA_SOURCE_SUSPENDED);
1790
1791 s->thread_info.state = PA_PTR_TO_UINT(userdata);
1792
1793 if (suspend_change) {
1794 pa_source_output *o;
1795 void *state = NULL;
1796
1797 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
1798 if (o->suspend_within_thread)
1799 o->suspend_within_thread(o, s->thread_info.state == PA_SOURCE_SUSPENDED);
1800 }
1801
1802 return 0;
1803 }
1804
1805 case PA_SOURCE_MESSAGE_DETACH:
1806
1807 /* Detach all streams */
1808 pa_source_detach_within_thread(s);
1809 return 0;
1810
1811 case PA_SOURCE_MESSAGE_ATTACH:
1812
1813 /* Reattach all streams */
1814 pa_source_attach_within_thread(s);
1815 return 0;
1816
1817 case PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY: {
1818
1819 pa_usec_t *usec = userdata;
1820 *usec = pa_source_get_requested_latency_within_thread(s);
1821
1822 /* Yes, that's right, the IO thread will see -1 when no
1823 * explicit requested latency is configured, the main
1824 * thread will see max_latency */
1825 if (*usec == (pa_usec_t) -1)
1826 *usec = s->thread_info.max_latency;
1827
1828 return 0;
1829 }
1830
1831 case PA_SOURCE_MESSAGE_SET_LATENCY_RANGE: {
1832 pa_usec_t *r = userdata;
1833
1834 pa_source_set_latency_range_within_thread(s, r[0], r[1]);
1835
1836 return 0;
1837 }
1838
1839 case PA_SOURCE_MESSAGE_GET_LATENCY_RANGE: {
1840 pa_usec_t *r = userdata;
1841
1842 r[0] = s->thread_info.min_latency;
1843 r[1] = s->thread_info.max_latency;
1844
1845 return 0;
1846 }
1847
1848 case PA_SOURCE_MESSAGE_GET_FIXED_LATENCY:
1849
1850 *((pa_usec_t*) userdata) = s->thread_info.fixed_latency;
1851 return 0;
1852
1853 case PA_SOURCE_MESSAGE_SET_FIXED_LATENCY:
1854
1855 pa_source_set_fixed_latency_within_thread(s, (pa_usec_t) offset);
1856 return 0;
1857
1858 case PA_SOURCE_MESSAGE_GET_MAX_REWIND:
1859
1860 *((size_t*) userdata) = s->thread_info.max_rewind;
1861 return 0;
1862
1863 case PA_SOURCE_MESSAGE_SET_MAX_REWIND:
1864
1865 pa_source_set_max_rewind_within_thread(s, (size_t) offset);
1866 return 0;
1867
1868 case PA_SOURCE_MESSAGE_GET_LATENCY:
1869
1870 if (s->monitor_of) {
1871 *((pa_usec_t*) userdata) = 0;
1872 return 0;
1873 }
1874
1875 /* Implementors need to overwrite this implementation! */
1876 return -1;
1877
1878 case PA_SOURCE_MESSAGE_SET_PORT:
1879
1880 pa_assert(userdata);
1881 if (s->set_port) {
1882 struct source_message_set_port *msg_data = userdata;
1883 msg_data->ret = s->set_port(s, msg_data->port);
1884 }
1885 return 0;
1886
1887 case PA_SOURCE_MESSAGE_UPDATE_VOLUME_AND_MUTE:
1888 /* This message is sent from IO-thread and handled in main thread. */
1889 pa_assert_ctl_context();
1890
1891 pa_source_get_volume(s, TRUE);
1892 pa_source_get_mute(s, TRUE);
1893 return 0;
1894
1895 case PA_SOURCE_MESSAGE_MAX:
1896 ;
1897 }
1898
1899 return -1;
1900 }
1901
1902 /* Called from main thread */
1903 int pa_source_suspend_all(pa_core *c, pa_bool_t suspend, pa_suspend_cause_t cause) {
1904 pa_source *source;
1905 uint32_t idx;
1906 int ret = 0;
1907
1908 pa_core_assert_ref(c);
1909 pa_assert_ctl_context();
1910 pa_assert(cause != 0);
1911
1912 for (source = PA_SOURCE(pa_idxset_first(c->sources, &idx)); source; source = PA_SOURCE(pa_idxset_next(c->sources, &idx))) {
1913 int r;
1914
1915 if (source->monitor_of)
1916 continue;
1917
1918 if ((r = pa_source_suspend(source, suspend, cause)) < 0)
1919 ret = r;
1920 }
1921
1922 return ret;
1923 }
1924
1925 /* Called from main thread */
1926 void pa_source_detach(pa_source *s) {
1927 pa_source_assert_ref(s);
1928 pa_assert_ctl_context();
1929 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1930
1931 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_DETACH, NULL, 0, NULL) == 0);
1932 }
1933
1934 /* Called from main thread */
1935 void pa_source_attach(pa_source *s) {
1936 pa_source_assert_ref(s);
1937 pa_assert_ctl_context();
1938 pa_assert(PA_SOURCE_IS_LINKED(s->state));
1939
1940 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_ATTACH, NULL, 0, NULL) == 0);
1941 }
1942
1943 /* Called from IO thread */
1944 void pa_source_detach_within_thread(pa_source *s) {
1945 pa_source_output *o;
1946 void *state = NULL;
1947
1948 pa_source_assert_ref(s);
1949 pa_source_assert_io_context(s);
1950 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1951
1952 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
1953 if (o->detach)
1954 o->detach(o);
1955 }
1956
1957 /* Called from IO thread */
1958 void pa_source_attach_within_thread(pa_source *s) {
1959 pa_source_output *o;
1960 void *state = NULL;
1961
1962 pa_source_assert_ref(s);
1963 pa_source_assert_io_context(s);
1964 pa_assert(PA_SOURCE_IS_LINKED(s->thread_info.state));
1965
1966 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
1967 if (o->attach)
1968 o->attach(o);
1969 }
1970
1971 /* Called from IO thread */
1972 pa_usec_t pa_source_get_requested_latency_within_thread(pa_source *s) {
1973 pa_usec_t result = (pa_usec_t) -1;
1974 pa_source_output *o;
1975 void *state = NULL;
1976
1977 pa_source_assert_ref(s);
1978 pa_source_assert_io_context(s);
1979
1980 if (!(s->flags & PA_SOURCE_DYNAMIC_LATENCY))
1981 return PA_CLAMP(s->thread_info.fixed_latency, s->thread_info.min_latency, s->thread_info.max_latency);
1982
1983 if (s->thread_info.requested_latency_valid)
1984 return s->thread_info.requested_latency;
1985
1986 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
1987 if (o->thread_info.requested_source_latency != (pa_usec_t) -1 &&
1988 (result == (pa_usec_t) -1 || result > o->thread_info.requested_source_latency))
1989 result = o->thread_info.requested_source_latency;
1990
1991 if (result != (pa_usec_t) -1)
1992 result = PA_CLAMP(result, s->thread_info.min_latency, s->thread_info.max_latency);
1993
1994 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
1995 /* Only cache this if we are fully set up */
1996 s->thread_info.requested_latency = result;
1997 s->thread_info.requested_latency_valid = TRUE;
1998 }
1999
2000 return result;
2001 }
2002
2003 /* Called from main thread */
2004 pa_usec_t pa_source_get_requested_latency(pa_source *s) {
2005 pa_usec_t usec = 0;
2006
2007 pa_source_assert_ref(s);
2008 pa_assert_ctl_context();
2009 pa_assert(PA_SOURCE_IS_LINKED(s->state));
2010
2011 if (s->state == PA_SOURCE_SUSPENDED)
2012 return 0;
2013
2014 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_REQUESTED_LATENCY, &usec, 0, NULL) == 0);
2015
2016 return usec;
2017 }
2018
2019 /* Called from IO thread */
2020 void pa_source_set_max_rewind_within_thread(pa_source *s, size_t max_rewind) {
2021 pa_source_output *o;
2022 void *state = NULL;
2023
2024 pa_source_assert_ref(s);
2025 pa_source_assert_io_context(s);
2026
2027 if (max_rewind == s->thread_info.max_rewind)
2028 return;
2029
2030 s->thread_info.max_rewind = max_rewind;
2031
2032 if (PA_SOURCE_IS_LINKED(s->thread_info.state))
2033 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2034 pa_source_output_update_max_rewind(o, s->thread_info.max_rewind);
2035 }
2036
2037 /* Called from main thread */
2038 void pa_source_set_max_rewind(pa_source *s, size_t max_rewind) {
2039 pa_source_assert_ref(s);
2040 pa_assert_ctl_context();
2041
2042 if (PA_SOURCE_IS_LINKED(s->state))
2043 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_MAX_REWIND, NULL, max_rewind, NULL) == 0);
2044 else
2045 pa_source_set_max_rewind_within_thread(s, max_rewind);
2046 }
2047
2048 /* Called from IO thread */
2049 void pa_source_invalidate_requested_latency(pa_source *s, pa_bool_t dynamic) {
2050 pa_source_output *o;
2051 void *state = NULL;
2052
2053 pa_source_assert_ref(s);
2054 pa_source_assert_io_context(s);
2055
2056 if ((s->flags & PA_SOURCE_DYNAMIC_LATENCY))
2057 s->thread_info.requested_latency_valid = FALSE;
2058 else if (dynamic)
2059 return;
2060
2061 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2062
2063 if (s->update_requested_latency)
2064 s->update_requested_latency(s);
2065
2066 while ((o = pa_hashmap_iterate(s->thread_info.outputs, &state, NULL)))
2067 if (o->update_source_requested_latency)
2068 o->update_source_requested_latency(o);
2069 }
2070
2071 if (s->monitor_of)
2072 pa_sink_invalidate_requested_latency(s->monitor_of, dynamic);
2073 }
2074
2075 /* Called from main thread */
2076 void pa_source_set_latency_range(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2077 pa_source_assert_ref(s);
2078 pa_assert_ctl_context();
2079
2080 /* min_latency == 0: no limit
2081 * min_latency anything else: specified limit
2082 *
2083 * Similar for max_latency */
2084
2085 if (min_latency < ABSOLUTE_MIN_LATENCY)
2086 min_latency = ABSOLUTE_MIN_LATENCY;
2087
2088 if (max_latency <= 0 ||
2089 max_latency > ABSOLUTE_MAX_LATENCY)
2090 max_latency = ABSOLUTE_MAX_LATENCY;
2091
2092 pa_assert(min_latency <= max_latency);
2093
2094 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2095 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2096 max_latency == ABSOLUTE_MAX_LATENCY) ||
2097 (s->flags & PA_SOURCE_DYNAMIC_LATENCY));
2098
2099 if (PA_SOURCE_IS_LINKED(s->state)) {
2100 pa_usec_t r[2];
2101
2102 r[0] = min_latency;
2103 r[1] = max_latency;
2104
2105 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_LATENCY_RANGE, r, 0, NULL) == 0);
2106 } else
2107 pa_source_set_latency_range_within_thread(s, min_latency, max_latency);
2108 }
2109
2110 /* Called from main thread */
2111 void pa_source_get_latency_range(pa_source *s, pa_usec_t *min_latency, pa_usec_t *max_latency) {
2112 pa_source_assert_ref(s);
2113 pa_assert_ctl_context();
2114 pa_assert(min_latency);
2115 pa_assert(max_latency);
2116
2117 if (PA_SOURCE_IS_LINKED(s->state)) {
2118 pa_usec_t r[2] = { 0, 0 };
2119
2120 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_LATENCY_RANGE, r, 0, NULL) == 0);
2121
2122 *min_latency = r[0];
2123 *max_latency = r[1];
2124 } else {
2125 *min_latency = s->thread_info.min_latency;
2126 *max_latency = s->thread_info.max_latency;
2127 }
2128 }
2129
2130 /* Called from IO thread, and from main thread before pa_source_put() is called */
2131 void pa_source_set_latency_range_within_thread(pa_source *s, pa_usec_t min_latency, pa_usec_t max_latency) {
2132 pa_source_assert_ref(s);
2133 pa_source_assert_io_context(s);
2134
2135 pa_assert(min_latency >= ABSOLUTE_MIN_LATENCY);
2136 pa_assert(max_latency <= ABSOLUTE_MAX_LATENCY);
2137 pa_assert(min_latency <= max_latency);
2138
2139 /* Hmm, let's see if someone forgot to set PA_SOURCE_DYNAMIC_LATENCY here... */
2140 pa_assert((min_latency == ABSOLUTE_MIN_LATENCY &&
2141 max_latency == ABSOLUTE_MAX_LATENCY) ||
2142 (s->flags & PA_SOURCE_DYNAMIC_LATENCY) ||
2143 s->monitor_of);
2144
2145 if (s->thread_info.min_latency == min_latency &&
2146 s->thread_info.max_latency == max_latency)
2147 return;
2148
2149 s->thread_info.min_latency = min_latency;
2150 s->thread_info.max_latency = max_latency;
2151
2152 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2153 pa_source_output *o;
2154 void *state = NULL;
2155
2156 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2157 if (o->update_source_latency_range)
2158 o->update_source_latency_range(o);
2159 }
2160
2161 pa_source_invalidate_requested_latency(s, FALSE);
2162 }
2163
2164 /* Called from main thread, before the source is put */
2165 void pa_source_set_fixed_latency(pa_source *s, pa_usec_t latency) {
2166 pa_source_assert_ref(s);
2167 pa_assert_ctl_context();
2168
2169 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2170 pa_assert(latency == 0);
2171 return;
2172 }
2173
2174 if (latency < ABSOLUTE_MIN_LATENCY)
2175 latency = ABSOLUTE_MIN_LATENCY;
2176
2177 if (latency > ABSOLUTE_MAX_LATENCY)
2178 latency = ABSOLUTE_MAX_LATENCY;
2179
2180 if (PA_SOURCE_IS_LINKED(s->state))
2181 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_FIXED_LATENCY, NULL, (int64_t) latency, NULL) == 0);
2182 else
2183 s->thread_info.fixed_latency = latency;
2184 }
2185
2186 /* Called from main thread */
2187 pa_usec_t pa_source_get_fixed_latency(pa_source *s) {
2188 pa_usec_t latency;
2189
2190 pa_source_assert_ref(s);
2191 pa_assert_ctl_context();
2192
2193 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY)
2194 return 0;
2195
2196 if (PA_SOURCE_IS_LINKED(s->state))
2197 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_FIXED_LATENCY, &latency, 0, NULL) == 0);
2198 else
2199 latency = s->thread_info.fixed_latency;
2200
2201 return latency;
2202 }
2203
2204 /* Called from IO thread */
2205 void pa_source_set_fixed_latency_within_thread(pa_source *s, pa_usec_t latency) {
2206 pa_source_assert_ref(s);
2207 pa_source_assert_io_context(s);
2208
2209 if (s->flags & PA_SOURCE_DYNAMIC_LATENCY) {
2210 pa_assert(latency == 0);
2211 return;
2212 }
2213
2214 pa_assert(latency >= ABSOLUTE_MIN_LATENCY);
2215 pa_assert(latency <= ABSOLUTE_MAX_LATENCY);
2216
2217 if (s->thread_info.fixed_latency == latency)
2218 return;
2219
2220 s->thread_info.fixed_latency = latency;
2221
2222 if (PA_SOURCE_IS_LINKED(s->thread_info.state)) {
2223 pa_source_output *o;
2224 void *state = NULL;
2225
2226 PA_HASHMAP_FOREACH(o, s->thread_info.outputs, state)
2227 if (o->update_source_fixed_latency)
2228 o->update_source_fixed_latency(o);
2229 }
2230
2231 pa_source_invalidate_requested_latency(s, FALSE);
2232 }
2233
2234 /* Called from main thread */
2235 size_t pa_source_get_max_rewind(pa_source *s) {
2236 size_t r;
2237 pa_assert_ctl_context();
2238 pa_source_assert_ref(s);
2239
2240 if (!PA_SOURCE_IS_LINKED(s->state))
2241 return s->thread_info.max_rewind;
2242
2243 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_GET_MAX_REWIND, &r, 0, NULL) == 0);
2244
2245 return r;
2246 }
2247
2248 /* Called from main context */
2249 int pa_source_set_port(pa_source *s, const char *name, pa_bool_t save) {
2250 pa_device_port *port;
2251 int ret;
2252
2253 pa_source_assert_ref(s);
2254 pa_assert_ctl_context();
2255
2256 if (!s->set_port) {
2257 pa_log_debug("set_port() operation not implemented for source %u \"%s\"", s->index, s->name);
2258 return -PA_ERR_NOTIMPLEMENTED;
2259 }
2260
2261 if (!s->ports)
2262 return -PA_ERR_NOENTITY;
2263
2264 if (!(port = pa_hashmap_get(s->ports, name)))
2265 return -PA_ERR_NOENTITY;
2266
2267 if (s->active_port == port) {
2268 s->save_port = s->save_port || save;
2269 return 0;
2270 }
2271
2272 if (s->flags & PA_SOURCE_SYNC_VOLUME) {
2273 struct source_message_set_port msg = { .port = port, .ret = 0 };
2274 pa_assert_se(pa_asyncmsgq_send(s->asyncmsgq, PA_MSGOBJECT(s), PA_SOURCE_MESSAGE_SET_PORT, &msg, 0, NULL) == 0);
2275 ret = msg.ret;
2276 }
2277 else
2278 ret = s->set_port(s, port);
2279
2280 if (ret < 0)
2281 return -PA_ERR_NOENTITY;
2282
2283 pa_subscription_post(s->core, PA_SUBSCRIPTION_EVENT_SOURCE|PA_SUBSCRIPTION_EVENT_CHANGE, s->index);
2284
2285 pa_log_info("Changed port of source %u \"%s\" to %s", s->index, s->name, port->name);
2286
2287 s->active_port = port;
2288 s->save_port = save;
2289
2290 pa_hook_fire(&s->core->hooks[PA_CORE_HOOK_SOURCE_PORT_CHANGED], s);
2291
2292 return 0;
2293 }
2294
2295 PA_STATIC_FLIST_DECLARE(pa_source_volume_change, 0, pa_xfree);
2296
2297 /* Called from the IO thread. */
2298 static pa_source_volume_change *pa_source_volume_change_new(pa_source *s) {
2299 pa_source_volume_change *c;
2300 if (!(c = pa_flist_pop(PA_STATIC_FLIST_GET(pa_source_volume_change))))
2301 c = pa_xnew(pa_source_volume_change, 1);
2302
2303 PA_LLIST_INIT(pa_source_volume_change, c);
2304 c->at = 0;
2305 pa_cvolume_reset(&c->hw_volume, s->sample_spec.channels);
2306 return c;
2307 }
2308
2309 /* Called from the IO thread. */
2310 static void pa_source_volume_change_free(pa_source_volume_change *c) {
2311 pa_assert(c);
2312 if (pa_flist_push(PA_STATIC_FLIST_GET(pa_source_volume_change), c) < 0)
2313 pa_xfree(c);
2314 }
2315
2316 /* Called from the IO thread. */
2317 void pa_source_volume_change_push(pa_source *s) {
2318 pa_source_volume_change *c = NULL;
2319 pa_source_volume_change *nc = NULL;
2320 uint32_t safety_margin = s->thread_info.volume_change_safety_margin;
2321
2322 const char *direction = NULL;
2323
2324 pa_assert(s);
2325 nc = pa_source_volume_change_new(s);
2326
2327 /* NOTE: There is already more different volumes in pa_source that I can remember.
2328 * Adding one more volume for HW would get us rid of this, but I am trying
2329 * to survive with the ones we already have. */
2330 pa_sw_cvolume_divide(&nc->hw_volume, &s->real_volume, &s->soft_volume);
2331
2332 if (!s->thread_info.volume_changes && pa_cvolume_equal(&nc->hw_volume, &s->thread_info.current_hw_volume)) {
2333 pa_log_debug("Volume not changing");
2334 pa_source_volume_change_free(nc);
2335 return;
2336 }
2337
2338 nc->at = pa_source_get_latency_within_thread(s);
2339 nc->at += pa_rtclock_now() + s->thread_info.volume_change_extra_delay;
2340
2341 if (s->thread_info.volume_changes_tail) {
2342 for (c = s->thread_info.volume_changes_tail; c; c = c->prev) {
2343 /* If volume is going up let's do it a bit late. If it is going
2344 * down let's do it a bit early. */
2345 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&c->hw_volume)) {
2346 if (nc->at + safety_margin > c->at) {
2347 nc->at += safety_margin;
2348 direction = "up";
2349 break;
2350 }
2351 }
2352 else if (nc->at - safety_margin > c->at) {
2353 nc->at -= safety_margin;
2354 direction = "down";
2355 break;
2356 }
2357 }
2358 }
2359
2360 if (c == NULL) {
2361 if (pa_cvolume_avg(&nc->hw_volume) > pa_cvolume_avg(&s->thread_info.current_hw_volume)) {
2362 nc->at += safety_margin;
2363 direction = "up";
2364 } else {
2365 nc->at -= safety_margin;
2366 direction = "down";
2367 }
2368 PA_LLIST_PREPEND(pa_source_volume_change, s->thread_info.volume_changes, nc);
2369 }
2370 else {
2371 PA_LLIST_INSERT_AFTER(pa_source_volume_change, s->thread_info.volume_changes, c, nc);
2372 }
2373
2374 pa_log_debug("Volume going %s to %d at %llu", direction, pa_cvolume_avg(&nc->hw_volume), (long long unsigned) nc->at);
2375
2376 /* We can ignore volume events that came earlier but should happen later than this. */
2377 PA_LLIST_FOREACH(c, nc->next) {
2378 pa_log_debug("Volume change to %d at %llu was dropped", pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at);
2379 pa_source_volume_change_free(c);
2380 }
2381 nc->next = NULL;
2382 s->thread_info.volume_changes_tail = nc;
2383 }
2384
2385 /* Called from the IO thread. */
2386 static void pa_source_volume_change_flush(pa_source *s) {
2387 pa_source_volume_change *c = s->thread_info.volume_changes;
2388 pa_assert(s);
2389 s->thread_info.volume_changes = NULL;
2390 s->thread_info.volume_changes_tail = NULL;
2391 while (c) {
2392 pa_source_volume_change *next = c->next;
2393 pa_source_volume_change_free(c);
2394 c = next;
2395 }
2396 }
2397
2398 /* Called from the IO thread. */
2399 pa_bool_t pa_source_volume_change_apply(pa_source *s, pa_usec_t *usec_to_next) {
2400 pa_usec_t now = pa_rtclock_now();
2401 pa_bool_t ret = FALSE;
2402
2403 pa_assert(s);
2404 pa_assert(s->write_volume);
2405
2406 while (s->thread_info.volume_changes && now >= s->thread_info.volume_changes->at) {
2407 pa_source_volume_change *c = s->thread_info.volume_changes;
2408 PA_LLIST_REMOVE(pa_source_volume_change, s->thread_info.volume_changes, c);
2409 pa_log_debug("Volume change to %d at %llu was written %llu usec late",
2410 pa_cvolume_avg(&c->hw_volume), (long long unsigned) c->at, (long long unsigned) (now - c->at));
2411 ret = TRUE;
2412 s->thread_info.current_hw_volume = c->hw_volume;
2413 pa_source_volume_change_free(c);
2414 }
2415
2416 if (s->write_volume && ret)
2417 s->write_volume(s);
2418
2419 if (s->thread_info.volume_changes) {
2420 if (usec_to_next)
2421 *usec_to_next = s->thread_info.volume_changes->at - now;
2422 if (pa_log_ratelimit(PA_LOG_DEBUG))
2423 pa_log_debug("Next volume change in %lld usec", (long long) (s->thread_info.volume_changes->at - now));
2424 }
2425 else {
2426 if (usec_to_next)
2427 *usec_to_next = 0;
2428 s->thread_info.volume_changes_tail = NULL;
2429 }
2430 return ret;
2431 }
2432
2433
2434 /* Called from the main thread */
2435 /* Gets the list of formats supported by the source. The members and idxset must
2436 * be freed by the caller. */
2437 pa_idxset* pa_source_get_formats(pa_source *s) {
2438 pa_idxset *ret;
2439
2440 pa_assert(s);
2441
2442 if (s->get_formats) {
2443 /* Source supports format query, all is good */
2444 ret = s->get_formats(s);
2445 } else {
2446 /* Source doesn't support format query, so assume it does PCM */
2447 pa_format_info *f = pa_format_info_new();
2448 f->encoding = PA_ENCODING_PCM;
2449
2450 ret = pa_idxset_new(NULL, NULL);
2451 pa_idxset_put(ret, f, NULL);
2452 }
2453
2454 return ret;
2455 }
2456
2457 /* Called from the main thread */
2458 /* Checks if the source can accept this format */
2459 pa_bool_t pa_source_check_format(pa_source *s, pa_format_info *f)
2460 {
2461 pa_idxset *formats = NULL;
2462 pa_bool_t ret = FALSE;
2463
2464 pa_assert(s);
2465 pa_assert(f);
2466
2467 formats = pa_source_get_formats(s);
2468
2469 if (formats) {
2470 pa_format_info *finfo_device;
2471 uint32_t i;
2472
2473 PA_IDXSET_FOREACH(finfo_device, formats, i) {
2474 if (pa_format_info_is_compatible(finfo_device, f)) {
2475 ret = TRUE;
2476 break;
2477 }
2478 }
2479
2480 pa_idxset_free(formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2481 }
2482
2483 return ret;
2484 }
2485
2486 /* Called from the main thread */
2487 /* Calculates the intersection between formats supported by the source and
2488 * in_formats, and returns these, in the order of the source's formats. */
2489 pa_idxset* pa_source_check_formats(pa_source *s, pa_idxset *in_formats) {
2490 pa_idxset *out_formats = pa_idxset_new(NULL, NULL), *source_formats = NULL;
2491 pa_format_info *f_source, *f_in;
2492 uint32_t i, j;
2493
2494 pa_assert(s);
2495
2496 if (!in_formats || pa_idxset_isempty(in_formats))
2497 goto done;
2498
2499 source_formats = pa_source_get_formats(s);
2500
2501 PA_IDXSET_FOREACH(f_source, source_formats, i) {
2502 PA_IDXSET_FOREACH(f_in, in_formats, j) {
2503 if (pa_format_info_is_compatible(f_source, f_in))
2504 pa_idxset_put(out_formats, pa_format_info_copy(f_in), NULL);
2505 }
2506 }
2507
2508 done:
2509 if (source_formats)
2510 pa_idxset_free(source_formats, (pa_free2_cb_t) pa_format_info_free2, NULL);
2511
2512 return out_formats;
2513 }