summaryrefslogtreecommitdiff
path: root/chromium/media/mojo/mojom/renderer_extensions.mojom
blob: c326d52bd30908a5bf860e7c32d1a8307065b4b2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
// Copyright 2019 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

module media.mojom;

import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";

[EnableIf=is_win]
import "ui/gfx/mojom/buffer_types.mojom";
// Extension of the mojo::RendererClient communication layer for HLS and Android
// software rendering fallback paths.
// This allows the Browser side to call back into the Renderer side. Concretely,
// the MediaPlayerRenderer uses these methods to propagate events it raises to
// the MediaPlayerRendererClient, which lives in the Renderer process.
interface MediaPlayerRendererClientExtension {
  // Called when the first time the metadata is updated, and whenever the
  // metadata changes.
  OnVideoSizeChange(gfx.mojom.Size size);
  OnDurationChange(mojo_base.mojom.TimeDelta duration);
};

// Extension of the mojo::Renderer communication layer for HLS and Android
// software rendering fallback paths.
// This allows the Renderer side to call into the Browser side.
// Concretely, the MediaPlayerRendererClient uses these methods to send commands
// to MediaPlayerRenderer, which lives in the Browser process.
interface MediaPlayerRendererExtension {
  // Registers a new request in the ScopedSurfaceRequestManager, and returns
  // its token.
  // Called once, during media::Renderer initialization, as part of
  // StreamTexture's set-up.
  InitiateScopedSurfaceRequest()
      => (mojo_base.mojom.UnguessableToken request_token);
};

// Extension of the mojo::RendererClient communication layer for media flinging,
// a.k.a RemotePlayback, when playing media on a remote Cast device.
// This allows the Browser side to call back into the Renderer side.
// Concretely, the FlingingRenderer uses these methods to propagate events it
// raises to the FlingingRendererClient, which lives in the Renderer process.
interface FlingingRendererClientExtension {
  // Called when the play state of a casted device goes out of sync with WMPI's
  // play state (e.g. when another phone play/pauses a cast device on the same
  // network).
  OnRemotePlayStateChange(MediaStatusState state);
};

[EnableIf=is_win]
struct FrameTextureInfo {
  gfx.mojom.GpuMemoryBufferHandle texture_handle;
  mojo_base.mojom.UnguessableToken token;
};

[EnableIf=is_win]
struct FramePoolInitializationParameters {
  array<FrameTextureInfo> frame_textures;
  gfx.mojom.Size texture_size;
};

[EnableIf=is_win]
// Extension of the mojo:RendererClient communication layer for Media
// Foundation based Renderer.
// This allows the MF_CDM utility process to call the blink Renderer process
// side.
// Concretely, the MediaFoundationRenderer (in the MF_CDM process) uses these
// methods to send video frames as textures to the
// MediaFoundationRendererClient which lives in the blink renderer process so
// that the MediaFoundationRenderer can signal to the video stream sink that
// the specific texture is ready to be displayed.
interface MediaFoundationRendererClientExtension {
  // Provide the GPU textures (ID3D11Texture2D) to the
  // MediaFoundationRendererClient with the associated token so that the
  // MediaFoundationRendererClient can create the shared images in the GPU
  // process using the SharedImageInterface.
  InitializeFramePool(FramePoolInitializationParameters pool_info);

  // Signal to the MediaFoundationRendererClient that the texture associated
  // with the frame_token is ready to be displayed with the corresponding
  // frame size and timestamp which is the delta between the start of the
  // AV presentation and the time that the frame is to be presented.
  OnFrameAvailable(mojo_base.mojom.UnguessableToken frame_token,
    gfx.mojom.Size size, mojo_base.mojom.TimeDelta timestamp);
};

// The Media Foundation Media Engine has two modes for playback.
// DirectComposition is when the media engine will render directly to a
// direct composition visual, skipping the Chromium compositor.
// FrameServer is when the media engine will render to a texture and
// that textured is provided to the Chromium compositor.
enum MediaFoundationRenderingMode {
  DirectComposition,
  FrameServer
};

[EnableIf=is_win]
// Extension of the mojo::Renderer communication layer for MediaFoundation-based
// Renderer.
// This allows the MediaFoundationRendererClient in the render process to call
// into the MediaFoundationRenderer in the MediaFoundationService (utility/LPAC)
// process.
// Please refer to media/renderers/win/media_foundation_renderer_extension.h
// for its C++ interface equivalence.
interface MediaFoundationRendererExtension {
  // Enables Direct Composition video rendering and gets the token associated
  // with the Direct Composition surface handle, which can be retrieved later
  // in the GPU process using the token. Returns a null `token` on failures when
  // `error` explains the failure reason.
  GetDCOMPSurface() => (mojo_base.mojom.UnguessableToken? token, string error);

  // Notifies whether video is enabled.
  SetVideoStreamEnabled(bool enabled);

  // Notifies of output composition parameters. It might fail if
  // MediaFoundationRenderer runs into error while setting the `rect`
  // information onto MFMediaEngine. It it fails, the video will be displayed
  // incorrectly (e.g. a smaller video rendered at the corner of the video
  // output area.). In case of failure, caller should not use the `rect` for
  // further operations.
  SetOutputRect(gfx.mojom.Rect rect) => (bool success);

  // Notify that the frame has been displayed and can be reused.
  NotifyFrameReleased(mojo_base.mojom.UnguessableToken frame_token);

  // Request a frame from the media engine if it is available
  // The frame will be returned async via the
  // MediaFoundationRendererClientExtension::OnFrameAvailable callback.
  RequestNextFrame();

  // Notify which rendering mode to be using for future video frames.
  SetMediaFoundationRenderingMode(MediaFoundationRenderingMode mode);
};

// This interface is used by the browser to determine if there are any renderers
// actively using the Media Foundation Renderer. The number of Media Foundation
// Renderers in use is determined by the number of active connections. The
// remote lives in the renderer process and the receiver lives in the browser
// process.
interface MediaFoundationRendererObserver {
};

// This interface is used to notify the browser that the renderer is using the
// Media Foundation Renderer which uses MediaFoundation to render audio
// directly. Live Caption will not work in this case because Chrome is unable
// to tap into the audio rendering pipeline. The remote lives in the renderer
// process and the receiver lives in the browser process.
interface MediaFoundationRendererNotifier {
  // Notify the browser than a Media Foundation Renderer has been created. The
  // browser will use this event to notify the user that some features
  // incompatible with the Media Foundation Renderer may not work.
  MediaFoundationRendererCreated(
    pending_receiver<MediaFoundationRendererObserver> observer);
};