diff --git a/docs.json b/docs.json
index 3a45338e7..51a0a762d 100644
--- a/docs.json
+++ b/docs.json
@@ -978,23 +978,32 @@
]
},
"streaming-platform/how-the-streaming-platform-interact-with-the-cdn",
- {
+ {
"group": "Live streaming",
"pages": [
"streaming-platform/live-streaming/create-a-live-stream",
- "streaming-platform/live-streaming/webrtc-to-hls-transcoding",
{
- "group": "Push live streams software",
+ "group": "Broadcasting software",
+ "pages": [
+ "streaming-platform/live-streaming/broadcasting-software/ffmpeg",
+ "streaming-platform/live-streaming/broadcasting-software/larix",
+ "streaming-platform/live-streaming/broadcasting-software/obs"
+ ]
+ },
+ {
+ "group": "Protocols",
"pages": [
- "streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs",
- "streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-liveu-solo"
+ "streaming-platform/live-streaming/protocols/rtmp",
+ "streaming-platform/live-streaming/protocols/srt",
+ "streaming-platform/live-streaming/protocols/webrtc"
]
},
"streaming-platform/live-streaming/create-and-configure-a-restream-to-social-media",
"streaming-platform/live-streaming/combine-multiple-live-streams",
"streaming-platform/live-streaming/record-your-live-streams-and-save-them-as-videos",
"streaming-platform/live-streaming/pause-and-rewind-the-live-streams",
- "streaming-platform/live-streaming/insert-html-overlays-in-live-streams"
+ "streaming-platform/live-streaming/insert-html-overlays-in-live-streams",
+ "streaming-platform/live-streaming/how-low-latency-streaming-works"
]
},
{
@@ -1054,7 +1063,10 @@
{
"group": "Troubleshooting",
"pages": [
- "streaming-platform/troubleshooting/solve-common-streaming-platform-issues",
+ "streaming-platform/troubleshooting/general-issues",
+ "streaming-platform/troubleshooting/http-status-codes",
+ "streaming-platform/troubleshooting/live-streaming-issues",
+ "streaming-platform/troubleshooting/vod-issues",
{
"group": "WebRTC common issues",
"pages": [
diff --git a/images/larix-grove.png b/images/larix-grove.png
new file mode 100644
index 000000000..84e1dd767
Binary files /dev/null and b/images/larix-grove.png differ
diff --git a/streaming-platform/about-gcore-streaming-platform.mdx b/streaming-platform/about-gcore-streaming-platform.mdx
index fdd59e28f..33dd4669a 100644
--- a/streaming-platform/about-gcore-streaming-platform.mdx
+++ b/streaming-platform/about-gcore-streaming-platform.mdx
@@ -1,36 +1,36 @@
----
-title: About GCore streaming platform
-sidebarTitle: Overview
----
-
-## Method 1: Streaming directly via CDN
-
-If you have your own media server that produces a stream in HLS format, you will need one CDN resource with special settings for playlists (.m3u8) and chunks (.ts). For more information about settings, see the article [Live streaming via CDN using the HLS protocol](/cdn/cdn-resource-options/configure-live-streams-and-video-delivery-via-cdn-only-for-paid-tariffs).
-
-You can protect the stream with a token. The token to encrypt the streaming is configured via a script on the origin server, enabling the Secure Token option in the CDN Resource settings according to the "[Configure and use Secure Token](/cdn/cdn-resource-options/security/use-a-secure-token/configure-and-use-secure-token)" guide.
-
-## Method 2: Streaming via the video streaming with CDN
-
-Use this method if a stream from your server is not in HLS format.
-
-Streaming via our Video Streaming has advantages and additional features, such as:
-
- * [Live stream recording](/streaming-platform/live-streaming/record-your-live-streams-and-save-them-as-videos) records the broadcast and saves it as VOD.
- * [DVR](/streaming-platform/live-streaming/pause-and-rewind-the-live-streams) allows you to rewind the stream, stop it and return to viewing it later.
- * [Restreaming](/streaming-platform/live-streaming/create-and-configure-a-restream-to-social-media) allows you to send a broadcast to several media services simultaneously, such as Facebook and YouTube.
- * [Own player and statistics](/streaming-platform/extra-features/customize-appearance-of-the-built-in-player) allow you to collect statistics on views, geography, and popularity of individual broadcasts.
-
-
-
-The formats and specifications supported by the Video Streaming are described in the [article](/streaming-platform/live-streams-and-videos-protocols-and-codecs/what-initial-parameters-of-your-live-streams-and-videos-we-can-accept).
-
-When you stream via the Video Streaming, there are two ways to send us the stream: PUSH and PULL.
-
- * To use PULL, you need a server with a stream in RTMP (or other) format. The stream is sent to our servers, which convert it into HLS.
- * Use PUSH if you stream directly from your computer, camera, or any other device using third-party [software](/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs). In this case, there is a unique key in the Gcore Customer Portal that you insert into your program, and the stream is sent to us. The Video Streaming converts it into HLS and sends it to end-users.
-
-
-
-We can receive SRT streams in either PULL or PUSH format. To send us PULL-SRT, just specify a link in the required protocol in the URL field. If you want to get a PUSH link to send SRT streams to us, write to support via [support@gcore.com](mailto:support@gcore.com) or your manager. We will set up an SRT-PUSH link on your account.
-
+---
+title: About GCore streaming platform
+sidebarTitle: Overview
+---
+
+## Method 1: Streaming directly via CDN
+
+If you have your own media server that produces a stream in HLS format, you will need one CDN resource with special settings for playlists (.m3u8) and chunks (.ts). For more information about settings, see the article [Live streaming via CDN using the HLS protocol](/cdn/cdn-resource-options/configure-live-streams-and-video-delivery-via-cdn-only-for-paid-tariffs).
+
+You can protect the stream with a token. The token to encrypt the streaming is configured via a script on the origin server, enabling the Secure Token option in the CDN Resource settings according to the "[Configure and use Secure Token](/cdn/cdn-resource-options/security/use-a-secure-token/configure-and-use-secure-token)" guide.
+
+## Method 2: Streaming via the video streaming with CDN
+
+Use this method if a stream from your server is not in HLS format.
+
+Streaming via our Video Streaming has advantages and additional features, such as:
+
+ * [Live stream recording](/streaming-platform/live-streaming/record-your-live-streams-and-save-them-as-videos) records the broadcast and saves it as VOD.
+ * [DVR](/streaming-platform/live-streaming/pause-and-rewind-the-live-streams) allows you to rewind the stream, stop it and return to viewing it later.
+ * [Restreaming](/streaming-platform/live-streaming/create-and-configure-a-restream-to-social-media) allows you to send a broadcast to several media services simultaneously, such as Facebook and YouTube.
+ * [Own player and statistics](/streaming-platform/extra-features/customize-appearance-of-the-built-in-player) allow you to collect statistics on views, geography, and popularity of individual broadcasts.
+
+
+
+The formats and specifications supported by the Video Streaming are described in the [article](/streaming-platform/live-streams-and-videos-protocols-and-codecs/what-initial-parameters-of-your-live-streams-and-videos-we-can-accept).
+
+When you stream via the Video Streaming, there are two ways to send us the stream: PUSH and PULL.
+
+ * To use PULL, you need a server with a stream in RTMP (or other) format. The stream is sent to our servers, which convert it into HLS.
+ * Use PUSH if you stream directly from your computer, camera, or any other device using third-party [software](https://gcore.com/docs/streaming-platform/live-streaming/broadcasting-software/obs). In this case, there is a unique key in the Gcore Customer Portal that you insert into your program, and the stream is sent to us. The Video Streaming converts it into HLS and sends it to end-users.
+
+
+
+We can receive SRT streams in either PULL or PUSH format. To send us PULL-SRT, just specify a link in the required protocol in the URL field. If you want to get a PUSH link to send SRT streams to us, write to support via [support@gcore.com](mailto:support@gcore.com) or your manager. We will set up an SRT-PUSH link on your account.
+
About PULL and PUSH in detail, you can read in the "[Create a live stream](/streaming-platform/live-streaming/create-a-live-stream)" guide (step 3.2).
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/broadcasting-software/ffmpeg.mdx b/streaming-platform/live-streaming/broadcasting-software/ffmpeg.mdx
new file mode 100644
index 000000000..96f0878dd
--- /dev/null
+++ b/streaming-platform/live-streaming/broadcasting-software/ffmpeg.mdx
@@ -0,0 +1,68 @@
+---
+title: "FFmpeg"
+sidebarTitle: "FFmpeg"
+---
+
+FFmpeg is a free and open-source command-line tool for recording, screencasting, and live streaming. It’s suitable for video game streaming, blogging, educational content, and more.
+
+FFmpeg links your device (e.g, a laptop or a PC) to different streaming platforms (e.g., Gcore Video Streaming, YouTube, Twitch, etc.). It takes an image captured by a camera, converts it into a video stream, and then sends it to the streaming platform.
+
+## Setup
+
+1. Install FFmpeg on your device. Follow the download instructions on [the official website](https://ffmpeg.org/download.html).
+
+2. To get the server URL and stream key, go to the [Streaming list](https://streaming.gcore.com/streaming/list), open the **Live stream settings** you need, and copy the relevant value from the **URLs for the encoder** section.
+
+For example, if you see these values on the **Live stream settings** page:
+
+
+
+Concatenate them to form the full RTMP URL for the stream:
+
+```
+rtmp://vp-push-ix1.gvideo.co/in/400448?cdf2a7ccf990e464c2b…
+```
+
+3. Open the command line interface (CLI) on your device and run the following command:
+
+```
+ffmpeg -f {input format params} -f flv {RTMP URL}
+```
+
+## Configure the stream parameters for optimal performance
+
+We recommend configuring the stream parameters you will send to our server to ensure optimal streaming performance. You can adjust these settings via the CLI parameters of FFmpeg.
+
+Example of a command line for streaming via FFmpeg with the recommended parameters:
+
+```
+ffmpeg -f {input format params} \
+ -c:v libx264 -preset veryfast -b:v 2000000 \
+ -profile:v baseline -vf format=yuv420p \
+ -crf 23 -g 60 \
+ -b:a 128k -ar 44100 -ac 2 \
+ -f flv {RTMP URL}
+```
+
+### Output parameters
+
+- **Video Bitrate:** To stream at 720p resolution, set the bitrate to 2000Kbps (`-b:v 2000000`). If you’re broadcasting at 1080p, set the bitrate to 4000Kbps (`-b:v 4000000`).
+- **Audio Bitrate:** 128 (`-b:a 128k`).
+- **Encoder:** Software (`-c:v libx264`), or any other H264 codec.
+- **Rate control:** CRF (`-crf 23`)
+- **Keyframe Interval:** 2s (`-g 60`).
+- **CPU Usage Preset:** veryfast (`-preset veryfast`).
+- **Profile:** baseline (`-profile:v baseline -vf format=yuv420p`)
+
+### Audio parameters
+
+- **Sample Rate**: 44.1 kHz (`-ar 44100`) or 48 kHz (`-ar 48000`).
+- Use **Stereo** for the best sound quality (`-ac 2`).
+
+### Video parameters
+
+If you need to reduce the original resolution (downscale), follow the instructions in this section. If no resolution change is required, you can skip this step.
+
+- **Output (Scaled) Resolution:** 1280×720
+- **Downscale Filter:** Bicubic
+- **Common FPS Values:** 30
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/broadcasting-software/larix.mdx b/streaming-platform/live-streaming/broadcasting-software/larix.mdx
new file mode 100644
index 000000000..deb4aa5c9
--- /dev/null
+++ b/streaming-platform/live-streaming/broadcasting-software/larix.mdx
@@ -0,0 +1,64 @@
+---
+title: "Larix"
+sidebarTitle: "Larix"
+---
+
+Larix is a free video recording, screencasting, and live streaming encoder. It’s suitable for video game streaming, blogging, educational content, and more.
+
+Larix links your mobile device (e.g., a smartphone or a tablet) to different streaming platforms (e.g., Gcore Video Streaming, YouTube, Twitch, etc.). It takes an image captured by a camera, converts it into a video stream, and then sends it to the streaming platform.
+
+## Setup
+
+1. Install Larix on your mobile device. You can find the download instructions on the official website.
+2. To get the server URL and stream key, go to the [Streaming list](https://streaming.gcore.com/streaming/list), open the **Live stream settings** you need, and copy the relevant value from the **URLs for the encoder** section.
+
+For example, if you see these values on the **Live stream settings** page:
+
+
+
+Concatenate them to form the full RTMP URL for the stream:
+
+```
+rtmp://vp-push-ix1.gvideo.co/in/400448?cdf2a7ccf990e464c2b…
+```
+
+ 3. Open [Larix Grove](https://softvelum.com/larix/grove/), where you can create the configuration for the Larix app and share it via QR code.
+ 4. In **Larix Grove**, scroll down to the **Connection** section.
+ 5. Enter the RTMP URL and a name for your connection.
+ 6. Click the **QR-Code** button to generate a QR code. You can scan this code with the Larix app on your mobile device to automatically configure the connection.
+ 7. Open the Larix app on your mobile device and tap the gear icon to open the settings.
+ 8. Tap **Larix Grove** and then tap **Scan Grove QR code**.
+ 9. Scan the QR code you generated in Larix Grove. The app will automatically configure the connection.
+10. Return to the Larix app's main screen and tap the big white button to start streaming.
+
+## Configure the stream parameters for optimal performance
+
+We recommend configuring the stream parameters you send to our server to ensure optimal streaming performance.
+
+You can adjust these settings with [Larix Grove](https://softvelum.com/larix/grove/), which generates the configuration and connection URL as a QR code. This allows you to share the configuration with team members easily.
+
+After you have changed the settings, click the **QR-Code** button to generate a new QR code for sharing.
+
+
+
+### Camera parameters
+
+If you need to reduce the original resolution (downscale), follow the instructions in this section.
+
+If you need to increase the FPS to 60, make sure to also increase the bitrate accordingly for optimal stream quality (i.e., double it).
+
+If no resolution change is required, you can skip this step.
+
+- **Resolution:** 1280×720
+- **Frame rate:** 30
+
+### Video encoder parameters
+
+- **Video Bitrate:** 2000000 for 720p resolution or 4000000 for 1080p resolution.
+- **Keyframe Interval:** 60 (i.e., 2 seconds)
+
+### Audio encoder parameters
+
+- **Audio Bitrate:** 128000
+- **Sample Rate**: 44100 or 48000
+- **Channels**: 2
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/broadcasting-software/obs.mdx b/streaming-platform/live-streaming/broadcasting-software/obs.mdx
new file mode 100644
index 000000000..f988582f7
--- /dev/null
+++ b/streaming-platform/live-streaming/broadcasting-software/obs.mdx
@@ -0,0 +1,100 @@
+---
+title: "Open Broadcaster Software"
+sidebarTitle: "OBS"
+---
+
+Open Broadcaster Software (OBS) is a free and open-source encoder for video recording, screencasting, and live streaming. It’s suitable for video game streaming, blogging, educational content, and more.
+
+OBS links your device (a laptop or a PC) to different streaming platforms (Gcore Video Streaming, YouTube, Twitch, etc.). It takes a camera image, converts it into a video stream, and then sends it to the streaming platform.
+
+## Setup
+
+1. Download Open Broadcaster Software (OBS) from the [official website](https://obsproject.com).
+
+2. Open the **Settings** section and go to the **Stream** tab. Complete the remaining steps in it.
+
+
+
+3. Select **Custom** from the dropdown list.
+
+4. Enter the Server URL into the **Server** field and the unique key into the **Stream Key** field. To get the Server URL and key, go to the [Streaming list](https://streaming.gcore.com/streaming/list), open the Live stream settings you need, and copy the relevant value from the URLs for the encoder section.
+
+For example, if you see these values on the Live stream settings page:
+
+
+
+Paste them into the OBS Settings as follows:
+
+- _rtmp://vp-push-ix1.gvideo.co/in/_ is the **Server**.
+- _400448?cdf2a7ccf990e464c2b…_ is the **Stream Key**.
+
+5. Click the **Apply** button to save the new configuration.
+
+6. Go to the **main OBS menu**, select the stream's source (video capture device, display capture, etc.), and click **Start Streaming**.
+
+
+
+7. Once the streaming has started, go to the [Streaming list](https://streaming.gcore.com/streaming/list), open the **Live Stream settings**, and copy the link to embed the broadcast to your website.
+
+
+
+That’s it. The stream from OBS will be broadcast to your website.
+
+## Configure the stream parameters for optimal performance
+
+We recommend configuring the stream parameters you will send to our server to ensure optimal streaming performance. You can adjust these settings in the OBS Output, Audio, and Video tabs.
+
+### Output parameters
+
+1. Open OBS Settings and go to the **Output** tab. Select **Simple** mode.
+
+2. Set the parameters as follows:
+
+- **Video Bitrate:** The resolution of your stream determines the required bitrate: The higher the resolution, the higher the bitrate. To stream at 720p resolution, set the bitrate to 2000Kbps. If you’re broadcasting at 1080p, set the bitrate to 4000Kbps.
+- **Audio Bitrate:** 128.
+- **Encoder:** Software (x264), or any other H264 codec.
+
+
+
+3. Click **Advanced** mode.
+
+4. Set the parameters as follows:
+
+- **Rate control:** CRF (the default value is 23)
+- **Keyframe Interval (0=auto):** 2s
+- **CPU Usage Preset:** veryfast
+- **Profile:** baseline
+
+5. Click **Apply** to save the configuration.
+
+
+
+### Audio parameters
+
+
+
+1. Open OBS Settings and go to the Audio tab.
+
+2. Set the Sample Rate to 44.1 kHz (default) or 48 kHz. Select **Stereo** for the best sound quality.
+
+3. Click **Apply**.
+
+### Video parameters
+
+If you need to reduce the original resolution (downscale), follow the instructions in this section. If no resolution change is required, you can skip this step.
+
+1. Open OBS Settings and go to the **Video** tab.
+
+2. Set the following parameters:
+
+- **Output (Scaled) Resolution:** 1280×720
+- **Downscale Filter:** Bicubic
+- **Common FPS Values:** 30
+
+3. Click **Apply**.
+
+
+
+
+ You can see the stream's [output parameters here](https://gcore.com/docs/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs#output-parameters). If you need to increase the FPS to 60, make sure to also increase the bitrate accordingly for optimal stream quality.
+
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/create-a-live-stream.mdx b/streaming-platform/live-streaming/create-a-live-stream.mdx
index 354463d9f..a65b76256 100644
--- a/streaming-platform/live-streaming/create-a-live-stream.mdx
+++ b/streaming-platform/live-streaming/create-a-live-stream.mdx
@@ -1,153 +1,75 @@
----
-title: Create a live stream
-sidebarTitle: Create a live stream
----
-
-## Step 1. Initiate the process
-
-1\. In the Gcore Customer Portal, navigate to [Streaming](https://streaming.gcore.com/streaming) > **Live Streaming**.
-
-2\. Click **Create Live stream**.
-
-
-
-
-
-
-If the button is non-responsive, you have exceeded your live stream limit. To create a new stream, remove an existing stream from the list or request [technical support](mailto:support@gcore.com) to increase your limits.
-
-
-
-2\. Enter the name of your live stream in the window that appears and click **Create**.
-
-
-
-
-
-
-A new page will appear. Perform the remaining steps there.
-
-## Step 2. Set the stream type and additional features
-
-
-
-
-
-
-1\. Make sure that the **Enable live stream** toggle is on.
-
-
-
-By default, we offer live streams with low latency (a 4–5 second delay.) Low latency is available in two protocols: LL-DASH ([compatible with all devices except iOS](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works)) and LL-HLS (compatible with iOS). You can also obtain legacy HLS with MPEGTS format segments, in which case, please read [our article](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works).
-
-
-
-2\. (Optional) Review the live stream name and update it if needed.
-
-3\. Enable additional features If you activated them previously:
-
- * [Record](/streaming-platform/live-streaming/record-your-live-streams-and-save-them-as-videos) for live stream recording. It will be active when you start streaming. Remember to enable the toggle if you require a record of your stream.
- * [DVR](/streaming-platform/live-streaming/pause-and-rewind-the-live-streams) for an improved user experience. When the DVR feature is enabled, your viewers can pause and rewind the broadcast.
-
-
-
-4\. Select the relevant stream type: **Push**, **Pull**, or **WebRTC = > HLS**.
-
- * Choose **Push** if you don't use your own media server. Establish the URL of our server and the unique stream key in your encoder (e.g. OBS, Streamlabs, vMix, or LiveU Solo). You can use protocols RTMP, RTMPS, and SRT too. The live stream will operate on our server, will be converted to MPEG-DASH and HLS protocols, and will be distributed to end users via our CDN.
-
- * Choose **Pull** if you have a streaming media server. The live stream will operate on your server. Our server will convert it from the RTMP, RTMPS, SRT, or other protocols to MPEG-DASH and HLS protocols. Then, our CDN will distribute the original live stream in the new format to end users.
-
- * Choose **WebRTC = > HLS** if you want to convert your live video stream from WebRTC to HLS (HTTP Live Streaming) and DASH (Dynamic Adaptive Streaming over HTTP) formats.
-
-
-
-
-## Step 3. Configure your stream for push, pull, or WebRTC to HLS
-
-### Push ingest type
-
-1\. Select the protocol for your stream: **RTMP**, **RTMPS**, or **SRT**. The main difference between these protocols is their security levels and ability to handle packet loss.
-
- * RTMP is the standard open-source protocol for live broadcasting over the internet. It supports low latency.
- * RTMPS is a variation of RTMP that incorporates SSL usage.
- * SRT is a protocol designed to transmit data reliably with protection against packet loss.
-
-
-
-
-
-
-2\. Copy the relevant data to insert into your encoder.
-
-
-
-Insert the following values:
-
- * **Server (URL)** is the target server where your encoder will relay the broadcast; e.g., `rtmp://vp-push-ed1.gvideo.co/in/ `.
- * **Stream key** is the unique identifier of the created live stream.
-
-
-
-Copy the Push URL SRT. It contains the server URL, port, stream ID (internal for Gcore,) and stream key. For example:
-
- ```
- srt://vp-push-ed1-srt.gvideo.co:5001?streamid=000000#12ab345c678901d…
- ```
-
-
-
-
-
-
-
-We provide backup links, which you can specify in the encoder interface. In case of inaccessibility and overloading of your primary server, the stream will be minimally interrupted and will continue automatically from the backup server.
-
-
-
-### Pull ingest type
-
-In the **URL** field, insert a link to the stream from your media server. Check the full list of supported protocols in our [Input parameters](/streaming-platform/live-streams-and-videos-protocols-and-codecs/what-initial-parameters-of-your-live-streams-and-videos-we-can-accept) guide.
-
-
-
-
-
-**Tip**
-
-You can specify multiple media servers separated by space in the URL field.
-
-In this case, the first media server will be the primary source, and the subsequent ones will serve as backup servers. If the signal from the first source fails, we will automatically continue the stream from the second source. For example: `rtmps://main-server/live1 rtmp://backup-server/live1 rtmp://backup-server/live2`.
-
-
-### WebRTC to HLS ingest type
-
-Insert the link from the WHIP URL field to any library or tool that supports WHIP (WebRTC-HTTP Ingestion Protocol). This will convert your stream into HLS format.
-
-
-
-
-
-
-## Step 4. Start the stream
-
-Start a live stream on your media server or encoder. You will see a streaming preview on the Gcore Live Stream Settings page if everything is configured correctly.
-
-## Step 5. Embed the stream to your app
-
-Embed the created live stream into your web app by one of the following methods:
-
- * Copy the iframe code to embed the live stream within the Gcore built-in player.
- * Copy the export link in a suitable protocol and paste it into your player. Use the **LL-DASH** link if your live stream will be viewed from any device except iOS. Use **LL HLS** for iOS viewing.
-
-
-
-
-
-
-That's it. Your viewers can see the live stream.
-
-{/*
-**Warning**
-
-We only support [statistic data](/streaming/streaming-platform/how-the-streaming-platform-and-additional-features-are-billed) collection for Gcore players. If you use your own, non-Gcore player, the statistics page will be empty. Independent of the player, you can view [monitoring metrics](/streaming-platform/live-streaming/view-your-live-stream-metrics) for performance analysis and troubleshooting.
+---
+title: Create a live stream
+sidebarTitle: Create a live stream
+---
+
+## Step 1. Initiate the process
+
+1\. In the **Gcore Customer Portal**, navigate to **Streaming** > [Live Streaming](https://streaming.gcore.com/streaming).
+
+2\. Click the **Create Live stream** button on the top right.
+
+
+
+
+If the button is non-responsive, you have exceeded your live stream limit. To create a new stream, remove an existing stream from the list or request [technical support](mailto:support@gcore.com) to increase your limits.
+
+
+2\. Enter the name of your live stream in the window that appears and click the **Create** button.
+
+
+
+A new page will appear. Perform the remaining steps there.
+
+## Step 2. Set the ingest type and additional features
+
+
+
+1\. Make sure that the **Enable live stream** toggle is on.
+
+
+By default, we offer live streams with low latency (a 4–5 second delay.) Low latency is available in two protocols: LL-DASH ([compatible with all devices except iOS](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works)) and LL-HLS (compatible with iOS). You can also obtain legacy HLS with MPEGTS format segments, in which case, please read [our article](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works).
+
+
+2\. Review the live stream name and update it if needed.
+
+3\. Enable additional features:
+
+ * [Record](/streaming-platform/live-streaming/record-your-live-streams-and-save-them-as-videos) for live stream recording. It will be active when you start streaming. Remember to enable the toggle if you require a record of your stream.
+ * [DVR](/streaming-platform/live-streaming/pause-and-rewind-the-live-streams) for an improved user experience. When the DVR feature is enabled, your viewers can pause and rewind the broadcast.
+
+4\. Select the relevant **Ingest type**: **Push** or **Pull**.
+
+- Choose **Push** if you don't use your own media server. Establish the URL of our server and the unique stream key in your encoder (e.g. OBS, Streamlabs, vMix, or LiveU Solo). You can use protocols RTMP, RTMPS, and SRT too. The live stream will operate on our server, will be converted to MPEG-DASH and HLS protocols, and will be distributed to end users via our CDN.
+
+- Choose **Pull** if you have a streaming media server. The live stream will operate on your server. Our server will convert it from the RTMP, RTMPS, SRT, or other protocols to MPEG-DASH and HLS protocols. Then, our CDN will distribute the original live stream in the new format to end users.
+
+## Step 3. Configure your stream
+
+Depending on the selcected ingest type and protocol, your settings will differ. Refer to specific
+protocol pages for more details:
+
+- [RTMP/RTMPS](protocols/rtmp)
+- [SRT](protocols/srt)
+- [WebRTC to HLS](protocols/webrtc)
+
+## Step 4. Start the stream
+
+Start a live stream on your media server or encoder. You will see a streaming preview on the **Live Stream Settings** page if everything is configured correctly.
+
+## Step 5. Embed the stream to your app
+
+Embed the created live stream into your web app by one of the following methods:
+
+- Copy the iframe code to embed the live stream within the Gcore built-in player.
+- Copy the export link in a suitable protocol and paste it into your player. Use the **LL-DASH** link if your live stream will be viewed from any device except iOS. Use **LL HLS** for iOS viewing.
+
+
+
+That’s it. Your viewers can see the live stream.
+
+{/*
+**Warning**
+
+We only support [statistic data](/streaming/streaming-platform/how-the-streaming-platform-and-additional-features-are-billed) collection for Gcore players. If you use your own, non-Gcore player, the statistics page will be empty. Independent of the player, you can view [monitoring metrics](/streaming-platform/live-streaming/view-your-live-stream-metrics) for performance analysis and troubleshooting.
*/}
\ No newline at end of file
diff --git a/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works.mdx b/streaming-platform/live-streaming/how-low-latency-streaming-works.mdx
similarity index 58%
rename from streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works.mdx
rename to streaming-platform/live-streaming/how-low-latency-streaming-works.mdx
index 350eac1fc..b176eed7d 100644
--- a/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works.mdx
+++ b/streaming-platform/live-streaming/how-low-latency-streaming-works.mdx
@@ -1,86 +1,79 @@
----
-title: How low latency streaming works
-sidebarTitle: Low latency streaming
----
-
-Streaming latency is the timespan between the moment a frame is captured and when that frame is displayed on the viewers' screens. Latency occurs because each stream is processed several times during broadcasting to be delivered worldwide:
-
-1\. **Encoding (or packaging).** In this step, the streaming service retrieves your stream in any format, converts it into the format for delivery through CDN, and divides it into small fragments.
-
-2\. **Transferring.** In this step, CDN servers pull the processed stream, cache it, and send it to the end-users.
-
-3\. **Receipt by players.** In this step, end-user players load the fragments and buffer them.
-
-Each step affects latency, so the total timespan can increase to 30–40 seconds, especially if the streaming processing isn't optimized. For some companies (such as sports or metaverse events, or news releases), such latency is too large, and it's crucial to reduce it.
-
-## How does GCore provide low latency?
-
-The Gcore Video Streaming receives live streams in RTMP or SRT protocols; transcodes to ABR ([adaptive bitrate](/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs#output-parameters-after-transcoding)), via CDN in LL-HLS and LL-DASH protocols.
-
- * LL-HLS (Low Latency HTTP Live Streaming) is an adaptive protocol developed by Apple for live streaming via the Internet. This protocol is based on HTTP, which allows it to be cached on CDN servers and distributed via CDN as static content.
- * LL-DASH (Low Latency Dynamic Adaptive Streaming over HTTP) is a data streaming technology that optimizes media content delivery via the HTTP protocol.
-
-
-
-Also, Gcore uses CMAF (Common Media Application Format) as a base for LL-HLS/DASH. CMAF allows dividing segments into chunks (video fragments) for faster delivery over HTTP networks.
-
-LL-HLS and LL-DASH reduce latency to 2–4 sec, depending on the network conditions.
-
-
-
-
-
-
-## How do LL-HLS and LL-DASH work in comparison to the standard approach?
-
-The standard video delivery approach involves sending the entirely created segment to the CDN. Once the CDN receives the complete segment, it transmits it to the player.
-
-With this approach, video latency depends on segment length. For example, if a segment is 6 seconds long when requesting and processing the first segment, the player displays a frame that is already 6 seconds late compared to the actual time.
-
-The Low Latency approach uses the CMAF-CTE extension (Chunked Transfer-Encoding), which helps divide live stream segments into small, non-overlapping, and independent fragments (chunks) with a length of 0.5–2 seconds. The independence of the chunks allows the encoder not to wait for the end of the complete loading of the segment but to send it to the CDN and the player in ready-made small fragments.
-
-This approach helps eliminate the segment duration factor affecting video latency in standard video delivery methods. Therefore, latency for 10-second and 2-second segments will be the same and minimal. The total latency between the CDN server and the viewers will be at most 4 seconds.
-
-Compared to the standard approach, a 6-second segment will be divided into 0.5-2 seconds chunks. Thus, the total latency will be lower.
-
-
-
-
-
-
-## Use low latency streaming
-
-We support [Low Latency streaming](https://gcore.com/news/low-latency-hls/) by default. It means your live streams are automatically transcoded to LL-HLS or LL-DASH protocol when you [create and configure a live stream](/streaming-platform/live-streaming/create-a-live-stream).
-
-Links for embedding the live stream to your own player contain the _/cmaf/_ part and look as follows:
-
- * MPEG-DASH, CMAF (low latency): `https://demo.gvideo.io/cmaf/2675_19146/index.mpd`
- * LL HLS, CMAF (low latency): `https://demo.gvideo.io/cmaf/2675_19146/master.m3u8`
- * Traditional HLS, MPEG TS (no low latency): `https://demo.gvideo.io/mpegts/2675_19146/master_mpegts.m3u8`
-
-
-
-## Switch to legacy HLS modes
-
-Some legacy devices or software require MPEG-TS (.ts) segments for streaming. To ensure full backward compatibility with HLS across all devices and infrastructures, we offer MPEG-TS streaming options.
-
-We produce low-latency and non-low-latency streams in parallel, so you don't have to create a stream specifically for cases when the connection is unstable or a device doesn't support low-latency. Both formats share the same segment sizes, manifest lengths for DVR functionality, and other related capabilities.
-
-
-**Tip**
-
-For modern devices, we recommend using the HLS manifest URL (`hls_cmaf_url`). It's more efficient and is highly compatible with streaming devices.
-
-
-You can get the non-low-latency in the same Links for export section in the Customer Portal:
-
-1\. On the [Video Streaming](https://portal.gcore.com/streaming/streaming/list) page, find the needed video.
-
-2\. In the **Links for export** section, copy the link in the **HLS non-low-latency manifest URL** field. This link contains non low-latency HLSv3 and MPEG TS files as chunks.
-
-
-
-
-
-
+---
+title: "How low latency streaming works"
+sidebarTitle: "Low latency streaming"
+---
+
+Streaming latency is the timespan between the moment a frame is captured and when that frame is displayed on the viewers' screens. Latency occurs because each stream is processed several times during broadcasting to be delivered worldwide:
+
+1. **Encoding (or packaging).** In this step, the streaming service retrieves your stream in any format, converts it into the format for delivery through CDN, and divides it into small fragments.
+
+2. **Transferring.** In this step, CDN servers pull the processed stream, cache it, and send it to the end-users.
+
+3. **Receipt by players.** In this step, end-user players load the fragments and buffer them.
+
+Each step affects latency, so the total timespan can increase to 30–40 seconds, especially if the streaming processing isn't optimized. For some companies (such as sports or metaverse events, or news releases), such latency is too large, and it's crucial to reduce it.
+
+## How does Gcore provide low latency?
+
+The Gcore Video Streaming receives live streams in RTMP or SRT protocols; transcodes to ABR ([adaptive bitrate](/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs#output-parameters-after-transcoding)), via CDN in LL-HLS and LL-DASH protocols.
+
+- LL-HLS (Low Latency HTTP Live Streaming) is an adaptive protocol developed by Apple for live streaming via the Internet. This protocol is based on HTTP, which allows it to be cached on CDN servers and distributed via CDN as static content.
+- LL-DASH (Low Latency Dynamic Adaptive Streaming over HTTP) is a data streaming technology that optimizes media content delivery via the HTTP protocol.
+
+Also, Gcore uses CMAF (Common Media Application Format) as a base for LL-HLS/DASH. CMAF allows dividing segments into chunks (video fragments) for faster delivery over HTTP networks.
+
+LL-HLS and LL-DASH reduce latency to 2–4 sec, depending on the network conditions.
+
+
+ 
+
+
+## How do LL-HLS and LL-DASH work in comparison to the standard approach?
+
+The standard video delivery approach involves sending the entirely created segment to the CDN. Once the CDN receives the complete segment, it transmits it to the player.
+
+With this approach, video latency depends on segment length. For example, if a segment is 6 seconds long when requesting and processing the first segment, the player displays a frame that is already 6 seconds late compared to the actual time.
+
+The Low Latency approach uses the CMAF-CTE extension (Chunked Transfer-Encoding), which helps divide live stream segments into small, non-overlapping, and independent fragments (chunks) with a length of 0.5–2 seconds. The independence of the chunks allows the encoder not to wait for the end of the complete loading of the segment but to send it to the CDN and the player in ready-made small fragments.
+
+This approach helps eliminate the segment duration factor affecting video latency in standard video delivery methods. Therefore, latency for 10-second and 2-second segments will be the same and minimal. The total latency between the CDN server and the viewers will be at most 4 seconds.
+
+Compared to the standard approach, a 6-second segment will be divided into 0.5-2 seconds chunks. Thus, the total latency will be lower.
+
+
+ 
+
+
+## Use low latency streaming
+
+We support [Low Latency streaming](https://gcore.com/news/low-latency-hls/) by default. It means your live streams are automatically transcoded to LL-HLS or LL-DASH protocol when you [create and configure a live stream](/streaming-platform/live-streaming/create-a-live-stream).
+
+Links for embedding the live stream to your own player contain the _/cmaf/_ part and look as follows:
+
+- MPEG-DASH, CMAF (low latency): `https://demo.gvideo.io/cmaf/2675_19146/index.mpd`
+- LL HLS, CMAF (low latency): `https://demo.gvideo.io/cmaf/2675_19146/master.m3u8`
+- Traditional HLS, MPEG TS (no low latency): `https://demo.gvideo.io/mpegts/2675_19146/master_mpegts.m3u8`
+
+## Switch to legacy HLS modes
+
+Some legacy devices or software require MPEG-TS (.ts) segments for streaming. To ensure full backward compatibility with HLS across all devices and infrastructures, we offer MPEG-TS streaming options.
+
+We produce low-latency and non-low-latency streams in parallel, so you don't have to create a stream specifically for cases when the connection is unstable or a device doesn't support low-latency. Both formats share the same segment sizes, manifest lengths for DVR functionality, and other related capabilities.
+
+
+ **Tip**
+
+ For modern devices, we recommend using the HLS manifest URL (`hls_cmaf_url`). It's more efficient and is highly compatible with streaming devices.
+
+
+You can get the non-low-latency in the same Links for export section in the Customer Portal:
+
+1. On the [Video Streaming](https://portal.gcore.com/streaming/streaming/list) page, find the needed video.
+
+2. In the **Links for export** section, copy the link in the **HLS non-low-latency manifest URL** field. This link contains non low-latency HLSv3 and MPEG TS files as chunks.
+
+
+ 
+
+
For details on how to get the streams via API, check our [API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/get_streams).
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/protocols/rtmp.mdx b/streaming-platform/live-streaming/protocols/rtmp.mdx
new file mode 100644
index 000000000..c8eb268fa
--- /dev/null
+++ b/streaming-platform/live-streaming/protocols/rtmp.mdx
@@ -0,0 +1,111 @@
+---
+title: "The Real Time Messaging Protocol"
+sidebarTitle: "RTMP"
+---
+
+The Real Time Messaging Protocol (RTMP) is the most common way to stream to video streaming platforms. Gcore Live Streaming supports both RTMP and RTMPS.
+
+
+ RTMP is limited to the H264 codec. If you want to use other codecs, please [use SRT](https://gcore.com/docs/streaming-platform/live-streaming/protocols/srt) instead.
+
+ We plan to support H265/HVEC and other extensions from the Enhanced RTMP specification. Stay tuned for updates.
+
+
+## Push streams
+
+Gcore Video Streaming provides two endpoints for pushing a stream: the default one and a backup one. The default endpoint is the one closest to your location. The backup endpoint is in a different location and is used if the default one is unavailable.
+
+By default, Gcore will route your stream to free ingest points with the lowest latency. If you need to set a fixed ingest point or if you need to set the main and backup ingest points in the same region (i.e., to not send streams outside the EU or US), please contact our support team.
+
+### Obtain the server URLs and stream key
+
+There are two ways to obtain the server URLs and stream key: via the Gcore Customer Portal or via the API.
+
+#### Via the Gcore Customer Portal
+
+1. In the **Gcore Customer Portal**, navigate to **Streaming** \> [**Live Streaming**](https://portal.gcore.com/streaming/streaming/list).
+
+
+
+2. Click on the stream you want to push to. This will open the **Live Stream Settings**.
+
+
+
+3. Ensure that the **Ingest type** is set to **Push**.
+4. Ensure that the protocol is set to **RTMP** or **RTMPS** in the **URLs for encoder** section. 5. Copy the **Server** URL and **Stream Key** from the **URLs for encoder** section.
+
+
+
+#### Via the API
+
+You can also obtain the URL and stream key via the Gcore API. The endpoint returns the complete URLs for the default and backup ingest points and the stream key.
+
+Example of the API request:
+
+```http
+GET /streaming/streams/{stream_id}
+```
+
+Example of the API response:
+
+```json
+{
+ "push_url": "rtmp://vp-push-anx2.domain.com/in/123?08cd54f0",
+ "backup_push_url": "rtmp://vp-push-ed1.domain.com/in/123b?08cd54f0",
+ ...
+}
+```
+
+Read more in [the API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/post_streams_id).
+
+## Pull streams
+
+Gcore Video Streaming can pull video data from your external server.
+
+Main rules of pulling:
+
+- The URL of the stream to pull from must be **publicly available** and **return a 200 status** for all requests.
+- You can specify **multiple media servers** (separated with space characters) in the **URL** input field. The maximum length of all URLs is 255 characters and the round robin is used when polling the list of specified servers.
+- If a stream is closed (i.e., its connection is terminated) or there is no video data in the stream for 30 seconds, then the next attempt will be made in the next steps progressively (10s, 30s, 60s, 5min, 10min).
+- The stream will be deactivated after 24 hours of inactivity.
+- If you need to set an allowlist for access to the stream, please contact support to get an up-to-date list of networks.
+
+### Setting up a pull stream
+
+There are two ways to set up a pull stream: via the Gcore Customer Portal or via the API.
+
+#### Via the Gcore Customer Portal
+
+1. In the **Gcore Customer Portal**, navigate to **Streaming** \> [**Live Streaming**](https://portal.gcore.com/streaming/streaming/list).
+
+
+
+2. Click on the stream you want to pull from. This will open the **Live Stream Settings**.
+
+
+
+3. Ensure that the **Ingest type** is set to **Pull**.
+4. In the **URL** field, insert a link to the stream from your media server.
+5. Click the **Save changes** button on the top right.
+
+#### Via the API
+
+You can also set up a pull stream via the Gcore API. The endpoint accepts the URL of the stream to pull from.
+
+Example of the API request:
+
+```http
+PATCH /streaming/streams/{stream_id}
+```
+
+```json
+{
+ "stream": {
+ "pull": true,
+ "uri": "rtmp://example.com/path/to/stream",
+ ...
+ }
+}
+```
+
+Read more in [the API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/patch_streams_id).
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/protocols/srt.mdx b/streaming-platform/live-streaming/protocols/srt.mdx
new file mode 100644
index 000000000..b019abcd0
--- /dev/null
+++ b/streaming-platform/live-streaming/protocols/srt.mdx
@@ -0,0 +1,108 @@
+---
+title: "The Secure Reliable Transport Protocol"
+sidebarTitle: "SRT"
+---
+
+Secure Reliable Transport (SRT) is an open-source streaming protocol that solves some of the limitations of RTMP delivery. In contrast to RTMP/RTMPS, SRT is a UDP-based protocol that provides low-latency streaming over unpredictable networks. On Gcore Video Streaming, SRT is also required if you want to use the H265/HVEC codec.
+
+## Push streams
+
+Gcore Video Streaming provides two endpoints for pushing a stream: the default one and a backup one. The default endpoint is the one closest to your location. The backup endpoint is in a different location and is used if the default one is unavailable.
+
+By default, Gcore will route your stream to free ingest points with the lowest latency. If you need to set a fixed ingest point or if you need to set the main and backup ingest points in the same region (i.e., to not send streams outside the EU or US), please contact our support team.
+
+### Obtain the server URLs
+
+There are two ways to obtain the SRT server URLs: via the Gcore Customer Portal or via the API.
+
+#### Via the Gcore Customer Portal
+
+1. In the **Gcore Customer Portal**, navigate to **Streaming** \> [**Live Streaming**](https://portal.gcore.com/streaming/streaming/list).
+
+
+
+2. Click on the stream you want to push to. This will open the **Live Stream Settings**.
+
+
+
+3. Ensure that the **Ingest type** is set to **Push**.
+4. Ensure that the protocol is set to **SRT** in the **URLs for encoder** section.
+5. Copy the server URL from the **Push URL SRT** field.
+
+
+
+#### Via the API
+
+You can also obtain the URL and stream key via the Gcore API. The endpoint returns the complete URLs for the default and backup ingest points, as well as the stream key.
+
+Example of the API request:
+
+```http
+GET /streaming/streams/{stream_id}
+```
+
+Example of the API response:
+
+```json
+{
+ "push_url": "srt://vp-push-anx2.domain.com/in/123?08cd54f0",
+ "backup_push_url": "srt://vp-push-ed1.domain.com/in/123b?08cd54f0",
+ ...
+}
+```
+
+Read more in [the API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/post_streams_id).
+
+## Pull streams
+
+Gcore Video Streaming can pull video data from your external server.
+
+Main rules of pulling:
+
+- The URL of the stream to pull from must be **publicly available** and **return a 200 status** for all requests.
+- You can specify **multiple media servers** (separated with space characters) in the **URL** input field. The maximum length of all URLs is 255 characters and the round robin is used when polling the list of specified servers.
+- If a stream is closed (i.e., its connection is terminated) or there is no video data in the stream for 30 seconds, then the next attempt will be made in the next steps progressively (10s, 30s, 60s, 5min, 10min).
+- The stream will be deactivated after 24 hours of inactivity.
+- If you need to set an allowlist for access to the stream, please contact support to get an up-to-date list of networks.
+
+### Setting up a pull stream
+
+There are two ways to set up a pull stream: via the Gcore Customer Portal or via the API.
+
+#### Via the Gcore Customer Portal
+
+1. In the **Gcore Customer Portal**, navigate to **Streaming** \> [**Live Streaming**](https://portal.gcore.com/streaming/streaming/list).
+
+
+
+2. Click on the stream you want to pull from. This will open the **Live Stream Settings**.
+
+
+
+3. Ensure that the **Ingest type** is set to **Pull**.
+4. In the **URL** field, insert a link to the stream from your media server.
+5. Click the **Save changes** button on the top right.
+
+
+
+#### Via the API
+
+You can also set up a pull stream via the Gcore API. The endpoint accepts the URL of the stream to pull from.
+
+Example of the API request:
+
+```http
+PATCH /streaming/streams/{stream_id}
+```
+
+```json
+{
+ "stream": {
+ "pull": true,
+ "uri": "srt://example.com/path/to/stream",
+ ...
+ }
+}
+```
+
+Read more in [the API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/patch_streams_id).
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/webrtc-to-hls-transcoding.mdx b/streaming-platform/live-streaming/protocols/webrtc.mdx
similarity index 98%
rename from streaming-platform/live-streaming/webrtc-to-hls-transcoding.mdx
rename to streaming-platform/live-streaming/protocols/webrtc.mdx
index 6769e0aa2..8cf91275c 100644
--- a/streaming-platform/live-streaming/webrtc-to-hls-transcoding.mdx
+++ b/streaming-platform/live-streaming/protocols/webrtc.mdx
@@ -1,513 +1,513 @@
----
-title: WebRTC ingest and transcoding to HLS/DASH
-sidebarTitle: WebRTC ingest and transcoding to HLS/DASH
----
-
-Streaming videos using HLS and MPEG-DASH protocols is a simple and cost-effective way to show your video to large audiences. However, this requires the original streams to be in a certain format that browsers do not support natively.
-
-At the same time, WebRTC protocol works in any browser, but it's not as flexible when streaming to large audiences.
-
-Gcore [Video Streaming](https://gcore.com/streaming-platform) supports both WebRTC HTTP Ingest Protocol (WHIP) and WebRTC to HLS/DASH converter, giving you the advantages of these protocols.
-
-
-
-
-
-
-## Advantages of WebRTC and conversion to HLS/DASH
-
-WebRTC ingest for streaming offers two key advantages over traditional RTMP and SRT protocols:
-
-1\. It runs directly in the presenter's browser, so no additional software is needed.
-
-2\. WebRTC can reduce stream latency.
-
-By using WebRTC WHIP for ingest, you can convert WebRTC to HLS/DASH playback, which provides the following benefits:
-
- * Fast ingest via WebRTC from a browser.
- * Optimal stream distribution using HLS/DASH with [adaptive bitrate streaming](/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs#what-is-transcoding-with-abr) (ABR) through the CDN.
-
-
-
-
-
-
-## How it works
-
-We use a dedicated WebRTC WHIP server to manage WebRTC ingest. This server handles both signaling and video data reception. Such a setup allows you to configure WebRTC on demand and continue to use all system capabilities to set up transcoding and delivery via CDN.
-
-The RTC WHIP server organizes signaling and receives video data. Signaling refers to the communication between WebRTC endpoints that are necessary to initiate and maintain a session. WHIP is an open specification for a simple signaling protocol that starts WebRTC sessions in an outgoing direction, such as streaming from your device.
-
-We use local servers in each region to ensure a minimal route from a user-presenter to the server.
-
-### WebRTC stream encoding parameters
-
-The stream must include at least one video track and one audio track:
-
- * Video must be encoded using H.264.
- * Audio must use OPUS codec.
-
-
-
-If you use [OBS](https://obsproject.com/) or your own WHIP library, use the following video encoding parameters:
-
- * Codec H.264 with no B-frames and fast encoding:
-
- * **Encoder** : x264, or any of H.264
- * **CPU usage** : very fast
- * **Keyframe interval** : 1 sec
- * **Profile** : baseline
- * **Tune** : zero latency
- * **x264 options** : bframes=0 scenecut=0
- * Bitrate:
-
- * The lower the bitrate, the faster the data will be transmitted to the server. Choose the optimal one for your video. For example, 1-2 Mbps is usually enough for video broadcasts of online training format or online broadcasts with a presenter.
-
-
-
-For example, you might have the following settings in OBS:
-
-
-
-
-
-
-### Supported WHIP clients
-
-You can use any libraries to send data via the WebRTC WHIP protocol.
-
- * Gcore WebRTC WHIP client
- * [OBS](https://obsproject.com/) (Open Broadcaster Software)
- * [@eyevinn/whip-web-client](https://web.whip.eyevinn.technology/)
- * [whip-go](https://github.com/ggarber/whip-go)
- * Larix Broadcaster (free apps for iOS and Android with WebRTC based on Pion; SDK is available)
-
-
-
-### LL-HLS and LL-DASH outputs
-
-Streams sent via WebRTC are transcoded in the same way as other streams received via RTMP and SRT.
-
-At the output, you can view the streams using any available protocols:
-
- * **MPEG-DASH** : ±2-4 seconds latency to a viewer with ABR.
- * **LL-HLS** : ±3-4 seconds latency to a viewer with ABR.
- * **HLS MPEG-TS** : legacy with [non-low-latency](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works#switch-to-legacy-hls-modes) (±10 seconds latency) with ABR.
-
-
-
-For WebRTC mode, we use a method of constant transcoding with an initial given resolution. This means that if WebRTC in a viewer's browser reduces the quality or resolution of the master stream (for example, to 360p) due to restrictions on the viewer's device (such as network conditions or CPU consumption), the transcoder will continue to transcode the reduced stream to the initial resolution (for example 1080p ABR).
-
-When the restrictions on the viewer's device are removed, quality will improve again.
-
-
-**Tip**
-
-For more details about low-latency streaming, check out [How low-latency streaming works](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works).
-
-
-## Convert WebRTC to HLS in the Customer Portal
-
-For instructions on how to convert a stream via API, refer to the [API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/get_streams_id).
-
-1\. In the Gcore Customer Portal, navigate to **Streaming**.
-
-2\. Open the Live Streaming** page and find a needed live stream. If you don't have one, create a stream first.
-
-3\. Click the stream name to open its settings.
-
-4\. In the **Quick start in browser** section, click **Go Live**. The broadcast will start automatically.
-
-
-
-
-
-
-5\. Allow Gcore to access your camera and microphone. In several seconds the HLS/DASH stream will appear in an HTML video player.
-
-You'll see the result under the **Video preview** instead of a black area with the "No active streams found" message. This large window of an HTML video player is the transcoded version of the stream in HLS/DASH protocols using adaptive bitrate.
-
-
-
-
-
-
-A small window in the top-right corner is from your camera. It shows the stream taken from the webcam.
-
-There are also settings for selecting a camera and microphone if you have more than one option on your device.
-
-## Convert WebRTC to HLS in your environment
-
-We provide a [WebRTC WHIP library](https://rtckit.gvideo.io/0.72.0/index.esm.js) for working in browsers. It implements the basic system calls and simplifies working with WebRTC:
-
- * Wrapper for initializing WebRTC stream and connecting to the server.
- * Camera and mic wrapper.
- * Monitoring WebRTC events and calling appropriate handlers in your code.
-
-
-
-The latest library version, 0.72.0, is available at https://rtckit.gvideo.io/0.72.0/index.esm.js.
-
-### Start a stream with the Gcore WebRTC WHIP library
-
-Since WHIP is an open standard, many libraries have been released for it in different languages. You can use our [WebRTC WHIP](https://rtckit.gvideo.io/0.72.0/index.esm.js) or any other library specified in the [WHIP clients](/streaming-platform/live-streaming/webrtc-to-hls-transcoding#supported-whip-clients) section.
-
-Using our library, you can start the conversion with a few lines of code. To go live immediately, create a live stream in the Gcore Streaming dashboard and paste a URL into the example linked below:
-
-1\. In the Gcore Customer Portal, open the [Live Streaming](https://portal.gcore.com/streaming/streaming/editor/1740470) page.
-
-2\. Open the stream settings and copy a WHIP URL from the **WebRTC = > HLS parameters** section.
-
-3\. Open [WHIP demo app](https://stackblitz.com/edit/stackblitz-starters-j2r9ar?file=index.html) and paste the WHIP URL into the `WHIP_ENDPOINT const`.
-
-
-
-
-
-
-4\. Click the **Start** button. The steam will be started in the Customer Portal.
-
-You can find the technical reference manual on data types, interfaces, methods, and other components in the [gcorevideo/rtckit](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.md) repository.
-
-### Start a stream with your own backend or frontend
-
-
-
- To create a new stream, send a POST request to the following endpoint: `https://api.gcore.com/streaming/streams`.
-
-Example request:
-
-```sh
-curl -L 'https://api.gcore.com/streaming/streams' \
--H 'Content-Type: application/json' \
--H 'Authorization: APIKey 1111$3ec8…9604e' \
--d '{
- "name": "WebRTC to HLS demo",
- "active": true
-}'
- ```
-
-Example response:
-
-```json
-{
- "id": 1683264,
- "name": "WebRTC to HLS demo",
- "push_url_whip": "https://whip.gvideo.co/1965207_561f4742ec38ae6386a6e7e637c03041/whip", …
-}
- ```
-
-Use the `"push_url_whip"` value from the response to start the stream.
-
-
- Get access and data from the microphone and camera:
-
-```js
-import { WebrtcStreaming } from 'https://rtckit.gvideo.io/0.68.2/index.esm.js';
-const WHIP_ENDPOINT = '{push_url_whip}';
-const webrtc = new WebrtcStreaming(WHIP_ENDPOINT, {...});
-```
-
-Send a local stream to the WHIP server:
-
-```js
-webrtc.openSourceStream({
- audio: mic,
- video: cam,
- resolution: 1080,
-})
- ```
-
-Note that if a user stops streaming to the ingester, for example, by closing the browser tab, the stream settings will be terminated. When the user resumes streaming from any browser, the ingester will pick up the stream. However, there will be a brief delay before the ingested stream becomes playable.
-
-If a user tries to stream to the same endpoint where another user is already streaming, the former will get an error message from the media server. The current stream will remain uninterrupted.
-
-
-
-### Play HLS or DASH
-
-After sending the stream from frontend, the stream will start transcoding. In ±2-7 seconds, the HLS and MPEG-DASH versions will be ready for viewing.
-
-The stream can be viewed through the built-in web player or using direct links to the manifests. You can take these links from the API response.
-
-Examples:
-
- * Web player: https://player.gvideo.co/streams/102748_1965207
- * LL-HLS manifest: https://102748.gvideo.io/cmaf/102748_1965207/master.m3u8
- * DASH manifest: https://102748.gvideo.io/cmaf/102748_1965207/index.mpd
-
-
-
-Send a GET request to the following endpoint: `https://api.gcore.com/streaming/streams/{id}`.
-
-Example request:
-
-```sh
-curl -L 'https://api.gcore.com/streaming/streams/1965207' \
--H 'Authorization: APIKey 1111$3ec8…9604e'
- ```
-
-Example response:
-
-```json
-
-{
- "id": 1965207,
- "iframe_url": "https://player.gvideo.co/streams/102748_1965207",
- "hls_cmaf_url": "https://demo-public.gvideo.io/cmaf/102748_1965207/master.m3u8",
- "hls_mpegts_url": "https://demo-public.gvideo.io/mpegts/102748_1965207/master_mpegts.m3u8",
- "dash_url": "https://demo-public.gvideo.io/cmaf/102748_1965207/index.mpd",
- …
-}
-
- ```
-
-### Deactivate a finished stream
-
-
-
- Update the stream by sending a PATCH request to the following endpoint: `https://api.gcore.com/streaming/streams/{id}`.
-
-Example request:
-
-```sh
-curl -L -X PATCH 'https://api.gcore.com/streaming/streams/1965207' \
--H 'Content-Type: application/json' \
--H 'Authorization: APIKey 1111$3ec8…9604e' ' \
--d '{
- "active": false
- }'
-```
-
-Alternatively, you can delete the stream by sending the DELETE request to `https://api.gcore.com/streaming/streams/$id`.
-
-Example request:
-
-```sh
-curl -L -X DELETE 'https://api.gcore.com/streaming/streams/1965207' \
--H 'Authorization: APIKey 1111$3ec8…9604e'
- ```
-
-
- Example command to close the stream: `webrtc.close()`
-
-
-
-### Demo projects of streaming with frontend and backend
-
-
-
- You can find a detailed description of this version above. To view the full code, inspect the https://stackblitz.com/edit/stackblitz-starters-j2r9ar?file=index.html.
-
-
-
-
-
- This demo depicts a complete frontend and backend implementation with the Nuxt framework. It's a fully functional prebuilt version with a demo stream from our demo server.
-
-The implementation includes: stream generation, initialization of WebRTC data in a browser, video transmission from the browser to the server, and displaying the HLS/DASH web player with transcoded broadcast.
-
-We've added the demo instance and source code to help you explore the implementation in action:
-
- * Demo app – https://gcore-webrtc-sdk-js-nuxt.vercel.app/host?token=123
- * Source code – https://github.com/G-Core/gcore-webrtc-sdk-js/tree/main/apps/ingest-demo-nuxt
-
-
-
-To start streaming:
-
-1\. Select your camera and microphone
-
-2\. In the **Host** section, click **Start** under the video preview.
-
-3\. Click the **Watch** link.
-
-
-
-
-
-
-## Troubleshooting
-
-If you experience issues related to our streaming service, check out the following sections. They outline common problems and recommended resolution steps.
-
-### Error handling
-
-**NetworkError**
-
-For details, refer to [NetworkError class](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.networkerror.md).
-
-The ingestion service is unavailable or is unreachable from the client's network. The error message includes a description of the error cause.
-
-In such cases, the application should render itself unavailable and report the error to Gcore support. The app should not retry the operation, as the retry logic is already implemented in the SDK.
-
-**ServerRequestError**
-
-For details, check out [ServerRequestError class](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.serverrequesterror.md).
-
-The ingestion server returned an error, which can be identified by inspecting the `status` and `detail` fields of the error object.
-
-
-
-| HTTP status code | Explanation | Example |
-|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| 400 | The client’s request is wrong. It may be due to the incorrect request parameters sent by the WHIP client. If you see this error with an unintelligible description or with no description at all, contact the [Gcore support team](mailto:support@gcore.com).
A special case to note is when multiple clients attempt to stream to the same endpoint simultaneously. Check the example for details. | **err.message**: Server request failed with status 400
**err.status**: 400
**err.detail**: `{"error": "someone is already publishing to the path '1960197_XXX'"}` |
-| 403 | The endpoint you are trying to connect to is unreachable.
Probable causes: - A stream doesn’t exist.
- A stream was not closed correctly, but you push to that broken stream (time out is ±30 seconds. In this case, try again after that time).
- Your token is invalid.
- Another stream setting prevents it from ingesting WebRTC.
| **err.message**: Server request failed with status 403
**err.status**: 403 |
-| 500, 502, 503, 504, 5xx (infrequently) | Gcore infrastructure is experiencing pressure or outage. Contact the [Gcore support team](mailto:support@gcore.com).
The app should render itself unavailable. It should not retry the request. | **err.message**: Server request failed with status 504
**err.status**: 504 |
-
-
-
-
-**TimeoutError**
-
-For details, check out [TimeoutError class](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.timeouterror.md).
-
-Some operation has timed out.
-
-
-
-| Error message | Explanation |
-|---------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Timeout waiting for ICE candidates | **Cause 1**. Incorrectly configured [ICE servers](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/3e45d6e8beebcc7221625bd9e3b3b1749d9405ae/packages/rtckit/docs/api/rtckit.whipclientoptions.md). The default configuration (when no ICE servers are specified) is to fetch a list of them from the WHIP endpoint.
**Cause 2**. ICE servers fetched from the WHIP endpoint are unreachable from your client’s location. This is very unlikely to happen.
In both cases, start by checking the ICE servers your WebRTC uses as described in the [Network troubleshooting section](/streaming-platform/live-streaming/webrtc-to-hls-transcoding#network-troubleshooting). If that doesn’t work, contact the [Gcore support team](mailto:support@gcore.com).
The client app should render itself unavailable due to network conditions as an explanation. |
-
-
-
-
-Other types of errors are described in our [SDK docs](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.md). End-users should not encounter these errors, and there is no way to handle them in a real application apart from reporting the error occurrence.
-
-Some SDK methods might also throw browser's native exceptions, such as [WebrtcStreaming.openSourceStream](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.webrtcstreaming.opensourcestream.md) and the methods of the [MediaDevicesHelper](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.mediadeviceshelper.md) throw [getUserMedia-originated exceptions](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#exceptions). The application should handle them accordingly.
-
-### Sudden disconnection of camera or microphone
-
-Sometimes, users use external or plug-in cameras and microphones, and these devices can be disconnected at any time. For example:
-
- * A USB camera cable might be unplugged.
- * AirPods may be placed back in their case.
-
-
-
-If a camera or microphone is accidentally disconnected, you need to track such cases programmatically. Enable the `mediaDevicesAutoSwitch` option and subscribe to the event:
-
- * set mediaDevicesAutoSwitch: true
- * catch WebrtcStreamingEvents
-
-
-
-The new algorithm ensures uninterrupted broadcasting by prompting the browser to switch to another available camera or microphone if the current device becomes unavailable.
-
-When such a situation occurs, you will know which device was disconnected and which one was connected instead. This will allow you to visualize (if necessary) the new connected device in your interface.
-
-
-
-
-
-
-### Debugging with Chrome WebRTC internals tool
-
-Chrome is really good at working with WebRTC because it has a built-in tool to help developers see how things are working.
-
-Chrome v87+ has a special page called chrome://webrtc-internals where you can check your WebRTC calls:
-
-1\. Open a new Chrome tab and navigate to chrome://webrtc-internals while you're in a WebRTC call. On this page, you can view detailed information about the video and audio streams, connection setup, and more.
-
-2\. Use the provided information to find potential problems. For instance, when videos won't play, calls won't connect, or videos are slow.
-
-One of the parameters you can monitor in Stats graphs for candidate-pair:
-
- * **AvailableOutgoingBitrate**
-
-
-
-
-
-
-You can also follow the following parameters from the **Stats graphs for outbound-rtp** :
-
- * bytesSent_in_bits/s
- * targetBitrate
- * frameWidth
- * frameHeight
- * framesSent/s
-
-
-
-For example, consider how unevenly frames are sent from the browser in the following screenshot:
-
-
-
-
-
-
-### Network troubleshooting
-
-#### Video stream is poorly transcoded or constantly stops
-
-If a stream in the player constantly stops, is interrupted, or has poor quality, the issue is likely related to slow transmission of the original stream from a presenter via WebRTC.
-
-WebRTC is very demanding of the quality of internet connection from client to server. At the same time, standard implementations take into account many parameters on a local device, which can cause slower transmission of data or even stop it altogether until conditions are improved.
-
-To diagnose such situations:
-
-1\. Use the **VideoResolutionChangeDetector** plugin. It allows you to show a message about bad network conditions on a viewer's device.
-
-2\. Use Chrome's WebRTC debug tool that's available via this link: chrome:\webrtc-internals.
-
-Network congestion, occurring when resource demand surpasses capacity, leads to packet loss, increased latency, and jitter, hindering real-time communication, with congestion control algorithms optimizing performance by regulating data packet flow. You can read how WebRTC uses Transport Wide Congestion Control (TWCC) to control it in [thearticle about TWCC](https://bloggeek.me/webrtcglossary/transport-cc/).
-
-The available bitrate is calculated in the **availableOutgoingBitrate** parameter, which indicates the available outbound capacity of the network connection. The higher the value, the more bandwidth you can assume is available for outgoing data. The value is reported in bits per second and is computed over a 1-second interval.
-
-The most likely scenario for quality degradation occurs here when the channel width becomes insufficient to send good resolution.
-
-However, sometimes the connection is even worse when packets are lost. In this case, the server starts sending NACK (Negative Acknowledgement) packets. You can read more about this issue in the [NACK overview article](https://bloggeek.me/webrtcglossary/nack/).
-
-More and more data start to be resent, which leads to increased latency or gaps in frames. In this case, the transcoder doesn't receive frames on time, causing the video to interrupt or stop altogether. You can monitor and debug this issue in Chrome's webrtc-internals tool:
-
-
-
-
-
-
-What to do in such situations:
-
- * Always show users a message about changed conditions. In 99% of cases, the issue is related to the user's internet conditions.
- * Use TCP as the delivery protocol instead of UDP.
- * Use the TURN server for delivery instead of sending directly to the media server.
-
-
-
-#### Issues with ICE servers
-
-If you experience problems with timeout waiting for an ICE candidate, check your ICE server configuration.
-
-ICE servers used by the WHIP client can be configured explicitly using the iceServers [configuration option](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/3e45d6e8beebcc7221625bd9e3b3b1749d9405ae/packages/rtckit/docs/api/rtckit.whipclientoptions.md). Otherwise, they are fetched from Gcore's media server in the response to a session initiation request.
-
-In the case of the latter, check what the server returns in the `Link` headers. For example:
-
-
-
-
-```
-Link: ; rel="ice-server"
-Link: ; rel="ice-server"; username="1730558739:0nu0id47meqbsyvpz743"; credential="IswwB19KAEWQujy3X/c4D9GjZj8="; credential-type="password"
-Link: ...
-```
-
-You can also inspect the servers using chrome://webrtc-internals or an alternative tool:
-
-
-
-
-
-
-After you verify your server configuration, use the [Trickle ICE](https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/) app to test the servers.
-
-Add a STUN or TURN server and check how it works. If everything functions correctly, the results will show:
-
- * A srvrflx candidate for STUN server
- * A relay candidate for a TURN server
-
-
-
-If you don't see these results, your STUN or TURN server may be misconfigured, or there is an outage.
-
+---
+title: WebRTC ingest and transcoding to HLS/DASH
+sidebarTitle: WebRTC ingest and transcoding to HLS/DASH
+---
+
+Streaming videos using HLS and MPEG-DASH protocols is a simple and cost-effective way to show your video to large audiences. However, this requires the original streams to be in a certain format that browsers do not support natively.
+
+At the same time, WebRTC protocol works in any browser, but it's not as flexible when streaming to large audiences.
+
+Gcore [Video Streaming](https://gcore.com/streaming-platform) supports both WebRTC HTTP Ingest Protocol (WHIP) and WebRTC to HLS/DASH converter, giving you the advantages of these protocols.
+
+
+
+
+
+
+## Advantages of WebRTC and conversion to HLS/DASH
+
+WebRTC ingest for streaming offers two key advantages over traditional RTMP and SRT protocols:
+
+1\. It runs directly in the presenter's browser, so no additional software is needed.
+
+2\. WebRTC can reduce stream latency.
+
+By using WebRTC WHIP for ingest, you can convert WebRTC to HLS/DASH playback, which provides the following benefits:
+
+ * Fast ingest via WebRTC from a browser.
+ * Optimal stream distribution using HLS/DASH with [adaptive bitrate streaming](/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs#what-is-transcoding-with-abr) (ABR) through the CDN.
+
+
+
+
+
+
+## How it works
+
+We use a dedicated WebRTC WHIP server to manage WebRTC ingest. This server handles both signaling and video data reception. Such a setup allows you to configure WebRTC on demand and continue to use all system capabilities to set up transcoding and delivery via CDN.
+
+The RTC WHIP server organizes signaling and receives video data. Signaling refers to the communication between WebRTC endpoints that are necessary to initiate and maintain a session. WHIP is an open specification for a simple signaling protocol that starts WebRTC sessions in an outgoing direction, such as streaming from your device.
+
+We use local servers in each region to ensure a minimal route from a user-presenter to the server.
+
+### WebRTC stream encoding parameters
+
+The stream must include at least one video track and one audio track:
+
+ * Video must be encoded using H.264.
+ * Audio must use OPUS codec.
+
+
+
+If you use [OBS](https://obsproject.com/) or your own WHIP library, use the following video encoding parameters:
+
+ * Codec H.264 with no B-frames and fast encoding:
+
+ * **Encoder** : x264, or any of H.264
+ * **CPU usage** : very fast
+ * **Keyframe interval** : 1 sec
+ * **Profile** : baseline
+ * **Tune** : zero latency
+ * **x264 options** : bframes=0 scenecut=0
+ * Bitrate:
+
+ * The lower the bitrate, the faster the data will be transmitted to the server. Choose the optimal one for your video. For example, 1-2 Mbps is usually enough for video broadcasts of online training format or online broadcasts with a presenter.
+
+
+
+For example, you might have the following settings in OBS:
+
+
+
+
+
+
+### Supported WHIP clients
+
+You can use any libraries to send data via the WebRTC WHIP protocol.
+
+ * Gcore WebRTC WHIP client
+ * [OBS](https://obsproject.com/) (Open Broadcaster Software)
+ * [@eyevinn/whip-web-client](https://web.whip.eyevinn.technology/)
+ * [whip-go](https://github.com/ggarber/whip-go)
+ * Larix Broadcaster (free apps for iOS and Android with WebRTC based on Pion; SDK is available)
+
+
+
+### LL-HLS and LL-DASH outputs
+
+Streams sent via WebRTC are transcoded in the same way as other streams received via RTMP and SRT.
+
+At the output, you can view the streams using any available protocols:
+
+ * **MPEG-DASH** : ±2-4 seconds latency to a viewer with ABR.
+ * **LL-HLS** : ±3-4 seconds latency to a viewer with ABR.
+ * **HLS MPEG-TS** : legacy with [non-low-latency](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works#switch-to-legacy-hls-modes) (±10 seconds latency) with ABR.
+
+
+
+For WebRTC mode, we use a method of constant transcoding with an initial given resolution. This means that if WebRTC in a viewer's browser reduces the quality or resolution of the master stream (for example, to 360p) due to restrictions on the viewer's device (such as network conditions or CPU consumption), the transcoder will continue to transcode the reduced stream to the initial resolution (for example 1080p ABR).
+
+When the restrictions on the viewer's device are removed, quality will improve again.
+
+
+**Tip**
+
+For more details about low-latency streaming, check out [How low-latency streaming works](/streaming-platform/live-streams-and-videos-protocols-and-codecs/how-low-latency-streaming-works).
+
+
+## Convert WebRTC to HLS in the Customer Portal
+
+For instructions on how to convert a stream via API, refer to the [API documentation](https://api.gcore.com/docs/streaming#tag/Streams/operation/get_streams_id).
+
+1\. In the Gcore Customer Portal, navigate to **Streaming**.
+
+2\. Open the Live Streaming** page and find a needed live stream. If you don't have one, create a stream first.
+
+3\. Click the stream name to open its settings.
+
+4\. In the **Quick start in browser** section, click **Go Live**. The broadcast will start automatically.
+
+
+
+
+
+
+5\. Allow Gcore to access your camera and microphone. In several seconds the HLS/DASH stream will appear in an HTML video player.
+
+You'll see the result under the **Video preview** instead of a black area with the "No active streams found" message. This large window of an HTML video player is the transcoded version of the stream in HLS/DASH protocols using adaptive bitrate.
+
+
+
+
+
+
+A small window in the top-right corner is from your camera. It shows the stream taken from the webcam.
+
+There are also settings for selecting a camera and microphone if you have more than one option on your device.
+
+## Convert WebRTC to HLS in your environment
+
+We provide a [WebRTC WHIP library](https://rtckit.gvideo.io/0.72.0/index.esm.js) for working in browsers. It implements the basic system calls and simplifies working with WebRTC:
+
+ * Wrapper for initializing WebRTC stream and connecting to the server.
+ * Camera and mic wrapper.
+ * Monitoring WebRTC events and calling appropriate handlers in your code.
+
+
+
+The latest library version, 0.72.0, is available at https://rtckit.gvideo.io/0.72.0/index.esm.js.
+
+### Start a stream with the Gcore WebRTC WHIP library
+
+Since WHIP is an open standard, many libraries have been released for it in different languages. You can use our [WebRTC WHIP](https://rtckit.gvideo.io/0.72.0/index.esm.js) or any other library specified in the [WHIP clients](/streaming-platform/live-streaming/webrtc-to-hls-transcoding#supported-whip-clients) section.
+
+Using our library, you can start the conversion with a few lines of code. To go live immediately, create a live stream in the Gcore Streaming dashboard and paste a URL into the example linked below:
+
+1\. In the Gcore Customer Portal, open the [Live Streaming](https://portal.gcore.com/streaming/streaming/editor/1740470) page.
+
+2\. Open the stream settings and copy a WHIP URL from the **WebRTC = > HLS parameters** section.
+
+3\. Open [WHIP demo app](https://stackblitz.com/edit/stackblitz-starters-j2r9ar?file=index.html) and paste the WHIP URL into the `WHIP_ENDPOINT const`.
+
+
+
+
+
+
+4\. Click the **Start** button. The steam will be started in the Customer Portal.
+
+You can find the technical reference manual on data types, interfaces, methods, and other components in the [gcorevideo/rtckit](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.md) repository.
+
+### Start a stream with your own backend or frontend
+
+
+
+ To create a new stream, send a POST request to the following endpoint: `https://api.gcore.com/streaming/streams`.
+
+Example request:
+
+```sh
+curl -L 'https://api.gcore.com/streaming/streams' \
+-H 'Content-Type: application/json' \
+-H 'Authorization: APIKey 1111$3ec8…9604e' \
+-d '{
+ "name": "WebRTC to HLS demo",
+ "active": true
+}'
+ ```
+
+Example response:
+
+```json
+{
+ "id": 1683264,
+ "name": "WebRTC to HLS demo",
+ "push_url_whip": "https://whip.gvideo.co/1965207_561f4742ec38ae6386a6e7e637c03041/whip", …
+}
+ ```
+
+Use the `"push_url_whip"` value from the response to start the stream.
+
+
+ Get access and data from the microphone and camera:
+
+```js
+import { WebrtcStreaming } from 'https://rtckit.gvideo.io/0.68.2/index.esm.js';
+const WHIP_ENDPOINT = '{push_url_whip}';
+const webrtc = new WebrtcStreaming(WHIP_ENDPOINT, {...});
+```
+
+Send a local stream to the WHIP server:
+
+```js
+webrtc.openSourceStream({
+ audio: mic,
+ video: cam,
+ resolution: 1080,
+})
+ ```
+
+Note that if a user stops streaming to the ingester, for example, by closing the browser tab, the stream settings will be terminated. When the user resumes streaming from any browser, the ingester will pick up the stream. However, there will be a brief delay before the ingested stream becomes playable.
+
+If a user tries to stream to the same endpoint where another user is already streaming, the former will get an error message from the media server. The current stream will remain uninterrupted.
+
+
+
+### Play HLS or DASH
+
+After sending the stream from frontend, the stream will start transcoding. In ±2-7 seconds, the HLS and MPEG-DASH versions will be ready for viewing.
+
+The stream can be viewed through the built-in web player or using direct links to the manifests. You can take these links from the API response.
+
+Examples:
+
+ * Web player: https://player.gvideo.co/streams/102748_1965207
+ * LL-HLS manifest: https://102748.gvideo.io/cmaf/102748_1965207/master.m3u8
+ * DASH manifest: https://102748.gvideo.io/cmaf/102748_1965207/index.mpd
+
+
+
+Send a GET request to the following endpoint: `https://api.gcore.com/streaming/streams/{id}`.
+
+Example request:
+
+```sh
+curl -L 'https://api.gcore.com/streaming/streams/1965207' \
+-H 'Authorization: APIKey 1111$3ec8…9604e'
+ ```
+
+Example response:
+
+```json
+
+{
+ "id": 1965207,
+ "iframe_url": "https://player.gvideo.co/streams/102748_1965207",
+ "hls_cmaf_url": "https://demo-public.gvideo.io/cmaf/102748_1965207/master.m3u8",
+ "hls_mpegts_url": "https://demo-public.gvideo.io/mpegts/102748_1965207/master_mpegts.m3u8",
+ "dash_url": "https://demo-public.gvideo.io/cmaf/102748_1965207/index.mpd",
+ …
+}
+
+ ```
+
+### Deactivate a finished stream
+
+
+
+ Update the stream by sending a PATCH request to the following endpoint: `https://api.gcore.com/streaming/streams/{id}`.
+
+Example request:
+
+```sh
+curl -L -X PATCH 'https://api.gcore.com/streaming/streams/1965207' \
+-H 'Content-Type: application/json' \
+-H 'Authorization: APIKey 1111$3ec8…9604e' ' \
+-d '{
+ "active": false
+ }'
+```
+
+Alternatively, you can delete the stream by sending the DELETE request to `https://api.gcore.com/streaming/streams/$id`.
+
+Example request:
+
+```sh
+curl -L -X DELETE 'https://api.gcore.com/streaming/streams/1965207' \
+-H 'Authorization: APIKey 1111$3ec8…9604e'
+ ```
+
+
+ Example command to close the stream: `webrtc.close()`
+
+
+
+### Demo projects of streaming with frontend and backend
+
+
+
+ You can find a detailed description of this version above. To view the full code, inspect the https://stackblitz.com/edit/stackblitz-starters-j2r9ar?file=index.html.
+
+
+
+
+
+ This demo depicts a complete frontend and backend implementation with the Nuxt framework. It's a fully functional prebuilt version with a demo stream from our demo server.
+
+The implementation includes: stream generation, initialization of WebRTC data in a browser, video transmission from the browser to the server, and displaying the HLS/DASH web player with transcoded broadcast.
+
+We've added the demo instance and source code to help you explore the implementation in action:
+
+ * Demo app – https://gcore-webrtc-sdk-js-nuxt.vercel.app/host?token=123
+ * Source code – https://github.com/G-Core/gcore-webrtc-sdk-js/tree/main/apps/ingest-demo-nuxt
+
+
+
+To start streaming:
+
+1\. Select your camera and microphone
+
+2\. In the **Host** section, click **Start** under the video preview.
+
+3\. Click the **Watch** link.
+
+
+
+
+
+
+## Troubleshooting
+
+If you experience issues related to our streaming service, check out the following sections. They outline common problems and recommended resolution steps.
+
+### Error handling
+
+**NetworkError**
+
+For details, refer to [NetworkError class](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.networkerror.md).
+
+The ingestion service is unavailable or is unreachable from the client's network. The error message includes a description of the error cause.
+
+In such cases, the application should render itself unavailable and report the error to Gcore support. The app should not retry the operation, as the retry logic is already implemented in the SDK.
+
+**ServerRequestError**
+
+For details, check out [ServerRequestError class](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.serverrequesterror.md).
+
+The ingestion server returned an error, which can be identified by inspecting the `status` and `detail` fields of the error object.
+
+
+
+| HTTP status code | Explanation | Example |
+|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| 400 | The client’s request is wrong. It may be due to the incorrect request parameters sent by the WHIP client. If you see this error with an unintelligible description or with no description at all, contact the [Gcore support team](mailto:support@gcore.com).
A special case to note is when multiple clients attempt to stream to the same endpoint simultaneously. Check the example for details. | **err.message**: Server request failed with status 400
**err.status**: 400
**err.detail**: `{"error": "someone is already publishing to the path '1960197_XXX'"}` |
+| 403 | The endpoint you are trying to connect to is unreachable.
Probable causes: - A stream doesn’t exist.
- A stream was not closed correctly, but you push to that broken stream (time out is ±30 seconds. In this case, try again after that time).
- Your token is invalid.
- Another stream setting prevents it from ingesting WebRTC.
| **err.message**: Server request failed with status 403
**err.status**: 403 |
+| 500, 502, 503, 504, 5xx (infrequently) | Gcore infrastructure is experiencing pressure or outage. Contact the [Gcore support team](mailto:support@gcore.com).
The app should render itself unavailable. It should not retry the request. | **err.message**: Server request failed with status 504
**err.status**: 504 |
+
+
+
+
+**TimeoutError**
+
+For details, check out [TimeoutError class](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.timeouterror.md).
+
+Some operation has timed out.
+
+
+
+| Error message | Explanation |
+|---------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Timeout waiting for ICE candidates | **Cause 1**. Incorrectly configured [ICE servers](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/3e45d6e8beebcc7221625bd9e3b3b1749d9405ae/packages/rtckit/docs/api/rtckit.whipclientoptions.md). The default configuration (when no ICE servers are specified) is to fetch a list of them from the WHIP endpoint.
**Cause 2**. ICE servers fetched from the WHIP endpoint are unreachable from your client’s location. This is very unlikely to happen.
In both cases, start by checking the ICE servers your WebRTC uses as described in the [Network troubleshooting section](/streaming-platform/live-streaming/webrtc-to-hls-transcoding#network-troubleshooting). If that doesn’t work, contact the [Gcore support team](mailto:support@gcore.com).
The client app should render itself unavailable due to network conditions as an explanation. |
+
+
+
+
+Other types of errors are described in our [SDK docs](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.md). End-users should not encounter these errors, and there is no way to handle them in a real application apart from reporting the error occurrence.
+
+Some SDK methods might also throw browser's native exceptions, such as [WebrtcStreaming.openSourceStream](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.webrtcstreaming.opensourcestream.md) and the methods of the [MediaDevicesHelper](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/main/packages/rtckit/docs/api/rtckit.mediadeviceshelper.md) throw [getUserMedia-originated exceptions](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia#exceptions). The application should handle them accordingly.
+
+### Sudden disconnection of camera or microphone
+
+Sometimes, users use external or plug-in cameras and microphones, and these devices can be disconnected at any time. For example:
+
+ * A USB camera cable might be unplugged.
+ * AirPods may be placed back in their case.
+
+
+
+If a camera or microphone is accidentally disconnected, you need to track such cases programmatically. Enable the `mediaDevicesAutoSwitch` option and subscribe to the event:
+
+ * set mediaDevicesAutoSwitch: true
+ * catch WebrtcStreamingEvents
+
+
+
+The new algorithm ensures uninterrupted broadcasting by prompting the browser to switch to another available camera or microphone if the current device becomes unavailable.
+
+When such a situation occurs, you will know which device was disconnected and which one was connected instead. This will allow you to visualize (if necessary) the new connected device in your interface.
+
+
+
+
+
+
+### Debugging with Chrome WebRTC internals tool
+
+Chrome is really good at working with WebRTC because it has a built-in tool to help developers see how things are working.
+
+Chrome v87+ has a special page called chrome://webrtc-internals where you can check your WebRTC calls:
+
+1\. Open a new Chrome tab and navigate to chrome://webrtc-internals while you're in a WebRTC call. On this page, you can view detailed information about the video and audio streams, connection setup, and more.
+
+2\. Use the provided information to find potential problems. For instance, when videos won't play, calls won't connect, or videos are slow.
+
+One of the parameters you can monitor in Stats graphs for candidate-pair:
+
+ * **AvailableOutgoingBitrate**
+
+
+
+
+
+
+You can also follow the following parameters from the **Stats graphs for outbound-rtp** :
+
+ * bytesSent_in_bits/s
+ * targetBitrate
+ * frameWidth
+ * frameHeight
+ * framesSent/s
+
+
+
+For example, consider how unevenly frames are sent from the browser in the following screenshot:
+
+
+
+
+
+
+### Network troubleshooting
+
+#### Video stream is poorly transcoded or constantly stops
+
+If a stream in the player constantly stops, is interrupted, or has poor quality, the issue is likely related to slow transmission of the original stream from a presenter via WebRTC.
+
+WebRTC is very demanding of the quality of internet connection from client to server. At the same time, standard implementations take into account many parameters on a local device, which can cause slower transmission of data or even stop it altogether until conditions are improved.
+
+To diagnose such situations:
+
+1\. Use the **VideoResolutionChangeDetector** plugin. It allows you to show a message about bad network conditions on a viewer's device.
+
+2\. Use Chrome's WebRTC debug tool that's available via this link: chrome:\webrtc-internals.
+
+Network congestion, occurring when resource demand surpasses capacity, leads to packet loss, increased latency, and jitter, hindering real-time communication, with congestion control algorithms optimizing performance by regulating data packet flow. You can read how WebRTC uses Transport Wide Congestion Control (TWCC) to control it in [thearticle about TWCC](https://bloggeek.me/webrtcglossary/transport-cc/).
+
+The available bitrate is calculated in the **availableOutgoingBitrate** parameter, which indicates the available outbound capacity of the network connection. The higher the value, the more bandwidth you can assume is available for outgoing data. The value is reported in bits per second and is computed over a 1-second interval.
+
+The most likely scenario for quality degradation occurs here when the channel width becomes insufficient to send good resolution.
+
+However, sometimes the connection is even worse when packets are lost. In this case, the server starts sending NACK (Negative Acknowledgement) packets. You can read more about this issue in the [NACK overview article](https://bloggeek.me/webrtcglossary/nack/).
+
+More and more data start to be resent, which leads to increased latency or gaps in frames. In this case, the transcoder doesn't receive frames on time, causing the video to interrupt or stop altogether. You can monitor and debug this issue in Chrome's webrtc-internals tool:
+
+
+
+
+
+
+What to do in such situations:
+
+ * Always show users a message about changed conditions. In 99% of cases, the issue is related to the user's internet conditions.
+ * Use TCP as the delivery protocol instead of UDP.
+ * Use the TURN server for delivery instead of sending directly to the media server.
+
+
+
+#### Issues with ICE servers
+
+If you experience problems with timeout waiting for an ICE candidate, check your ICE server configuration.
+
+ICE servers used by the WHIP client can be configured explicitly using the iceServers [configuration option](https://github.com/G-Core/gcore-webrtc-sdk-js/blob/3e45d6e8beebcc7221625bd9e3b3b1749d9405ae/packages/rtckit/docs/api/rtckit.whipclientoptions.md). Otherwise, they are fetched from Gcore's media server in the response to a session initiation request.
+
+In the case of the latter, check what the server returns in the `Link` headers. For example:
+
+
+
+
+```
+Link: ; rel="ice-server"
+Link: ; rel="ice-server"; username="1730558739:0nu0id47meqbsyvpz743"; credential="IswwB19KAEWQujy3X/c4D9GjZj8="; credential-type="password"
+Link: ...
+```
+
+You can also inspect the servers using chrome://webrtc-internals or an alternative tool:
+
+
+
+
+
+
+After you verify your server configuration, use the [Trickle ICE](https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/) app to test the servers.
+
+Add a STUN or TURN server and check how it works. If everything functions correctly, the results will show:
+
+ * A srvrflx candidate for STUN server
+ * A relay candidate for a TURN server
+
+
+
+If you don't see these results, your STUN or TURN server may be misconfigured, or there is an outage.
+
The [Gcore support team](mailto:support@gcore.com) will help you handle that. In the request, include the results of your ICE connectivity check to help us resolve the issue quickly.
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-liveu-solo.mdx b/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-liveu-solo.mdx
deleted file mode 100644
index cbd8a9971..000000000
--- a/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-liveu-solo.mdx
+++ /dev/null
@@ -1,19 +0,0 @@
----
-title: Push live streams via LiveU Solo
-sidebarTitle: LiveU Solo
----
-
-Install and launch LiveU Solo.
-
-Click on the «Edit Destination» button.
-
-
-
-
-
-
-Find RTMP URL and a stream key in the Gcore Customer Portal according to the [Create a live stream](/streaming-platform/live-streaming/create-a-live-stream) guide. Choose the stream, click edit, and look at the Push URL.
-
-Fill the «Primary Ingress URL» field with this part of PUSH URL: _rtmp://vp-push-ed1.gvideo.co/in/_ and «Stream name» field with the key — all the remaining symbols: _9cb3fdee0836564bd0046dasdb0e4de3sda32af712411313_.
-
-Run the stream.
\ No newline at end of file
diff --git a/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs.mdx b/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs.mdx
deleted file mode 100644
index 86793eedb..000000000
--- a/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs.mdx
+++ /dev/null
@@ -1,130 +0,0 @@
----
-title: Push live streams via OBS
-sidebarTitle: OBS (Open Broadcaster Software)
----
-
-## What is an OBS?
-
-Open Broadcaster Software (OBS) is a free and open-source encoder for video recording, screencasting, and live streaming. It's suitable for video game streaming, blogging, educational content, and more.
-
-OBS links your device (a laptop or a PC) and different streaming platforms (Gcore Video Streaming, YouTube, Twitch, etc.). It takes an image captured by a camera, converts it into a video stream, and then sends it to the streaming platform.
-
-## Configure the OBS encoder for GCore streaming
-
-1\. Download Open Broadcaster Software (OBS) from the [official website](https://obsproject.com) and install it.
-
-2\. Open the **Settings** section and go to the **Stream** tab. Complete the remaining steps in it.
-
-
-
-
-
-
-3\. Select **Custom** from the dropdown list.
-
-4\. Enter the Server URL into the Server field and the unique key into the Stream Key field. To get the Server URL and key, go to the [Streaming list](https://streaming.gcore.com/streaming/list), open the Live stream settings you need, and copy the relevant value from the URLs for the encoder section.
-
-For example, if you see these values on the Live stream settings page:
-
-
-
-
-
-
-paste them to the OBS Settings as follows:
-
- * _rtmp://vp-push-ix1.gvideo.co/in/_ is the Server;
- * _400448?cdf2a7ccf990e464c2b…_ is the Stream Key.
-
-
-
-5\. Click the **Apply** button to save the new configuration.
-
-6\. Go to the main OBS menu, select the source of the stream (video capture device, display capture, etc.), and click **Start Streaming**.
-
-
-
-
-
-
-7\. Once the streaming has started, go to the [Streaming list](https://streaming.gcore.com/streaming/list), open the Live Stream settings, and copy the link to embed the broadcast to your website.
-
-
-
-
-
-
-That's it. The stream from OBS will be broadcast to your website.
-
-## Manage the stream parameters
-
-To ensure optimal streaming performance, we recommend configuring the stream parameters you will send to our server. You can adjust these settings in the Output, Audio, and Video tabs within OBS.
-
-### Output parameters
-
-1\. Open OBS Settings and go to the **Output** tab. Select **Simple** mode.
-
-2\. Set the parameters as follows:
-
- * **Video Bitrate:** The resolution of your stream determines the required bitrate: The higher the resolution, the higher the bitrate. To stream at 720p resolution, set the bitrate to 2000Kbps. If you're broadcasting at 1080p, set the bitrate to 4000Kbps.
- * **Audio Bitrate:** 128.
- * **Encoder:** Software (x264), or any other H264 codec.
-
-
-
-
-
-
-3\. Click **Advanced** mode.
-
-4\. Set the parameters as follows:
-
- * **Rate control:** CRF (the default value is 23)
- * **Keyframe Interval (0=auto):** 2s
- * **CPU Usage Preset:** veryfast
- * **Profile:** baseline
-
-
-
-5\. Click **Apply** to save the configuration.
-
-
-
-
-
-
-### Audio parameters
-
-
-
-
-
-
-1\. Open OBS Settings and go to the Audio tab.
-
-2\. Set the Sample Rate to 44.1 kHz (default) or 48 kHz. Select **Stereo** for the best sound quality.
-
-3\. Click **Apply**.
-
-### Video parameters
-
-If you need to reduce the original resolution (downscale), follow the instructions in this section. If no resolution change is required, you can skip this step.
-
-1\. Open OBS Settings and go to the **Video** tab.
-
-2\. Set the following parameters:
-
- * **Output (Scaled) Resolution:** 1280×720
- * **Downscale Filter:** Bicubic
- * **Common FPS Values:** 30
-
-
-
-3\. Click **Apply**.
-
-
-
-
-
-
-**Hints.** You can see the stream's [output parameters here](/streaming-platform/live-streaming/push-live-streams-software/push-live-streams-via-obs#output-parameters). If you need to increase the FPS to 60, make sure to also increase the bitrate accordingly for optimal stream quality.
\ No newline at end of file
diff --git a/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs.mdx b/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs.mdx
index 25935dbdb..9a3abe346 100644
--- a/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs.mdx
+++ b/streaming-platform/live-streams-and-videos-protocols-and-codecs/output-parameters-after-transcoding-bitrate-frame-rate-and-codecs.mdx
@@ -1,97 +1,76 @@
----
-title: "Output parameters after transcoding: bitrate, frame rate, and codecs"
-sidebarTitle: Output parameters
----
-
-## What is transcoding with ABR?
-
-In a nutshell, transcoding is converting the original video from one format to another format with a set of extra qualities (ABR). Transcoding aims to make videos viewable across different platforms and devices and to expand the number of viewers, e.g., adding those who couldn't watch the video in its original format. Learn more about transcoding with our [in-depth article](https://gcore.com/learning/what-is-transcoding/).
-
-Transcoding with adaptive bitrate streaming (ABR) works like this:
-
- 1. Decoding of the original video ([list of accepted parameters](/streaming-platform/live-streams-and-videos-protocols-and-codecs/what-initial-parameters-of-your-live-streams-and-videos-we-can-accept) for input video)
- 2. Processing, including scaling (resizing,) frame rate conversion, aspect ratio conversion, and other types of video processing
- 3. Encoding into the desired output format
-
-
-
-Discover how we [lowered the bitrate](https://gcore.com/blog/how-we-lowered-the-bitrate-for-live-and-vod-streaming-by-32-5-without-sacrificing-quality/) for live and VOD streaming by 32.5% without sacrificing quality.
-
-## Audio and video codecs
-
-A transcoded video stream has the following codecs by default:
-
- * **Audio** : AAC, 44.1KHz, stereo
- * **Video** : H264, 30FPS, ABR
-
-
-
-## Output parameters after transcoding
-
-The table below shows the available output quality levels after transcoding. Video and audio parameters are optimized for adaptive bitrate streaming, providing smooth playback across devices and network conditions.
-
-**Video Quality** | **Resolution** | **FPS** | **Video bitrate, Mbps** | **Audio bitrate, Mbps**
----|---|---|---|---
-UHD 8K* | 7680 × 4320 | - | - | -
-UHD 4K* | 3840 × 2160 | 30 | 14,000 | 0,196
-QHD 2K* | 2560 × 1440 | 30 | 7,200 | 0,196
-Full HD 1080p | 1920 × 1080 | 30 | 4,050 | 0,128
-HD 720p | 1280 × 720 | 30 | 1,800 | 0,128
-SD 468p | 832 × 468 | 30 | 0,800 | 0,096
-360p | 640 × 360 | 30 | 0,450 | 0,064
-240p* | 427 × 240 | 30 | 0,200 | 0,064
-144p* | 254 × 144 | 30 | 0,072 | 0,064
-Audio only* | - | - | - | 0,128
-
-*To enable these qualities, contact the [support team](mailto:support@gcore.com).
-
-The values in the table are given for videos with a horizontal aspect ratio of 16:9. The values must be reversed for vertical videos. For example, if a horizontal resolution is (1280 × 720), the relevant vertical resolution is (720 × 1280).
-
-We use variable bitrate (VBR) for encoding video. The table above shows average values. The Gcore Customer Portal shows a sum of audio and video bitrates.
-
-**Note** : We don't upscale videos. Your output will range from the lowest quality to that of your original bitrate. So, if you upload a 4K video, your output will range from the lowest quality up to 4K. Similarly, if you upload a 720p video, your output will vary from the lowest quality up to 720p only.
-
-### Custom quality sets
-
-We provide an optimized set of quality presets designed for smooth streaming across all devices. These settings balance quality and bandwidth efficiency, making them ideal for most users. However, if you have specific requirements—such as higher audio quality, cost reduction by removing unnecessary renditions, AV1 encoding, or custom settings for individual videos—you have full control over your transcoding presets.
-
-Each video can have its own unique quality settings, so you're not limited to a single configuration. Default and custom presets work together, giving you the flexibility to fine-tune streaming quality as needed. Check out the [API documentation](https://api.gcore.com/docs/streaming#tag/QualitySets) to explore available custom quality sets. If you need assistance, our [support team](mailto:support@gcore.com) is always ready to help you customize the best option for your needs.
-
-## HTTP response codes when requesting Live and VOD videos
-
-The following table includes the possible HTTP response codes returned when requesting videos and live streams for manifests (.m3u8, .mpd) and chunks (.ts, mp4, etc.).
-
-| Code | Function | Description |
-|------|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| 200 | OK | All OK |
-| 403 | Forbidden | Access is denied. If you use any distribution restriction such as geo-restriction or token, you must satisfy this condition for access. |
-| 404 | Not Found | There's no requested video, or the live stream is temporarily not delivering chunks. Check the request link or activate your video.
For Live streams in CMAF format, you can check the extra header "X-Err-Code": - 1000 – Master-stream is missed. The stream is not pushed or not transcoded, so start a stream or restart transcoding.
- 2000 – Invalid StreamID. The identifier is not parsed from the requested URL; check the URL.
- 3000 – Stream is not ready for delivery. Inspect the logs or contact support. Oftentimes, this happens when the master-stream has wrong parameters, such as video and audio codecs, FPS, or bitrate. Verify the parameters to ensure everything works as expected.
|
-| 422 | Not Found | This is advanced functionality (i.e. custom encoding presets). To enable it, contact your manager or the support team. |
-| 500 | Internal Server Error | An unexpected issue happened on the server. This may be a local error in a specific video. In this case, check the video processing status in your personal account. If the error is global, the information will be on the status page. |
-| 502 | Bad Gateway | An unexpected issue happened on the server. This may happen when VOD or Live can't be delivered over CDN because an incorrect response was received from an origin (storage or live transcoder). In this case, check the video processing status or live stream transcoding in your personal account. If the error is global, the information will appear on the status page. |
-| 503 | Service Unavailable | An unexpected issue happened on the server. This may be a local error in a specific video. In this case, check the video processing status in your personal account. If the error is global, the information will be on the status page. |
-| 504 | Gateway Time-out | Timeout for receiving data from the source. Try checking the status of video sending/ingesting and transcoding. |
-
-
-**Info**
-
-The system health status page is available at https://status.gcore.com/
-
-
-## How to apply 2K/4K+ and custom advanced settings
-
-Some settings require manual control. If you need them, contact the Gcore [support team](mailto:support@gcore.com) or your manager.
-
-The basic tariff plan doesn't include 2K/4K+ in the transcoding quality preset. For information about high-quality video processing costs, check our [pricing page](https://gcore.com/pricing/streaming-platform). To work with high-quality video, we'll provide you with new quality presets and a new tariff plan.
-
-Additionally, you can ask to activate the following settings:
-
- * Use codecs H265 HEVC, VP9, AV1, etc.,
- * Add 8K quality,
- * Add super-low quality 144p,
- * Change preset of output ABR qualities for all or some renditions, remove low-quality renditions,
- * Use 60+ FPS or use original FPS values,
- * Increase bitrate for audio; use 48KHz,
- * Use Dolby Digital, Dolby Atmos,
- * Transmux VOD MP4 files into HLS on the fly.
+---
+title: "Output parameters after transcoding: bitrate, frame rate, and codecs"
+sidebarTitle: Output parameters
+---
+
+## What is transcoding with ABR?
+
+In a nutshell, transcoding is converting the original video from one format to another format with a set of extra qualities (ABR). Transcoding aims to make videos viewable across different platforms and devices and to expand the number of viewers, e.g., adding those who couldn't watch the video in its original format. Learn more about transcoding with our [in-depth article](https://gcore.com/learning/what-is-transcoding/).
+
+Transcoding with adaptive bitrate streaming (ABR) works like this:
+
+ 1. Decoding of the original video ([list of accepted parameters](/streaming-platform/live-streams-and-videos-protocols-and-codecs/what-initial-parameters-of-your-live-streams-and-videos-we-can-accept) for input video)
+ 2. Processing, including scaling (resizing,) frame rate conversion, aspect ratio conversion, and other types of video processing
+ 3. Encoding into the desired output format
+
+
+
+Discover how we [lowered the bitrate](https://gcore.com/blog/how-we-lowered-the-bitrate-for-live-and-vod-streaming-by-32-5-without-sacrificing-quality/) for live and VOD streaming by 32.5% without sacrificing quality.
+
+## Audio and video codecs
+
+A transcoded video stream has the following codecs by default:
+
+ * **Audio** : AAC, 44.1KHz, stereo
+ * **Video** : H264, 30FPS, ABR
+
+
+
+## Output parameters after transcoding
+
+The table below shows the available output quality levels after transcoding. Video and audio parameters are optimized for adaptive bitrate streaming, providing smooth playback across devices and network conditions.
+
+**Video Quality** | **Resolution** | **FPS** | **Video bitrate, Mbps** | **Audio bitrate, Mbps**
+---|---|---|---|---
+UHD 8K* | 7680 × 4320 | - | - | -
+UHD 4K* | 3840 × 2160 | 30 | 14,000 | 0,196
+QHD 2K* | 2560 × 1440 | 30 | 7,200 | 0,196
+Full HD 1080p | 1920 × 1080 | 30 | 4,050 | 0,128
+HD 720p | 1280 × 720 | 30 | 1,800 | 0,128
+SD 468p | 832 × 468 | 30 | 0,800 | 0,096
+360p | 640 × 360 | 30 | 0,450 | 0,064
+240p* | 427 × 240 | 30 | 0,200 | 0,064
+144p* | 254 × 144 | 30 | 0,072 | 0,064
+Audio only* | - | - | - | 0,128
+
+*To enable these qualities, contact the [support team](mailto:support@gcore.com).
+
+The values in the table are given for videos with a horizontal aspect ratio of 16:9. The values must be reversed for vertical videos. For example, if a horizontal resolution is (1280 × 720), the relevant vertical resolution is (720 × 1280).
+
+We use variable bitrate (VBR) for encoding video. The table above shows average values. The Gcore Customer Portal shows a sum of audio and video bitrates.
+
+**Note** : We don't upscale videos. Your output will range from the lowest quality to that of your original bitrate. So, if you upload a 4K video, your output will range from the lowest quality up to 4K. Similarly, if you upload a 720p video, your output will vary from the lowest quality up to 720p only.
+
+### Custom quality sets
+
+We provide an optimized set of quality presets designed for smooth streaming across all devices. These settings balance quality and bandwidth efficiency, making them ideal for most users. However, if you have specific requirements—such as higher audio quality, cost reduction by removing unnecessary renditions, AV1 encoding, or custom settings for individual videos—you have full control over your transcoding presets.
+
+Each video can have its own unique quality settings, so you're not limited to a single configuration. Default and custom presets work together, giving you the flexibility to fine-tune streaming quality as needed. Check out the [API documentation](https://api.gcore.com/docs/streaming#tag/QualitySets) to explore available custom quality sets. If you need assistance, our [support team](mailto:support@gcore.com) is always ready to help you customize the best option for your needs.
+
+## How to apply 2K/4K+ and custom advanced settings
+
+Some settings require manual control. If you need them, contact the Gcore [support team](mailto:support@gcore.com) or your manager.
+
+The basic tariff plan doesn't include 2K/4K+ in the transcoding quality preset. For information about high-quality video processing costs, check our [pricing page](https://gcore.com/pricing/streaming-platform). To work with high-quality video, we'll provide you with new quality presets and a new tariff plan.
+
+Additionally, you can ask to activate the following settings:
+
+ * Use codecs H265 HEVC, VP9, AV1, etc.,
+ * Add 8K quality,
+ * Add super-low quality 144p,
+ * Change preset of output ABR qualities for all or some renditions, remove low-quality renditions,
+ * Use 60+ FPS or use original FPS values,
+ * Increase bitrate for audio; use 48KHz,
+ * Use Dolby Digital, Dolby Atmos,
+ * Transmux VOD MP4 files into HLS on the fly.
* Transmux and "pass-through" instead of transcoding for LIVE streams.
\ No newline at end of file
diff --git a/streaming-platform/troubleshooting/general-issues.mdx b/streaming-platform/troubleshooting/general-issues.mdx
new file mode 100644
index 000000000..4d54d2b6a
--- /dev/null
+++ b/streaming-platform/troubleshooting/general-issues.mdx
@@ -0,0 +1,73 @@
+---
+title: General video streaming issues
+sidebarTitle: General issues
+---
+
+Common issues you may encounter when working with both Live and VOD streams, as well as steps you can take to troubleshoot them.
+
+
+ If you encounter problems specific to live streaming or VOD uploads, please refer to the [Live streaming issues](live-streaming-issues) or [VOD issues](vod-issues) pages.
+
+
+## General troubleshooting steps
+
+For the most common issues, such as video not playing, taking a long time to start streaming, or looking blurry, these basic checks should help:
+
+- **Status page**: Check if the issue you are experiencing is related to any known issue or is an isolated one by visiting the [status page](https://status.gcore.com).
+- **Source video**: Ensure that the source content is uploaded for streaming. If the same issue occurs in the source, re-upload the video or restart the stream.
+- **Stream URL and code**: Use the exact URL and embed code that appear in the Streaming settings.
+- **Streaming settings**: Make sure the stream is enabled. If configured to pull a stream, ensure the source URL is correct.
+- **Encoder settings**: Make sure you are using the recommended settings. If configured to push a stream, ensure the server URL and stream key are accurate.
+
+Other things to try:
+
+- Clear the browsing data.
+- Disable any interfering browser extensions.
+- Ensure that the network connection is stable. Try connecting with or without a VPN.
+- Verify that the streaming ports are open on the firewall.
+- Update the browser or device OS.
+
+## Common playback issues
+
+Various factors, including network issues, device compatibility, and encoding settings, can cause playback issues. Here are some common problems and their solutions.
+
+### Stream does not appear on some devices
+
+_Possible cause_: Device is too old.\
+_Suggested solution_: Streaming should work on most devices, but some may not be compatible. Try using a modern device with enough processing power and memory to stream video successfully.
+
+### Stream returns an HTTP 404 error
+
+_Possible cause_: Transcoding is in progress.\
+_Suggested solution_: Each video chunk may take several seconds to transcode. Allow 10 to 15 seconds for this to happen. Once the chunks have been transcoded, the stream should be ready to play.
+
+_Possible cause_: Low Latency is not enabled in the Customer Portal.\
+_Suggested solution_: Contact our [support team](mailto:support@gcore.com) to activate this option.
+
+
+ For more details on HTTP status codes, see the [HTTP status codes page](https://gcore.com/docs/streaming-platform/troubleshooting/http-status-codes).
+
+
+### Stream returns an HTTP 502 error
+
+_Possible cause_: CDN resource settings have been changed from the preset settings.\
+_Suggested solution_: Contact our [support team](mailto:support@gcore.com) to assist you in restoring the settings.
+
+_Possible cause_: Token configuration is not synchronized.\
+_Suggested solution_: Contact our [support team](mailto:support@gcore.com) to help you restore the settings.
+
+## Open a support ticket
+
+If none of the above work or apply to your issue, contact our [support team](mailto:support@gcore.com) with the following information:
+
+1. Link to the stream.
+2. Description of the issue and steps to reproduce.
+3. List of steps taken to troubleshoot the issue.
+4. Screenshot of the information shown in http://iam.gcdn.co/info.
+5. Screenshot of the response to this command:
+
+ ```
+ curl http://iam.gcdn.co/info/json
+ ```
+6. Screenshot of the speed test result using http://iam.gcdn.co/info.
+7. HAR file. [This page](https://toolbox.googleapps.com/apps/har_analyzer/?lang=en) describes how to generate one in Chrome, Firefox, and Edge.
\ No newline at end of file
diff --git a/streaming-platform/troubleshooting/http-status-codes.mdx b/streaming-platform/troubleshooting/http-status-codes.mdx
new file mode 100644
index 000000000..2146986f1
--- /dev/null
+++ b/streaming-platform/troubleshooting/http-status-codes.mdx
@@ -0,0 +1,26 @@
+---
+title: Video streaming HTTP status codes
+sidebarTitle: HTTP status codes
+---
+
+The following table includes all possible HTTP status codes returned when requesting videos and live streams for manifests (e.g., .m3u8 and .mpd) or chunks (e.g., .ts, .mp4, etc.).
+
+| Code | Status | Description |
+| ---- | ------ | ----------- |
+| 200 | OK | All OK |
+| 403 | Forbidden | Access is denied. If you use any distribution restriction, such as geo-restriction or token, you must satisfy this condition for access. |
+| 404 | Not Found |
+There is no video, or the live stream is temporarily not delivering chunks. Check the request link or activate your video.
For Live streams in CMAF format, you can check the extra header "X-Err-Code":- - 1000 – Master-stream is missed. The stream is not pushed or not transcoded, so start a stream or restart transcoding.
- - 2000 – Invalid StreamID. The identifier is not parsed from the requested URL; check the URL
- - 3000 – Stream is not ready for delivery. Inspect the logs or contact support. Oftentimes, this happens when the master stream has wrong parameters, such as video and audio codecs, FPS, or bitrate. Verify the parameters to ensure everything works as expected.
|
+| 422 | Unprocessable Content | This is advanced functionality (e.g., custom encoding presets). To enable it, contact your manager or the support team. |
+| 500 | Internal Server Error | An unexpected issue happened on the server. This may be a local error in a specific video. In this case, check the video processing status in your account. If the error is global, the information will be on the status page. |
+| 502 | Bad Gateway | An unexpected issue happened on the server. This may happen when VOD or Live can't be delivered over CDN because an incorrect response was received from an origin (storage or live transcoder). In this case, check the video processing status or live stream transcoding in your account. If the error is global, the information will appear on the status page. |
+| 503 | Service Unavailable | An unexpected issue happened on the server. This may be a local error in a specific video. In this case, check the video processing status in your account. If the error is global, the information will be on the status page. |
+| 504 | Gateway Time-out | Timeout for receiving data from the source. Try checking the status of video sending/ingesting and transcoding. |
+
+
+
+
+
+The system health status page is available at [https://status.gcore.com/](https://status.gcore.com/)
+
+
\ No newline at end of file
diff --git a/streaming-platform/troubleshooting/live-streaming-issues.mdx b/streaming-platform/troubleshooting/live-streaming-issues.mdx
new file mode 100644
index 000000000..2b1ed4a2d
--- /dev/null
+++ b/streaming-platform/troubleshooting/live-streaming-issues.mdx
@@ -0,0 +1,24 @@
+---
+title: "Live Streaming issues"
+sidebarTitle: "Live Streaming issues"
+---
+
+Common issues you may encounter when working with Live Streaming, as well as steps you can take to troubleshoot them.
+
+## Low latency mode has a delay of more than 5 seconds
+
+_Possible cause_: The player does not support the DASH.JS library.
+_Suggested solution_: Our low latency solution has a latency of 4-5 seconds. If the delay is more than 5 seconds:
+
+- Make sure that the player supports the DASH.JS library.
+- Try testing your low latency stream at the open source DASH.JS player.
+
+## Current broadcast contains DVR chunks of the previous broadcast
+
+_Possible cause_: The broadcast is over, but the stream has not been stopped.
+_Suggested solution_: This is a normal behavior.
+
+To avoid this situation:
+
+- Stop the stream when the broadcast is finished.
+- Delete the DVR archive before starting a new broadcast.
\ No newline at end of file
diff --git a/streaming-platform/troubleshooting/real-time-video-issues/audio-quality-is-reduced-when-using-bluetooth-headsets-in-video-call-apps.mdx b/streaming-platform/troubleshooting/real-time-video-issues/audio-quality-is-reduced-when-using-bluetooth-headsets-in-video-call-apps.mdx
index c3736ca3c..2d09a2bb7 100644
--- a/streaming-platform/troubleshooting/real-time-video-issues/audio-quality-is-reduced-when-using-bluetooth-headsets-in-video-call-apps.mdx
+++ b/streaming-platform/troubleshooting/real-time-video-issues/audio-quality-is-reduced-when-using-bluetooth-headsets-in-video-call-apps.mdx
@@ -1,31 +1,29 @@
----
-title: Audio quality is reduced when using bluetooth headsets in video call apps
-sidebarTitle: Bad audio while bluetooth mic
----
-
-## Overview
-
-If you listen to high-quality audio content such as music with Bluetooth headphones and then open an application that uses the Bluetooth headphones' microphone, e.g., Zoom or Microsoft Team, and so on, the audio quality may be reduced, or some unconventional popping sounds may appear. The problem is that high-quality audio has been changed to low-quality audio. This problem isn't connected to the concrete application—it occurs because of Bluetooth technology in general and its audio profiles' limitations.
-
-## What are bluetooth's audio profiles?
-
-A Bluetooth audio profile is a set of standards that define how Bluetooth devices exchange audio data. Bluetooth supports several different audio profiles designed for different types of audio applications. The most common Bluetooth audio profiles are:
-
- * **Advanced Audio Distribution Profile (A2DP)**. This profile is intended for high-quality stereo playback.
- * **Headset Profile (HSP)/Hands-Free Profile (HFP)**. This profile is meant for calls. It allows phone calls to be controlled wirelessly using the headset's own microphone and speaker.
-
-
-
-More available audio profiles are listed in the [official specification base](https://www.bluetooth.com/specifications/specs/).
-
-## How do bluetooth audio profiles cause a decrease in sound quality?
-
-The limitations of Bluetooth's audio profiles cause the problem with audio quality degradation. Bluetooth headsets can only use one audio profile at a time.
-
-So, when you open a video call application with a Bluetooth headset, the A2DP profile (high sound quality but no microphone capability) is replaced by the HSP/HFP profile (headphones and microphone capability but low sound quality). So, the high-quality audio is changed to low-quality.
-
-## Recommendations for solving the problem
-
-1\. Switch to wired headphone use.
-
+---
+title: Audio quality is reduced when using bluetooth headsets in video call apps
+sidebarTitle: Bad audio while bluetooth mic
+---
+
+If you listen to high-quality audio content such as music with Bluetooth headphones and then open an application that uses the Bluetooth headphones' microphone, e.g., Zoom or Microsoft Team, and so on, the audio quality may be reduced, or some unconventional popping sounds may appear. The problem is that high-quality audio has been changed to low-quality audio. This problem isn't connected to the concrete application—it occurs because of Bluetooth technology in general and its audio profiles' limitations.
+
+## What are bluetooth's audio profiles?
+
+A Bluetooth audio profile is a set of standards that define how Bluetooth devices exchange audio data. Bluetooth supports several different audio profiles designed for different types of audio applications. The most common Bluetooth audio profiles are:
+
+ * **Advanced Audio Distribution Profile (A2DP)**. This profile is intended for high-quality stereo playback.
+ * **Headset Profile (HSP)/Hands-Free Profile (HFP)**. This profile is meant for calls. It allows phone calls to be controlled wirelessly using the headset's own microphone and speaker.
+
+
+
+More available audio profiles are listed in the [official specification base](https://www.bluetooth.com/specifications/specs/).
+
+## How do bluetooth audio profiles cause a decrease in sound quality?
+
+The limitations of Bluetooth's audio profiles cause the problem with audio quality degradation. Bluetooth headsets can only use one audio profile at a time.
+
+So, when you open a video call application with a Bluetooth headset, the A2DP profile (high sound quality but no microphone capability) is replaced by the HSP/HFP profile (headphones and microphone capability but low sound quality). So, the high-quality audio is changed to low-quality.
+
+## Recommendations for solving the problem
+
+1\. Switch to wired headphone use.
+
2\. Change the audio input source from the Bluetooth headset microphone to, for example, the built-in microphone of a laptop or smartphone. The A2DP audio profile will be activated, and the sound quality will be recovered.
\ No newline at end of file
diff --git a/streaming-platform/troubleshooting/solve-common-streaming-platform-issues.mdx b/streaming-platform/troubleshooting/solve-common-streaming-platform-issues.mdx
deleted file mode 100644
index 7ac710613..000000000
--- a/streaming-platform/troubleshooting/solve-common-streaming-platform-issues.mdx
+++ /dev/null
@@ -1,137 +0,0 @@
----
-title: Solve common video streaming issues
-sidebarTitle: Streaming issues
----
-
-We are covering some issues you may encounter when working with both Live and VOD streams, as well as steps you can take to troubleshoot them.
-
-## General troubleshooting steps
-
-For the most common issues, such as video not playing, taking a long time to start streaming, or looking blurry, these basic checks should help:
-
- * **Status page**. Check if the issue you are experiencing is related to any known issue or is an isolated one by visiting the [status page](https://status.gcore.com).
- * **Source video**. Ensure that the source content is uploaded for streaming. If the same issue occurs in the source, re-upload the video or restart the stream.
- * **Stream URL and code**. Make sure to use the exact URL and embed code that appear in the Streaming settings.
- * **Streaming settings**. Make sure the stream is enabled. If configured to pull a stream, make sure the source URL is correct.
- * **Encoder settings**. Make sure you are using the [recommended settings](/streaming-platform/live-streams-and-videos-protocols-and-codecs/what-initial-parameters-of-your-live-streams-and-videos-we-can-accept). If configured to push a stream, make sure the server URL and stream key are accurate.
-
-
-
-Other things to try:
-
- * Clear the browsing data.
- * Disable any interfering browser extensions.
- * Ensure that the network connection is stable. Try connecting with or without a VPN.
- * Verify that the streaming ports are open on the firewall.
- * Update the browser or device OS.
-
-
-
-## Other common issues and solutions
-
-### Playback
-
-**Stream does not appear on some devices**
-
-_Possible cause_ : Device is too old.
-_Suggested solution_ : Streaming should work on most devices, but some devices may not be compatible. Try using a modern device with enough processing power and memory to successfully stream video.
-
-**Stream returns an HTTP 404 error**
-
-_Possible cause_ : Transcoding is in progress.
-_Suggested solution_ : Each video chunk may take several seconds to transcode. Allow 10 to 15 seconds for this to happen. Once the chunks have been transcoded, the stream should be ready to play.
-
-_Possible cause_ : Low Latency is not enabled in the Customer Portal.
-_Suggested solution_ : Contact our [support team](mailto:support@gcore.com) to activate this option.
-
-**Stream returns an HTTP 502 error**
-
-_Possible cause_ : CDN resource settings have been changed from the preset settings.
-_Suggested solution_ : Contact our [support team](mailto:support@gcore.com) to assist you in restoring the settings.
-
-_Possible cause_ : Token configuration is not synchronized.
-_Suggested solution_ : Contact our [support team](mailto:support@gcore.com) to help you restore the settings.
-
-**Current broadcast contains DVR chunks of the previous broadcast**
-
-_Possible cause_ : The broadcast is over, but the stream has not been stopped.
-_Suggested solution_ : This is a normal behavior.
-
-To avoid this situation:
-
- * Stop the stream when the broadcast is finished.
- * Delete the DVR archive before starting a new broadcast.
-
-
-
-**Low latency mode has a delay of more than 5 seconds**
-
-_Possible cause_ : The player does not support the DASH.JS library.
-_Suggested solution_ : Our low latency solution has a latency of 4-5 seconds. If the delay is more than 5 seconds:
-
- * Make sure that the player supports the DASH.JS library.
- * Try testing your low latency stream at the open source [DASH.JS player](https://reference.dashif.org/dash.js).
-
-
-
-### Upload
-
-**Video is not uploaded**
-
-_Possible cause_ : Upload has been interrupted by closing or reloading the window.
-_Suggested solution_ : Try uploading again and be careful not to interrupt the process. If the size of the video is more than 500 MB, the upload will resume where it left off.
-
-_Possible cause_ : Not enough storage space to complete the upload.
-_Suggested solution_ : Confirm in the Storage statistics that you have used up your storage. Contact our [support team](mailto:support@gcore.com) to increase the storage space.
-
-**Upload to the Gcore Customer Portal is interrupted by a session timeout (force logout)**
-
-_Possible cause_ : Video is too large to upload through UI.
-_Suggested solution_ :
-
- * Upload during late night or early morning hours when there is less load on the queue.
- * Upload a small batch of videos (e.g., 10 at a time).
- * [Upload via API](/streaming-platform/video-hosting/upload-video-via-api).
-
-
-
-**Upload is stuck in the Processed state for a long time**
-
-_Possible cause_ : Processing queue is too long or loaded with large videos.
-_Suggested solution_ : Wait a while and then try uploading again.
-
-### Player
-
-**No sound when using Gcore player**
-
-_Possible cause_ : The audio is encoded with an unsupported codec.
-_Suggested solution_ : Set the audio codec to AAC. Note that if you are using Adobe FMLE on Windows, you need to purchase an AAC encoder plugin in order to use the AAC audio format.
-
-**Selected Gcore player does not render on the page**
-
-_Possible cause_ : Some parameters have been added to the URL in the embed code.
-_Suggested solution_ : Contact our [support team](mailto:support@gcore.com) to troubleshoot the issue further.
-
-**No playback controls on the Gcore player**
-
-_Possible cause_ : Disable Skin is active.
-_Suggested solution_ : Turn this option off.
-
-## Open a support ticket
-
-If none of the above work or apply to your issue, contact our [support team](mailto:support@gcore.com) with the following information:
-
- 1. Link to the stream.
- 2. Description of the issue and steps to reproduce.
- 3. List of steps taken to troubleshoot the issue.
- 4. Screenshot of the information shown in http://iam.gcdn.co/info.
- 5. Screenshot of the response to this command:
-
-
-
-```sh
-curl http://iam.gcdn.co/info/json
- ```
-
- 6. Screenshot of the speed test result using http://iam.gcdn.co/info.
- 7. HAR file. This [page](https://toolbox.googleapps.com/apps/har_analyzer/?lang=en) describes how to generate one in Chrome, Firefox, and Edge.
\ No newline at end of file
diff --git a/streaming-platform/troubleshooting/vod-issues.mdx b/streaming-platform/troubleshooting/vod-issues.mdx
new file mode 100644
index 000000000..993836223
--- /dev/null
+++ b/streaming-platform/troubleshooting/vod-issues.mdx
@@ -0,0 +1,28 @@
+---
+title: "VOD issues"
+sidebarTitle: "VOD issues"
+---
+
+Here are some common issues you may encounter when working with VOD and steps you can take to troubleshoot them.
+
+## Video is not uploaded
+
+_Possible cause_: Upload has been interrupted by closing or reloading the window.\
+_Suggested solution_: Try uploading again and be careful not to interrupt the process. If the video is more than 500 MB, the upload will resume where it left off.
+
+_Possible cause_: Not enough storage space to complete the upload.\
+_Suggested solution_: Confirm in the Storage statistics that you have used up your storage. Contact our [support team](mailto:support@gcore.com) to increase the storage space.
+
+## Upload to the Gcore Customer Portal is interrupted by a session timeout (force logout)
+
+_Possible cause_: Video is too large to upload through UI.\
+_Suggested solution_:
+
+- Upload during late night or early morning hours when there is less load on the queue.
+- Upload a small batch of videos (e.g., 10 at a time).
+- [Upload via API](https://gcore.com/docs/streaming-platform/video-hosting/upload-video-via-api).
+
+## Upload is stuck in the Processed state for a long time
+
+_Possible cause_: Processing queue is too long or loaded with large videos.\
+_Suggested solution_: Wait a while and then try uploading again.
\ No newline at end of file
diff --git a/streaming-platform/video-hosting/ai-video-service.mdx b/streaming-platform/video-hosting/ai-video-service.mdx
index 1b5137d24..6903ae3cd 100644
--- a/streaming-platform/video-hosting/ai-video-service.mdx
+++ b/streaming-platform/video-hosting/ai-video-service.mdx
@@ -1,35 +1,73 @@
----
-title: AI video services
-sidebarTitle: AI video services
----
-
-Gcore Video Streaming offers advanced video processing features for both video on demand (VOD) and live streaming:
-
- * [Transcription and translation of captions](/streaming-platform/video-hosting/ai-for-video/generate-ai-subtitles-and-add-them-to-video). Our AI Automated Speech Recognition (AI ASR) feature generates and translates subtitles to a wide range of supported languages, making your videos accessible to users around the globe.
-
- * [Content moderation](/streaming-platform/ai-video-service/content-moderation). We analyze videos for nudity, offensive language, or violence to ensure that your audience receives safe and appropriate content.
-
-
-
-
-Our suite of AI-powered video services is comprehensive and fully managed, ensuring seamless integration into your video streaming workflow.
-
-## AI video services infrastructure
-
-We use our own Gcore infrastructure to execute AI models. In this way, we ensure that all data is processed within our company on EU and US servers, minimizing the risk of data leaks.
-
-Using a unified infrastructure facilitates the integration of your project with other Gcore AI-powered solutions, such as the [AI GPU Cloud](https://gcore.com/cloud/ai-gpu) for accelerating machine learning and [FastEdge](https://gcore.com/fastedge) for low-latency edge computing for serverless app deployment.
-
-## Activate AI video services
-
-AI services are available to all customers of Video Streaming in their personal account and API. If for some reason there is no access to AI, please contact the support the [Gcore support](mailto:support@gcore.com). While the general usage of the AI video service is free, certain features, like translation, may incur charges. For more details, refer to our [pricing page](https://gcore.com/pricing/streaming-platform).
-
-## Available features
-
-Upon activation, you can use API methods for subtitle generation and translation. However, these features are only available for videos.
-
-
-**Warning**
-
-The generation of subtitles for Live Streaming is currently in development and will be released soon.
-
\ No newline at end of file
+---
+title: AI video services
+sidebarTitle: AI video services
+---
+
+Gcore Video Streaming offers advanced video processing features for both video on demand (VOD) and live streaming:
+
+- [Transcription and translation of captions](/streaming-platform/video-hosting/ai-for-video/generate-ai-subtitles-and-add-them-to-video). Our AI Automated Speech Recognition (AI ASR) feature generates and translates subtitles to a wide range of supported languages, making your videos accessible to users around the globe.
+- [Content moderation](/streaming-platform/ai-video-service/content-moderation). We analyze videos for nudity, offensive language, or violence to ensure that your audience receives safe and appropriate content.
+
+Our suite of AI-powered video services is comprehensive and fully managed, ensuring seamless integration into your video streaming workflow.
+
+## AI video services infrastructure
+
+We use our own Gcore infrastructure to execute AI models. In this way, we ensure that all data is processed within our company on EU and US servers, minimizing the risk of data leaks.
+
+Using a unified infrastructure facilitates the integration of your project with other Gcore AI-powered solutions, such as the [AI GPU Cloud](https://gcore.com/cloud/ai-gpu) for accelerating machine learning and [FastEdge](https://gcore.com/fastedge) for low-latency edge computing for serverless app deployment.
+
+## Activate AI video services
+
+AI services are available to all customers of Video Streaming in their personal account and API. If for some reason there is no access to AI, please contact the support the [Gcore support](mailto:support@gcore.com). While the general usage of the AI video service is free, certain features, like translation, may incur charges. For more details, refer to our [pricing page](https://gcore.com/pricing/streaming-platform).
+
+## Available features
+
+Upon activation, you can use API methods for subtitle generation and translation. However, these features are only available for videos.
+
+
+**Warning**
+
+The generation of subtitles for Live Streaming is currently in development and will be released soon.
+
+
+## Content moderation
+
+You can check our [API documentation](https://api.gcore.com/docs/streaming?_gl=1*1rw278g*_gcl_au*MTM0OTcwMzMxMC4xNzE3MTQ0ODk3*_ga*OTkwMzM5NjM4LjE3MDkxMDg3OTU.*_ga_Y79HRL8RPR*MTcyMTI4NzExMi4xMjIuMS4xNzIxMjg3MjMzLjYwLjAuMA..#tag/AI/operation/post_ai_contentmoderation_nsfw) for advice on integrating the different types.
+
+If you want to do one-off checks on single videos, you can use it via the Customer Portal; [these docs](https://gcore.com/docs/streaming-platform/ai-video-service/content-moderation) will explain how.
+
+We offer six AI content moderation tasks—four related to nudity, one for sports, and one for weapons.
+
+### Content unsafe for work
+
+The **nsfw** task is the strictest nudity detection task, as it identifies and filters videos containing inappropriately clothed people (i.e., they don’t have to be fully or partially nude to get flagged). Our algorithm flags content as NSFW if it is potentially unsuitable for viewing at work or in public places. If NSFW content is detected, the AI model provides its confidence level (in percentage).
+
+### Soft nudity
+
+The **soft_nudity** task is less strict than the **nsfw** task, as it only flags videos with full or partial nudity. It detects explicit and partial nudity, including videos of male and female faces and other uncovered body parts. Use it to check whether you can publish a video to all users or block them due to offensive and inappropriate content. The check returns information about the object identified, the video frame number where it was found, and the probability of the detected object. The response only includes objects with at least a 30% confidence level.
+
+This task allows you to identify faces and other body parts and detect more objects than hard nudity detection, so analyzing a video for soft nudity takes more time.
+
+### Hard nudity
+
+The **hard_nudity** task is less strict than the **soft_nudity** task, as it only detects exposed primary and secondary sex organs in a video. It can determine whether publishing a video to all users is appropriate or should be blocked due to offensive or inappropriate content. If hard nudity content is detected, the AI model will provide its confidence level (in percentage).
+
+This task detects fewer objects than **soft_nudity**, so it works faster and better when you only need to detect exposed body parts.
+
+### Child sexual abuse materials
+
+The **child_pornography** task is the least strict nudity detector, as it will only detect child sexual abuse materials (CSAM).
+
+We first run the **soft_nudity** and **hard_nudity** tasks. If they indicate the presence of obscene content with the involvement of children (e.g., the face of a child) in a frame, then such a video is marked as obscene. Frames are designated by the age category of identified children.
+
+The check returns information such as the frames containing the child's face and the child’s age. The response includes objects with a probability of at least 30%.
+
+### Sport activities
+
+The **sport** task identifies various sporting activities in a video. This can include detecting specific types, actions, events, and moments. It lets you determine whether particular sports were filmed and even locate crucial scenes, such as goal shots in football or dunks in a basketball game.
+
+The check returns information about the object identified, the video frame number where it was found, and the probability of the detected object. Objects with a probability of at least 30% are included in the response.
+
+### Weapons
+
+The **weapon** task identifies the presence of weapons in a video. The check returns information about the object identified, the number of video frames where it was found, and the probability of the detected object. Objects with a probability of at least 30% are included in the response.
diff --git a/waap/waap-rules/advanced-rules.mdx b/waap/waap-rules/advanced-rules.mdx
index 63faf7297..531dc4ecf 100644
--- a/waap/waap-rules/advanced-rules.mdx
+++ b/waap/waap-rules/advanced-rules.mdx
@@ -1,249 +1,262 @@
----
-title: "Advanced rules"
-sidebarTitle: Advanced rules
----
-
-
-**Info**
-
-This feature is available for the [Enterprise](/waap/billing#enterprise) package.
-
-
-Similarly to WAAP [custom rules](/waap/waap-rules/custom-rules), you can create, edit, and manage advanced custom rules. These rules also contain "If/Then" statements, but they support more complex conditions created with the [Common Expression Language (CEL)](https://github.com/google/cel-spec) syntax.
-
-## Create advanced rules
-
-Due to the highly technical aspect of the advanced rules functionality, the ability to create and manage these rules is currently only available through our API.
-
-Check out the following guides for details on how to create advanced rules and their key components:
-
- * [API docs](https://api.gcore.com/docs/waap): Learn how to construct and manage advanced rules.
-
- * [Advanced rule objects and attributes](/waap/waap-rules/advanced-rules/advanced-rule-objects): Get the list of all available objects you can use in rule expressions along with their respective attributes and types.
-
- * [Source field objects](/waap/waap-rules/advanced-rules/source-field-objects): Check the available source field objects you can use in your expressions along with their respective attributes and types.
-
-
-
-
-## Advanced rule properties
-
-The advanced rule object contains the following properties:
-
-```json
-{
- "name": "string",
- "description": "",
- "enabled": true,
- "action": {
- "allow": {},
- "block": {
- "status_code": 403,
- "action_duration": "string"
- },
- "captcha": {},
- "handshake": {},
- "tag": {
- "tags": [
- "string"
- ]
- }
- },
- "source": "string",
- "phase": "access"
-}
- ```
-
-
-**Info**
-
-Each rule can contain only one action—`block`, `allow`, `captcha`, `handshake`, or `tag`. If you use multiple actions in a single rule, the API will return an error.
-
-
-| Field | Description | Values | Details |
-|----------------------|--------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| `name` | Rule name | Can contain only ASCII letters, numbers, spaces, periods, and colons. | |
-| `action` | The action to execute when a condition is true. | - `block`: WAAP blocked the request.
- `allow`: WAAP allowed the request.
- `captcha`: WAAP presented the user with a CAPTCHA
- `handshake`: WAAP performed automatic browser validation.
- `tag`: WAAP will generate a tag with no action.
| On `tag` [action](/waap/waap-rules/custom-rules#actions-in-custom-rules), the tag field should be provided.
For the `block` [action](/waap/waap-rules/custom-rules#actions-in-custom-rules), setting up the `status_code` (integer) and `action_duration` (time in seconds) is optional. By default, the status is set to `"status_code": 403`, and duration equals to `0s`. |
-| `source` | The condition part of the rule. | Can reference namespace objects: request, whois, session, response, tags, user_agent, client_data, as well as use data and functions.
Supported Python operand syntax: and, or, in, not, ==, !=, >, \<, etc.
Supported CEL operand syntax: \|\|, && | Every string value should be enclosed in single quotation marks `'` and **not** in double quotation marks `"`. |
-| `enabled` | Whether or not the rule is enabled. | Boolean: `true` or `false` | Default value: `false` |
-| `description` | A string to describe the purpose of the rule. | Any string.
The character limit for the description field is 100 characters. | Default value: `false` |
-| `phase` | The request processing phase. | - `access`: The advanced rule applies to the request phase (request headers and body available).
- `header_filter`: The advanced rule applies to the response headers phase.
- `body_filter`: The advanced rule applies to the response body phase.
| Default value: `access` |
-
-
-
-
-## Best practices
-
-You can use our API documentation as a guide in constructing your own advanced rules. The following sections demonstrate a few examples of advanced rules created via our API using cURL.
-
-### Rate limiting
-
-Block IPs that hit more than 200 requests per 5 seconds (changeable) when the following cookies don't exist. You can find more examples in our [Rate limiting](/waap/waap-rules/advanced-rules/advanced-rate-limiting-rules) guide.
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "action": {
- "block": {
- "status_code": 403,
- "action_duration": "10m"
- }
- },
- "phase": "access",
- "name": "Block Scrappers",
- "description": "Block IPs that hit more than 200 requests per 5 seconds for any `events` paths",
- "enabled": false,
- "source": "(request.rate_limit([], '.*events', 5, 200, [], [], '', 'ip') and not ('mb-web-ui' in request.headers['Cookie'] or 'mb-mobile-ios' in request.headers['Cookie'] or 'mobile-android' in request.headers['Cookie'] or 'mb-mobile-android' in request.headers['Cookie'] or 'session-token' in request.headers['Cookie']) and not request.headers['session']) or tags.exists('penalty')"
-}'
-```
-
-### The penalty rule
-
-If a `block` action triggers, WAAP will tag matching requests with the `penalty` tag as long as the `block` action is active.
-
-To ensure the blocking of specific sources, **you must also check for the`penalty` tag** by defining a separate rule that blocks requests with the `penalty` tag or by including the tag check as part of a rule's `source` condition.
-
-#### Block all penalty requests
-
-The following rule will block all requests with the `penalty` tag. This ensures `block` actions triggered by any rule will be enforced.
-
-Creating a separate rule for the `penalty` tag is useful when you have exhausted the 5 tags limit in a single rule.
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "action": {"block": {}},
- "phase": "access",
- "name": "Block Penalized Requests",
- "description": "Block requests that are detected with `penalty` tag",
- "enabled": false,
- "source": "tags.exists('penalty')"
-}'
-```
-
-{/*
-#### Block penalty requests of a specific rule
-
-In this example, the check for the `penalty` tag is embedded in a specific rule, so creating a separate rule to block requests with the `penalty` tag is unnecessary.
-
-```
-curl --request POST \
-\--url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
-\--header 'accept: application/json' \
-\--header 'content-type: application/json' \
-\--data '
-{
- "phase": "ACCESS",
- "action": {"block": {"statusCode": 403, "action_duration": "1h"}},
- "name": "Rate Limit IP",
- "description": "Block IPs that hit more than 30 requests per 1 minute",
- "enabled": True,
- "source": "request.rate_limit([], '.*', 60, 30, [], [], '', 'ip') or tags.exists('penalty')"
-}
-'
- ```*/}
-
-### Validate a set of countries
-
-Use JavaScript validation to challenge IPs that are coming from countries without certain cookies:
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "action": {
- "handshake": {}
- },
- "phase": "access",
- "name": "Validate set of countries",
- "description": "Validate with JavaScript challenge IPs that are coming from the following countries",
- "enabled": false,
- "source": "whois.country in ['BR', 'VN', 'ID', 'TH', 'ME', 'XK', 'LK'] and not ('mb-web-ui' in request.headers['Cookie'] or 'mb-mobile-ios' in request.headers['Cookie'] or 'mobile-android' in request.headers['Cookie'] or 'mb-mobile-android' in request.headers['Cookie'])
-}'
- ```
-
-### Add clients to allow list
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "action": {
- "allow": {}
- },
- "name": "Whitelist known IPs",
- "enabled": false,
- "source": "request.ip == '117.20.32.5' or request.ip == '117.20.32.4' or request.ip_in_range('72.21.217.0', '72.21.217.255')"
-}'
- ```
-
-### Tag and allow registered clients
-
-Make sure that the tag value is enclosed in the double-quotes character ".
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "name": "Tag registered clients",
- "description": "Detect and tag registered clients by cookie",
- "source": "'mb-mobile-android' in request.headers['Cookie']",
- "action": {
- "tag": {
- "tags": ["registered"]
- }
- }
-}'
- ```
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "name": "Allow registered clients",
- "description": "Allow registered clients",
- "source": "tags.exists('registered')",
- "action": {
- "allow": {}
- }
-}'
- ```
-
-### Define login pages
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "name": "Detect and Tag Login Pages",
- "source": "['url1/login','url2/signup'] in request.uri",
- "action": {
- "tag": {
- "tags": ["login page"]
- }
- }
-}'
- ```
-
-## Review existing rules
-
-After you create advanced rules, they will appear on the **WAAP rules** page in the Gcore Customer Portal. You can enable or disable the rules by clicking the relevant toggles.
-
-
-
\ No newline at end of file
+---
+title: "Advanced rules"
+sidebarTitle: "Advanced rules"
+---
+
+
+ **Info**
+
+ This feature is available in selected plans. For up-to-date information on plan availability, see the [billing documentation](https://gcore.com/docs/waap/billing).
+
+
+Similarly to WAAP [custom rules](/waap/waap-rules/custom-rules), you can create, edit, and manage advanced custom rules. These rules also contain "If/Then" statements, but they support more complex conditions created with the [Common Expression Language (CEL)](https://github.com/google/cel-spec) syntax.
+
+## Create advanced rules
+
+Due to the highly technical aspect of the advanced rules functionality, the ability to create and edit these rules is currently only available through our API.
+
+Check out the following guides for details on how to create advanced rules and their key components:
+
+- [API docs](https://api.gcore.com/docs/waap): Learn how to construct and manage advanced rules.
+- [Advanced rule objects and attributes](/waap/waap-rules/advanced-rules/advanced-rule-objects): Get the list of all available objects you can use in rule expressions along with their respective attributes and types.
+- [Source field objects](/waap/waap-rules/advanced-rules/source-field-objects): Check the available source field objects you can use in your expressions along with their respective attributes and types.
+
+## Advanced rule properties
+
+The advanced rule object contains the following properties:
+
+```json
+{
+ "name": "string",
+ "description": "",
+ "enabled": true,
+ "action": {
+ "allow": {},
+ "block": {
+ "status_code": 403,
+ "action_duration": "string"
+ },
+ "captcha": {},
+ "handshake": {},
+ "tag": {
+ "tags": [
+ "string"
+ ]
+ }
+ },
+ "source": "string",
+ "phase": "access"
+}
+```
+
+
+ **Info**
+
+ Each rule can contain only one action—`block`, `allow`, `captcha`, `handshake`, or `tag`. If you use multiple actions in a single rule, the API will return an error.
+
+
+
+ | Field | Description | Values | Details |
+ | ------------- | ----------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+ | `name` | Rule name | Can contain only ASCII letters, numbers, spaces, periods, and colons. | **Required.** |
+ | `action` | The action to execute when a condition is true. | - block: WAAP blocked the request.
- allow: WAAP allowed the request.
- captcha: WAAP presented the user with a CAPTCHA
- handshake: WAAP performed automatic browser validation.
- tag: WAAP will generate a tag with no action.
| \*\*Required. \*\*On `tag` [action](/waap/waap-rules/custom-rules#actions-in-custom-rules), the tag field should be provided.
For the `block` [action](/waap/waap-rules/custom-rules#actions-in-custom-rules), setting up the `status_code` (integer) and `action_duration` (time in seconds) is optional. By default, the status is set to `"status_code": 403`, and duration equals to `0s`. |
+ | `source` | The condition part of the rule. | Can reference namespace objects: request, whois, session, response, tags, user_agent, client_data, as well as use data and functions.
Supported Python operand syntax: and, or, in, not, ==, !=, \>, \<, etc.
Supported CEL operand syntax: \|\|, && | \*\*Required. \*\*Every string value should be enclosed in single quotation marks `'` and **not** in double quotation marks `"`. |
+ | `enabled` | Whether or not the rule is enabled. | Boolean: `true` or `false` | **Required.** |
+ | `description` | A string to describe the purpose of the rule. | Any string.
The character limit for the description field is 100 characters. | |
+ | `phase` | The request processing phase. | - access: The advanced rule applies to the request phase (request headers and body available).
- header_filter: The advanced rule applies to the response headers phase.
- body_filter: The advanced rule applies to the response body phase.
| Default value: `access` |
+
+
+## Best practices
+
+You can use our API documentation as a guide when constructing your own advanced rules. The following sections provide examples of advanced rules created via our API using cURL.
+
+
+ **Note**
+
+ Examples are illustrative. Field values (paths, cookies, IPs, countries) should be adapted to the customer’s environment.
+
+
+### Rate limiting
+
+You can rate limit IPs based on the number of requests they make to your website. For example, this can be useful for mitigating scrapers or automated clients that generate a high volume of requests over a short period of time.
+
+You can find more examples in our [Rate limiting](https://gcore.com/docs/waap/waap-rules/advanced-rules/advanced-rate-limiting-rules) guide.
+
+The following rule limits the rate of requests an IP can send for 10 minutes, when it exceeds 200 requests in 5 seconds, but excludes requests from mobile or web clients that have specific cookies.
+
+**Rate Limiting curl example**
+
+```sh
+curl --request POST \
+--url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+--header 'accept: application/json' \
+--header 'content-type: application/json' \
+--data '{
+ "action": {
+ "block": {
+ "status_code": 403,
+ "action_duration": "10m"
+ }
+ },
+ "phase": "access",
+ "name": "Block Scrappers",
+ "description": "Block IPs that hit more than 200 requests per 5 seconds for any `events` paths",
+ "enabled": false,
+ "source": "((request.limit_rate(url='.*events', interval=5, requests=200, scope='ip')) and not ('mb-web-ui' in request.headers['Cookie'] or 'mb-mobile-ios' in request.headers['Cookie'] or 'mobile-android' in request.headers['Cookie'] or 'mb-mobile-android' in request.headers['Cookie'] or 'session-token' in request.headers['Cookie']) and not request.headers['session']) or tags.exists('penalty')"
+}'
+```
+
+### The penalty tag
+
+The WAAP system appends the `penalty` tag to the local (domain-related) IP record when an IP address triggers a block rule configured with an `action_duration` parameter.
+
+To continue blocking an IP address after the original rule’s conditions are no longer met, include a `penalty` tag check in the rule’s source conditions (for example, using a condition such as `... `or `tags.exists('penalty')`, or define a separate rule that targets requests carrying the penalty tag. The following example illustrates this approach.
+
+#### Block all penalty requests
+
+The following rule blocks requests from IP addresses tagged with the `penalty` tag, allowing block actions applied by other rules to persist.
+
+**Block penalty-tagged IP requests**
+
+```sh
+curl --request POST \
+ --url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+ --header 'accept: application/json' \
+ --header 'content-type: application/json' \
+ --data '{
+ "name": "Block Penalized Requests",
+ "description": "Block requests from IP addresses tagged with the penalty tag",
+ "enabled": false,
+ "phase": "access",
+ "action": {
+ "block": {}
+ },
+ "source": "tags.exists('penalty')"
+ }'
+```
+
+## Other examples
+
+### Validate a set of countries
+
+Demonstrates how to apply browser validation (JavaScript-based challenge) using the `handshake `action to requests originating from specific countries, based on the `whois.country` field, while excluding requests that contain certain cookies.
+
+**Validate a set of countries**
+
+```sh
+curl --request POST \
+ --url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+ --header 'accept: application/json' \
+ --header 'content-type: application/json' \
+ --data '{
+ "action": {
+ "handshake": {}
+ },
+ "phase": "access",
+ "name": "Validate set of countries",
+ "description": "Validate with JavaScript challenge IPs that are coming from the following countries",
+ "enabled": false,
+ "source": "whois.country in ['BR', 'VN', 'ID', 'TH', 'ME', 'XK', 'LK'] and not ('mb-web-ui' in request.headers['Cookie'] or 'mb-mobile-ios' in request.headers['Cookie'] or 'mobile-android' in request.headers['Cookie'] or 'mb-mobile-android' in request.headers['Cookie'])"
+ }'
+```
+
+### Add clients to allow list
+
+Demonstrates how to allow requests from specific IP addresses or IP ranges by matching IP values in the rule condition.
+
+**Add clients to allow list**
+
+```sh
+curl --request POST \
+ --url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+ --header 'accept: application/json' \
+ --header 'content-type: application/json' \
+ --data '{
+ "name": "Whitelist known IPs",
+ "enabled": false,
+ "action": {
+ "allow": {}
+ },
+ "source": "request.ip == '117.20.32.5' or request.ip == '117.20.32.4' or request.ip_in_range('72.21.217.0', '72.21.217.255')"
+ }'
+```
+
+### Tag and allow registered clients
+
+Demonstrates how to tag requests based on the presence of a specific cookie and allow requests associated with the assigned tag.\
+When defining tag values in JSON, double quotation marks must be used, while string values inside rule expressions must be enclosed in single quotation marks.
+
+**Tag registered clients**
+
+```sh
+curl --request POST \
+ --url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+ --header 'accept: application/json' \
+ --header 'content-type: application/json' \
+ --data '{
+ "name": "Tag registered clients",
+ "description": "Detect and tag registered clients by cookie",
+ "enabled": false,
+ "phase": "access",
+ "source": "'mb-mobile-android' in request.headers['Cookie']",
+ "action": {
+ "tag": {
+ "tags": ["registered"]
+ }
+ }
+ }'
+```
+
+**Allow registered clients**
+
+```sh
+curl --request POST \
+ --url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+ --header 'accept: application/json' \
+ --header 'content-type: application/json' \
+ --data '{
+ "name": "Allow registered clients",
+ "description": "Allow registered clients",
+ "enabled": false,
+ "phase": "access",
+ "source": "tags.exists('registered')",
+ "action": {
+ "allow": {}
+ }
+ }'
+```
+
+### Define login pages
+
+Demonstrates how to tag requests that match specific URL patterns using string matching on the request URI.
+
+**Define login pages**
+
+```sh
+curl --request POST \
+ --url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+ --header 'accept: application/json' \
+ --header 'content-type: application/json' \
+ --data '{
+ "name": "Detect and Tag Login Pages",
+ "enabled": false,
+ "phase": "access",
+ "source": "['url1/login','url2/signup'] in request.uri",
+ "action": {
+ "tag": {
+ "tags": ["login page"]
+ }
+ }
+ }'
+```
+
+## Review existing rules
+
+Advanced rules can be reviewed by:
+
+- Using the WAAP API via the [Advanced Rules retrieval endpoint](https://gcore.com/docs/api-reference/waap/advanced-rules/retrieve-advanced-rules).
+- Using the Gcore Customer Portal, where rules can be enabled, disabled, or deleted.
+
+
+ 
+
\ No newline at end of file
diff --git a/waap/waap-rules/advanced-rules/advanced-rate-limiting-rules.mdx b/waap/waap-rules/advanced-rules/advanced-rate-limiting-rules.mdx
index eada4861e..bcc3b4204 100644
--- a/waap/waap-rules/advanced-rules/advanced-rate-limiting-rules.mdx
+++ b/waap/waap-rules/advanced-rules/advanced-rate-limiting-rules.mdx
@@ -1,191 +1,216 @@
----
-title: "Advanced rate limiting rules"
-sidebarTitle: Advanced rate limiting rules
----
-
-The rate limiting feature allows you to limit the number of specific web requests to your domain within a particular period.
-
-We have a unique implementation of rate limiting: it's set as a conditioning segment within a rule and can be embedded within any other condition segment. This is an advanced functionality that's only available in the advanced rules.
-
-## Rate limiting method
-
-Use the `request.rate_limit()` method to implement rate limiting:
-
-```js
-request.rate_limit(
- ip = [, ...],
- url = ,
- time = ,
- requests = ,
- method = [, ...],
- status_code = [, ...],
- content_type = ,
- scope =
-)
- ```
-
-For [tag-based](/waap/waap-rules/custom-rules/tag-rules#tag-based-rules) rate limiting rules, use `request.limit_rate` instead:
-
-```js
-request.limit_rate(
- ip = [, ...],
- url = ,
- time = ,
- requests = ,
- method = [, ...],
- status_code = [, ...],
- content_type = ,
- scope = ,
- tag =
-)
-```
-
-The method returns `true` if the count of requests (4) under the granularity (8) with the filters (1, 2, 5, 6, 7, 9) exceeds the limit for a given time (3).
-
-| | Parameter name | Required | Description |
-|---|--------------------------------|---------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| 1 | `ip` | False | List of IP addresses that the rule applies to.
If there are no IPs in the list, counting will be done for all IPs. |
-| 2 | `url` | True | A regex pattern used to match the request's URI (URL + query_params). |
-| 3 | `time` | True | The time limit, in seconds, within which we only allow **n** number of requests to URI matching the pattern. |
-| 4 | `requests` | True | The maximum number of requests **accepted** within the given period before an action is taken (minimum value is 20 requests). |
-| 5 | `method` | False | List of method types the request aggregation will be applied to. |
-| 6 | `status_code` | False | List of status codes the request aggregation will be applied to. |
-| 7 | `content_type` | False | Regex pattern to match request content_types against. |
-| 8 | `scope` (granularity) | False
If the granularity isn’t set to `cluster`, the default aggregation will be set to `cluster`. | The `cluster` scope counts all the traffic of a POP:
- Interval counting is made on each POP separately. This can cause unsynchronized counting.
- IP counting can be counted separately on different POPs (each request individually, not overlapped).
- POPs could be related to certain locations, which means that some clients may never be addressed to certain POPs
- The `cluster` scope counts traffic per given IP. |
-| 9 | `tag` | False | Aggregation of tagged ([user-defined tags](/waap/waap-rules/custom-rules/tag-rules#tag-generating-rules)) requests will be applied for each IP. |
-
-## Implementation
-
-### Advanced rules via API
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "ruleAction": {"block": {"statusCode": "FORBIDDEN_403"}},
- "phase": "ACCESS",
- "name": "Block Scrappers",
- "description": "Block IPs that hit more than 200 requests per 5 seconds for any `events` paths",
- "enabled": false,
- "source": "request.rate_limit([], '.*events', 5, 200, [], [], '', 'ip') and not ('mb-web-ui' in request.headers['Cookie'] or 'mb-mobile-ios' in request.headers['Cookie'] or 'mobile-android' in request.headers['Cookie'] or 'mb-mobile-android' in request.headers['Cookie'] or 'session-token' in request.headers['Cookie']) and not request.headers['session']"
-}'
- ```
-
-### Best practices
-
-#### Block IPs that exceed request limit for any URL
-
-Each request will be counted individually for each IP. For example, if the IP address 1.2.3.4 sends more than 200 requests within a 5-second timeframe, it will be blocked.
-
-Another IP address, such as 1.2.3.5, will only be blocked if it exceeds the allowed threshold of 200 requests in the same time interval.
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "ruleAction": {"block": {"statusCode": "FORBIDDEN_403"}},
- "phase": "ACCESS",
- "name": "Limit Certain IPs",
- "description": "Limit Certain IPs",
- "enabled": false,
- "source": "request.rate_limit(['1.2.3.4', '1.2.3.5'], '.*events', 5, 200, [], [], '', 'ip')"
-}'
- ```
-
-#### Embed additional conditions
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "ruleAction": {"block": {"statusCode": "FORBIDDEN_403"}},
- "phase": "ACCESS",
- "name": "Embedding additional condition to rate limit feature",
- "description": "Embedding additional condition to rate limit feature",
- "enabled": false,
- "source": "request.headers['User-Agent'] == 'Firefox' AND request.rate_limit(['1.2.3.4', '1.2.3.5'], '.*events', 5, 200, [], [], '', 'ip')"
-}'
- ```
-
-Note that using an additional condition, such as an IP, is considered a bad practice. For example, if you use `request.ip in ['1.2.3.4']` and `request.rate_limit([], '', 5, 200, [], [], '', 'ip')`, the rate limit will count requests per interval for every IP. However, it will set an action only when 1.2.3.4 exceeds the number of requests.
-
-This behavior is linked to the rule you are creating, not the rate limit condition. Thus, even with a blank IP list and an embedded IP condition, the rate limit will still count requests for all IPs.
-
-#### Rate limit complex URL regex
-
-```sh
-curl --request POST \
---url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
---header 'accept: application/json' \
---header 'content-type: application/json' \
---data '{
- "ruleAction": {"block": {"statusCode": "FORBIDDEN_403"}},
- "phase": "ACCESS",
- "name": "Rate limit complexed URL regex",
- "description": "Rate limit complexed URL regex",
- "enabled": false,
- "source": "request.rate_limit([], '.*(?,
+ interval = ,
+ url = ,
+ ip_list = [, ...],
+ method_list = [, ...],
+ status_list = [, ...],
+ content_type = ,
+ scope = ,
+ tag =
+)
+```
+
+Specify the `tag` parameter when configuring tag-based rate limiting rules.
+
+The method returns `true` , and the enclosing rule condition is satisfied when the request count (4), under the granularity (8), exceeds the configured threshold within the specified time window (3), using the configured filters (1, 2, 5, 6, 7, 9).
+
+| # | Parameter name | Required | Description | |
+| --- | --------------------- | ------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --- |
+| 1 | `ip_list` | False | List of IP addresses that the rule applies to.
If there are no IPs in the list, counting will be done for all IPs. | |
+| 2 | `url` | True | A regex pattern used to match the request's URI (URL + query_params). | |
+| 3 | `interval` | True | The time limit, in seconds, within which we only allow **n** number of requests to URI matching the pattern. | |
+| 4 | `requests` | True | The maximum number of requests **accepted** within the given period before an action is taken (minimum value is 21 requests). | |
+| 5 | `method_list` | False | List of method types the request aggregation will be applied to. | |
+| 6 | `status_list` | False | List of status codes the request aggregation will be applied to. | |
+| 7 | `content_type` | False | Regex pattern to match request content_type against. | |
+| 8 | `scope` (granularity) | False
If the granularity isn’t set to `cluster`, the default aggregation will be set to `cluster`. | When protecting the origin, you can configure rate limit rules that apply to either IPs or all requests that come through the cluster.
- If you set it to `IP`, once the IP exceeds the threshold, the rule is triggered. This helps against individual attackers
- If you set it to `cluster`, once the total requests (from any IP) exceed the threshold, the rule is triggered. This helps protect the origin from getting too many requests. | |
+| 9 | `tag` | False | Aggregation of tagged ([user-defined tags](/waap/waap-rules/custom-rules/tag-rules#tag-generating-rules)) requests will be applied for each IP. | |
+
+## Implementation
+
+### Advanced rules via API
+
+```sh
+curl --request POST \
+--url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+--header 'accept: application/json' \
+--header 'content-type: application/json' \
+--data '{
+ "action": {
+ "block": {
+ "status_code": 403
+ }
+ },
+ "phase": "access",
+ "name": "Block Scrappers",
+ "description": "Block IPs that hit more than 200 requests per 5 seconds for any `events` paths",
+ "enabled": false,
+ "source": "request.limit_rate(url='.*events', interval=5, requests=200, scope='ip') and not ('mb-web-ui' in request.headers['Cookie'] or 'mb-mobile-ios' in request.headers['Cookie'] or 'mobile-android' in request.headers['Cookie'] or 'mb-mobile-android' in request.headers['Cookie'] or 'session-token' in request.headers['Cookie']) and not request.headers['session']"
+}'
+```
+
+### Best practices
+
+#### Block IPs that exceed request limit for any URL
+
+Each request will be counted individually for each IP. For example, if the IP address 1.2.3.4 sends more than 200 requests within a 5-second timeframe, it will be blocked.
+
+Another IP address, such as 1.2.3.5, will only be blocked if it exceeds the allowed threshold of 200 requests in the same time interval.
+
+```sh
+curl --request POST \
+--url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+--header 'accept: application/json' \
+--header 'content-type: application/json' \
+--data '{
+ "action": {
+ "block": {
+ "status_code": 403
+ }
+ },
+ "phase": "access",
+ "name": "Limit Certain IPs",
+ "description": "Limit Certain IPs",
+ "enabled": false,
+ "source": "request.limit_rate(ip_list=['1.2.3.4','1.2.3.5'], url='.*events', interval=5, requests=200, scope='ip')"
+}'
+```
+
+#### Embed additional conditions
+
+```sh
+curl --request POST \
+--url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+--header 'accept: application/json' \
+--header 'content-type: application/json' \
+--data '{
+ "action": {
+ "block": {
+ "status_code": 403
+ }
+ },
+ "phase": "access",
+ "name": "Embedding additional condition to rate limit feature",
+ "description": "Embedding additional condition to rate limit feature",
+ "enabled": false,
+ "source": "request.headers['User-Agent'] == 'Firefox' and request.limit_rate(ip_list=['1.2.3.4','1.2.3.5'], url='.*events', interval=5, requests=200, scope='ip')"
+}'
+```
+
+#### Rate limit complex URL regex
+
+```sh
+curl --request POST \
+--url https://api.gcore.com/waap/v1/domains/{domain_id}/advanced-rules \
+--header 'accept: application/json' \
+--header 'content-type: application/json' \
+--data '{
+ "action": {
+ "block": {
+ "status_code": 403
+ }
+ },
+ "phase": "access",
+ "name": "Rate limit complexed URL regex",
+ "description": "Rate limit complexed URL regex",
+ "enabled": false,
+ "source": "request.limit_rate(url='.*(?