How to Build a Live Streaming Service with ZLMediaKit, FFmpeg, and Spring Boot
This guide walks through preparing the environment, installing ZLMediaKit and FFmpeg, configuring a Spring Boot backend with streaming services, and provides step‑by‑step usage instructions for pushing and playing live streams via RTMP, FLV, and HLS.
1. Environment Preparation
1.1 ZLMediaKit Installation and Configuration
Pull the Docker image and start the container.
# Pull image
docker pull zlmediakit/zlmediakit:master
# Start
docker run -d \
--name zlm-server \
-p 1935:1935 \
-p 8099:80 \
-p 8554:554 \
-p 10000:10000 \
-p 10000:10000/udp \
-p 8000:8000/udp \
-v /docker-volumes/zlmediakit/conf/config.ini:/opt/media/conf/config.ini \
zlmediakit/zlmediakit:masterConfiguration file (config.ini) example:
[hls]
broadcastRecordTs=0
deleteDelaySec=300 # video retention (5 minutes)
fileBufSize=65536
filePath=./www # save path
segDur=2 # segment duration (seconds)
segNum=1000 # max segments in .m3u8
segRetain=9999 # actual retained segments on diskCheck container logs to verify startup:
# View startup status
docker logs -f zlm-server1.2 FFmpeg Installation
Download FFmpeg from https://www.gyan.dev/ffmpeg/builds/ and add its bin directory to the system PATH variable.
After adding the path, the installation is successful.
2. Spring Boot Backend Implementation
2.1 Add Maven Dependency
<dependencies>
<!-- Process management -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-exec</artifactId>
<version>1.3</version>
</dependency>
</dependencies>2.2 Stream Configuration Class
package com.lyk.plugflow.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Data
@Component
@ConfigurationProperties(prefix = "stream")
public class StreamConfig {
private String zlmHost;
private Integer rtmpPort;
private Integer httpPort;
private String ffmpegPath;
private String videoPath;
}2.3 Stream Service Class
package com.lyk.plugflow.service;
import com.lyk.plugflow.config.StreamConfig;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.exec.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.*;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@Slf4j
@Service
public class StreamService {
@Autowired
private StreamConfig streamConfig;
// Store streaming processes
private final Map<String, DefaultExecutor> streamProcesses = new ConcurrentHashMap<>();
// Manual stop flags
private final Map<String, Boolean> manualStopFlags = new ConcurrentHashMap<>();
/** Start streaming */
public boolean startStream(String videoPath, String streamKey) {
try {
File videoFile = new File(videoPath);
if (!videoFile.exists()) {
log.error("Video file not found: {}", videoPath);
return false;
}
String rtmpUrl = String.format("rtmp://%s:%d/live/%s",
streamConfig.getZlmHost(), streamConfig.getRtmpPort(), streamKey);
CommandLine cmdLine = getCommandLine(videoPath, rtmpUrl);
DefaultExecutor executor = new DefaultExecutor();
executor.setExitValue(0);
ExecuteWatchdog watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
executor.setWatchdog(watchdog);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
executor.setStreamHandler(new PumpStreamHandler(outputStream));
executor.execute(cmdLine, new ExecuteResultHandler() {
@Override
public void onProcessComplete(int exitValue) {
log.info("Streaming completed, streamKey: {}, exitValue: {}", streamKey, exitValue);
streamProcesses.remove(streamKey);
}
@Override
public void onProcessFailed(ExecuteException e) {
boolean isManualStop = manualStopFlags.remove(streamKey);
if (isManualStop) {
log.info("Streaming manually stopped, streamKey: {}", streamKey);
} else {
log.error("Streaming failed, streamKey: {}, error: {}", streamKey, e.getMessage());
}
streamProcesses.remove(streamKey);
}
});
streamProcesses.put(streamKey, executor);
log.info("Started streaming, streamKey: {}, rtmpUrl: {}", streamKey, rtmpUrl);
return true;
} catch (Exception e) {
log.error("Failed to start streaming", e);
return false;
}
}
private CommandLine getCommandLine(String videoPath, String rtmpUrl) {
CommandLine cmdLine = new CommandLine(streamConfig.getFfmpegPath());
cmdLine.addArgument("-re");
cmdLine.addArgument("-i");
cmdLine.addArgument(videoPath);
cmdLine.addArgument("-c:v");
cmdLine.addArgument("libx264");
cmdLine.addArgument("-c:a");
cmdLine.addArgument("aac");
cmdLine.addArgument("-f");
cmdLine.addArgument("flv");
cmdLine.addArgument("-flvflags");
cmdLine.addArgument("no_duration_filesize");
cmdLine.addArgument(rtmpUrl);
return cmdLine;
}
/** Stop streaming */
public boolean stopStream(String streamKey) {
try {
DefaultExecutor executor = streamProcesses.get(streamKey);
if (executor != null) {
manualStopFlags.put(streamKey, true);
ExecuteWatchdog watchdog = executor.getWatchdog();
if (watchdog != null) {
watchdog.destroyProcess();
} else {
log.warn("Process has no watchdog, cannot force stop, streamKey: {}", streamKey);
}
streamProcesses.remove(streamKey);
log.info("Stopped streaming successfully, streamKey: {}", streamKey);
return true;
}
return false;
} catch (Exception e) {
log.error("Failed to stop streaming", e);
return false;
}
}
/** Get playback URL */
public String getPlayUrl(String streamKey, String protocol) {
switch (protocol.toLowerCase()) {
case "flv":
return String.format("http://%s:%d/live/%s.live.flv",
streamConfig.getZlmHost(), streamConfig.getHttpPort(), streamKey);
case "hls":
return String.format("http://%s:%d/live/%s/hls.m3u8",
streamConfig.getZlmHost(), streamConfig.getHttpPort(), streamKey);
default:
return null;
}
}
/** Check streaming status */
public boolean isStreaming(String streamKey) {
return streamProcesses.containsKey(streamKey);
}
}2.4 Application Configuration (application.yml)
stream:
zlm-host: 192.168.159.129
rtmp-port: 1935
http-port: 8099
ffmpeg-path: ffmpeg
video-path: \videos\
spring:
servlet:
multipart:
max-file-size: 1GB
max-request-size: 1GB3. Usage Instructions
3.1 Streaming Process
Start ZLMediaKit service.
Upload video files to the server.
Call the streaming API with video path and stream key.
Spring Boot executes FFmpeg command to push the stream to ZLMediaKit.
3.2 Playback Process
Obtain playback address (HTTP‑FLV or HLS).
Supports real‑time playback and on‑demand replay.
Example FFmpeg command for manual testing:
ffmpeg -re -i "C:\Users\lyk19\Videos\8月9日.mp4" -c:v libx264 -preset ultrafast -tune zerolatency -c:a aac -ar 44100 -b:a 128k -f flv rtmp://192.168.159.129:1935/live/streamFrontend player (HTML + flv.js) can be used to play the FLV stream:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>FLV Live Player</title>
<style>
body {margin:0;padding:20px;font-family:Arial;background:#f0f0f0;}
.player-container {max-width:800px;margin:auto;background:#fff;border-radius:8px;padding:20px;box-shadow:0 2px 10px rgba(0,0,0,0.1);}
#videoElement {width:100%;height:450px;background:#000;border-radius:4px;}
.controls {margin-top:15px;text-align:center;}
button {padding:10px 20px;margin:0 5px;border:none;border-radius:4px;background:#007bff;color:#fff;cursor:pointer;}
button:hover {background:#0056b3;}
button:disabled {background:#ccc;cursor:not-allowed;}
.status {margin-top:10px;padding:10px;border-radius:4px;text-align:center;}
.status.success {background:#d4edda;color:#155724;}
.status.error {background:#f8d7da;color:#721c24;}
.status.info {background:#d1ecf1;color:#0c5460;}
</style>
</head>
<body>
<div class="player-container">
<h1>FLV Live Player</h1>
<video id="videoElement" controls muted>Your browser does not support video</video>
<div class="controls">
<button id="playBtn">Play</button>
<button id="pauseBtn" disabled>Pause</button>
<button id="stopBtn" disabled>Stop</button>
<button id="muteBtn">Mute</button>
</div>
<div id="status" class="status info">Ready, click Play to start</div>
</div>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/flv.min.js"></script>
<script>
let flvPlayer = null;
const videoElement = document.getElementById('videoElement');
const playBtn = document.getElementById('playBtn');
const pauseBtn = document.getElementById('pauseBtn');
const stopBtn = document.getElementById('stopBtn');
const muteBtn = document.getElementById('muteBtn');
const statusDiv = document.getElementById('status');
const streamUrl = 'http://192.168.159.129:8099/live/stream.live.flv';
function updateStatus(message, type) {
statusDiv.textContent = message;
statusDiv.className = `status ${type}`;
console.log(`[${type.toUpperCase()}] ${message}`);
}
function updateButtons(playEnabled, pauseEnabled, stopEnabled) {
playBtn.disabled = !playEnabled;
pauseBtn.disabled = !pauseEnabled;
stopBtn.disabled = !stopEnabled;
}
if (!flvjs.isSupported()) {
updateStatus('Your browser does not support FLV playback.', 'error');
playBtn.disabled = true;
}
playBtn.addEventListener('click', function () {
try {
if (flvPlayer) { flvPlayer.destroy(); }
flvPlayer = flvjs.createPlayer({type:'flv',url:streamUrl,isLive:true}, {
enableWorker:false,
lazyLoad:true,
lazyLoadMaxDuration:3*60,
deferLoadAfterSourceOpen:false,
autoCleanupSourceBuffer:true,
enableStashBuffer:false
});
flvPlayer.attachMediaElement(videoElement);
flvPlayer.load();
flvPlayer.on(flvjs.Events.ERROR, (type, detail, info) => {
console.error('FLV error:', type, detail, info);
updateStatus(`Play error: ${detail}`, 'error');
});
flvPlayer.on(flvjs.Events.LOADING_COMPLETE, () => {
updateStatus('Stream loaded', 'success');
});
videoElement.play().then(() => {
updateStatus('Playing live stream', 'success');
updateButtons(false, true, true);
}).catch(error => {
console.error('Play failed:', error);
updateStatus('Play failed: ' + error.message, 'error');
});
} catch (e) {
console.error('Player creation failed:', e);
updateStatus('Player creation failed: ' + e.message, 'error');
}
});
pauseBtn.addEventListener('click', function () {
if (videoElement && !videoElement.paused) {
videoElement.pause();
updateStatus('Playback paused', 'info');
updateButtons(true, false, true);
}
});
stopBtn.addEventListener('click', function () {
if (flvPlayer) {
flvPlayer.pause();
flvPlayer.unload();
flvPlayer.destroy();
flvPlayer = null;
}
videoElement.src = '';
videoElement.load();
updateStatus('Playback stopped', 'info');
updateButtons(true, false, false);
});
muteBtn.addEventListener('click', function () {
videoElement.muted = !videoElement.muted;
muteBtn.textContent = videoElement.muted ? 'Unmute' : 'Mute';
updateStatus(videoElement.muted ? 'Muted' : 'Unmuted', 'info');
});
videoElement.addEventListener('loadstart', () => updateStatus('Loading video stream...', 'info'));
videoElement.addEventListener('canplay', () => updateStatus('Video ready', 'success'));
videoElement.addEventListener('playing', () => {
updateStatus('Live stream playing', 'success');
updateButtons(false, true, true);
});
videoElement.addEventListener('pause', () => {
updateStatus('Playback paused', 'info');
updateButtons(true, false, true);
});
videoElement.addEventListener('error', () => {
updateStatus('Video playback error', 'error');
updateButtons(true, false, false);
});
</script>
</body>
</html>Signed-in readers can open the original source through BestHub's protected redirect.
This article has been distilled and summarized from source material, then republished for learning and reference. If you believe it infringes your rights, please contactand we will review it promptly.
Selected Java Interview Questions
A professional Java tech channel sharing common knowledge to help developers fill gaps. Follow us!
How this landed with the community
Was this worth your time?
0 Comments
Thoughtful readers leave field notes, pushback, and hard-won operational detail here.
