Build a Live Streaming Service with ZLMediaKit, FFmpeg, and Spring Boot
This guide walks you through preparing the environment, installing ZLMediaKit via Docker, configuring FFmpeg, implementing a Spring Boot backend to control streaming processes, and using a simple HTML player with flv.js to publish and view live video streams.
1. Environment Preparation
1.1 ZLMediaKit installation and configuration
Pull the Docker image and run the container:
# Pull image
docker pull zlmediakit/zlmediakit:master
# Run container
docker run -d \
--name zlm-server \
-p 1935:1935 \
-p 8099:80 \
-p 8554:554 \
-p 10000:10000 \
-p 10000:10000/udp \
-p 8000:8000/udp \
-v /docker-volumes/zlmediakit/conf/config.ini:/opt/media/conf/config.ini \
zlmediakit/zlmediakit:masterKey config.ini settings (HLS section):
[hls]
broadcastRecordTs=0
deleteDelaySec=300 # keep recorded video for 5 minutes
fileBufSize=65536
filePath=./www # storage path
segDur=2 # each .ts segment length (seconds)
segNum=1000 # max segments in .m3u8
segRetain=9999 # actual retained segments on diskCheck the container logs to verify it started:
docker logs -f zlm-server1.2 FFmpeg installation
Download a Windows build from https://www.gyan.dev/ffmpeg/builds/ and add the bin directory (e.g., C:\ffmpeg\ffmpeg-7.0.2-essentials_build\bin) to the system PATH environment variable.
2. Spring Boot Backend Implementation
2.1 Add Maven dependency
<dependencies>
<!-- Process management -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-exec</artifactId>
<version>1.3</version>
</dependency>
</dependencies>2.2 Stream configuration class
package com.lyk.plugflow.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Data
@Component
@ConfigurationProperties(prefix = "stream")
public class StreamConfig {
/** ZLMediaKit service address */
private String zlmHost;
/** RTMP port */
private Integer rtmpPort;
/** HTTP‑FLV port */
private Integer httpPort;
/** FFmpeg executable path */
private String ffmpegPath;
/** Video storage path */
private String videoPath;
}2.3 Stream service class
package com.lyk.plugflow.service;
import com.lyk.plugflow.config.StreamConfig;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.exec.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@Slf4j
@Service
public class StreamService {
@Autowired
private StreamConfig streamConfig;
// Store running processes
private final Map<String, DefaultExecutor> streamProcesses = new ConcurrentHashMap<>();
// Manual stop flags
private final Map<String, Boolean> manualStopFlags = new ConcurrentHashMap<>();
/** Start streaming */
public boolean startStream(String videoPath, String streamKey) {
try {
File videoFile = new File(videoPath);
if (!videoFile.exists()) {
log.error("Video file not found: {}", videoPath);
return false;
}
String rtmpUrl = String.format("rtmp://%s:%d/live/%s",
streamConfig.getZlmHost(), streamConfig.getRtmpPort(), streamKey);
CommandLine cmdLine = getCommandLine(videoPath, rtmpUrl);
DefaultExecutor executor = new DefaultExecutor();
executor.setExitValue(0);
ExecuteWatchdog watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT);
executor.setWatchdog(watchdog);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
executor.setStreamHandler(new PumpStreamHandler(outputStream));
executor.execute(cmdLine, new ExecuteResultHandler() {
@Override
public void onProcessComplete(int exitValue) {
log.info("Stream finished, key: {}, exit: {}", streamKey, exitValue);
streamProcesses.remove(streamKey);
}
@Override
public void onProcessFailed(ExecuteException e) {
boolean manual = manualStopFlags.remove(streamKey);
if (manual) {
log.info("Stream manually stopped, key: {}", streamKey);
} else {
log.error("Stream failed, key: {}, error: {}", streamKey, e.getMessage());
}
streamProcesses.remove(streamKey);
}
});
streamProcesses.put(streamKey, executor);
log.info("Started stream, key: {}, rtmpUrl: {}", streamKey, rtmpUrl);
return true;
} catch (Exception e) {
log.error("Failed to start stream", e);
return false;
}
}
private CommandLine getCommandLine(String videoPath, String rtmpUrl) {
CommandLine cmdLine = new CommandLine(streamConfig.getFfmpegPath());
cmdLine.addArgument("-re");
cmdLine.addArgument("-i");
cmdLine.addArgument(videoPath);
cmdLine.addArgument("-c:v");
cmdLine.addArgument("libx264");
cmdLine.addArgument("-c:a");
cmdLine.addArgument("aac");
cmdLine.addArgument("-f");
cmdLine.addArgument("flv");
cmdLine.addArgument("-flvflags");
cmdLine.addArgument("no_duration_filesize");
cmdLine.addArgument(rtmpUrl);
return cmdLine;
}
/** Stop streaming */
public boolean stopStream(String streamKey) {
try {
DefaultExecutor executor = streamProcesses.get(streamKey);
if (executor != null) {
manualStopFlags.put(streamKey, true);
ExecuteWatchdog watchdog = executor.getWatchdog();
if (watchdog != null) {
watchdog.destroyProcess();
} else {
log.warn("No watchdog, cannot force stop, key: {}", streamKey);
}
streamProcesses.remove(streamKey);
log.info("Stopped stream, key: {}", streamKey);
return true;
}
return false;
} catch (Exception e) {
log.error("Failed to stop stream", e);
return false;
}
}
/** Get playback URL */
public String getPlayUrl(String streamKey, String protocol) {
return switch (protocol.toLowerCase()) {
case "flv" -> String.format("http://%s:%d/live/%s.live.flv",
streamConfig.getZlmHost(), streamConfig.getHttpPort(), streamKey);
case "hls" -> String.format("http://%s:%d/live/%s/hls.m3u8",
streamConfig.getZlmHost(), streamConfig.getHttpPort(), streamKey);
default -> null;
};
}
/** Check if a stream is active */
public boolean isStreaming(String streamKey) {
return streamProcesses.containsKey(streamKey);
}
}2.4 Application configuration (application.yml)
stream:
zlm-host: 192.168.159.129
rtmp-port: 1935
http-port: 8099
ffmpeg-path: ffmpeg
video-path: \videos\
spring:
servlet:
multipart:
max-file-size: 1GB
max-request-size: 1GB3. Usage Instructions
3.1 Streaming workflow
Start the ZLMediaKit service.
Upload video files to the server.
Call the Spring Boot endpoint with video path and a stream key.
Spring Boot launches FFmpeg to push the stream to ZLMediaKit.
3.2 Playback workflow
Obtain the playback URL (HTTP‑FLV or HLS) via getPlayUrl.
Use a front‑end player to view the live or recorded stream.
Example FFmpeg command for manual testing:
ffmpeg -re -i "C:\Users\lyk19\Videos\8月9日.mp4" -c:v libx264 -preset ultrafast -tune zerolatency -c:a aac -ar 44100 -b:a 128k -f flv rtmp://192.168.159.129:1935/live/stream3.3 Simple HTML player (flv.js)
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>FLV Live Player</title>
<style>
body { margin:0; padding:20px; font-family:Arial, sans-serif; background:#f0f0f0; }
.player-container { max-width:800px; margin:auto; background:#fff; padding:20px; border-radius:8px; box-shadow:0 2px 10px rgba(0,0,0,0.1); }
video { width:100%; height:450px; background:#000; border-radius:4px; }
.controls button { margin:0 5px; padding:10px 20px; border:none; border-radius:4px; background:#007bff; color:#fff; cursor:pointer; }
.controls button:disabled { background:#ccc; cursor:not-allowed; }
.status { margin-top:10px; padding:10px; border-radius:4px; text-align:center; }
.status.success { background:#d4edda; color:#155724; }
.status.error { background:#f8d7da; color:#721c24; }
.status.info { background:#d1ecf1; color:#0c5460; }
</style>
</head>
<body>
<div class="player-container">
<h1>FLV Live Player</h1>
<video id="videoElement" controls muted>Your browser does not support video.</video>
<div class="controls">
<button id="playBtn">Play</button>
<button id="pauseBtn" disabled>Pause</button>
<button id="stopBtn" disabled>Stop</button>
<button id="muteBtn">Mute</button>
</div>
<div id="status" class="status info">Ready – click Play to start.</div>
</div>
<script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/flv.min.js"></script>
<script>
let flvPlayer = null;
const video = document.getElementById('videoElement');
const playBtn = document.getElementById('playBtn');
const pauseBtn = document.getElementById('pauseBtn');
const stopBtn = document.getElementById('stopBtn');
const muteBtn = document.getElementById('muteBtn');
const statusDiv = document.getElementById('status');
const streamUrl = 'http://192.168.159.129:8099/live/stream.live.flv';
function updateStatus(msg, type) {
statusDiv.textContent = msg;
statusDiv.className = `status ${type}`;
console.log(`[${type.toUpperCase()}] ${msg}`);
}
function updateButtons(play, pause, stop) {
playBtn.disabled = !play;
pauseBtn.disabled = !pause;
stopBtn.disabled = !stop;
}
if (!flvjs.isSupported()) {
updateStatus('Browser does not support FLV playback.', 'error');
playBtn.disabled = true;
}
playBtn.addEventListener('click', function() {
try {
if (flvPlayer) { flvPlayer.destroy(); }
flvPlayer = flvjs.createPlayer({type:'flv', url:streamUrl, isLive:true}, {
enableWorker:false, lazyLoad:true, lazyLoadMaxDuration:180, deferLoadAfterSourceOpen:false,
autoCleanupSourceBuffer:true, enableStashBuffer:false
});
flvPlayer.attachMediaElement(video);
flvPlayer.load();
flvPlayer.on(flvjs.Events.ERROR, (type, detail) => {
console.error('FLV error', type, detail);
updateStatus(`Play error: ${detail}`, 'error');
});
flvPlayer.on(flvjs.Events.LOADING_COMPLETE, () => updateStatus('Stream loaded', 'success'));
flvPlayer.on(flvjs.Events.RECOVERED_EARLY_EOF, () => updateStatus('Recovered early EOF', 'info'));
video.play().then(() => {
updateStatus('Playing live stream', 'success');
updateButtons(false, true, true);
}).catch(err => {
console.error('Play failed', err);
updateStatus('Play failed: ' + err.message, 'error');
});
} catch (e) {
console.error('Player creation failed', e);
updateStatus('Player creation failed: ' + e.message, 'error');
}
});
pauseBtn.addEventListener('click', function() {
if (video && !video.paused) {
video.pause();
updateStatus('Playback paused', 'info');
updateButtons(true, false, true);
}
});
stopBtn.addEventListener('click', function() {
if (flvPlayer) {
flvPlayer.pause();
flvPlayer.unload();
flvPlayer.destroy();
flvPlayer = null;
}
video.src = '';
video.load();
updateStatus('Playback stopped', 'info');
updateButtons(true, false, false);
});
muteBtn.addEventListener('click', function() {
video.muted = !video.muted;
muteBtn.textContent = video.muted ? 'Unmute' : 'Mute';
updateStatus(video.muted ? 'Muted' : 'Unmuted', 'info');
});
video.addEventListener('loadstart', () => updateStatus('Loading stream...', 'info'));
video.addEventListener('canplay', () => updateStatus('Stream ready', 'success'));
video.addEventListener('playing', () => {
updateStatus('Live stream playing', 'success');
updateButtons(false, true, true);
});
video.addEventListener('pause', () => {
updateStatus('Playback paused', 'info');
updateButtons(true, false, true);
});
video.addEventListener('error', () => {
updateStatus('Video error', 'error');
updateButtons(true, false, false);
});
</script>
</body>
</html>Code Ape Tech Column
Former Ant Group P8 engineer, pure technologist, sharing full‑stack Java, job interview and career advice through a column. Site: java-family.cn
How this landed with the community
Was this worth your time?
0 Comments
Thoughtful readers leave field notes, pushback, and hard-won operational detail here.
