목차
AVI, MKV, MOV 등과 같은 것을 컨테이너라 부른다. 무엇인가를 담을 수 있는 것으로 ts 또한 컨테이너다. 여기에는 다양한 video codec과 audio codec은 물론, 자막과 기타 데이터를 담을 수 있다. rtsp로 받은 데이터는 codec 형태 자체로 저장해도 되지만 일반 플레이어를 사용하여 재생하기 위해서는 컨테이너로 감싸줘야 한다. 일반 동영상을 담는 컨테이너는 앞부분에 전체 영상의 길이 등 컨테이너에 따라 다양한 정보가 들어가는데 NVR과 같은 경우, 영상이 언제든지 끊어질 수 있는 특성상 지금 저장 중인 파일의 길이가 어떻게 되는지 알 수 없다. 물론, 지정된 시간 단위로 저장하면서 파일 앞에 정보를 갱신하면 되겠지만, NVR이 중간에 종료될 경우, 복구를 해줘야 한다. 이러한 상황을 고려할 때 ts 컨테이너는 간단하면서 요구 조건을 만족하는 좋은 선택지 중의 하나다.
1. TS
2. h264 데이터 만들기
h264 데이터를 ts로 만드는 예제도 LIVE555의 testProgs에 있다. 그럼 먼저 이 예제를 사용하기 위한 h264 데이터를 만들어 보자.
busyman@busyman:~/play/bmNVR/lib/LIVE555/live/testProgs$ ./testH264VideoToTransportStream
Unable to open file "in.264" as a byte-stream file source
아래와 같이 testProgs/testRTSPClient.cpp의 afterGettingFrame()를 수정하여 h264.dat로 데이터를 저장한다.
bool gFirst = true;
FILE* gOut = NULL;
char hdr[4] = { 0x00, 0x00, 0x00, 0x01 };
void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned /*durationInMicroseconds*/) {
// We've just received a frame of data. (Optionally) print out information about it:
if (numTruncatedBytes > 0) {
envir() << "Oops! numTruncatedBytes is larger than ZERO.\n";
exit(-1);
}
if (true == gFirst) {
if (0x67 == fReceiveBuffer[0]) {
gFirst = false;
}
}
if (false == gFirst) {
if (NULL == gOut) {
gOut = fopen("h264.dat", "wb");
if (NULL == gOut) {
envir() << "Oops! Cannot open the output file.\n";
exit(-1);
}
else {
envir() << "Start saving the file...\n";
}
}
if (NULL != gOut) {
fwrite(hdr, 1, sizeof(hdr), gOut);
fwrite(fReceiveBuffer, 1, frameSize, gOut);
fflush(gOut);
}
}
#ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME
if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
char uHexDump[(4*4)+1];
sprintf(uHexDump, "[%02X][%02X][%02X][%02X]", fReceiveBuffer[0], fReceiveBuffer[1], fReceiveBuffer[2], fReceiveBuffer[3]);
envir() << ".\t" << uHexDump;
char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
envir() << "\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) {
envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
}
#ifdef DEBUG_PRINT_NPT
envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
#endif
envir() << "\n";
#endif
// Then continue, to request the next frame of data:
continuePlaying();
}
실행하면 다음과 같다.
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 4984 bytes. [61][E2][80][02] Presentation time: 1691910340.819820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6572 bytes. [61][E2][A0][02] Presentation time: 1691910340.853820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6054 bytes. [61][E2][C0][02] Presentation time: 1691910340.886820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6442 bytes. [61][E2][E0][02] Presentation time: 1691910340.919820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6330 bytes. [61][E3][00][03] Presentation time: 1691910340.953820!
Start saving the file...
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 15 bytes. [67][4D][00][1F] Presentation time: 1691910340.986820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 4 bytes. [68][EE][3C][80] Presentation time: 1691910340.986820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5 bytes. [06][E5][01][22] Presentation time: 1691910340.986820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 38559 bytes. [65][B8][00][00] Presentation time: 1691910340.986820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5506 bytes. [61][E0][20][00] Presentation time: 1691910341.019820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5275 bytes. [61][E0][40][00] Presentation time: 1691910341.053820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5540 bytes. [61][E0][60][00] Presentation time: 1691910341.086820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5271 bytes. [61][E0][80][00] Presentation time: 1691910341.119820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6392 bytes. [61][E0][A0][00] Presentation time: 1691910341.153820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5385 bytes. [61][E0][C0][00] Presentation time: 1691910341.186820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5528 bytes. [61][E0][E0][00] Presentation time: 1691910341.219820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5471 bytes. [61][E1][00][01] Presentation time: 1691910341.253820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6507 bytes. [61][E1][20][01] Presentation time: 1691910341.286820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5429 bytes. [61][E1][40][01] Presentation time: 1691910341.319820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5625 bytes. [61][E1][60][01] Presentation time: 1691910341.353820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5624 bytes. [61][E1][80][01] Presentation time: 1691910341.386820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5527 bytes. [61][E1][A0][01] Presentation time: 1691910341.419820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5653 bytes. [61][E1][C0][01] Presentation time: 1691910341.453820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5589 bytes. [61][E1][E0][01] Presentation time: 1691910341.486820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 6460 bytes. [61][E2][00][02] Presentation time: 1691910341.519820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5425 bytes. [61][E2][20][02] Presentation time: 1691910341.553820!
Stream "rtsp://127.0.0.1:8554/bm"; video/H264: Received 5428 bytes. [61][E2][40][02] Presentation time: 1691910341.586820!
저장된 데이터는 ffmpeg를 사용하여 아래와 같이 확인할 수 있다.
busyman@busyman:~/play/bmNVR/lib/LIVE555/live$ ffmpeg -i h264.dat
ffmpeg version 4.2.7-0ubuntu0.1 Copyright (c) 2000-2022 the FFmpeg developers
built with gcc 9 (Ubuntu 9.4.0-1ubuntu1~20.04.1)
configuration: --prefix=/usr --extra-version=0ubuntu0.1 --toolchain=hardened --libdir=/usr/lib/x86_64-linux-gnu --incdir=/usr/include/x86_64-linux-gnu --arch=amd64 --enable-gpl --disable-stripping --enable-avresample --disable-filter=resample --enable-avisynth --enable-gnutls --enable-ladspa --enable-libaom --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libcdio --enable-libcodec2 --enable-libflite --enable-libfontconfig --enable-libfreetype --enable-libfribidi --enable-libgme --enable-libgsm --enable-libjack --enable-libmp3lame --enable-libmysofa --enable-libopenjpeg --enable-libopenmpt --enable-libopus --enable-libpulse --enable-librsvg --enable-librubberband --enable-libshine --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libssh --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx265 --enable-libxml2 --enable-libxvid --enable-libzmq --enable-libzvbi --enable-lv2 --enable-omx --enable-openal --enable-opencl --enable-opengl --enable-sdl2 --enable-libdc1394 --enable-libdrm --enable-libiec61883 --enable-nvenc --enable-chromaprint --enable-frei0r --enable-libx264 --enable-shared
libavutil 56. 31.100 / 56. 31.100
libavcodec 58. 54.100 / 58. 54.100
libavformat 58. 29.100 / 58. 29.100
libavdevice 58. 8.100 / 58. 8.100
libavfilter 7. 57.100 / 7. 57.100
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 5.100 / 5. 5.100
libswresample 3. 5.100 / 3. 5.100
libpostproc 55. 5.100 / 55. 5.100
Input #0, h264, from 'h264.dat':
Duration: N/A, bitrate: N/A
Stream #0:0: Video: h264 (Main), yuvj420p(pc, bt709, progressive), 1280x720, 25 fps, 25 tbr, 1200k tbn, 50 tbc
3. transport stream으로 변환
위에서 만든 파일을 in.264로 이름을 변경 후, testH264VideoToTransportStream를 사용하여 transport stream으로 변환해 보자.
busyman@busyman:~/play/bmNVR/lib/LIVE555/live$ testProgs/testH264VideoToTransportStream
Beginning to read...
Done reading.
Wrote output file: "out.ts"
out.ts 도 ffmpeg으로 확인해 보자
busyman@busyman:~/play/bmNVR/lib/LIVE555/live$ ffmpeg -i out.ts
ffmpeg version 4.2.7-0ubuntu0.1 Copyright (c) 2000-2022 the FFmpeg developers
built with gcc 9 (Ubuntu 9.4.0-1ubuntu1~20.04.1)
configuration: --prefix=/usr --extra-version=0ubuntu0.1 --toolchain=hardened --libdir=/usr/lib/x86_64-linux-gnu --incdir=/usr/include/x86_64-linux-gnu --arch=amd64 --enable-gpl --disable-stripping --enable-avresample --disable-filter=resample --enable-avisynth --enable-gnutls --enable-ladspa --enable-libaom --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libcdio --enable-libcodec2 --enable-libflite --enable-libfontconfig --enable-libfreetype --enable-libfribidi --enable-libgme --enable-libgsm --enable-libjack --enable-libmp3lame --enable-libmysofa --enable-libopenjpeg --enable-libopenmpt --enable-libopus --enable-libpulse --enable-librsvg --enable-librubberband --enable-libshine --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libssh --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx265 --enable-libxml2 --enable-libxvid --enable-libzmq --enable-libzvbi --enable-lv2 --enable-omx --enable-openal --enable-opencl --enable-opengl --enable-sdl2 --enable-libdc1394 --enable-libdrm --enable-libiec61883 --enable-nvenc --enable-chromaprint --enable-frei0r --enable-libx264 --enable-shared
libavutil 56. 31.100 / 56. 31.100
libavcodec 58. 54.100 / 58. 54.100
libavformat 58. 29.100 / 58. 29.100
libavdevice 58. 8.100 / 58. 8.100
libavfilter 7. 57.100 / 7. 57.100
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 5.100 / 5. 5.100
libswresample 3. 5.100 / 3. 5.100
libpostproc 55. 5.100 / 55. 5.100
Input #0, mpegts, from 'out.ts':
Duration: 00:00:03.77, start: 77345.391311, bitrate: 1693 kb/s
Program 1
Stream #0:0[0xe0]: Video: h264 (Main) ([27][0][0][0] / 0x001B), yuvj420p(pc, bt709, progressive), 1280x720, 30 fps, 30 tbr, 90k tbn, 180k tbc
여기까지 되었다면 기본적인 NVR의 기본 기능 확인이 되었다. 예제 2개로 NVR의 기본 기능 구현이 가능한 것을 보면 Open Source의 힘은 대단하다.
2023.08.30 - [놀기/잡스러운 것] - [bmNVR] LIVE555 (ubuntu 20.04) 빌드
'놀기 > 잡스러운 것' 카테고리의 다른 글
[bmNVR] 수신한 데이터를 받을 Source 만들기 (ByteStreamMemoryBufferSource 참조) (0) | 2023.09.01 |
---|---|
[bmNVR] QT 기반으로 옮기기 (0) | 2023.08.30 |
[bmNVR] LIVE555 (ubuntu 20.04) 빌드 (0) | 2023.08.30 |
[FFMPEG] SMPTE Bar 영상 만들기 (with timecode) (0) | 2023.08.27 |
[Shell Script] 파일 내용 모니터링 (0) | 2023.03.17 |
댓글