diff --git a/liveMedia/EBMLNumber.hh b/liveMedia/EBMLNumber.hh
index 8d59cb6..ddbabf8 100644
--- a/liveMedia/EBMLNumber.hh
+++ b/liveMedia/EBMLNumber.hh
@@ -124,6 +124,8 @@ public:
 #define MATROSKA_ID_SEEK_PRE_ROLL 0x56BB
 #define MATROSKA_ID_CODEC_DELAY 0x56AA
 #define MATROSKA_ID_DISCARD_PADDING 0x75A2
+#define MATROSKA_ID_COLOR_SPACE 0x2EB524
+#define MATROSKA_ID_PRIMARIES 0x55BB
 
 class EBMLId: public EBMLNumber {
 public:
diff --git a/liveMedia/Makefile.tail b/liveMedia/Makefile.tail
index e085153..cea85d5 100644
--- a/liveMedia/Makefile.tail
+++ b/liveMedia/Makefile.tail
@@ -23,7 +23,7 @@ DV_SINK_OBJS = DVVideoRTPSink.$(OBJ)
 AC3_SINK_OBJS = AC3AudioRTPSink.$(OBJ)
 
 MISC_SOURCE_OBJS = MediaSource.$(OBJ) FramedSource.$(OBJ) FramedFileSource.$(OBJ) FramedFilter.$(OBJ) ByteStreamFileSource.$(OBJ) ByteStreamMultiFileSource.$(OBJ) ByteStreamMemoryBufferSource.$(OBJ) BasicUDPSource.$(OBJ) DeviceSource.$(OBJ) AudioInputDevice.$(OBJ) WAVAudioFileSource.$(OBJ) $(MPEG_SOURCE_OBJS) $(H263_SOURCE_OBJS) $(AC3_SOURCE_OBJS) $(DV_SOURCE_OBJS) JPEGVideoSource.$(OBJ) AMRAudioSource.$(OBJ) AMRAudioFileSource.$(OBJ) InputFile.$(OBJ) StreamReplicator.$(OBJ)
-MISC_SINK_OBJS = MediaSink.$(OBJ) FileSink.$(OBJ) BasicUDPSink.$(OBJ) AMRAudioFileSink.$(OBJ) H264or5VideoFileSink.$(OBJ) H264VideoFileSink.$(OBJ) H265VideoFileSink.$(OBJ) OggFileSink.$(OBJ) $(MPEG_SINK_OBJS) $(H263_SINK_OBJS) $(H264_OR_5_SINK_OBJS) $(DV_SINK_OBJS) $(AC3_SINK_OBJS) VorbisAudioRTPSink.$(OBJ) TheoraVideoRTPSink.$(OBJ) VP8VideoRTPSink.$(OBJ) VP9VideoRTPSink.$(OBJ) GSMAudioRTPSink.$(OBJ) JPEGVideoRTPSink.$(OBJ) SimpleRTPSink.$(OBJ) AMRAudioRTPSink.$(OBJ) T140TextRTPSink.$(OBJ) TCPStreamSink.$(OBJ) OutputFile.$(OBJ)
+MISC_SINK_OBJS = MediaSink.$(OBJ) FileSink.$(OBJ) BasicUDPSink.$(OBJ) AMRAudioFileSink.$(OBJ) H264or5VideoFileSink.$(OBJ) H264VideoFileSink.$(OBJ) H265VideoFileSink.$(OBJ) OggFileSink.$(OBJ) $(MPEG_SINK_OBJS) $(H263_SINK_OBJS) $(H264_OR_5_SINK_OBJS) $(DV_SINK_OBJS) $(AC3_SINK_OBJS) VorbisAudioRTPSink.$(OBJ) TheoraVideoRTPSink.$(OBJ) VP8VideoRTPSink.$(OBJ) VP9VideoRTPSink.$(OBJ) GSMAudioRTPSink.$(OBJ) JPEGVideoRTPSink.$(OBJ) SimpleRTPSink.$(OBJ) AMRAudioRTPSink.$(OBJ) T140TextRTPSink.$(OBJ) TCPStreamSink.$(OBJ) OutputFile.$(OBJ) RawVideoRTPSink.$(OBJ)
 MISC_FILTER_OBJS = uLawAudioFilter.$(OBJ)
 TRANSPORT_STREAM_TRICK_PLAY_OBJS = MPEG2IndexFromTransportStream.$(OBJ) MPEG2TransportStreamIndexFile.$(OBJ) MPEG2TransportStreamTrickModeFilter.$(OBJ)
 
@@ -264,6 +264,8 @@ VorbisAudioRTPSink.$(CPP):	include/VorbisAudioRTPSink.hh include/Base64.hh inclu
 include/VorbisAudioRTPSink.hh:	include/AudioRTPSink.hh
 TheoraVideoRTPSink.$(CPP):	include/TheoraVideoRTPSink.hh include/Base64.hh include/VorbisAudioRTPSource.hh include/VorbisAudioRTPSink.hh
 include/TheoraVideoRTPSink.hh:	include/VideoRTPSink.hh
+RawVideoRTPSink.$(CPP):	include/RawVideoRTPSink.hh
+include/RawVideoRTPSink.hh:	include/VideoRTPSink.hh
 VP8VideoRTPSink.$(CPP):		include/VP8VideoRTPSink.hh
 include/VP8VideoRTPSink.hh:	include/VideoRTPSink.hh
 VP9VideoRTPSink.$(CPP):		include/VP9VideoRTPSink.hh
diff --git a/liveMedia/MatroskaFile.cpp b/liveMedia/MatroskaFile.cpp
index 0650398..4c54a03 100644
--- a/liveMedia/MatroskaFile.cpp
+++ b/liveMedia/MatroskaFile.cpp
@@ -33,6 +33,7 @@ along with this library; if not, write to the Free Software Foundation, Inc.,
 #include <VP8VideoRTPSink.hh>
 #include <VP9VideoRTPSink.hh>
 #include <TheoraVideoRTPSink.hh>
+#include <RawVideoRTPSink.hh>
 #include <T140TextRTPSink.hh>
 
 ////////// CuePoint definition //////////
@@ -437,6 +438,9 @@ RTPSink* MatroskaFile
       } while (0);
 
       delete[] identificationHeader; delete[] commentHeader; delete[] setupHeader;
+    } else if (strcmp(track->mimeType, "video/RAW") == 0) {
+      result = RawVideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic, 
+                                          track->pixelHeight, track->pixelWidth, track->bitDepth, track->colorSampling, track->colorimetry);
     } else if (strcmp(track->mimeType, "video/H264") == 0) {
       // Use our track's 'Codec Private' data: Bytes 5 and beyond contain SPS and PPSs:
       u_int8_t* SPS = NULL; unsigned SPSSize = 0;
@@ -668,7 +672,7 @@ MatroskaTrack::MatroskaTrack()
     codecPrivateSize(0), codecPrivate(NULL),
     codecPrivateUsesH264FormatForH265(False), codecIsOpus(False),
     headerStrippedBytesSize(0), headerStrippedBytes(NULL),
-    subframeSizeSize(0) {
+    subframeSizeSize(0), pixelHeight(0), pixelWidth(0), bitDepth(8), colorSampling(""), colorimetry("BT709-2") /*Matroska default value for Primaries */{
 }
 
 MatroskaTrack::~MatroskaTrack() {
diff --git a/liveMedia/MatroskaFileParser.cpp b/liveMedia/MatroskaFileParser.cpp
index 366cfb8..7d0abe2 100644
--- a/liveMedia/MatroskaFileParser.cpp
+++ b/liveMedia/MatroskaFileParser.cpp
@@ -460,6 +460,10 @@ Boolean MatroskaFileParser::parseTrack() {
 	      track->mimeType = "video/THEORA";
 	    } else if (strncmp(codecID, "S_TEXT", 6) == 0) {
 	      track->mimeType = "text/T140";
+	    } else if (strncmp(codecID, "V_MJPEG", 7) == 0) {
+	      track->mimeType = "video/JPEG";
+	    } else if (strncmp(codecID, "V_UNCOMPRESSED", 14) == 0) {
+	      track->mimeType = "video/RAW";
 	    }
 	  } else {
 	    delete[] codecID;
@@ -525,6 +529,7 @@ Boolean MatroskaFileParser::parseTrack() {
 #ifdef DEBUG
 	  fprintf(stderr, "\tPixel Width %d\n", pixelWidth);
 #endif
+      if (track != NULL) track->pixelWidth = pixelWidth;
 	}
 	break;
       }
@@ -534,6 +539,7 @@ Boolean MatroskaFileParser::parseTrack() {
 #ifdef DEBUG
 	  fprintf(stderr, "\tPixel Height %d\n", pixelHeight);
 #endif
+      if (track != NULL) track->pixelHeight = pixelHeight;
 	}
 	break;
       }
@@ -604,6 +610,7 @@ Boolean MatroskaFileParser::parseTrack() {
 #ifdef DEBUG
 	  fprintf(stderr, "\tBit Depth %d\n", bitDepth);
 #endif
+	  if (track != NULL) track->bitDepth = bitDepth;
 	}
 	break;
       }
@@ -653,6 +660,70 @@ Boolean MatroskaFileParser::parseTrack() {
 	if (track != NULL) track->isEnabled = False;
 	// Fall through to...
       }
+      case MATROSKA_ID_COLOR_SPACE: {
+	u_int8_t* colourSpace;
+	unsigned colourSpaceSize;
+	if (parseEBMLVal_binary(size, colourSpace)) {
+	  colourSpaceSize = (unsigned)size.val();
+#ifdef DEBUG
+	  fprintf(stderr, "\tColor space : %02x %02x %02x %02x\n", colourSpace[0], colourSpace[1], colourSpace[2], colourSpace[3]);
+#endif
+      if ((track != NULL) && (colourSpaceSize == 4)) {
+            //convert to sampling value (rfc 4175)
+        if ((strncmp((const char*)colourSpace, "I420", 4) == 0) || (strncmp((const char*)colourSpace, "IYUV", 4) == 0)){ 
+            track->colorSampling = "YCbCr-4:2:0";
+        }
+        else if ((strncmp((const char*)colourSpace, "YUY2", 4) == 0) || (strncmp((const char*)colourSpace, "UYVY", 4) == 0)){
+            track->colorSampling = "YCbCr-4:2:2";
+        }
+        else if (strncmp((const char*)colourSpace, "AYUV", 4) == 0) {
+            track->colorSampling = "YCbCr-4:4:4";
+        }
+        else if ((strncmp((const char*)colourSpace, "Y41P", 4) == 0) || (strncmp((const char*)colourSpace, "Y41T", 4) == 0)) {
+            track->colorSampling = "YCbCr-4:1:1";
+        }
+        else if (strncmp((const char*)colourSpace, "RGBA", 4) == 0) {
+            track->colorSampling = "RGBA";
+        }
+        else if (strncmp((const char*)colourSpace, "BGRA", 4) == 0) {
+            track->colorSampling = "BGRA";
+        }
+      } else {
+        delete[] colourSpace;
+      }
+        }
+        break;
+      }
+    case MATROSKA_ID_PRIMARIES: {
+        unsigned primaries;
+        if (parseEBMLVal_unsigned(size, primaries)) {
+#ifdef DEBUG
+          fprintf(stderr, "\tPrimaries %u\n", primaries);
+#endif
+        if (track != NULL) {
+            switch (primaries) {
+                  case 1: //ITU-R BT.709
+                    track->colorimetry = "BT709-2";
+                    break;
+                  case 7: //SMPTE 240M
+                    track->colorimetry = "SMPTE240M";
+                    break;
+                  case 2: //Unspecified
+                  case 3: //Reserved
+                  case 4: //ITU-R BT.470M
+                  case 5: //ITU-R BT.470BG
+                  case 6: //SMPTE 170M
+                  case 8: //FILM
+                  case 9: //ITU-R BT.2020
+                  default:
+#ifdef DEBUG
+                     fprintf(stderr, "\tUnsupported color primaries %u\n", primaries);
+#endif
+                    break;
+                }
+            }
+        }
+      }
       default: { // We don't process this header, so just skip over it:
 	skipHeader(size);
 	break;
diff --git a/liveMedia/RawVideoRTPSink.cpp b/liveMedia/RawVideoRTPSink.cpp
new file mode 100644
index 0000000..a51486e
--- /dev/null
+++ b/liveMedia/RawVideoRTPSink.cpp
@@ -0,0 +1,325 @@
+/**********
+This library is free software; you can redistribute it and/or modify it under
+the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation; either version 3 of the License, or (at your
+option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
+
+This library is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
+more details.
+
+You should have received a copy of the GNU Lesser General Public License
+along with this library; if not, write to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+**********/
+// "liveMedia"
+// Copyright (c) 1996-2018 Live Networks, Inc.  All rights reserved.
+// RTP sink for Raw video
+// Implementation
+
+#include "RawVideoRTPSink.hh"
+
+RawVideoRTPSink* RawVideoRTPSink
+::createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat,
+        unsigned height, unsigned width, unsigned depth,
+        char const* sampling, char const* colorimetry) {
+  return new RawVideoRTPSink(env, RTPgs,
+                             rtpPayloadFormat,
+                             height, width, depth,
+                             sampling, colorimetry);
+}
+
+RawVideoRTPSink
+::RawVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat,
+                  unsigned height, unsigned width, unsigned depth,
+                  char const* sampling, char const* colorimetry)
+  : VideoRTPSink(env, RTPgs, rtpPayloadFormat, 90000, "RAW"),
+    fFmtpSDPLine(NULL), fSampling(NULL), fWidth(width), fHeight(height),
+    fDepth(depth), fColorimetry(NULL), fLineindex(0) {
+    
+  // Then use this 'config' string to construct our "a=fmtp:" SDP line:
+  unsigned fmtpSDPLineMaxSize = 200;// 200 => more than enough space
+  fFmtpSDPLine = new char[fmtpSDPLineMaxSize];
+  sprintf(fFmtpSDPLine, "a=fmtp:%d sampling=%s;width=%u;height=%u;depth=%u;colorimetry=%s\r\n", rtpPayloadType(), sampling, width, height, depth, colorimetry);
+
+  //set parameters
+  fSampling = new char[strlen(sampling) + 1];
+  strncpy(fSampling, sampling, strlen(sampling));
+  fSampling[strlen(sampling)] = '\0';
+
+  fColorimetry = new char[strlen(colorimetry) + 1];
+  strncpy(fColorimetry, colorimetry, strlen(colorimetry));
+  fColorimetry[strlen(colorimetry)] = '\0';
+  setFrameParameters();
+}
+
+RawVideoRTPSink::~RawVideoRTPSink() {
+  delete[] fFmtpSDPLine;
+  delete[] fSampling;
+  delete[] fColorimetry;}
+
+char const* RawVideoRTPSink::auxSDPLine() {
+  return fFmtpSDPLine;
+}
+
+void RawVideoRTPSink
+::doSpecialFrameHandling(unsigned fragmentationOffset,
+             unsigned char* frameStart,
+             unsigned numBytesInFrame,
+             struct timeval framePresentationTime,
+             unsigned numRemainingBytes) {
+
+  unsigned * lenghts = NULL;
+  unsigned * offsets= NULL;
+  unsigned nbLines = getNbLineInPacket(fragmentationOffset, lenghts, offsets);
+  unsigned specialHeaderSize = 2 + (6 * nbLines);
+  u_int8_t* specialHeader = new u_int8_t[specialHeaderSize];
+
+  //Extended Sequence Number (not used actually)
+  specialHeader[0] = 0;
+  specialHeader[1] = 0;
+
+  for (unsigned i = 0; i < nbLines; i++) {
+    // detection of new line incrementation
+    if ((offsets[i] == 0) && fragmentationOffset != 0) {
+      fLineindex = fLineindex + fFrameParameters.scanLineIterationStep;
+    }
+
+    // Set length
+    specialHeader[2 + (i * 6) + 0] = lenghts[i] >> 8;
+    specialHeader[2 + (i * 6) + 1] = (u_int8_t)lenghts[i];
+
+    // Field Identification (false for us)
+    bool fieldIdent = false;
+
+    // set line index
+    specialHeader[2 + (i * 6) + 2] = ((fLineindex >> 8) & 0x7F) | (fieldIdent << 7);
+    specialHeader[2 + (i * 6) + 3] = (u_int8_t)fLineindex;
+
+    //set Continuation bit
+    bool continuationBit = (i < nbLines - 1) ? true : false;
+
+    //set offset
+    specialHeader[2 + (i * 6) + 4] = ((offsets[i] >> 8) & 0x7F) | (continuationBit << 7);
+    specialHeader[2 + (i * 6) + 5] = (u_int8_t)offsets[i];
+  }
+
+  setSpecialHeaderBytes(specialHeader, specialHeaderSize);
+  
+  if (numRemainingBytes == 0) {
+    // This packet contains the last (or only) fragment of the frame.
+    // Set the RTP 'M' ('marker') bit:
+    setMarkerBit();
+    //reset line index
+    fLineindex = 0;
+  }
+
+  // Also set the RTP timestamp:
+  setTimestamp(framePresentationTime);
+
+  delete [] specialHeader;
+  delete [] lenghts;
+  delete [] offsets;
+}
+
+Boolean RawVideoRTPSink::frameCanAppearAfterPacketStart(unsigned char const* /*frameStart*/,
+                               unsigned /*numBytesInFrame*/) const {
+  // Only one frame per packet:
+  return False;
+}
+
+unsigned RawVideoRTPSink::specialHeaderSize() const {
+  unsigned * lenghts = NULL;
+  unsigned * offsets= NULL;
+  unsigned nbLines = getNbLineInPacket(curFragmentationOffset(), lenghts, offsets);
+  delete[] lenghts;
+  delete[] offsets;
+  return 2 + (6 * nbLines);
+}
+
+unsigned RawVideoRTPSink::getNbLineInPacket(unsigned fragOffset, unsigned * &lenghts, unsigned * &offsets) const
+{
+  unsigned rtpHeaderSize = 12;
+  unsigned specialHeaderSize = 2; // Extended Sequence Nb
+  unsigned packetMaxSize = ourMaxPacketSize();
+  unsigned nbLines = 0;
+  unsigned remainingSizeInPacket;
+
+  if (fragOffset >= fFrameParameters.frameSize) {
+    envir() << "RawVideoRTPSink::getNbLineInPacket(): bad fragOffset " << fragOffset << "\n";
+    return 0;
+  }
+  unsigned lengthArray[100] = {0};
+  unsigned offsetArray[100] = {0};
+  unsigned positionInDataLine = 0;
+  unsigned lineOffset = (fragOffset % fFrameParameters.scanLineSize);
+
+  unsigned remaingLineSize = fFrameParameters.scanLineSize - (fragOffset % fFrameParameters.scanLineSize);
+  do {
+    if (packetMaxSize - specialHeaderSize - rtpHeaderSize - 6 < positionInDataLine) {
+      break; //sanity check
+    }
+
+    //add one line
+    nbLines ++;
+    specialHeaderSize += 6;
+
+    remainingSizeInPacket = packetMaxSize - specialHeaderSize - rtpHeaderSize - positionInDataLine;
+    remainingSizeInPacket -= remainingSizeInPacket % fFrameParameters.pGroupSize; // use only multiple of pgroup
+    lengthArray[nbLines-1] = min(remaingLineSize, remainingSizeInPacket);
+    offsetArray[nbLines-1] = lineOffset * fFrameParameters.scanLineIterationStep / fFrameParameters.pGroupSize;
+    if (remaingLineSize >= remainingSizeInPacket) {
+      break;
+    }
+
+    if (fragOffset + remaingLineSize >= fFrameParameters.frameSize) {
+      break; //last packet
+    }
+
+    remaingLineSize = fFrameParameters.scanLineSize;
+    positionInDataLine += lengthArray[nbLines-1];
+    lineOffset = 0;
+  }
+  while(1);
+  lenghts = new unsigned[nbLines];
+  offsets = new unsigned[nbLines];
+  for (unsigned i = 0; i < nbLines; i++) {
+    lenghts[i] = lengthArray[i];
+    offsets[i] = offsetArray[i];
+  }
+  return nbLines;
+}
+
+unsigned RawVideoRTPSink::computeOverflowForNewFrame(unsigned newFrameSize) const {
+  unsigned initialOverflow = MultiFramedRTPSink::computeOverflowForNewFrame(newFrameSize);
+
+  // Adjust (increase) this overflow to be a multiple of the pgroup value
+  unsigned numFrameBytesUsed = newFrameSize - initialOverflow;
+  initialOverflow += numFrameBytesUsed % fFrameParameters.pGroupSize;
+
+  return initialOverflow;
+}
+
+void RawVideoRTPSink::setFrameParameters() {
+  fFrameParameters.scanLineIterationStep = 1;
+  if ((strncmp("RGB", fSampling, strlen(fSampling)) == 0) || (strncmp("BGR", fSampling, strlen(fSampling)) == 0)) {
+    switch (fDepth) {
+      case 8:
+        fFrameParameters.pGroupSize = 3;
+        fFrameParameters.nbOfPixelInPGroup = 1;
+        break;
+      case 10:
+        fFrameParameters.pGroupSize = 15;
+        fFrameParameters.nbOfPixelInPGroup = 4;
+        break;
+      case 12:
+        fFrameParameters.pGroupSize = 9;
+        fFrameParameters.nbOfPixelInPGroup = 2;
+        break;
+      case 16:
+        fFrameParameters.pGroupSize = 6;
+        fFrameParameters.nbOfPixelInPGroup = 1;
+        break;
+      default:
+        break;
+    }
+  }
+  else if ((strncmp("RGBA", fSampling, strlen(fSampling)) == 0) || (strncmp("BGRA", fSampling, strlen(fSampling)) == 0)) {
+    switch (fDepth) {
+      case 8:
+        fFrameParameters.pGroupSize = 4;
+        break;
+      case 10:
+        fFrameParameters.pGroupSize = 5;
+        break;
+      case 12:
+        fFrameParameters.pGroupSize = 6;
+        break;
+      case 16:
+        fFrameParameters.pGroupSize = 8;
+        break;
+      default:
+        break;
+    }
+    fFrameParameters.nbOfPixelInPGroup = 1;
+  } else if (strncmp("YCbCr-4:4:4", fSampling, strlen(fSampling)) == 0) {
+    switch (fDepth) {
+      case 8:
+        fFrameParameters.pGroupSize = 3;
+        fFrameParameters.nbOfPixelInPGroup = 1;
+        break;
+      case 10:
+        fFrameParameters.pGroupSize = 15;
+        fFrameParameters.nbOfPixelInPGroup = 4;
+        break;
+      case 12:
+        fFrameParameters.pGroupSize = 9;
+        fFrameParameters.nbOfPixelInPGroup = 2;
+        break;
+      case 16:
+        fFrameParameters.pGroupSize = 6;
+        fFrameParameters.nbOfPixelInPGroup = 1;
+        break;
+      default:
+        break;
+    }
+  } else if (strncmp("YCbCr-4:2:2", fSampling, strlen(fSampling)) == 0) {
+    switch (fDepth) {
+      case 8:
+        fFrameParameters.pGroupSize = 4;
+        break;
+      case 10:
+        fFrameParameters.pGroupSize = 5;
+        break;
+      case 12:
+        fFrameParameters.pGroupSize = 6;
+        break;
+      case 16:
+        fFrameParameters.pGroupSize = 8;
+        break;
+      default:
+        break;
+    }
+    fFrameParameters.nbOfPixelInPGroup = 2;
+  } else if (strncmp("YCbCr-4:1:1", fSampling, strlen(fSampling)) == 0) {
+    switch (fDepth) {
+      case 8:
+        fFrameParameters.pGroupSize = 6;
+        break;
+      case 10:
+        fFrameParameters.pGroupSize = 15;
+        break;
+      case 12:
+        fFrameParameters.pGroupSize = 9;
+        break;
+      case 16:
+        fFrameParameters.pGroupSize = 12;
+        break;
+      default:
+        break;
+    }
+    fFrameParameters.nbOfPixelInPGroup = 4;
+  } else if (strncmp("YCbCr-4:2:0", fSampling, strlen(fSampling)) == 0) {
+    switch (fDepth) {
+      case 8:
+        fFrameParameters.pGroupSize = 6;
+        break;
+      case 10:
+        fFrameParameters.pGroupSize = 15;
+        break;
+      case 12:
+        fFrameParameters.pGroupSize = 9;
+        break;
+      case 16:
+        fFrameParameters.pGroupSize = 12;
+        break;
+      default:
+        break;
+    }
+    fFrameParameters.nbOfPixelInPGroup = 4;
+    fFrameParameters.scanLineIterationStep = 2;
+  }
+  fFrameParameters.frameSize = fHeight * fWidth * fFrameParameters.pGroupSize / fFrameParameters.nbOfPixelInPGroup;
+  fFrameParameters.scanLineSize  = fWidth * fFrameParameters.pGroupSize / fFrameParameters.nbOfPixelInPGroup * fFrameParameters.scanLineIterationStep;
+}
diff --git a/liveMedia/RawVideoRTPSource.cpp b/liveMedia/RawVideoRTPSource.cpp
index c429211..e584c43 100644
--- a/liveMedia/RawVideoRTPSource.cpp
+++ b/liveMedia/RawVideoRTPSource.cpp
@@ -65,7 +65,7 @@ RawVideoRTPSource
 		    unsigned char rtpPayloadFormat,
                     unsigned rtpTimestampFrequency)
   : MultiFramedRTPSource(env, RTPgs, rtpPayloadFormat, rtpTimestampFrequency,
-			 new BufferedPacketFactory),
+			 new RawVideoBufferedPacketFactory),
     fNumLines(0), fNextLine(0), fLineHeaders(NULL) {
 }
 
@@ -176,3 +176,7 @@ void RawVideoBufferedPacket::getNextEnclosedFrameParameters(unsigned char*& /*fr
   frameSize = fOurSource->fLineHeaders[fOurSource->fNextLine++].length;
 }
 
+BufferedPacket* RawVideoBufferedPacketFactory
+::createNewPacket(MultiFramedRTPSource* ourSource) {
+  return new RawVideoBufferedPacket((RawVideoRTPSource*)ourSource);
+}
diff --git a/liveMedia/include/MatroskaFile.hh b/liveMedia/include/MatroskaFile.hh
index 3812a8d..61d1eed 100644
--- a/liveMedia/include/MatroskaFile.hh
+++ b/liveMedia/include/MatroskaFile.hh
@@ -131,6 +131,11 @@ public:
   Boolean codecIsOpus; // a hack for Opus audio
   unsigned headerStrippedBytesSize;
   u_int8_t* headerStrippedBytes;
+  char const* colorSampling;
+  char const* colorimetry;
+  unsigned pixelWidth;
+  unsigned pixelHeight;
+  unsigned bitDepth;
   unsigned subframeSizeSize; // 0 means: frames do not have subframes (the default behavior)
   Boolean haveSubframes() const { return subframeSizeSize > 0; }
 };
diff --git a/liveMedia/include/RawVideoRTPSink.hh b/liveMedia/include/RawVideoRTPSink.hh
new file mode 100644
index 0000000..b83a577
--- /dev/null
+++ b/liveMedia/include/RawVideoRTPSink.hh
@@ -0,0 +1,85 @@
+/**********
+This library is free software; you can redistribute it and/or modify it under
+the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation; either version 3 of the License, or (at your
+option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
+
+This library is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
+more details.
+
+You should have received a copy of the GNU Lesser General Public License
+along with this library; if not, write to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+**********/
+// "liveMedia"
+// Copyright (c) 1996-2018 Live Networks, Inc.  All rights reserved.
+// RTP sink for Raw video
+// C++ header
+
+#ifndef _RAW_VIDEO_RTP_SINK_HH
+#define _RAW_VIDEO_RTP_SINK_HH
+
+#ifndef _VIDEO_RTP_SINK_HH
+#include "VideoRTPSink.hh"
+#endif
+
+////////// FrameParameters //////////
+
+struct FrameParameters {
+  u_int16_t pGroupSize;
+  u_int16_t nbOfPixelInPGroup;
+  u_int32_t scanLineSize ;
+  u_int32_t frameSize;
+  u_int16_t scanLineIterationStep;
+};
+
+
+class RawVideoRTPSink: public VideoRTPSink {
+public:
+  static RawVideoRTPSink*
+  createNew(UsageEnvironment& env, Groupsock* RTPgs, u_int8_t rtpPayloadFormat,
+        // The following headers provide the 'configuration' information, for the SDP description:
+        unsigned height, unsigned width, unsigned depth,
+        char const* sampling, char const* colorimetry = "BT709-2");
+
+protected:
+  RawVideoRTPSink(UsageEnvironment& env, Groupsock* RTPgs,
+                  u_int8_t rtpPayloadFormat,
+                  unsigned height, unsigned width, unsigned depth,
+                  char const* sampling, char const* colorimetry = "BT709-2");
+  // called only by createNew()
+  
+  virtual ~RawVideoRTPSink();
+  
+private: // redefined virtual functions:
+  virtual char const* auxSDPLine(); // for the "a=fmtp:" SDP line
+  
+  virtual void doSpecialFrameHandling(unsigned fragmentationOffset,
+                      unsigned char* frameStart,
+                      unsigned numBytesInFrame,
+                      struct timeval framePresentationTime,
+                      unsigned numRemainingBytes);
+  virtual Boolean frameCanAppearAfterPacketStart(unsigned char const* frameStart,
+                         unsigned numBytesInFrame) const;
+  virtual unsigned specialHeaderSize() const;
+  virtual unsigned computeOverflowForNewFrame(unsigned newFrameSize) const;
+  
+private:
+  char* fFmtpSDPLine;
+  char* fSampling;
+  unsigned fWidth;
+  unsigned fHeight;
+  unsigned fDepth;
+  char* fColorimetry;
+  unsigned fLineindex;
+  FrameParameters fFrameParameters;
+
+  unsigned getNbLineInPacket(unsigned fragOffset, unsigned * &lenghts, unsigned * &offsets) const;
+  //  return the number of lines, their lengths and offsets from the fragmentation offset of the whole frame.
+  // call delete[] on lenghts and offsets after use of the function
+  void setFrameParameters();
+};
+
+#endif
diff --git a/liveMedia/include/liveMedia.hh b/liveMedia/include/liveMedia.hh
index d1fa0b3..2c8230e 100644
--- a/liveMedia/include/liveMedia.hh
+++ b/liveMedia/include/liveMedia.hh
@@ -92,6 +92,7 @@ along with this library; if not, write to the Free Software Foundation, Inc.,
 #include "VP8VideoRTPSink.hh"
 #include "VP9VideoRTPSink.hh"
 #include "MPEG4GenericRTPSink.hh"
+#include "RawVideoRTPSink.hh"
 #include "MPEG1or2VideoStreamDiscreteFramer.hh"
 #include "MPEG4VideoStreamDiscreteFramer.hh"
 #include "DeviceSource.hh"
