Move solution and projects to src

This commit is contained in:
TSR Berry 2023-04-08 01:22:00 +02:00 committed by Mary
parent cd124bda58
commit cee7121058
3466 changed files with 55 additions and 55 deletions

View file

@ -0,0 +1,175 @@
using Ryujinx.Common.Logging;
using Ryujinx.Graphics.Nvdec.FFmpeg.Native;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Graphics.Nvdec.FFmpeg
{
unsafe class FFmpegContext : IDisposable
{
private unsafe delegate int AVCodec_decode(AVCodecContext* avctx, void* outdata, int* got_frame_ptr, AVPacket* avpkt);
private readonly AVCodec_decode _decodeFrame;
private static readonly FFmpegApi.av_log_set_callback_callback _logFunc;
private readonly AVCodec* _codec;
private AVPacket* _packet;
private AVCodecContext* _context;
public FFmpegContext(AVCodecID codecId)
{
_codec = FFmpegApi.avcodec_find_decoder(codecId);
if (_codec == null)
{
Logger.Error?.PrintMsg(LogClass.FFmpeg, $"Codec wasn't found. Make sure you have the {codecId} codec present in your FFmpeg installation.");
return;
}
_context = FFmpegApi.avcodec_alloc_context3(_codec);
if (_context == null)
{
Logger.Error?.PrintMsg(LogClass.FFmpeg, "Codec context couldn't be allocated.");
return;
}
if (FFmpegApi.avcodec_open2(_context, _codec, null) != 0)
{
Logger.Error?.PrintMsg(LogClass.FFmpeg, "Codec couldn't be opened.");
return;
}
_packet = FFmpegApi.av_packet_alloc();
if (_packet == null)
{
Logger.Error?.PrintMsg(LogClass.FFmpeg, "Packet couldn't be allocated.");
return;
}
int avCodecRawVersion = FFmpegApi.avcodec_version();
int avCodecMajorVersion = avCodecRawVersion >> 16;
int avCodecMinorVersion = (avCodecRawVersion >> 8) & 0xFF;
// libavcodec 59.24 changed AvCodec to move its private API and also move the codec function to an union.
if (avCodecMajorVersion > 59 || (avCodecMajorVersion == 59 && avCodecMinorVersion > 24))
{
_decodeFrame = Marshal.GetDelegateForFunctionPointer<AVCodec_decode>(((FFCodec<AVCodec>*)_codec)->CodecCallback);
}
// libavcodec 59.x changed AvCodec private API layout.
else if (avCodecMajorVersion == 59)
{
_decodeFrame = Marshal.GetDelegateForFunctionPointer<AVCodec_decode>(((FFCodecLegacy<AVCodec501>*)_codec)->Decode);
}
// libavcodec 58.x and lower
else
{
_decodeFrame = Marshal.GetDelegateForFunctionPointer<AVCodec_decode>(((FFCodecLegacy<AVCodec>*)_codec)->Decode);
}
}
static FFmpegContext()
{
_logFunc = Log;
// Redirect log output.
FFmpegApi.av_log_set_level(AVLog.MaxOffset);
FFmpegApi.av_log_set_callback(_logFunc);
}
private static void Log(void* ptr, AVLog level, string format, byte* vl)
{
if (level > FFmpegApi.av_log_get_level())
{
return;
}
int lineSize = 1024;
byte* lineBuffer = stackalloc byte[lineSize];
int printPrefix = 1;
FFmpegApi.av_log_format_line(ptr, level, format, vl, lineBuffer, lineSize, &printPrefix);
string line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer).Trim();
switch (level)
{
case AVLog.Panic:
case AVLog.Fatal:
case AVLog.Error:
Logger.Error?.Print(LogClass.FFmpeg, line);
break;
case AVLog.Warning:
Logger.Warning?.Print(LogClass.FFmpeg, line);
break;
case AVLog.Info:
Logger.Info?.Print(LogClass.FFmpeg, line);
break;
case AVLog.Verbose:
case AVLog.Debug:
Logger.Debug?.Print(LogClass.FFmpeg, line);
break;
case AVLog.Trace:
Logger.Trace?.Print(LogClass.FFmpeg, line);
break;
}
}
public int DecodeFrame(Surface output, ReadOnlySpan<byte> bitstream)
{
FFmpegApi.av_frame_unref(output.Frame);
int result;
int gotFrame;
fixed (byte* ptr = bitstream)
{
_packet->Data = ptr;
_packet->Size = bitstream.Length;
result = _decodeFrame(_context, output.Frame, &gotFrame, _packet);
}
if (gotFrame == 0)
{
FFmpegApi.av_frame_unref(output.Frame);
// If the frame was not delivered, it was probably delayed.
// Get the next delayed frame by passing a 0 length packet.
_packet->Data = null;
_packet->Size = 0;
result = _decodeFrame(_context, output.Frame, &gotFrame, _packet);
// We need to set B frames to 0 as we already consumed all delayed frames.
// This prevents the decoder from trying to return a delayed frame next time.
_context->HasBFrames = 0;
}
FFmpegApi.av_packet_unref(_packet);
if (gotFrame == 0)
{
FFmpegApi.av_frame_unref(output.Frame);
return -1;
}
return result < 0 ? result : 0;
}
public void Dispose()
{
fixed (AVPacket** ppPacket = &_packet)
{
FFmpegApi.av_packet_free(ppPacket);
}
FFmpegApi.avcodec_close(_context);
fixed (AVCodecContext** ppContext = &_context)
{
FFmpegApi.avcodec_free_context(ppContext);
}
}
}
}

View file

@ -0,0 +1,56 @@
using Ryujinx.Graphics.Nvdec.FFmpeg.Native;
using Ryujinx.Graphics.Video;
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.H264
{
public sealed class Decoder : IH264Decoder
{
public bool IsHardwareAccelerated => false;
private const int WorkBufferSize = 0x200;
private readonly byte[] _workBuffer = new byte[WorkBufferSize];
private FFmpegContext _context = new FFmpegContext(AVCodecID.AV_CODEC_ID_H264);
private int _oldOutputWidth;
private int _oldOutputHeight;
public ISurface CreateSurface(int width, int height)
{
return new Surface(width, height);
}
public bool Decode(ref H264PictureInfo pictureInfo, ISurface output, ReadOnlySpan<byte> bitstream)
{
Surface outSurf = (Surface)output;
if (outSurf.RequestedWidth != _oldOutputWidth ||
outSurf.RequestedHeight != _oldOutputHeight)
{
_context.Dispose();
_context = new FFmpegContext(AVCodecID.AV_CODEC_ID_H264);
_oldOutputWidth = outSurf.RequestedWidth;
_oldOutputHeight = outSurf.RequestedHeight;
}
Span<byte> bs = Prepend(bitstream, SpsAndPpsReconstruction.Reconstruct(ref pictureInfo, _workBuffer));
return _context.DecodeFrame(outSurf, bs) == 0;
}
private static byte[] Prepend(ReadOnlySpan<byte> data, ReadOnlySpan<byte> prep)
{
byte[] output = new byte[data.Length + prep.Length];
prep.CopyTo(output);
data.CopyTo(new Span<byte>(output).Slice(prep.Length));
return output;
}
public void Dispose() => _context.Dispose();
}
}

View file

@ -0,0 +1,121 @@
using System;
using System.Numerics;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.H264
{
struct H264BitStreamWriter
{
private const int BufferSize = 8;
private readonly byte[] _workBuffer;
private int _offset;
private int _buffer;
private int _bufferPos;
public H264BitStreamWriter(byte[] workBuffer)
{
_workBuffer = workBuffer;
_offset = 0;
_buffer = 0;
_bufferPos = 0;
}
public void WriteBit(bool value)
{
WriteBits(value ? 1 : 0, 1);
}
public void WriteBits(int value, int valueSize)
{
int valuePos = 0;
int remaining = valueSize;
while (remaining > 0)
{
int copySize = remaining;
int free = GetFreeBufferBits();
if (copySize > free)
{
copySize = free;
}
int mask = (1 << copySize) - 1;
int srcShift = (valueSize - valuePos) - copySize;
int dstShift = (BufferSize - _bufferPos) - copySize;
_buffer |= ((value >> srcShift) & mask) << dstShift;
valuePos += copySize;
_bufferPos += copySize;
remaining -= copySize;
}
}
private int GetFreeBufferBits()
{
if (_bufferPos == BufferSize)
{
Flush();
}
return BufferSize - _bufferPos;
}
public void Flush()
{
if (_bufferPos != 0)
{
_workBuffer[_offset++] = (byte)_buffer;
_buffer = 0;
_bufferPos = 0;
}
}
public void End()
{
WriteBit(true);
Flush();
}
public Span<byte> AsSpan()
{
return new Span<byte>(_workBuffer).Slice(0, _offset);
}
public void WriteU(uint value, int valueSize) => WriteBits((int)value, valueSize);
public void WriteSe(int value) => WriteExpGolombCodedInt(value);
public void WriteUe(uint value) => WriteExpGolombCodedUInt(value);
private void WriteExpGolombCodedInt(int value)
{
int sign = value <= 0 ? 0 : 1;
if (value < 0)
{
value = -value;
}
value = (value << 1) - sign;
WriteExpGolombCodedUInt((uint)value);
}
private void WriteExpGolombCodedUInt(uint value)
{
int size = 32 - BitOperations.LeadingZeroCount(value + 1);
WriteBits(1, size);
value -= (1u << (size - 1)) - 1;
WriteBits((int)value, size - 1);
}
}
}

View file

@ -0,0 +1,159 @@
using Ryujinx.Common.Memory;
using Ryujinx.Graphics.Video;
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.H264
{
static class SpsAndPpsReconstruction
{
public static Span<byte> Reconstruct(ref H264PictureInfo pictureInfo, byte[] workBuffer)
{
H264BitStreamWriter writer = new H264BitStreamWriter(workBuffer);
// Sequence Parameter Set.
writer.WriteU(1, 24);
writer.WriteU(0, 1);
writer.WriteU(3, 2);
writer.WriteU(7, 5);
writer.WriteU(100, 8); // Profile idc
writer.WriteU(0, 8); // Reserved
writer.WriteU(31, 8); // Level idc
writer.WriteUe(0); // Seq parameter set id
writer.WriteUe(pictureInfo.ChromaFormatIdc);
if (pictureInfo.ChromaFormatIdc == 3)
{
writer.WriteBit(false); // Separate colour plane flag
}
writer.WriteUe(0); // Bit depth luma minus 8
writer.WriteUe(0); // Bit depth chroma minus 8
writer.WriteBit(pictureInfo.QpprimeYZeroTransformBypassFlag);
writer.WriteBit(false); // Scaling matrix present flag
writer.WriteUe(pictureInfo.Log2MaxFrameNumMinus4);
writer.WriteUe(pictureInfo.PicOrderCntType);
if (pictureInfo.PicOrderCntType == 0)
{
writer.WriteUe(pictureInfo.Log2MaxPicOrderCntLsbMinus4);
}
else if (pictureInfo.PicOrderCntType == 1)
{
writer.WriteBit(pictureInfo.DeltaPicOrderAlwaysZeroFlag);
writer.WriteSe(0); // Offset for non-ref pic
writer.WriteSe(0); // Offset for top to bottom field
writer.WriteUe(0); // Num ref frames in pic order cnt cycle
}
writer.WriteUe(16); // Max num ref frames
writer.WriteBit(false); // Gaps in frame num value allowed flag
writer.WriteUe(pictureInfo.PicWidthInMbsMinus1);
writer.WriteUe(pictureInfo.PicHeightInMapUnitsMinus1);
writer.WriteBit(pictureInfo.FrameMbsOnlyFlag);
if (!pictureInfo.FrameMbsOnlyFlag)
{
writer.WriteBit(pictureInfo.MbAdaptiveFrameFieldFlag);
}
writer.WriteBit(pictureInfo.Direct8x8InferenceFlag);
writer.WriteBit(false); // Frame cropping flag
writer.WriteBit(false); // VUI parameter present flag
writer.End();
// Picture Parameter Set.
writer.WriteU(1, 24);
writer.WriteU(0, 1);
writer.WriteU(3, 2);
writer.WriteU(8, 5);
writer.WriteUe(0); // Pic parameter set id
writer.WriteUe(0); // Seq parameter set id
writer.WriteBit(pictureInfo.EntropyCodingModeFlag);
writer.WriteBit(pictureInfo.PicOrderPresentFlag);
writer.WriteUe(0); // Num slice groups minus 1
writer.WriteUe(pictureInfo.NumRefIdxL0ActiveMinus1);
writer.WriteUe(pictureInfo.NumRefIdxL1ActiveMinus1);
writer.WriteBit(pictureInfo.WeightedPredFlag);
writer.WriteU(pictureInfo.WeightedBipredIdc, 2);
writer.WriteSe(pictureInfo.PicInitQpMinus26);
writer.WriteSe(0); // Pic init qs minus 26
writer.WriteSe(pictureInfo.ChromaQpIndexOffset);
writer.WriteBit(pictureInfo.DeblockingFilterControlPresentFlag);
writer.WriteBit(pictureInfo.ConstrainedIntraPredFlag);
writer.WriteBit(pictureInfo.RedundantPicCntPresentFlag);
writer.WriteBit(pictureInfo.Transform8x8ModeFlag);
writer.WriteBit(pictureInfo.ScalingMatrixPresent);
if (pictureInfo.ScalingMatrixPresent)
{
for (int index = 0; index < 6; index++)
{
writer.WriteBit(true);
WriteScalingList(ref writer, pictureInfo.ScalingLists4x4[index]);
}
if (pictureInfo.Transform8x8ModeFlag)
{
for (int index = 0; index < 2; index++)
{
writer.WriteBit(true);
WriteScalingList(ref writer, pictureInfo.ScalingLists8x8[index]);
}
}
}
writer.WriteSe(pictureInfo.SecondChromaQpIndexOffset);
writer.End();
return writer.AsSpan();
}
// ZigZag LUTs from libavcodec.
private static ReadOnlySpan<byte> ZigZagDirect => new byte[]
{
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36,
29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46,
53, 60, 61, 54, 47, 55, 62, 63
};
private static ReadOnlySpan<byte> ZigZagScan => new byte[]
{
0 + 0 * 4, 1 + 0 * 4, 0 + 1 * 4, 0 + 2 * 4,
1 + 1 * 4, 2 + 0 * 4, 3 + 0 * 4, 2 + 1 * 4,
1 + 2 * 4, 0 + 3 * 4, 1 + 3 * 4, 2 + 2 * 4,
3 + 1 * 4, 3 + 2 * 4, 2 + 3 * 4, 3 + 3 * 4
};
private static void WriteScalingList(ref H264BitStreamWriter writer, IArray<byte> list)
{
ReadOnlySpan<byte> scan = list.Length == 16 ? ZigZagScan : ZigZagDirect;
int lastScale = 8;
for (int index = 0; index < list.Length; index++)
{
byte value = list[scan[index]];
int deltaScale = value - lastScale;
writer.WriteSe(deltaScale);
lastScale = value;
}
}
}
}

View file

@ -0,0 +1,26 @@
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct AVCodec
{
#pragma warning disable CS0649
public unsafe byte* Name;
public unsafe byte* LongName;
public int Type;
public AVCodecID Id;
public int Capabilities;
public byte MaxLowRes;
public unsafe AVRational* SupportedFramerates;
public IntPtr PixFmts;
public IntPtr SupportedSamplerates;
public IntPtr SampleFmts;
// Deprecated
public unsafe ulong* ChannelLayouts;
public unsafe IntPtr PrivClass;
public IntPtr Profiles;
public unsafe byte* WrapperName;
public IntPtr ChLayouts;
#pragma warning restore CS0649
}
}

View file

@ -0,0 +1,25 @@
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct AVCodec501
{
#pragma warning disable CS0649
public unsafe byte* Name;
public unsafe byte* LongName;
public int Type;
public AVCodecID Id;
public int Capabilities;
public byte MaxLowRes;
public unsafe AVRational* SupportedFramerates;
public IntPtr PixFmts;
public IntPtr SupportedSamplerates;
public IntPtr SampleFmts;
// Deprecated
public unsafe ulong* ChannelLayouts;
public unsafe IntPtr PrivClass;
public IntPtr Profiles;
public unsafe byte* WrapperName;
#pragma warning restore CS0649
}
}

View file

@ -0,0 +1,171 @@
using Ryujinx.Common.Memory;
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct AVCodecContext
{
#pragma warning disable CS0649
public unsafe IntPtr AvClass;
public int LogLevelOffset;
public int CodecType;
public unsafe AVCodec* Codec;
public AVCodecID CodecId;
public uint CodecTag;
public IntPtr PrivData;
public IntPtr Internal;
public IntPtr Opaque;
public long BitRate;
public int BitRateTolerance;
public int GlobalQuality;
public int CompressionLevel;
public int Flags;
public int Flags2;
public IntPtr ExtraData;
public int ExtraDataSize;
public AVRational TimeBase;
public int TicksPerFrame;
public int Delay;
public int Width;
public int Height;
public int CodedWidth;
public int CodedHeight;
public int GopSize;
public int PixFmt;
public IntPtr DrawHorizBand;
public IntPtr GetFormat;
public int MaxBFrames;
public float BQuantFactor;
public float BQuantOffset;
public int HasBFrames;
public float IQuantFactor;
public float IQuantOffset;
public float LumiMasking;
public float TemporalCplxMasking;
public float SpatialCplxMasking;
public float PMasking;
public float DarkMasking;
public int SliceCount;
public IntPtr SliceOffset;
public AVRational SampleAspectRatio;
public int MeCmp;
public int MeSubCmp;
public int MbCmp;
public int IldctCmp;
public int DiaSize;
public int LastPredictorCount;
public int MePreCmp;
public int PreDiaSize;
public int MeSubpelQuality;
public int MeRange;
public int SliceFlags;
public int MbDecision;
public IntPtr IntraMatrix;
public IntPtr InterMatrix;
public int IntraDcPrecision;
public int SkipTop;
public int SkipBottom;
public int MbLmin;
public int MbLmax;
public int BidirRefine;
public int KeyintMin;
public int Refs;
public int Mv0Threshold;
public int ColorPrimaries;
public int ColorPrc;
public int Colorspace;
public int ColorRange;
public int ChromaSampleLocation;
public int Slices;
public int FieldOrder;
public int SampleRate;
public int Channels;
public int SampleFmt;
public int FrameSize;
public int FrameNumber;
public int BlockAlign;
public int CutOff;
public ulong ChannelLayout;
public ulong RequestChannelLayout;
public int AudioServiceType;
public int RequestSampleFmt;
public IntPtr GetBuffer2;
public float QCompress;
public float QBlur;
public int QMin;
public int QMax;
public int MaxQdiff;
public int RcBufferSize;
public int RcOverrideCount;
public IntPtr RcOverride;
public long RcMaxRate;
public long RcMinRate;
public float RcMax_available_vbv_use;
public float RcMin_vbv_overflow_use;
public int RcInitialBufferOccupancy;
public int Trellis;
public IntPtr StatsOut;
public IntPtr StatsIn;
public int WorkaroundBugs;
public int StrictStdCompliance;
public int ErrorConcealment;
public int Debug;
public int ErrRecognition;
public long ReorderedOpaque;
public IntPtr HwAccel;
public IntPtr HwAccelContext;
public Array8<ulong> Error;
public int DctAlgo;
public int IdctAlgo;
public int BitsPerCodedSample;
public int BitsPerRawSample;
public int LowRes;
public int ThreadCount;
public int ThreadType;
public int ActiveThreadType;
public int ThreadSafeCallbacks;
public IntPtr Execute;
public IntPtr Execute2;
public int NsseWeight;
public int Profile;
public int Level;
public int SkipLoopFilter;
public int SkipIdct;
public int SkipFrame;
public IntPtr SubtitleHeader;
public int SubtitleHeaderSize;
public int InitialPadding;
public AVRational Framerate;
public int SwPixFmt;
public AVRational PktTimebase;
public IntPtr CodecDescriptor;
public long PtsCorrectionNumFaultyPts;
public long PtsCorrectionNumFaultyDts;
public long PtsCorrectionLastPts;
public long PtsCorrectionLastDts;
public IntPtr SubCharenc;
public int SubCharencMode;
public int SkipAlpha;
public int SeekPreroll;
public int DebugMv;
public IntPtr ChromaIntraMatrix;
public IntPtr DumpSeparator;
public IntPtr CodecWhitelist;
public uint Properties;
public IntPtr CodedSideData;
public int NbCodedSideData;
public IntPtr HwFramesCtx;
public int SubTextFormat;
public int TrailingPadding;
public long MaxPixels;
public IntPtr HwDeviceCtx;
public int HwAccelFlags;
public int applyCropping;
public int ExtraHwFrames;
public int DiscardDamagedPercentage;
public long MaxSamples;
public int ExportSideData;
public IntPtr GetEncodeBuffer;
#pragma warning restore CS0649
}
}

View file

@ -0,0 +1,8 @@
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
enum AVCodecID
{
AV_CODEC_ID_H264 = 27,
AV_CODEC_ID_VP8 = 139,
}
}

View file

@ -0,0 +1,37 @@
using Ryujinx.Common.Memory;
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct AVFrame
{
#pragma warning disable CS0649
public Array8<IntPtr> Data;
public Array8<int> LineSize;
public IntPtr ExtendedData;
public int Width;
public int Height;
public int NumSamples;
public int Format;
public int KeyFrame;
public int PictureType;
public AVRational SampleAspectRatio;
public long Pts;
public long PktDts;
public AVRational TimeBase;
public int CodedPictureNumber;
public int DisplayPictureNumber;
public int Quality;
public IntPtr Opaque;
public int RepeatPicture;
public int InterlacedFrame;
public int TopFieldFirst;
public int PaletteHasChanged;
public long ReorderedOpaque;
public int SampleRate;
public ulong ChannelLayout;
#pragma warning restore CS0649
// NOTE: There is more after, but the layout kind of changed a bit and we don't need more than this. This is safe as we only manipulate this behind a reference.
}
}

View file

@ -0,0 +1,15 @@
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
enum AVLog
{
Panic = 0,
Fatal = 8,
Error = 16,
Warning = 24,
Info = 32,
Verbose = 40,
Debug = 48,
Trace = 56,
MaxOffset = 64
}
}

View file

@ -0,0 +1,26 @@
using System;
using AVBufferRef = System.IntPtr;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct AVPacket
{
#pragma warning disable CS0649
public unsafe AVBufferRef *Buf;
public long Pts;
public long Dts;
public unsafe byte* Data;
public int Size;
public int StreamIndex;
public int Flags;
public IntPtr SizeData;
public int SizeDataElems;
public long Duration;
public long Position;
public IntPtr Opaque;
public unsafe AVBufferRef *OpaqueRef;
public AVRational TimeBase;
#pragma warning restore CS0649
}
}

View file

@ -0,0 +1,8 @@
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
public struct AVRational
{
public int Numerator;
public int Denominator;
}
}

View file

@ -0,0 +1,21 @@
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct FFCodec<T> where T: struct
{
#pragma warning disable CS0649
public T Base;
public int CapsInternalOrCbType;
public int PrivDataSize;
public IntPtr UpdateThreadContext;
public IntPtr UpdateThreadContextForUser;
public IntPtr Defaults;
public IntPtr InitStaticData;
public IntPtr Init;
public IntPtr CodecCallback;
#pragma warning restore CS0649
// NOTE: There is more after, but the layout kind of changed a bit and we don't need more than this. This is safe as we only manipulate this behind a reference.
}
}

View file

@ -0,0 +1,23 @@
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
struct FFCodecLegacy<T> where T: struct
{
#pragma warning disable CS0649
public T Base;
public uint CapsInternalOrCbType;
public int PrivDataSize;
public IntPtr UpdateThreadContext;
public IntPtr UpdateThreadContextForUser;
public IntPtr Defaults;
public IntPtr InitStaticData;
public IntPtr Init;
public IntPtr EncodeSub;
public IntPtr Encode2;
public IntPtr Decode;
#pragma warning restore CS0649
// NOTE: There is more after, but the layout kind of changed a bit and we don't need more than this. This is safe as we only manipulate this behind a reference.
}
}

View file

@ -0,0 +1,129 @@
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Runtime.InteropServices;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Native
{
static partial class FFmpegApi
{
public const string AvCodecLibraryName = "avcodec";
public const string AvUtilLibraryName = "avutil";
private static readonly Dictionary<string, (int, int)> _librariesWhitelist = new Dictionary<string, (int, int)>
{
{ AvCodecLibraryName, (58, 59) },
{ AvUtilLibraryName, (56, 57) }
};
private static string FormatLibraryNameForCurrentOs(string libraryName, int version)
{
if (OperatingSystem.IsWindows())
{
return $"{libraryName}-{version}.dll";
}
else if (OperatingSystem.IsLinux())
{
return $"lib{libraryName}.so.{version}";
}
else if (OperatingSystem.IsMacOS())
{
return $"lib{libraryName}.{version}.dylib";
}
else
{
throw new NotImplementedException($"Unsupported OS for FFmpeg: {RuntimeInformation.RuntimeIdentifier}");
}
}
private static bool TryLoadWhitelistedLibrary(string libraryName, Assembly assembly, DllImportSearchPath? searchPath, out IntPtr handle)
{
handle = IntPtr.Zero;
if (_librariesWhitelist.TryGetValue(libraryName, out var value))
{
(int minVersion, int maxVersion) = value;
for (int version = maxVersion; version >= minVersion; version--)
{
if (NativeLibrary.TryLoad(FormatLibraryNameForCurrentOs(libraryName, version), assembly, searchPath, out handle))
{
return true;
}
}
}
return false;
}
static FFmpegApi()
{
NativeLibrary.SetDllImportResolver(typeof(FFmpegApi).Assembly, (name, assembly, path) =>
{
IntPtr handle;
if (name == AvUtilLibraryName && TryLoadWhitelistedLibrary(AvUtilLibraryName, assembly, path, out handle))
{
return handle;
}
else if (name == AvCodecLibraryName && TryLoadWhitelistedLibrary(AvCodecLibraryName, assembly, path, out handle))
{
return handle;
}
return IntPtr.Zero;
});
}
public unsafe delegate void av_log_set_callback_callback(void* a0, AVLog level, [MarshalAs(UnmanagedType.LPUTF8Str)] string a2, byte* a3);
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial AVFrame* av_frame_alloc();
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial void av_frame_unref(AVFrame* frame);
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial void av_free(AVFrame* frame);
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial void av_log_set_level(AVLog level);
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial void av_log_set_callback(av_log_set_callback_callback callback);
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial AVLog av_log_get_level();
[LibraryImport(AvUtilLibraryName)]
internal static unsafe partial void av_log_format_line(void* ptr, AVLog level, [MarshalAs(UnmanagedType.LPUTF8Str)] string fmt, byte* vl, byte* line, int lineSize, int* printPrefix);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial AVCodec* avcodec_find_decoder(AVCodecID id);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial AVCodecContext* avcodec_alloc_context3(AVCodec* codec);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial int avcodec_open2(AVCodecContext* avctx, AVCodec* codec, void **options);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial int avcodec_close(AVCodecContext* avctx);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial void avcodec_free_context(AVCodecContext** avctx);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial AVPacket* av_packet_alloc();
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial void av_packet_unref(AVPacket* pkt);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial void av_packet_free(AVPacket** pkt);
[LibraryImport(AvCodecLibraryName)]
internal static unsafe partial int avcodec_version();
}
}

View file

@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net7.0</TargetFramework>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Ryujinx.Common\Ryujinx.Common.csproj" />
<ProjectReference Include="..\Ryujinx.Graphics.Video\Ryujinx.Graphics.Video.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,41 @@
using Ryujinx.Graphics.Nvdec.FFmpeg.Native;
using Ryujinx.Graphics.Video;
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg
{
unsafe class Surface : ISurface
{
public AVFrame* Frame { get; }
public int RequestedWidth { get; }
public int RequestedHeight { get; }
public Plane YPlane => new Plane((IntPtr)Frame->Data[0], Stride * Height);
public Plane UPlane => new Plane((IntPtr)Frame->Data[1], UvStride * UvHeight);
public Plane VPlane => new Plane((IntPtr)Frame->Data[2], UvStride * UvHeight);
public FrameField Field => Frame->InterlacedFrame != 0 ? FrameField.Interlaced : FrameField.Progressive;
public int Width => Frame->Width;
public int Height => Frame->Height;
public int Stride => Frame->LineSize[0];
public int UvWidth => (Width + 1) >> 1;
public int UvHeight => (Height + 1) >> 1;
public int UvStride => Frame->LineSize[1];
public Surface(int width, int height)
{
RequestedWidth = width;
RequestedHeight = height;
Frame = FFmpegApi.av_frame_alloc();
}
public void Dispose()
{
FFmpegApi.av_frame_unref(Frame);
FFmpegApi.av_free(Frame);
}
}
}

View file

@ -0,0 +1,53 @@
using Ryujinx.Graphics.Nvdec.FFmpeg.Native;
using Ryujinx.Graphics.Video;
using System;
namespace Ryujinx.Graphics.Nvdec.FFmpeg.Vp8
{
public sealed class Decoder : IDecoder
{
public bool IsHardwareAccelerated => false;
private readonly FFmpegContext _context = new FFmpegContext(AVCodecID.AV_CODEC_ID_VP8);
public ISurface CreateSurface(int width, int height)
{
return new Surface(width, height);
}
public bool Decode(ref Vp8PictureInfo pictureInfo, ISurface output, ReadOnlySpan<byte> bitstream)
{
Surface outSurf = (Surface)output;
int uncompHeaderSize = pictureInfo.KeyFrame ? 10 : 3;
byte[] frame = new byte[bitstream.Length + uncompHeaderSize];
uint firstPartSizeShifted = pictureInfo.FirstPartSize << 5;
frame[0] = (byte)(pictureInfo.KeyFrame ? 0 : 1);
frame[0] |= (byte)((pictureInfo.Version & 7) << 1);
frame[0] |= 1 << 4;
frame[0] |= (byte)firstPartSizeShifted;
frame[1] |= (byte)(firstPartSizeShifted >> 8);
frame[2] |= (byte)(firstPartSizeShifted >> 16);
if (pictureInfo.KeyFrame)
{
frame[3] = 0x9d;
frame[4] = 0x01;
frame[5] = 0x2a;
frame[6] = (byte)pictureInfo.FrameWidth;
frame[7] = (byte)((pictureInfo.FrameWidth >> 8) & 0x3F);
frame[8] = (byte)pictureInfo.FrameHeight;
frame[9] = (byte)((pictureInfo.FrameHeight >> 8) & 0x3F);
}
bitstream.CopyTo(new Span<byte>(frame).Slice(uncompHeaderSize));
return _context.DecodeFrame(outSurf, frame) == 0;
}
public void Dispose() => _context.Dispose();
}
}