mirror of
https://github.com/Laex/Delphi-OpenCV.git
synced 2024-11-15 07:45:53 +01:00
Migrate to FFmpeg Delphi/Pascal Headers by http://www.delphiffmpeg.com
Signed-off-by: Laentir Valetov <laex@bk.ru>
This commit is contained in:
parent
df2f17675f
commit
aa8ce8ae96
12
.gitignore
vendored
12
.gitignore
vendored
@ -8,6 +8,12 @@ ipch/
|
||||
lib/
|
||||
/resource/result/*.*
|
||||
CVAutoInstaller/
|
||||
/source/ffmpeg/examples
|
||||
/source/ffmpeg/headers
|
||||
|
||||
#ResourceFiles
|
||||
/resource/stereosample/Aloe/*.png
|
||||
/resource/stereosample/Aloe/dmin.txt
|
||||
|
||||
#Files
|
||||
*.exe
|
||||
@ -50,8 +56,4 @@ CVAutoInstaller/
|
||||
*.filters
|
||||
*.user
|
||||
!opencv_classes*.dll
|
||||
!opencv_classes*.dll
|
||||
|
||||
#ResourceFiles
|
||||
/resource/stereosample/Aloe/*.png
|
||||
/resource/stereosample/Aloe/dmin.txt
|
||||
!opencv_classes*.dll
|
@ -53,12 +53,13 @@ Unzip it to a convenient directory, thus get the following directory structure<b
|
||||
<samples>
|
||||
<source>
|
||||
```
|
||||
Download the [FFmpeg Delphi/Pascal Headers][6] and extract to <PROJECT_ROOT>\source\ffmpeg<br>
|
||||
Add the search path for the modules of the project in Delphi IDE (Tools-Options-Delphi Options-Library-Library path)
|
||||
```
|
||||
<PROJECT_ROOT>\source
|
||||
<PROJECT_ROOT>\source\classes
|
||||
<PROJECT_ROOT>\source\component
|
||||
<PROJECT_ROOT>\source\ffmpeg
|
||||
<PROJECT_ROOT>\source\ffmpeg\headers
|
||||
<PROJECT_ROOT>\source\opengl
|
||||
<PROJECT_ROOT>\source\sdl
|
||||
<PROJECT_ROOT>\source\sdl2
|
||||
@ -90,7 +91,7 @@ Examples of the use of video processing algorithms using VCL.Forms
|
||||
```
|
||||
Examples of using FFMPEG library header files are in the
|
||||
```
|
||||
<PROJECT_ROOT>\samples\FFMpeg\FFMPEG.groupproj
|
||||
<PROJECT_ROOT>\source\ffmpeg\examples
|
||||
```
|
||||
Examples of use of components
|
||||
```
|
||||
@ -107,4 +108,5 @@ Examples of use of components
|
||||
[2]: https://www.microsoft.com/en-us/download/details.aspx?id=48145
|
||||
[3]: https://www.libsdl.org/index.php
|
||||
[4]: https://github.com/opencv/opencv/releases/tag/2.4.13.2
|
||||
[5]: http://ffmpeg.zeranoe.com/builds/
|
||||
[5]: http://ffmpeg.zeranoe.com/builds/
|
||||
[6]: http://www.delphiffmpeg.com/headers/
|
@ -4,7 +4,7 @@ interface
|
||||
|
||||
uses
|
||||
Winapi.Windows, Winapi.Messages, System.SysUtils, System.Variants, System.Classes, Vcl.Graphics,
|
||||
Vcl.Controls, Vcl.Forms, Vcl.Dialogs, ffm.libavcodec.avcodec, ocv.comp.FFMSource,
|
||||
Vcl.Controls, Vcl.Forms, Vcl.Dialogs, libavcodec, ocv.comp.FFMSource,
|
||||
ocv.comp.Types, ocv.comp.Source, ocv.comp.View, Vcl.StdCtrls, Vcl.ExtCtrls;
|
||||
|
||||
type
|
||||
|
@ -1,96 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{494E9258-6938-4D18-843D-93EBD2FC9965}</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Projects Include="ffmpeg_sample_player\ffmpeg_sample_player.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
<Projects Include="filtering_video\filtering_video.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
<Projects Include="metadata\metadata.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
<Projects Include="scaling_video\scaling_video.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
<Projects Include="VideoEncoder\ffmVideoEncoder.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
<Projects Include="ffm_SDL2_VCL_player\ffm_SDL2_VCL_player.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Default.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Default.Personality/>
|
||||
</BorlandProject>
|
||||
</ProjectExtensions>
|
||||
<Target Name="ffmpeg_sample_player">
|
||||
<MSBuild Projects="ffmpeg_sample_player\ffmpeg_sample_player.dproj"/>
|
||||
</Target>
|
||||
<Target Name="ffmpeg_sample_player:Clean">
|
||||
<MSBuild Projects="ffmpeg_sample_player\ffmpeg_sample_player.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="ffmpeg_sample_player:Make">
|
||||
<MSBuild Projects="ffmpeg_sample_player\ffmpeg_sample_player.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="filtering_video">
|
||||
<MSBuild Projects="filtering_video\filtering_video.dproj"/>
|
||||
</Target>
|
||||
<Target Name="filtering_video:Clean">
|
||||
<MSBuild Projects="filtering_video\filtering_video.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="filtering_video:Make">
|
||||
<MSBuild Projects="filtering_video\filtering_video.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="metadata">
|
||||
<MSBuild Projects="metadata\metadata.dproj"/>
|
||||
</Target>
|
||||
<Target Name="metadata:Clean">
|
||||
<MSBuild Projects="metadata\metadata.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="metadata:Make">
|
||||
<MSBuild Projects="metadata\metadata.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="scaling_video">
|
||||
<MSBuild Projects="scaling_video\scaling_video.dproj"/>
|
||||
</Target>
|
||||
<Target Name="scaling_video:Clean">
|
||||
<MSBuild Projects="scaling_video\scaling_video.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="scaling_video:Make">
|
||||
<MSBuild Projects="scaling_video\scaling_video.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="ffmVideoEncoder">
|
||||
<MSBuild Projects="VideoEncoder\ffmVideoEncoder.dproj"/>
|
||||
</Target>
|
||||
<Target Name="ffmVideoEncoder:Clean">
|
||||
<MSBuild Projects="VideoEncoder\ffmVideoEncoder.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="ffmVideoEncoder:Make">
|
||||
<MSBuild Projects="VideoEncoder\ffmVideoEncoder.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="ffm_SDL2_VCL_player">
|
||||
<MSBuild Projects="ffm_SDL2_VCL_player\ffm_SDL2_VCL_player.dproj"/>
|
||||
</Target>
|
||||
<Target Name="ffm_SDL2_VCL_player:Clean">
|
||||
<MSBuild Projects="ffm_SDL2_VCL_player\ffm_SDL2_VCL_player.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="ffm_SDL2_VCL_player:Make">
|
||||
<MSBuild Projects="ffm_SDL2_VCL_player\ffm_SDL2_VCL_player.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="Build">
|
||||
<CallTarget Targets="ffmpeg_sample_player;filtering_video;metadata;scaling_video;ffmVideoEncoder;ffm_SDL2_VCL_player"/>
|
||||
</Target>
|
||||
<Target Name="Clean">
|
||||
<CallTarget Targets="ffmpeg_sample_player:Clean;filtering_video:Clean;metadata:Clean;scaling_video:Clean;ffmVideoEncoder:Clean;ffm_SDL2_VCL_player:Clean"/>
|
||||
</Target>
|
||||
<Target Name="Make">
|
||||
<CallTarget Targets="ffmpeg_sample_player:Make;filtering_video:Make;metadata:Make;scaling_video:Make;ffmVideoEncoder:Make;ffm_SDL2_VCL_player:Make"/>
|
||||
</Target>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Group.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Group.Targets')"/>
|
||||
</Project>
|
@ -1,124 +0,0 @@
|
||||
program ffmVideoEncoder;
|
||||
|
||||
{$APPTYPE CONSOLE}
|
||||
{$POINTERMATH ON}
|
||||
{$R *.res}
|
||||
|
||||
uses
|
||||
System.SysUtils,
|
||||
ffm.cls.videoencoder,
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.pixfmt,
|
||||
ffm.mem,
|
||||
ffm.frame;
|
||||
|
||||
Var
|
||||
seed: Single = 1.0;
|
||||
|
||||
// Create test video frame
|
||||
procedure CreateFrame(const buffer: PByte; const w, h, bytespan: Integer);
|
||||
Var
|
||||
wxh: Integer;
|
||||
i, j: Integer;
|
||||
line: PByte;
|
||||
begin
|
||||
wxh := w * h;
|
||||
for i := 0 to h - 1 do
|
||||
begin
|
||||
line := @buffer[i * bytespan];
|
||||
for j := 0 to w - 1 do
|
||||
begin
|
||||
// RGB
|
||||
line[0] := Trunc(255 * sin((i / wxh * seed) * 3.14));
|
||||
line[1] := Trunc(255 * cos((j / wxh * seed) * 3.14));
|
||||
line[2] := Trunc(255 * sin(((i + j) / wxh * seed) * 3.14));
|
||||
line := line + 3;
|
||||
end;
|
||||
end;
|
||||
seed := seed + 2.2;
|
||||
end;
|
||||
|
||||
Var
|
||||
shift: Single = 0.0;
|
||||
seconds: Single = 0.0;
|
||||
|
||||
minNu: Single = 3.14 / (44100.0) * 700.0;
|
||||
maxNu: Single = 3.14 / (44100.0) * 1500.0;
|
||||
speedNu: Single = 3.14 / (44100.0) * 10.0;
|
||||
|
||||
varNu: Single = 3.14 / (44100.0) * 700.0;
|
||||
|
||||
// Create sample
|
||||
procedure CreateSample(const buffer: PByte; const sampleCount: Integer);
|
||||
Var
|
||||
shift, seconds, minNu, maxNu, speedNu, varNu: Single;
|
||||
i: Integer;
|
||||
begin
|
||||
if (varNu > maxNu) then
|
||||
varNu := minNu;
|
||||
|
||||
varNu := varNu + speedNu;
|
||||
|
||||
for i := 0 to sampleCount - 1 do
|
||||
begin
|
||||
seconds := seconds + 1.0 / 44100.0;
|
||||
buffer[i] := Trunc(sin(i * varNu + shift) * $4FFF);
|
||||
end;
|
||||
shift := shift + varNu * sampleCount;
|
||||
end;
|
||||
|
||||
const
|
||||
W_VIDEO = 640;
|
||||
H_VIDEO = 480;
|
||||
FILE_NAME = 'c:\temp\1.avi';
|
||||
FRAME_COUNT = 150;
|
||||
CONTAINER = 'auto';
|
||||
|
||||
Var
|
||||
encoder: TFFMVideoEncoder;
|
||||
w, h: Integer;
|
||||
frame: pAVFrame;
|
||||
nSampleSize: Integer;
|
||||
sample: PByte;
|
||||
bufferImgSize: Integer;
|
||||
buffer: PByte;
|
||||
i: Integer;
|
||||
|
||||
begin
|
||||
try
|
||||
encoder := TFFMVideoEncoder.Create;
|
||||
|
||||
if encoder.InitFile(FILE_NAME, CONTAINER, W_VIDEO, H_VIDEO) then
|
||||
begin
|
||||
w := W_VIDEO;
|
||||
h := H_VIDEO;
|
||||
frame := av_frame_alloc();//avcodec_alloc_frame();
|
||||
nSampleSize := 2 * 44100 div 25; // 1 / 25 sec * FORMAT SIZE(S16)
|
||||
sample := AllocMem(nSampleSize);
|
||||
// Create frame
|
||||
bufferImgSize := avpicture_get_size(AV_PIX_FMT_BGR24, w, h);
|
||||
buffer := av_mallocz(bufferImgSize);
|
||||
avpicture_fill(pAVPicture(frame), buffer, AV_PIX_FMT_BGR24, w, h);
|
||||
|
||||
for i := 0 to FRAME_COUNT - 1 do
|
||||
begin
|
||||
CreateFrame(frame^.data[0], w, h, frame^.linesize[0]);
|
||||
CreateSample(sample, nSampleSize div 2);
|
||||
if not encoder.AddFrame(frame, sample, nSampleSize) then
|
||||
Writeln('Cannot write frame');
|
||||
end;
|
||||
|
||||
encoder.Finish();
|
||||
av_free(frame^.data[0]);
|
||||
av_free(frame);
|
||||
FreeMem(sample);
|
||||
sample := nil;
|
||||
end
|
||||
else
|
||||
Writeln('Cannot open file ' + FILE_NAME);
|
||||
except
|
||||
on E: Exception do
|
||||
Writeln(E.ClassName, ': ', E.Message);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,578 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{8A61BB0C-02D9-43F4-9C83-FBB6205E7FA1}</ProjectGuid>
|
||||
<ProjectVersion>18.2</ProjectVersion>
|
||||
<FrameworkType>None</FrameworkType>
|
||||
<MainSource>ffmVideoEncoder.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Debug</Config>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Console</AppType>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Android' and '$(Base)'=='true') or '$(Base_Android)'!=''">
|
||||
<Base_Android>true</Base_Android>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Base)'=='true') or '$(Base_iOSDevice32)'!=''">
|
||||
<Base_iOSDevice32>true</Base_iOSDevice32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Base)'=='true') or '$(Base_iOSDevice64)'!=''">
|
||||
<Base_iOSDevice64>true</Base_iOSDevice64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Base)'=='true') or '$(Base_iOSSimulator)'!=''">
|
||||
<Base_iOSSimulator>true</Base_iOSSimulator>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='OSX32' and '$(Base)'=='true') or '$(Base_OSX32)'!=''">
|
||||
<Base_OSX32>true</Base_OSX32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win64' and '$(Base)'=='true') or '$(Base_Win64)'!=''">
|
||||
<Base_Win64>true</Base_Win64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=</VerInfo_Keys>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;$(DCC_Namespace)</DCC_Namespace>
|
||||
<SanitizedProjectName>ffmVideoEncoder</SanitizedProjectName>
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<DCC_E>false</DCC_E>
|
||||
<DCC_N>false</DCC_N>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_F>false</DCC_F>
|
||||
<DCC_K>false</DCC_K>
|
||||
<Icon_MainIcon>$(BDS)\bin\delphi_PROJECTICON.ico</Icon_MainIcon>
|
||||
<Icns_MainIcns>$(BDS)\bin\delphi_PROJECTICNS.icns</Icns_MainIcns>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Android)'!=''">
|
||||
<EnabledSysJars>android-support-v4.dex.jar;apk-expansion.dex.jar;cloud-messaging.dex.jar;fmx.dex.jar;google-analytics-v2.dex.jar;google-play-billing.dex.jar;google-play-licensing.dex.jar;google-play-services.dex.jar</EnabledSysJars>
|
||||
<Android_LauncherIcon48>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_48x48.png</Android_LauncherIcon48>
|
||||
<Android_LauncherIcon96>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_96x96.png</Android_LauncherIcon96>
|
||||
<Android_LauncherIcon72>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_72x72.png</Android_LauncherIcon72>
|
||||
<DCC_UsePackage>DBXInterBaseDriver;DataSnapCommon;DbxCommonDriver;dbxcds;CustomIPTransport;dsnap;IndyIPServer;IndyCore;CloudService;FmxTeeUI;FireDACIBDriver;dsnapxml;OpenCV200;bindcompfmx;RESTBackendComponents;dbrtl;FireDACCommon;bindcomp;xmlrtl;ibxpress;FireDACCommonDriver;bindengine;soaprtl;FMXTee;inet;soapmidas;dxPSDBTeeChartRS19;RESTComponents;dbexpress;IndyIPClient;FireDACSqliteDriver;autoupgrXE5;FireDACDSDriver;DBXSqliteDriver;fmx;IndySystem;tethering;DataSnapClient;DataSnapProviderClient;fmxFireDAC;IndyIPCommon;DataSnapFireDAC;FireDACDBXDriver;soapserver;dxPSTeeChartRS19;rtl;DbxClientDriver;DataSnapNativeClient;IndyProtocols;bindcompdbx;FireDAC;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
<Android_LauncherIcon144>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_144x144.png</Android_LauncherIcon144>
|
||||
<Android_LauncherIcon36>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_36x36.png</Android_LauncherIcon36>
|
||||
<Android_SplashImage426>$(BDS)\bin\Artwork\Android\FM_SplashImage_426x320.png</Android_SplashImage426>
|
||||
<Android_SplashImage470>$(BDS)\bin\Artwork\Android\FM_SplashImage_470x320.png</Android_SplashImage470>
|
||||
<Android_SplashImage640>$(BDS)\bin\Artwork\Android\FM_SplashImage_640x480.png</Android_SplashImage640>
|
||||
<Android_SplashImage960>$(BDS)\bin\Artwork\Android\FM_SplashImage_960x720.png</Android_SplashImage960>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice32)'!=''">
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base_iOSDevice>true</Base_iOSDevice>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false</VerInfo_Keys>
|
||||
<DCC_UsePackage>DBXInterBaseDriver;DataSnapCommon;DbxCommonDriver;dbxcds;CustomIPTransport;dsnap;IndyIPServer;IndyCore;CloudService;FmxTeeUI;FireDACIBDriver;dsnapxml;OpenCV200;bindcompfmx;RESTBackendComponents;dbrtl;FireDACCommon;bindcomp;xmlrtl;ibxpress;FireDACCommonDriver;bindengine;soaprtl;FMXTee;inet;soapmidas;RESTComponents;dbexpress;IndyIPClient;FireDACSqliteDriver;FireDACDSDriver;DBXSqliteDriver;fmx;IndySystem;tethering;DataSnapClient;DataSnapProviderClient;fmxFireDAC;fmxase;IndyIPCommon;DataSnapFireDAC;FireDACDBXDriver;soapserver;rtl;DbxClientDriver;DataSnapNativeClient;IndyProtocols;bindcompdbx;FireDAC;$(DCC_UsePackage);$(DCC_UsePackage)</DCC_UsePackage>
|
||||
<Base>true</Base>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice64)'!=''">
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base_iOSDevice>true</Base_iOSDevice>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false</VerInfo_Keys>
|
||||
<DCC_UsePackage>DBXInterBaseDriver;DataSnapCommon;DbxCommonDriver;dbxcds;CustomIPTransport;dsnap;IndyIPServer;IndyCore;CloudService;FmxTeeUI;FireDACIBDriver;dsnapxml;OpenCV200;bindcompfmx;RESTBackendComponents;dbrtl;FireDACCommon;bindcomp;xmlrtl;ibxpress;FireDACCommonDriver;bindengine;soaprtl;FMXTee;inet;soapmidas;RESTComponents;dbexpress;IndyIPClient;FireDACSqliteDriver;FireDACDSDriver;DBXSqliteDriver;fmx;IndySystem;tethering;DataSnapClient;DataSnapProviderClient;fmxFireDAC;fmxase;IndyIPCommon;DataSnapFireDAC;FireDACDBXDriver;soapserver;rtl;DbxClientDriver;DataSnapNativeClient;IndyProtocols;bindcompdbx;FireDAC;$(DCC_UsePackage);$(DCC_UsePackage)</DCC_UsePackage>
|
||||
<Base>true</Base>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSSimulator)'!=''">
|
||||
<DCC_UsePackage>DBXInterBaseDriver;DataSnapCommon;DbxCommonDriver;dbxcds;CustomIPTransport;dsnap;IndyIPServer;IndyCore;CloudService;FmxTeeUI;FireDACIBDriver;dsnapxml;OpenCV200;bindcompfmx;RESTBackendComponents;dbrtl;FireDACCommon;bindcomp;xmlrtl;ibxpress;FireDACCommonDriver;bindengine;soaprtl;FMXTee;inet;soapmidas;RESTComponents;dbexpress;IndyIPClient;FireDACSqliteDriver;FireDACDSDriver;DBXSqliteDriver;fmx;IndySystem;tethering;DataSnapClient;DataSnapProviderClient;fmxFireDAC;fmxase;IndyIPCommon;DataSnapFireDAC;FireDACDBXDriver;soapserver;rtl;DbxClientDriver;DataSnapNativeClient;IndyProtocols;bindcompdbx;FireDAC;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_OSX32)'!=''">
|
||||
<DCC_ConsoleTarget>true</DCC_ConsoleTarget>
|
||||
<DCC_UsePackage>FireDACPgDriver;DBXInterBaseDriver;DataSnapServer;DataSnapCommon;DbxCommonDriver;dbxcds;CustomIPTransport;dsnap;IndyIPServer;IndyCore;CloudService;FmxTeeUI;FireDACIBDriver;dsnapxml;FireDACDb2Driver;bindcompfmx;FireDACODBCDriver;RESTBackendComponents;dbrtl;FireDACCommon;bindcomp;inetdb;xmlrtl;ibxpress;FireDACCommonDriver;bindengine;soaprtl;FMXTee;FireDACMSSQLDriver;DBXInformixDriver;DataSnapServerMidas;DBXFirebirdDriver;inet;FireDACMySQLDriver;soapmidas;DBXSybaseASADriver;RESTComponents;dbexpress;IndyIPClient;FireDACSqliteDriver;FireDACDSDriver;DBXSqliteDriver;fmx;IndySystem;tethering;DataSnapClient;DataSnapProviderClient;fmxFireDAC;DBXOracleDriver;fmxase;IndyIPCommon;DataSnapFireDAC;FireDACDBXDriver;soapserver;inetdbxpress;FireDACInfxDriver;FireDACASADriver;rtl;DbxClientDriver;DataSnapNativeClient;IndyProtocols;DBXMySQLDriver;bindcompdbx;FireDACADSDriver;FireDAC;fmxobj;FireDACOracleDriver;fmxdae;FireDACMSAccDriver;DataSnapIndy10ServerTransport;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<DCC_ConsoleTarget>true</DCC_ConsoleTarget>
|
||||
<DCC_UsePackage>dxSkinOffice2007BlackRS19;JvGlobus;JvMM;JvManagedThreads;dxSkinLiquidSkyRS19;cxBarEditItemRS19;OverbyteIcsDXE5Run;FireDACPgDriver;dxWizardControlRS19;dxPScxCommonRS19;tmswizdXE5;dxThemeRS19;JvCrypt;XiButtonXE4;cxGridRS19;dxPScxExtCommonRS19;DBXInterBaseDriver;DataSnapServer;DataSnapCommon;cxSchedulerRS19;JvNet;JvDotNetCtrls;DbxCommonDriver;vclimg;dbxcds;dxFlowChartRS19;DatasnapConnectorsFreePascal;JvXPCtrls;dxdbtrRS19;dxSkinSpringTimeRS19;vcldb;dxdborRS19;dxDockingRS19;dxSkinsdxDLPainterRS19;cxSpreadSheetRS19;dxtrmdRS19;dxSpellCheckerRS19;CustomIPTransport;dxTileControlRS19;dsnap;IndyIPServer;dxPSCoreRS19;dxSkinFoggyRS19;IndyCore;cxSchedulerGridRS19;cxPivotGridOLAPRS19;dxSkinStardustRS19;CloudService;FmxTeeUI;FireDACIBDriver;dxSkinXmas2008BlueRS19;JvDB;JvRuntimeDesign;dxSkinValentineRS19;fsIBX20;dsnapxml;dxPScxSchedulerLnkRS19;dxSkinDarkSideRS19;FireDACDb2Driver;dxSkinLondonLiquidSkyRS19;JclDeveloperTools;dxBarExtDBItemsRS19;dxTabbedMDIRS19;OpenCV200;dxSkinOffice2013WhiteRS19;dxSkinSharpRS19;bindcompfmx;dxSkinBlueprintRS19;dxSkinOffice2007PinkRS19;frx20;vcldbx;cxExportRS19;FireDACODBCDriver;RESTBackendComponents;dxSkinCoffeeRS19;dbrtl;FireDACCommon;bindcomp;inetdb;dxSkinBlueRS19;JvPluginSystem;dxServerModeRS19;DBXOdbcDriver;JvCmp;vclFireDAC;dxSkinMoneyTwinsRS19;cxPivotGridChartRS19;xmlrtl;dxSkiniMaginaryRS19;ibxpress;JvTimeFramework;dxSkinOffice2007GreenRS19;FireDACCommonDriver;bindengine;vclactnband;soaprtl;FMXTee;dxRibbonRS19;bindcompvcl;dxADOServerModeRS19;Jcl;vclie;dxPSdxLCLnkRS19;dxSkinBlackRS19;dxSkinOffice2010BlackRS19;dxSkinSevenClassicRS19;FireDACMSSQLDriver;DBXInformixDriver;Intraweb;DataSnapServerMidas;dsnapcon;DBXFirebirdDriver;dxSkinsdxNavBarPainterRS19;inet;dxPSdxFCLnkRS19;dxSkinscxSchedulerPainterRS19;JvPascalInterpreter;FireDACMySQLDriver;soapmidas;vclx;dxPSPrVwRibbonRS19;dxPSDBTeeChartRS19;DBXSybaseASADriver;RESTComponents;dxSkinLilianRS19;dxSkinscxPCPainterRS19;dbexpress;IndyIPClient;JvBDE;tmsdXE5;cxSchedulerTreeBrowserRS19;dxPScxSSLnkRS19;dxPScxPivotGridLnkRS19;dxSkinSharpPlusRS19;FireDACSqliteDriver;autoupgrXE5;FireDACDSDriver;ZComponent;DBXSqliteDriver;dxPSdxDBTVLnkRS19;dxSkinOffice2007BlueRS19;cxDataRS19;cxLibraryRS19;fmx;JvDlgs;IndySystem;RVButtonXE6;TeeDB;tethering;dxPsPrVwAdvRS19;dxSkinHighContrastRS19;inetdbbde;vclib;DataSnapClient;DataSnapProviderClient;DBXSybaseASEDriver;dxmdsRS19;dxSkinOffice2010SilverRS19;dxSkinsdxBarPainterRS19;fsBDE20;MetropolisUILiveTile;dxPSdxOCLnkRS19;vcldsnap;fmxFireDAC;DBXDb2Driver;dxSkinDevExpressDarkStyleRS19;DBXOracleDriver;dxBarDBNavRS19;JvCore;vclribbon;dxSkinSilverRS19;dxSkinVS2010RS19;fmxase;vcl;dxPSdxDBOCLnkRS19;DBXMSSQLDriver;IndyIPCommon;CodeSiteExpressPkg;dxBarRS19;cxTreeListdxBarPopupMenuRS19;DataSnapFireDAC;FireDACDBXDriver;JvAppFrm;soapserver;dxFireDACServerModeRS19;inetdbxpress;fsTee20;frxTee20;cxEditorsRS19;dxSkinMcSkinRS19;FireDACInfxDriver;JvDocking;adortl;dxSkinOffice2007SilverRS19;frxDB20;JvWizards;FireDACASADriver;dxSkinSevenRS19;JvHMI;dxDBXServerModeRS19;dxLayoutControlRS19;dxPSTeeChartRS19;dxSkinWhiteprintRS19;cxPageControlRS19;fsADO20;JvBands;ZDbc;rtl;dcldxSkinsCoreRS19;DbxClientDriver;ZPlain;dxPScxGridLnkRS19;Tee;cxPageControldxBarPopupMenuRS19;cxVerticalGridRS19;JclContainers;CPortLibDXE;JvSystem;DataSnapNativeClient;svnui;dxSkinsdxRibbonPainterRS19;dxSkinSummer2008RS19;cxPivotGridRS19;dxComnRS19;IndyProtocols;DBXMySQLDriver;dxSkinTheAsphaltWorldRS19;JvControls;tmsxlsdXE5;dxPSLnksRS19;viTimeLineDPK;bindcompdbx;TeeUI;JvJans;JvPrintPreview;JvPageComps;dxSkinDarkRoomRS19;JvStdCtrls;JvCustom;dxSkinPumpkinRS19;dxBarExtItemsRS19;FireDACADSDriver;vcltouch;ZCore;dxNavBarRS19;VclSmp;FireDAC;VCLRESTComponents;dxGDIPlusRS19;DataSnapConnectors;dxCoreRS19;dxPScxVGridLnkRS19;fsDB20;dxPScxTLLnkRS19;dxSkinsCoreRS19;fmxobj;dxSkinGlassOceansRS19;JclVcl;ZParseSql;dxPScxPCProdRS19;fs20;svn;tmsexdXE5;dxSkinOffice2010BlueRS19;FireDACOracleDriver;fmxdae;dxorgcRS19;bdertl;cxTreeListRS19;FireDACMSAccDriver;DataSnapIndy10ServerTransport;dxSkinDevExpressStyleRS19;dxSkinCaramelRS19;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win64)'!=''">
|
||||
<DCC_ConsoleTarget>true</DCC_ConsoleTarget>
|
||||
<DCC_UsePackage>dxSkinOffice2007BlackRS19;dxSkinLiquidSkyRS19;cxBarEditItemRS19;OverbyteIcsDXE5Run;FireDACPgDriver;dxWizardControlRS19;dxPScxCommonRS19;dxThemeRS19;cxGridRS19;dxPScxExtCommonRS19;DBXInterBaseDriver;DataSnapServer;DataSnapCommon;cxSchedulerRS19;DbxCommonDriver;vclimg;dbxcds;dxFlowChartRS19;DatasnapConnectorsFreePascal;dxdbtrRS19;dxSkinSpringTimeRS19;vcldb;dxdborRS19;dxDockingRS19;dxSkinsdxDLPainterRS19;cxSpreadSheetRS19;dxtrmdRS19;dxSpellCheckerRS19;CustomIPTransport;dxTileControlRS19;dsnap;IndyIPServer;dxPSCoreRS19;dxSkinFoggyRS19;IndyCore;cxSchedulerGridRS19;cxPivotGridOLAPRS19;dxSkinStardustRS19;CloudService;FmxTeeUI;FireDACIBDriver;dxSkinXmas2008BlueRS19;dxSkinValentineRS19;dsnapxml;dxPScxSchedulerLnkRS19;dxSkinDarkSideRS19;FireDACDb2Driver;dxSkinLondonLiquidSkyRS19;dxBarExtDBItemsRS19;dxTabbedMDIRS19;OpenCV200;dxSkinOffice2013WhiteRS19;dxSkinSharpRS19;bindcompfmx;dxSkinBlueprintRS19;dxSkinOffice2007PinkRS19;cxExportRS19;FireDACODBCDriver;RESTBackendComponents;dxSkinCoffeeRS19;dbrtl;FireDACCommon;bindcomp;inetdb;dxSkinBlueRS19;dxServerModeRS19;DBXOdbcDriver;vclFireDAC;dxSkinMoneyTwinsRS19;cxPivotGridChartRS19;xmlrtl;dxSkiniMaginaryRS19;ibxpress;dxSkinOffice2007GreenRS19;FireDACCommonDriver;bindengine;vclactnband;soaprtl;FMXTee;dxRibbonRS19;bindcompvcl;dxADOServerModeRS19;vclie;dxPSdxLCLnkRS19;dxSkinBlackRS19;dxSkinOffice2010BlackRS19;dxSkinSevenClassicRS19;FireDACMSSQLDriver;DBXInformixDriver;Intraweb;DataSnapServerMidas;dsnapcon;DBXFirebirdDriver;dxSkinsdxNavBarPainterRS19;inet;dxPSdxFCLnkRS19;dxSkinscxSchedulerPainterRS19;FireDACMySQLDriver;soapmidas;vclx;dxPSPrVwRibbonRS19;DBXSybaseASADriver;RESTComponents;dxSkinLilianRS19;dxSkinscxPCPainterRS19;dbexpress;IndyIPClient;tmsdXE5;cxSchedulerTreeBrowserRS19;dxPScxSSLnkRS19;dxPScxPivotGridLnkRS19;dxSkinSharpPlusRS19;FireDACSqliteDriver;FireDACDSDriver;ZComponent;DBXSqliteDriver;dxPSdxDBTVLnkRS19;dxSkinOffice2007BlueRS19;cxDataRS19;cxLibraryRS19;fmx;IndySystem;TeeDB;tethering;dxPsPrVwAdvRS19;dxSkinHighContrastRS19;vclib;DataSnapClient;DataSnapProviderClient;DBXSybaseASEDriver;dxmdsRS19;dxSkinOffice2010SilverRS19;dxSkinsdxBarPainterRS19;MetropolisUILiveTile;dxPSdxOCLnkRS19;vcldsnap;fmxFireDAC;DBXDb2Driver;dxSkinDevExpressDarkStyleRS19;DBXOracleDriver;dxBarDBNavRS19;vclribbon;dxSkinSilverRS19;dxSkinVS2010RS19;fmxase;vcl;dxPSdxDBOCLnkRS19;DBXMSSQLDriver;IndyIPCommon;dxBarRS19;cxTreeListdxBarPopupMenuRS19;DataSnapFireDAC;FireDACDBXDriver;soapserver;dxFireDACServerModeRS19;inetdbxpress;cxEditorsRS19;dxSkinMcSkinRS19;FireDACInfxDriver;adortl;dxSkinOffice2007SilverRS19;FireDACASADriver;dxSkinSevenRS19;dxDBXServerModeRS19;dxLayoutControlRS19;dxSkinWhiteprintRS19;cxPageControlRS19;ZDbc;rtl;dcldxSkinsCoreRS19;DbxClientDriver;ZPlain;dxPScxGridLnkRS19;Tee;cxPageControldxBarPopupMenuRS19;cxVerticalGridRS19;DataSnapNativeClient;dxSkinsdxRibbonPainterRS19;dxSkinSummer2008RS19;cxPivotGridRS19;dxComnRS19;IndyProtocols;DBXMySQLDriver;dxSkinTheAsphaltWorldRS19;tmsxlsdXE5;dxPSLnksRS19;bindcompdbx;TeeUI;dxSkinDarkRoomRS19;dxSkinPumpkinRS19;dxBarExtItemsRS19;FireDACADSDriver;vcltouch;ZCore;dxNavBarRS19;VclSmp;FireDAC;VCLRESTComponents;dxGDIPlusRS19;DataSnapConnectors;dxCoreRS19;dxPScxVGridLnkRS19;dxPScxTLLnkRS19;dxSkinsCoreRS19;fmxobj;dxSkinGlassOceansRS19;ZParseSql;dxPScxPCProdRS19;tmsexdXE5;dxSkinOffice2010BlueRS19;FireDACOracleDriver;fmxdae;dxorgcRS19;cxTreeListRS19;FireDACMSAccDriver;DataSnapIndy10ServerTransport;dxSkinDevExpressStyleRS19;dxSkinCaramelRS19;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_DebugDCUs>true</DCC_DebugDCUs>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
<DCC_DebugInfoInExe>true</DCC_DebugInfoInExe>
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<DCC_RemoteDebug>false</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">ffmVideoEncoder.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k200.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp200.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Deployment Version="3">
|
||||
<DeployFile LocalName="$(BDS)\Redist\iossimulator\libPCRE.dylib" Class="DependencyModule">
|
||||
<Platform Name="iOSSimulator">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\osx32\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\iossimulator\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="iOSSimulator">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\osx32\libcgsqlite3.dylib" Class="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployClass Name="AdditionalDebugSymbols">
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<RemoteDir>Contents\MacOS</RemoteDir>
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidClassesDexFile">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>classes</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidGDBServer">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>library\lib\armeabi-v7a</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidLibnativeArmeabiFile">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>library\lib\armeabi</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidLibnativeMipsFile">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>library\lib\mips</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidServiceOutput">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>library\lib\armeabi-v7a</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidSplashImageDef">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="AndroidSplashStyles">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\values</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_DefaultAppIcon">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_LauncherIcon144">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-xxhdpi</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_LauncherIcon36">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-ldpi</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_LauncherIcon48">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-mdpi</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_LauncherIcon72">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-hdpi</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_LauncherIcon96">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-xhdpi</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_SplashImage426">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-small</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_SplashImage470">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-normal</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_SplashImage640">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-large</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="Android_SplashImage960">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>res\drawable-xlarge</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="DebugSymbols">
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="DependencyFramework">
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
<Extensions>.framework</Extensions>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
<Extensions>.dylib</Extensions>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<Operation>0</Operation>
|
||||
<Extensions>.dll;.bpl</Extensions>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Required="true" Name="DependencyPackage">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
<Extensions>.dylib</Extensions>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
<Extensions>.dylib</Extensions>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
<Extensions>.dylib</Extensions>
|
||||
</Platform>
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
<Extensions>.dylib</Extensions>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<Operation>0</Operation>
|
||||
<Extensions>.bpl</Extensions>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="File">
|
||||
<Platform Name="Android">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
<Platform Name="OSX32">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPad_Launch1024">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPad_Launch1536">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPad_Launch2048">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPad_Launch768">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPhone_Launch320">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPhone_Launch640">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="iPhone_Launch640x1136">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="ProjectAndroidManifest">
|
||||
<Platform Name="Android">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="ProjectiOSDeviceDebug">
|
||||
<Platform Name="iOSDevice32">
|
||||
<RemoteDir>..\$(PROJECTNAME).app.dSYM\Contents\Resources\DWARF</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<RemoteDir>..\$(PROJECTNAME).app.dSYM\Contents\Resources\DWARF</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="ProjectiOSDeviceResourceRules"/>
|
||||
<DeployClass Name="ProjectiOSEntitlements"/>
|
||||
<DeployClass Name="ProjectiOSInfoPList"/>
|
||||
<DeployClass Name="ProjectiOSResource">
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="ProjectOSXEntitlements"/>
|
||||
<DeployClass Name="ProjectOSXInfoPList"/>
|
||||
<DeployClass Name="ProjectOSXResource">
|
||||
<Platform Name="OSX32">
|
||||
<RemoteDir>Contents\Resources</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Required="true" Name="ProjectOutput">
|
||||
<Platform Name="Android">
|
||||
<RemoteDir>library\lib\armeabi-v7a</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSDevice64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="iOSSimulator">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Linux64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win32">
|
||||
<Operation>0</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="ProjectUWPManifest">
|
||||
<Platform Name="Win32">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win64">
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="UWP_DelphiLogo150">
|
||||
<Platform Name="Win32">
|
||||
<RemoteDir>Assets</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win64">
|
||||
<RemoteDir>Assets</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<DeployClass Name="UWP_DelphiLogo44">
|
||||
<Platform Name="Win32">
|
||||
<RemoteDir>Assets</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
<Platform Name="Win64">
|
||||
<RemoteDir>Assets</RemoteDir>
|
||||
<Operation>1</Operation>
|
||||
</Platform>
|
||||
</DeployClass>
|
||||
<ProjectRoot Platform="Win64" Name="$(PROJECTNAME)"/>
|
||||
<ProjectRoot Platform="iOSDevice64" Name="$(PROJECTNAME).app"/>
|
||||
<ProjectRoot Platform="iOSDevice32" Name="$(PROJECTNAME).app"/>
|
||||
<ProjectRoot Platform="Win32" Name="$(PROJECTNAME)"/>
|
||||
<ProjectRoot Platform="Linux64" Name="$(PROJECTNAME)"/>
|
||||
<ProjectRoot Platform="OSX32" Name="$(PROJECTNAME)"/>
|
||||
<ProjectRoot Platform="Android" Name="$(PROJECTNAME)"/>
|
||||
<ProjectRoot Platform="iOSSimulator" Name="$(PROJECTNAME).app"/>
|
||||
</Deployment>
|
||||
<Platforms>
|
||||
<Platform value="Android">False</Platform>
|
||||
<Platform value="iOSDevice32">False</Platform>
|
||||
<Platform value="iOSDevice64">False</Platform>
|
||||
<Platform value="iOSSimulator">False</Platform>
|
||||
<Platform value="Linux64">False</Platform>
|
||||
<Platform value="OSX32">False</Platform>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
<Import Project="$(MSBuildProjectName).deployproj" Condition="Exists('$(MSBuildProjectName).deployproj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -1,707 +0,0 @@
|
||||
(*
|
||||
* Copyright (c) 2001 Fabrice Bellard
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*)
|
||||
(* *
|
||||
* @file
|
||||
* libavcodec API use example.
|
||||
*
|
||||
* @example decoding_encoding.c
|
||||
* Note that libavcodec only handles codecs (mpeg, mpeg4, etc...),
|
||||
* not file formats (avi, vob, mp4, mov, mkv, mxf, flv, mpegts, mpegps, etc...). See library 'libavformat' for the
|
||||
* format handling
|
||||
*)
|
||||
|
||||
program decoding_encoding;
|
||||
|
||||
{$APPTYPE CONSOLE}
|
||||
{$POINTERMATH ON}
|
||||
{$R *.res}
|
||||
|
||||
uses
|
||||
System.SysUtils,
|
||||
System.Math,
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.pixfmt,
|
||||
ffm.avformat,
|
||||
ffm.samplefmt,
|
||||
ffm.swscale,
|
||||
ffm.avio,
|
||||
ffm.frame,
|
||||
ffm.channel_layout,
|
||||
ffm.ctypes,
|
||||
ffm.mem,
|
||||
ffm.mathematics,
|
||||
ffm.avutil,
|
||||
ffm.opt,
|
||||
ffm.imgutils,
|
||||
ffm.log;
|
||||
|
||||
Const
|
||||
INBUF_SIZE = 4096;
|
||||
AUDIO_INBUF_SIZE = 20480;
|
||||
AUDIO_REFILL_THRESH = 4096;
|
||||
|
||||
(* check that a given sample format is supported by the encoder *)
|
||||
function check_sample_fmt(const codec: pAVCodec; const sample_fmt: TAVSampleFormat): Boolean;
|
||||
Var
|
||||
p: pAVSampleFormat;
|
||||
begin
|
||||
p := codec^.sample_fmts;
|
||||
while (p^ <> AV_SAMPLE_FMT_NONE) do
|
||||
begin
|
||||
if (p^ = sample_fmt) then
|
||||
Exit(True);
|
||||
inc(p);
|
||||
end;
|
||||
Result := False;
|
||||
end;
|
||||
|
||||
(* just pick the highest supported samplerate *)
|
||||
function select_sample_rate(const codec: pAVCodec): Integer;
|
||||
Var
|
||||
p: pInteger;
|
||||
best_samplerate: Integer;
|
||||
begin
|
||||
best_samplerate := 0;
|
||||
if not Assigned(codec^.supported_samplerates) then
|
||||
Exit(44100);
|
||||
p := codec^.supported_samplerates;
|
||||
while (p^ <> 0) do
|
||||
begin
|
||||
best_samplerate := MAX(p^, best_samplerate);
|
||||
inc(p);
|
||||
end;
|
||||
Result := best_samplerate;
|
||||
end;
|
||||
|
||||
(* select layout with the highest channel count *)
|
||||
function select_channel_layout(codec: pAVCodec): Integer;
|
||||
Var
|
||||
p: PUInt64_t;
|
||||
best_ch_layout: UInt64;
|
||||
best_nb_channels: Integer;
|
||||
nb_channels: Integer;
|
||||
begin
|
||||
best_ch_layout := 0;
|
||||
best_nb_channels := 0;
|
||||
if not Assigned(codec^.channel_layouts) then
|
||||
Exit(AV_CH_LAYOUT_STEREO);
|
||||
p := codec^.channel_layouts;
|
||||
while (p^ <> 0) do
|
||||
begin
|
||||
nb_channels := av_get_channel_layout_nb_channels(p^);
|
||||
if (nb_channels > best_nb_channels) then
|
||||
begin
|
||||
best_ch_layout := p^;
|
||||
best_nb_channels := nb_channels;
|
||||
end;
|
||||
inc(p);
|
||||
end;
|
||||
Result := best_ch_layout;
|
||||
end;
|
||||
|
||||
(*
|
||||
* Audio encoding example
|
||||
*)
|
||||
procedure audio_encode_example(const filename: String);
|
||||
Var
|
||||
codec: pAVCodec;
|
||||
c: pAVCodecContext;
|
||||
frame: pAVFrame;
|
||||
pkt: TAVPacket;
|
||||
i, j, k, ret, got_output: Integer;
|
||||
buffer_size: Integer;
|
||||
f: File;
|
||||
samples: ^Int16;
|
||||
t, tincr: Single;
|
||||
begin
|
||||
c := nil;
|
||||
WriteLn('Encode audio file ', filename);
|
||||
(* find the MP2 encoder *)
|
||||
codec := avcodec_find_encoder(AV_CODEC_ID_MP2);
|
||||
if not Assigned(codec) then
|
||||
begin
|
||||
WriteLn('Codec not found');
|
||||
Exit;
|
||||
end;
|
||||
c := avcodec_alloc_context3(codec);
|
||||
if not Assigned(c) then
|
||||
begin
|
||||
WriteLn('Could not allocate audio codec context\n');
|
||||
Exit;
|
||||
end;
|
||||
(* put sample parameters *)
|
||||
c^.bit_rate := 64000;
|
||||
(* check that the encoder supports s16 pcm input *)
|
||||
c^.sample_fmt := AV_SAMPLE_FMT_S16;
|
||||
if not check_sample_fmt(codec, c^.sample_fmt) then
|
||||
begin
|
||||
WriteLn('Encoder does not support sample format ', av_get_sample_fmt_name(c^.sample_fmt));
|
||||
Exit;
|
||||
end;
|
||||
(* select other audio parameters supported by the encoder *)
|
||||
c^.sample_rate := select_sample_rate(codec);
|
||||
c^.channel_layout := select_channel_layout(codec);
|
||||
c^.channels := av_get_channel_layout_nb_channels(c^.channel_layout);
|
||||
(* open it *)
|
||||
if (avcodec_open2(c, codec, nil) < 0) then
|
||||
begin
|
||||
WriteLn('Could not open codec');
|
||||
Exit;
|
||||
end;
|
||||
AssignFile(f, filename);
|
||||
try
|
||||
Rewrite(f, 1);
|
||||
except
|
||||
WriteLn('Could not open', filename);
|
||||
Exit;
|
||||
end;
|
||||
(* frame containing input raw audio *)
|
||||
frame := av_frame_alloc();
|
||||
if not Assigned(frame) then
|
||||
begin
|
||||
WriteLn('Could not allocate audio frame');
|
||||
Exit;
|
||||
end;
|
||||
frame^.nb_samples := c^.frame_size;
|
||||
frame^.format := Integer(c^.sample_fmt);
|
||||
frame^.channel_layout := c^.channel_layout;
|
||||
(* the codec gives us the frame size, in samples,
|
||||
* we calculate the size of the samples buffer in bytes *)
|
||||
buffer_size := av_samples_get_buffer_size(nil, c^.channels, c^.frame_size, c^.sample_fmt, 0);
|
||||
if (buffer_size < 0) then
|
||||
begin
|
||||
WriteLn('Could not get sample buffer size');
|
||||
Exit;
|
||||
end;
|
||||
samples := av_malloc(buffer_size);
|
||||
if not Assigned(samples) then
|
||||
begin
|
||||
WriteLn('Could not allocate %d bytes for samples buffer\n', buffer_size);
|
||||
Exit;
|
||||
end;
|
||||
(* setup the data pointers in the AVFrame *)
|
||||
ret := avcodec_fill_audio_frame(frame, c^.channels, c^.sample_fmt, pByte(samples), buffer_size, 0);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Could not setup audio frame');
|
||||
Exit;
|
||||
end;
|
||||
(* encode a single tone sound *)
|
||||
t := 0;
|
||||
tincr := 2 * M_PI * 440.0 / c^.sample_rate;
|
||||
for i := 0 to 199 do
|
||||
begin
|
||||
av_init_packet(@pkt);
|
||||
pkt.data := nil; // packet data will be allocated by the encoder
|
||||
pkt.size := 0;
|
||||
for j := 0 to c^.frame_size - 1 do
|
||||
begin
|
||||
samples[2 * j] := Trunc((sin(t) * 10000));
|
||||
for k := 1 to c^.channels - 1 do
|
||||
samples[2 * j + k] := samples[2 * j];
|
||||
t := t + tincr;
|
||||
end;
|
||||
(* encode the samples *)
|
||||
ret := avcodec_encode_audio2(c, @pkt, frame, got_output);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Error encoding audio frame');
|
||||
Exit;
|
||||
end;
|
||||
if (got_output <> 0) then
|
||||
begin
|
||||
BlockWrite(f, pkt.data^, pkt.size);
|
||||
av_free_packet(pkt);
|
||||
end;
|
||||
end;
|
||||
(* get the delayed frames *)
|
||||
got_output := 1;
|
||||
while got_output <> 0 do
|
||||
// ; i++)
|
||||
begin
|
||||
ret := avcodec_encode_audio2(c, @pkt, nil, got_output);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Error encoding frame');
|
||||
Exit;
|
||||
end;
|
||||
if (got_output <> 0) then
|
||||
begin
|
||||
BlockWrite(f, pkt.data^, pkt.size);
|
||||
av_free_packet(pkt);
|
||||
end;
|
||||
inc(i);
|
||||
end;
|
||||
Close(f);
|
||||
av_freep(samples);
|
||||
av_frame_free(frame);
|
||||
avcodec_close(c);
|
||||
av_free(c);
|
||||
end;
|
||||
|
||||
(*
|
||||
* Audio decoding.
|
||||
*)
|
||||
procedure audio_decode_example(const outfilename: String; const filename: String);
|
||||
Var
|
||||
codec: pAVCodec;
|
||||
c: pAVCodecContext;
|
||||
len: Integer;
|
||||
f, outfile: File;
|
||||
inbuf: array [0 .. AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE - 1] of byte;
|
||||
avpkt: TAVPacket;
|
||||
decoded_frame: pAVFrame;
|
||||
got_frame: Integer;
|
||||
data_size: Integer;
|
||||
begin
|
||||
c := nil;
|
||||
decoded_frame := nil;
|
||||
av_init_packet(@avpkt);
|
||||
WriteLn('Decode audio file %s to ', filename, outfilename);
|
||||
(* find the mpeg audio decoder *)
|
||||
codec := avcodec_find_decoder(AV_CODEC_ID_MP2);
|
||||
if not Assigned(codec) then
|
||||
begin
|
||||
WriteLn('Codec not found');
|
||||
Exit;
|
||||
end;
|
||||
c := avcodec_alloc_context3(codec);
|
||||
if not Assigned(c) then
|
||||
begin
|
||||
WriteLn('Could not allocate audio codec context');
|
||||
Exit;
|
||||
end;
|
||||
(* open it *)
|
||||
if (avcodec_open2(c, codec, nil) < 0) then
|
||||
begin
|
||||
WriteLn('Could not open codec');
|
||||
Exit;
|
||||
end;
|
||||
AssignFile(f, filename);
|
||||
try
|
||||
Reset(f, 1);
|
||||
except
|
||||
WriteLn('Could not open ', filename);
|
||||
Exit;
|
||||
end;
|
||||
AssignFile(outfile, outfilename);
|
||||
try
|
||||
Rewrite(outfile, 1);
|
||||
except
|
||||
av_free(c);
|
||||
Exit;
|
||||
end;
|
||||
(* decode until eof *)
|
||||
avpkt.data := @inbuf;
|
||||
BlockRead(f, inbuf, AUDIO_INBUF_SIZE, avpkt.size);
|
||||
while (avpkt.size > 0) do
|
||||
begin
|
||||
got_frame := 0;
|
||||
if not Assigned(decoded_frame) then
|
||||
begin
|
||||
decoded_frame := av_frame_alloc();
|
||||
if not Assigned(decoded_frame) then
|
||||
begin
|
||||
WriteLn('Could not allocate audio frame');
|
||||
Exit;
|
||||
end;
|
||||
end;
|
||||
len := avcodec_decode_audio4(c, decoded_frame, got_frame, @avpkt);
|
||||
if (len < 0) then
|
||||
begin
|
||||
WriteLn('Error while decoding');
|
||||
Exit;
|
||||
end;
|
||||
if (got_frame <> 0) then
|
||||
begin
|
||||
(* if a frame has been decoded, output it *)
|
||||
data_size := av_samples_get_buffer_size(nil, c^.channels, decoded_frame^.nb_samples, c^.sample_fmt, 1);
|
||||
if (data_size < 0) then
|
||||
begin
|
||||
(* This should not occur, checking just for paranoia *)
|
||||
WriteLn('Failed to calculate data size');
|
||||
Exit;
|
||||
end;
|
||||
BlockWrite(outfile, decoded_frame^.data[0]^, data_size);
|
||||
end;
|
||||
avpkt.size := avpkt.size - len;
|
||||
avpkt.data := avpkt.data + len;
|
||||
avpkt.dts := AV_NOPTS_VALUE;
|
||||
avpkt.pts := AV_NOPTS_VALUE;
|
||||
if (avpkt.size < AUDIO_REFILL_THRESH) then
|
||||
begin
|
||||
(* Refill the input buffer, to avoid trying to decode
|
||||
* incomplete frames. Instead of this, one could also use
|
||||
* a parser, or use a proper container format through
|
||||
* libavformat. *)
|
||||
Move(avpkt.data^, inbuf, avpkt.size);
|
||||
avpkt.data := @inbuf;
|
||||
BlockRead(f, avpkt.data[avpkt.size], AUDIO_INBUF_SIZE - avpkt.size, len);
|
||||
if (len > 0) then
|
||||
avpkt.size := avpkt.size + len;
|
||||
end;
|
||||
end;
|
||||
Close(outfile);
|
||||
Close(f);
|
||||
avcodec_close(c);
|
||||
av_free(c);
|
||||
av_frame_free(decoded_frame);
|
||||
end;
|
||||
|
||||
(*
|
||||
* Video encoding example
|
||||
*)
|
||||
procedure video_encode_example(const filename: String; codec_id: TAVCodecID);
|
||||
Var
|
||||
codec: pAVCodec;
|
||||
c: pAVCodecContext;
|
||||
i, ret, x, y, got_output: Integer;
|
||||
f: File;
|
||||
frame: pAVFrame;
|
||||
pkt: TAVPacket;
|
||||
endcode: array [0 .. 3] of byte;
|
||||
begin
|
||||
c := nil;
|
||||
endcode[0] := 0;
|
||||
endcode[1] := 0;
|
||||
endcode[2] := 1;
|
||||
endcode[3] := $B7;
|
||||
WriteLn('Encode video file ', filename);
|
||||
(* find the mpeg1 video encoder *)
|
||||
codec := avcodec_find_encoder(codec_id);
|
||||
if not Assigned(codec) then
|
||||
begin
|
||||
WriteLn('Codec not found');
|
||||
Exit;
|
||||
end;
|
||||
c := avcodec_alloc_context3(codec);
|
||||
if not Assigned(c) then
|
||||
begin
|
||||
WriteLn('Could not allocate video codec context');
|
||||
Exit;
|
||||
end;
|
||||
(* put sample parameters *)
|
||||
c^.bit_rate := 400000;
|
||||
(* resolution must be a multiple of two *)
|
||||
c^.width := 352;
|
||||
c^.height := 288;
|
||||
(* frames per second *)
|
||||
c^.time_base.num := 1;
|
||||
c^.time_base.den := 25;
|
||||
(* emit one intra frame every ten frames
|
||||
* check frame pict_type before passing frame
|
||||
* to encoder, if frame^.pict_type is AV_PICTURE_TYPE_I
|
||||
* then gop_size is ignored and the output of encoder
|
||||
* will always be I frame irrespective to gop_size
|
||||
*)
|
||||
c^.gop_size := 10;
|
||||
c^.max_b_frames := 1;
|
||||
c^.pix_fmt := AV_PIX_FMT_YUV420P;
|
||||
if (codec_id = AV_CODEC_ID_H264) then
|
||||
av_opt_set(c^.priv_data, 'preset', 'slow', 0);
|
||||
(* open it *)
|
||||
if (avcodec_open2(c, codec, nil) < 0) then
|
||||
begin
|
||||
WriteLn('Could not open codec');
|
||||
Exit;
|
||||
end;
|
||||
AssignFile(f, filename);
|
||||
try
|
||||
Rewrite(f, 1);
|
||||
except
|
||||
WriteLn('Could not open ', filename);
|
||||
Exit;
|
||||
end;
|
||||
frame := av_frame_alloc();
|
||||
if not Assigned(frame) then
|
||||
begin
|
||||
WriteLn('Could not allocate video frame');
|
||||
Exit;
|
||||
end;
|
||||
frame^.format := Integer(c^.pix_fmt);
|
||||
frame^.width := c^.width;
|
||||
frame^.height := c^.height;
|
||||
(* the image can be allocated by any means and av_image_alloc() is
|
||||
* just the most convenient way if av_malloc() is to be used *)
|
||||
ret := av_image_alloc(frame^.data, frame^.linesize, c^.width, c^.height, c^.pix_fmt, 32);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Could not allocate raw picture buffer');
|
||||
Exit;
|
||||
end;
|
||||
// got_output:=0;
|
||||
(* encode 1 second of video *)
|
||||
for i := 0 to 24 do
|
||||
begin
|
||||
av_init_packet(@pkt);
|
||||
pkt.data := nil; // packet data will be allocated by the encoder
|
||||
pkt.size := 0;
|
||||
(* prepare a dummy image *)
|
||||
|
||||
(* Y *)
|
||||
for y := 0 to c^.height - 1 do
|
||||
for x := 0 to c^.width - 1 do
|
||||
frame^.data[0][y * frame^.linesize[0] + x] := x + y + i * 3;
|
||||
|
||||
(* Cb and Cr *)
|
||||
for y := 0 to (c^.height div 2) - 1 do
|
||||
for x := 0 to (c^.width div 2) - 1 do
|
||||
begin
|
||||
frame^.data[1][y * frame^.linesize[1] + x] := 128 + y + i * 2;
|
||||
frame^.data[2][y * frame^.linesize[2] + x] := 64 + x + i * 5;
|
||||
end;
|
||||
|
||||
frame^.pts := i;
|
||||
(* encode the image *)
|
||||
ret := avcodec_encode_video2(c, @pkt, frame, got_output);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Error encoding frame');
|
||||
Exit;
|
||||
end;
|
||||
if (got_output <> 0) then
|
||||
begin
|
||||
WriteLn(format('Write frame %3d (size=%5d)', [i, pkt.size]));
|
||||
BlockWrite(f, pkt.data^, pkt.size);
|
||||
av_free_packet(pkt);
|
||||
end;
|
||||
end;
|
||||
(* get the delayed frames *)
|
||||
got_output := 1;
|
||||
While got_output <> 0 do
|
||||
begin
|
||||
ret := avcodec_encode_video2(c, @pkt, nil, got_output);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Error encoding frame');
|
||||
Exit;
|
||||
end;
|
||||
if (got_output <> 0) then
|
||||
begin
|
||||
WriteLn(format('Write frame %3d (size=%5d)', [i, pkt.size]));
|
||||
BlockWrite(f, pkt.data^, pkt.size);
|
||||
av_free_packet(pkt);
|
||||
end;
|
||||
inc(i);
|
||||
end;
|
||||
(* add sequence end code to have a real mpeg file *)
|
||||
BlockWrite(f, endcode, sizeof(endcode));
|
||||
Close(f);
|
||||
// avcodec_close(c);
|
||||
av_free(c);
|
||||
// av_freep(frame^.data[0]);
|
||||
av_frame_free(frame);
|
||||
end;
|
||||
|
||||
(*
|
||||
* Video decoding example
|
||||
*)
|
||||
procedure pgm_save(buf: pByte; wrap, xsize, ysize: Integer; filename: String);
|
||||
Var
|
||||
f: TextFile;
|
||||
fb: File;
|
||||
i: Integer;
|
||||
begin
|
||||
AssignFile(f, filename);
|
||||
Rewrite(f);
|
||||
WriteLn(f, format('P5' + #13#10 + '%d %d' + #13#10 + '%d', [xsize, ysize, 255]));
|
||||
Close(f);
|
||||
AssignFile(fb, filename);
|
||||
Reset(fb, 1);
|
||||
Seek(fb, FileSize(fb));
|
||||
for i := 0 to ysize - 1 do
|
||||
BlockWrite(fb, buf[i * wrap], xsize);
|
||||
Close(fb);
|
||||
end;
|
||||
|
||||
function decode_write_frame(const outfilename: String; avctx: pAVCodecContext; frame: pAVFrame;
|
||||
Var frame_count: Integer; pkt: pAVPacket; last: Integer): Integer;
|
||||
Var
|
||||
len, got_frame: Integer;
|
||||
buf: array [0 .. 1023] of AnsiChar;
|
||||
|
||||
begin
|
||||
len := avcodec_decode_video2(avctx, frame, got_frame, pkt);
|
||||
if (len < 0) then
|
||||
begin
|
||||
WriteLn('Error while decoding frame ', frame_count);
|
||||
Exit(len);
|
||||
end;
|
||||
if (got_frame <> 0) then
|
||||
begin
|
||||
if last <> 0 then
|
||||
WriteLn(format('Saving last frame %3d', [frame_count]))
|
||||
else
|
||||
WriteLn(format('Saving frame %3d', [frame_count]));
|
||||
(* the picture is allocated by the decoder, no need to free it *)
|
||||
pgm_save(frame^.data[0], frame^.linesize[0], avctx^.width, avctx^.height, format(outfilename, [frame_count]));
|
||||
inc(frame_count);
|
||||
end;
|
||||
if Assigned(pkt^.data) then
|
||||
begin
|
||||
pkt^.size := pkt^.size - len;
|
||||
pkt^.data := pkt^.data + len;
|
||||
end;
|
||||
Result := 0;
|
||||
end;
|
||||
|
||||
procedure video_decode_example(const outfilename: String; const filename: String);
|
||||
Var
|
||||
codec: pAVCodec;
|
||||
c: pAVCodecContext;
|
||||
frame_count: Integer;
|
||||
f: File;
|
||||
frame: pAVFrame;
|
||||
inbuf: array [0 .. INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE - 1] of byte;
|
||||
avpkt: TAVPacket;
|
||||
begin
|
||||
c := nil;
|
||||
av_init_packet(@avpkt);
|
||||
(* set end of buffer to 0 (this ensures that no overreading happens for damaged mpeg streams) *)
|
||||
FillChar(inbuf[INBUF_SIZE], FF_INPUT_BUFFER_PADDING_SIZE, 0);
|
||||
WriteLn(format('Decode video file %s to %s', [filename, outfilename]));
|
||||
(* find the mpeg1 video decoder *)
|
||||
codec := avcodec_find_decoder(AV_CODEC_ID_MPEG1VIDEO);
|
||||
if not Assigned(codec) then
|
||||
begin
|
||||
WriteLn('Codec not found');
|
||||
Exit;
|
||||
end;
|
||||
c := avcodec_alloc_context3(codec);
|
||||
if not Assigned(c) then
|
||||
begin
|
||||
WriteLn('Could not allocate video codec context');
|
||||
Exit;
|
||||
end;
|
||||
if (codec^.capabilities and CODEC_CAP_TRUNCATED) <> 0 then
|
||||
c^.flags := c^.flags or CODEC_FLAG_TRUNCATED; (* we do not send complete frames *)
|
||||
(* For some codecs, such as msmpeg4 and mpeg4, width and height
|
||||
MUST be initialized there because this information is not
|
||||
available in the bitstream. *)
|
||||
(* open it *)
|
||||
if (avcodec_open2(c, codec, nil) < 0) then
|
||||
begin
|
||||
WriteLn('Could not open codec');
|
||||
Exit;
|
||||
end;
|
||||
AssignFile(f, filename);
|
||||
try
|
||||
Reset(f, 1);
|
||||
except
|
||||
WriteLn('Could not open ', filename);
|
||||
Exit;
|
||||
end;
|
||||
frame := av_frame_alloc();
|
||||
if not Assigned(frame) then
|
||||
begin
|
||||
WriteLn('Could not allocate video frame');
|
||||
Exit;
|
||||
end;
|
||||
frame_count := 0;
|
||||
While True do
|
||||
begin
|
||||
BlockRead(f, inbuf, INBUF_SIZE, avpkt.size);
|
||||
if (avpkt.size = 0) then
|
||||
break;
|
||||
(* NOTE1: some codecs are stream based (mpegvideo, mpegaudio)
|
||||
and this is the only method to use them because you cannot
|
||||
know the compressed data size before analysing it.
|
||||
BUT some other codecs (msmpeg4, mpeg4) are inherently frame
|
||||
based, so you must call them with all the data for one
|
||||
frame exactly. You must also initialize 'width' and
|
||||
'height' before initializing them. *)
|
||||
(* NOTE2: some codecs allow the raw parameters (frame size,
|
||||
sample rate) to be changed at any frame. We handle this, so
|
||||
you should also take care of it *)
|
||||
(* here, we use a stream based decoder (mpeg1video), so we
|
||||
feed decoder and see if it could decode a frame *)
|
||||
avpkt.data := @inbuf;
|
||||
while (avpkt.size > 0) do
|
||||
if (decode_write_frame(outfilename, c, frame, frame_count, @avpkt, 0) < 0) then
|
||||
Exit;
|
||||
end;
|
||||
(* some codecs, such as MPEG, transmit the I and P frame with a
|
||||
latency of one frame. You must do the following to have a
|
||||
chance to get the last frame of the video *)
|
||||
avpkt.data := nil;
|
||||
avpkt.size := 0;
|
||||
decode_write_frame(outfilename, c, frame, frame_count, @avpkt, 1);
|
||||
Close(f);
|
||||
avcodec_close(c);
|
||||
av_free(c);
|
||||
av_frame_free(frame);
|
||||
end;
|
||||
|
||||
procedure avlog(ptr: Pointer; level: Integer; fmt: PAnsiChar; vl: pva_list); cdecl;
|
||||
Var
|
||||
line: array [0 .. 1023] of AnsiChar;
|
||||
print_prefix: Integer;
|
||||
A:AnsiString;
|
||||
begin
|
||||
print_prefix := 1;
|
||||
av_log_format_line(ptr, level, fmt, vl, @line, sizeof(line), print_prefix);
|
||||
A:=Trim(AnsiString(line));
|
||||
Writeln(A);
|
||||
end;
|
||||
|
||||
Var
|
||||
output_type: String;
|
||||
|
||||
begin
|
||||
try
|
||||
av_log_set_callback(avlog);
|
||||
(* register all the codecs *)
|
||||
avcodec_register_all();
|
||||
if ParamCount = 0 then
|
||||
begin
|
||||
WriteLn('usage: ' + ExtractFileName(ParamStr(0)) + ' output_type' + #13#10 +
|
||||
'API example program to decode/encode a media stream with libavcodec.' + #13#10 +
|
||||
'This program generates a synthetic stream and encodes it to a file' + #13#10 +
|
||||
'named test.h264, test.mp2 or test.mpg depending on output_type.' + #13#10 +
|
||||
'The encoded stream is then decoded and written to a raw data output.' + #13#10 +
|
||||
'output_type must be chosen between "h264", "mp2", "mpg"');
|
||||
Halt;
|
||||
end;
|
||||
output_type := ParamStr(1);
|
||||
if (SameText(output_type, 'h264')) then
|
||||
video_encode_example('test.h264', AV_CODEC_ID_H264)
|
||||
else if (SameText(output_type, 'mp2')) then
|
||||
begin
|
||||
audio_encode_example('test.mp2');
|
||||
audio_decode_example('test.sw', 'test.mp2');
|
||||
end
|
||||
else if (SameText(output_type, 'mpg')) then
|
||||
begin
|
||||
video_encode_example('test.mpg', AV_CODEC_ID_MPEG1VIDEO);
|
||||
video_decode_example('test%02d.pgm', 'test.mpg');
|
||||
end
|
||||
else
|
||||
begin
|
||||
WriteLn(format('Invalid output type "%s", choose between "h264", "mp2", or "mpg"', [output_type]));
|
||||
Halt;
|
||||
end;
|
||||
except
|
||||
on E: Exception do
|
||||
WriteLn(E.ClassName, ': ', E.Message);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,122 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{CE0FA7DD-82F4-40E1-868E-89238AA4468B}</ProjectGuid>
|
||||
<MainSource>decoding_encoding.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Debug</Config>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Console</AppType>
|
||||
<FrameworkType>None</FrameworkType>
|
||||
<ProjectVersion>15.4</ProjectVersion>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_2)'=='true') or '$(Cfg_2_Win32)'!=''">
|
||||
<Cfg_2_Win32>true</Cfg_2_Win32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<SanitizedProjectName>decoding_encoding</SanitizedProjectName>
|
||||
<DCC_K>false</DCC_K>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<DCC_N>false</DCC_N>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=;CFBundleName=</VerInfo_Keys>
|
||||
<DCC_E>false</DCC_E>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_ImageBase>00400000</DCC_ImageBase>
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;$(DCC_Namespace)</DCC_Namespace>
|
||||
<DCC_F>false</DCC_F>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_Win32)'!=''">
|
||||
<Debugger_RunParams>h264</Debugger_RunParams>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">decoding_encoding.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k200.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp200.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Platforms>
|
||||
<Platform value="OSX32">False</Platform>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
program ffm_SDL2_VCL_player;
|
||||
|
||||
uses
|
||||
Vcl.Forms,
|
||||
uMainForm in 'uMainForm.pas' {MainForm};
|
||||
|
||||
{$R *.res}
|
||||
|
||||
begin
|
||||
Application.Initialize;
|
||||
Application.MainFormOnTaskbar := True;
|
||||
Application.CreateForm(TMainForm, MainForm);
|
||||
Application.Run;
|
||||
end.
|
@ -1,138 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{400A9D75-6BD4-4E27-A2F0-25E5DB6EBC97}</ProjectGuid>
|
||||
<ProjectVersion>18.2</ProjectVersion>
|
||||
<FrameworkType>VCL</FrameworkType>
|
||||
<MainSource>ffm_SDL2_VCL_player.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Debug</Config>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Application</AppType>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win64' and '$(Base)'=='true') or '$(Base_Win64)'!=''">
|
||||
<Base_Win64>true</Base_Win64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;Vcl;Vcl.Imaging;Vcl.Touch;Vcl.Samples;Vcl.Shell;$(DCC_Namespace)</DCC_Namespace>
|
||||
<Icon_MainIcon>$(BDS)\bin\delphi_PROJECTICON.ico</Icon_MainIcon>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=</VerInfo_Keys>
|
||||
<SanitizedProjectName>ffm_SDL2_VCL_player</SanitizedProjectName>
|
||||
<Manifest_File>$(BDS)\bin\default_app.manifest</Manifest_File>
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<DCC_E>false</DCC_E>
|
||||
<DCC_N>false</DCC_N>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_F>false</DCC_F>
|
||||
<DCC_K>false</DCC_K>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<Manifest_File>$(BDS)\bin\default_app.manifest</Manifest_File>
|
||||
<DCC_UsePackage>dxSkinOffice2007BlackRS19;JvGlobus;JvMM;JvManagedThreads;dxSkinLiquidSkyRS19;cxBarEditItemRS19;OverbyteIcsDXE5Run;FireDACPgDriver;dxWizardControlRS19;JvCrypt;dxPScxCommonRS19;dxThemeRS19;tmswizdXE5;XiButtonXE4;cxGridRS19;dxPScxExtCommonRS19;DBXInterBaseDriver;DataSnapServer;DataSnapCommon;JvNet;cxSchedulerRS19;JvDotNetCtrls;DbxCommonDriver;vclimg;dbxcds;dxFlowChartRS19;DatasnapConnectorsFreePascal;JvXPCtrls;dxdbtrRS19;dxSkinSpringTimeRS19;vcldb;dxdborRS19;dxDockingRS19;dxSkinsdxDLPainterRS19;cxSpreadSheetRS19;dxtrmdRS19;dxSpellCheckerRS19;CustomIPTransport;dxTileControlRS19;dsnap;IndyIPServer;dxPSCoreRS19;dxSkinFoggyRS19;IndyCore;cxSchedulerGridRS19;cxPivotGridOLAPRS19;dxSkinStardustRS19;CloudService;FmxTeeUI;FireDACIBDriver;dxSkinXmas2008BlueRS19;JvDB;JvRuntimeDesign;dxSkinValentineRS19;dsnapxml;dxPScxSchedulerLnkRS19;JclDeveloperTools;FireDACDb2Driver;dxSkinDarkSideRS19;dxSkinLondonLiquidSkyRS19;dxBarExtDBItemsRS19;dxTabbedMDIRS19;OpenCV200;dxSkinOffice2013WhiteRS19;dxSkinSharpRS19;bindcompfmx;dxSkinBlueprintRS19;dxSkinOffice2007PinkRS19;vcldbx;cxExportRS19;FireDACODBCDriver;RESTBackendComponents;dxSkinCoffeeRS19;dbrtl;FireDACCommon;bindcomp;inetdb;JvPluginSystem;dxSkinBlueRS19;dxServerModeRS19;DBXOdbcDriver;JvCmp;vclFireDAC;dxSkinMoneyTwinsRS19;JvTimeFramework;xmlrtl;cxPivotGridChartRS19;ibxpress;dxSkiniMaginaryRS19;dxSkinOffice2007GreenRS19;FireDACCommonDriver;bindengine;vclactnband;soaprtl;FMXTee;dxRibbonRS19;bindcompvcl;dxADOServerModeRS19;Jcl;vclie;dxPSdxLCLnkRS19;dxSkinBlackRS19;dxSkinOffice2010BlackRS19;dxSkinSevenClassicRS19;FireDACMSSQLDriver;DBXInformixDriver;Intraweb;DataSnapServerMidas;dsnapcon;DBXFirebirdDriver;dxSkinscxSchedulerPainterRS19;inet;dxPSdxFCLnkRS19;dxSkinsdxNavBarPainterRS19;JvPascalInterpreter;FireDACMySQLDriver;soapmidas;vclx;dxPSPrVwRibbonRS19;dxPSDBTeeChartRS19;DBXSybaseASADriver;RESTComponents;dxSkinLilianRS19;dxSkinscxPCPainterRS19;dbexpress;JvBDE;IndyIPClient;dxSkinSharpPlusRS19;cxSchedulerTreeBrowserRS19;dxPScxSSLnkRS19;dxPScxPivotGridLnkRS19;tmsdXE5;FireDACSqliteDriver;FireDACDSDriver;ZComponent;DBXSqliteDriver;dxPSdxDBTVLnkRS19;dxSkinOffice2007BlueRS19;cxDataRS19;cxLibraryRS19;fmx;JvDlgs;IndySystem;TeeDB;tethering;dxPsPrVwAdvRS19;dxSkinHighContrastRS19;inetdbbde;vclib;DataSnapClient;DataSnapProviderClient;DBXSybaseASEDriver;dxmdsRS19;dxSkinOffice2010SilverRS19;dxSkinsdxBarPainterRS19;MetropolisUILiveTile;dxPSdxOCLnkRS19;vcldsnap;fmxFireDAC;DBXDb2Driver;dxSkinDevExpressDarkStyleRS19;DBXOracleDriver;dxBarDBNavRS19;JvCore;vclribbon;dxSkinSilverRS19;dxSkinVS2010RS19;fmxase;vcl;dxPSdxDBOCLnkRS19;DBXMSSQLDriver;IndyIPCommon;CodeSiteExpressPkg;cxTreeListdxBarPopupMenuRS19;dxBarRS19;DataSnapFireDAC;FireDACDBXDriver;JvAppFrm;soapserver;dxFireDACServerModeRS19;inetdbxpress;pkgAdobe;cxEditorsRS19;dxSkinMcSkinRS19;FireDACInfxDriver;JvDocking;adortl;dxSkinOffice2007SilverRS19;JvWizards;FireDACASADriver;dxSkinSevenRS19;JvHMI;dxDBXServerModeRS19;dxLayoutControlRS19;dxPSTeeChartRS19;dxSkinWhiteprintRS19;JvBands;cxPageControlRS19;ZDbc;rtl;dcldxSkinsCoreRS19;DbxClientDriver;dxPScxGridLnkRS19;ZPlain;Tee;JclContainers;cxVerticalGridRS19;cxPageControldxBarPopupMenuRS19;CPortLibDXE;JvSystem;DataSnapNativeClient;svnui;dxSkinsdxRibbonPainterRS19;JvControls;cxPivotGridRS19;dxComnRS19;IndyProtocols;DBXMySQLDriver;dxSkinSummer2008RS19;dxSkinTheAsphaltWorldRS19;viTimeLineDPK;dxPSLnksRS19;tmsxlsdXE5;bindcompdbx;TeeUI;JvJans;JvPrintPreview;JvPageComps;dxSkinDarkRoomRS19;JvStdCtrls;JvCustom;dxSkinPumpkinRS19;dxBarExtItemsRS19;FireDACADSDriver;vcltouch;dxNavBarRS19;ZCore;VclSmp;FireDAC;VCLRESTComponents;dxGDIPlusRS19;DataSnapConnectors;dxCoreRS19;dxPScxVGridLnkRS19;dxPScxTLLnkRS19;dxSkinsCoreRS19;fmxobj;dxSkinGlassOceansRS19;JclVcl;ZParseSql;dxPScxPCProdRS19;svn;dxSkinOffice2010BlueRS19;tmsexdXE5;FireDACOracleDriver;fmxdae;dxorgcRS19;bdertl;cxTreeListRS19;FireDACMSAccDriver;DataSnapIndy10ServerTransport;dxSkinDevExpressStyleRS19;dxSkinCaramelRS19;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<AppEnableRuntimeThemes>true</AppEnableRuntimeThemes>
|
||||
<UWP_DelphiLogo44>$(BDS)\bin\Artwork\Windows\UWP\delphi_UwpDefault_44.png</UWP_DelphiLogo44>
|
||||
<UWP_DelphiLogo150>$(BDS)\bin\Artwork\Windows\UWP\delphi_UwpDefault_150.png</UWP_DelphiLogo150>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win64)'!=''">
|
||||
<DCC_UsePackage>dxSkinOffice2007BlackRS19;dxSkinLiquidSkyRS19;cxBarEditItemRS19;OverbyteIcsDXE5Run;FireDACPgDriver;DBXInterBaseDriver;DataSnapServer;DataSnapCommon;cxSchedulerRS19;DbxCommonDriver;vclimg;dbxcds;dxFlowChartRS19;DatasnapConnectorsFreePascal;dxdbtrRS19;dxSkinSpringTimeRS19;vcldb;dxdborRS19;dxDockingRS19;dxSkinsdxDLPainterRS19;cxSpreadSheetRS19;dxtrmdRS19;dxSpellCheckerRS19;CustomIPTransport;dxTileControlRS19;dsnap;IndyIPServer;dxSkinFoggyRS19;IndyCore;cxSchedulerGridRS19;cxPivotGridOLAPRS19;dxSkinStardustRS19;CloudService;FmxTeeUI;FireDACIBDriver;dxSkinXmas2008BlueRS19;dxSkinValentineRS19;dsnapxml;dxPScxSchedulerLnkRS19;FireDACDb2Driver;dxSkinDarkSideRS19;dxSkinLondonLiquidSkyRS19;dxBarExtDBItemsRS19;dxTabbedMDIRS19;OpenCV200;dxSkinOffice2013WhiteRS19;dxSkinSharpRS19;bindcompfmx;dxSkinBlueprintRS19;dxSkinOffice2007PinkRS19;cxExportRS19;FireDACODBCDriver;RESTBackendComponents;dxSkinCoffeeRS19;dbrtl;FireDACCommon;bindcomp;inetdb;dxSkinBlueRS19;dxServerModeRS19;DBXOdbcDriver;vclFireDAC;dxSkinMoneyTwinsRS19;xmlrtl;cxPivotGridChartRS19;ibxpress;dxSkiniMaginaryRS19;dxSkinOffice2007GreenRS19;FireDACCommonDriver;bindengine;vclactnband;soaprtl;FMXTee;bindcompvcl;dxADOServerModeRS19;vclie;dxSkinBlackRS19;dxSkinOffice2010BlackRS19;dxSkinSevenClassicRS19;FireDACMSSQLDriver;DBXInformixDriver;Intraweb;DataSnapServerMidas;dsnapcon;DBXFirebirdDriver;dxSkinscxSchedulerPainterRS19;inet;dxPSdxFCLnkRS19;dxSkinsdxNavBarPainterRS19;FireDACMySQLDriver;soapmidas;vclx;dxPSPrVwRibbonRS19;DBXSybaseASADriver;RESTComponents;dxSkinLilianRS19;dxSkinscxPCPainterRS19;dbexpress;IndyIPClient;dxSkinSharpPlusRS19;cxSchedulerTreeBrowserRS19;dxPScxSSLnkRS19;dxPScxPivotGridLnkRS19;tmsdXE5;FireDACSqliteDriver;FireDACDSDriver;ZComponent;DBXSqliteDriver;dxPSdxDBTVLnkRS19;dxSkinOffice2007BlueRS19;cxDataRS19;fmx;IndySystem;TeeDB;tethering;dxPsPrVwAdvRS19;dxSkinHighContrastRS19;vclib;DataSnapClient;DataSnapProviderClient;DBXSybaseASEDriver;dxmdsRS19;dxSkinOffice2010SilverRS19;dxSkinsdxBarPainterRS19;MetropolisUILiveTile;dxPSdxOCLnkRS19;vcldsnap;fmxFireDAC;DBXDb2Driver;dxSkinDevExpressDarkStyleRS19;DBXOracleDriver;dxBarDBNavRS19;vclribbon;dxSkinSilverRS19;dxSkinVS2010RS19;fmxase;vcl;DBXMSSQLDriver;IndyIPCommon;cxTreeListdxBarPopupMenuRS19;dxBarRS19;DataSnapFireDAC;FireDACDBXDriver;soapserver;dxFireDACServerModeRS19;inetdbxpress;cxEditorsRS19;dxSkinMcSkinRS19;FireDACInfxDriver;adortl;dxSkinOffice2007SilverRS19;FireDACASADriver;dxSkinSevenRS19;dxDBXServerModeRS19;dxLayoutControlRS19;dxSkinWhiteprintRS19;ZDbc;rtl;dcldxSkinsCoreRS19;DbxClientDriver;ZPlain;Tee;cxVerticalGridRS19;cxPageControldxBarPopupMenuRS19;DataSnapNativeClient;dxSkinsdxRibbonPainterRS19;cxPivotGridRS19;dxComnRS19;IndyProtocols;DBXMySQLDriver;dxSkinSummer2008RS19;dxSkinTheAsphaltWorldRS19;tmsxlsdXE5;bindcompdbx;TeeUI;dxSkinDarkRoomRS19;dxSkinPumpkinRS19;dxBarExtItemsRS19;FireDACADSDriver;vcltouch;ZCore;VclSmp;FireDAC;VCLRESTComponents;dxGDIPlusRS19;DataSnapConnectors;dxPScxVGridLnkRS19;dxPScxTLLnkRS19;dxSkinsCoreRS19;fmxobj;dxSkinGlassOceansRS19;ZParseSql;dxPScxPCProdRS19;dxSkinOffice2010BlueRS19;tmsexdXE5;FireDACOracleDriver;fmxdae;cxTreeListRS19;FireDACMSAccDriver;DataSnapIndy10ServerTransport;dxSkinDevExpressStyleRS19;dxSkinCaramelRS19;$(DCC_UsePackage)</DCC_UsePackage>
|
||||
<UWP_DelphiLogo44>$(BDS)\bin\Artwork\Windows\UWP\delphi_UwpDefault_44.png</UWP_DelphiLogo44>
|
||||
<UWP_DelphiLogo150>$(BDS)\bin\Artwork\Windows\UWP\delphi_UwpDefault_150.png</UWP_DelphiLogo150>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_DebugDCUs>true</DCC_DebugDCUs>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
<DCC_DebugInfoInExe>true</DCC_DebugInfoInExe>
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;LastCompiledTime=31.07.2014 3:17:54;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
<DCC_RemoteDebug>false</DCC_RemoteDebug>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<DCCReference Include="uMainForm.pas">
|
||||
<Form>MainForm</Form>
|
||||
<FormType>dfm</FormType>
|
||||
</DCCReference>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">ffm_SDL2_VCL_player.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k200.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp200.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Deployment Version="2"/>
|
||||
<Platforms>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
<Import Project="$(MSBuildProjectName).deployproj" Condition="Exists('$(MSBuildProjectName).deployproj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -1,20 +0,0 @@
|
||||
object MainForm: TMainForm
|
||||
Left = 0
|
||||
Top = 0
|
||||
Caption = 'MainForm'
|
||||
ClientHeight = 437
|
||||
ClientWidth = 589
|
||||
Color = clBtnFace
|
||||
Font.Charset = DEFAULT_CHARSET
|
||||
Font.Color = clWindowText
|
||||
Font.Height = -11
|
||||
Font.Name = 'Tahoma'
|
||||
Font.Style = []
|
||||
OldCreateOrder = False
|
||||
OnActivate = FormActivate
|
||||
OnCreate = FormCreate
|
||||
OnDestroy = FormDestroy
|
||||
OnResize = FormResize
|
||||
PixelsPerInch = 96
|
||||
TextHeight = 13
|
||||
end
|
@ -1,175 +0,0 @@
|
||||
unit uMainForm;
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
Winapi.Windows, Winapi.Messages, System.SysUtils, System.Variants, System.Classes, Vcl.Graphics,
|
||||
Vcl.Controls, Vcl.Forms, Vcl.Dialogs, Vcl.ExtCtrls;
|
||||
|
||||
Const
|
||||
WM_PLAYVIDEO = WM_USER + 100;
|
||||
|
||||
type
|
||||
TMainForm = class(TForm)
|
||||
procedure FormCreate(Sender: TObject);
|
||||
procedure FormActivate(Sender: TObject);
|
||||
procedure FormResize(Sender: TObject);
|
||||
procedure FormDestroy(Sender: TObject);
|
||||
private
|
||||
procedure WMPlayVideo(var Message: TMessage); message WM_PLAYVIDEO;
|
||||
procedure Decode;
|
||||
public
|
||||
end;
|
||||
|
||||
var
|
||||
MainForm: TMainForm;
|
||||
|
||||
implementation
|
||||
|
||||
{$R *.dfm}
|
||||
|
||||
Uses
|
||||
SDL2,
|
||||
ffm.avformat,
|
||||
ffm.avutil,
|
||||
ffm.frame,
|
||||
ffm.swscale,
|
||||
ffm.libavcodec.avcodec,
|
||||
uResourcePaths;
|
||||
|
||||
const
|
||||
std_filename = cResourceMedia + 'trailer.avi';
|
||||
pixel_format = SDL_PIXELFORMAT_YV12;
|
||||
|
||||
Var
|
||||
sdlWnd: PSDL_Window;
|
||||
renderer: PSDL_Renderer;
|
||||
MooseTexture: pSDL_Texture;
|
||||
|
||||
format_context: pAVFormatContext = nil;
|
||||
video_stream: Integer;
|
||||
codec_context: pAVCodecContext;
|
||||
img_convert_context: pSwsContext;
|
||||
codec: pAVCodec;
|
||||
pFrame, frame: pAVFrame;
|
||||
packet: TAVPacket;
|
||||
frame_finished: Integer;
|
||||
ImgBufferSize: Integer;
|
||||
ImgBuffer: PByte;
|
||||
pict: TAVPicture;
|
||||
|
||||
procedure TMainForm.FormActivate(Sender: TObject);
|
||||
begin
|
||||
PostMessage(Handle, WM_PLAYVIDEO, 0, 0);
|
||||
end;
|
||||
|
||||
procedure TMainForm.FormCreate(Sender: TObject);
|
||||
Var
|
||||
err: Integer;
|
||||
begin
|
||||
if (SDL_Init(SDL_INIT_VIDEO or SDL_INIT_NOPARACHUTE) < 0) then
|
||||
FatalAppExit(0, PChar('Couldn''t initialize SDL: ' + SDL_GetError()));
|
||||
// SDL_InitSubSystem(SDL_INIT_VIDEO);
|
||||
sdlWnd := SDL_CreateWindowFrom(Pointer(Handle));
|
||||
renderer := SDL_CreateRenderer(sdlWnd, -1, SDL_RENDERER_ACCELERATED or SDL_RENDERER_PRESENTVSYNC);
|
||||
if not Assigned(renderer) then
|
||||
FatalAppExit(0, PChar('SDL_CreateRenderer Error: ' + SDL_GetError()));
|
||||
|
||||
// Register all available file formats and codecs
|
||||
av_register_all();
|
||||
avformat_network_init();
|
||||
// Open video file
|
||||
Assert(avformat_open_input(format_context, std_filename, nil, nil) >= 0);
|
||||
// Retrieve stream information
|
||||
Assert(avformat_find_stream_info(format_context, nil) >= 0);
|
||||
// Dump information about file onto standard error
|
||||
av_dump_format(format_context, 0, std_filename, 0);
|
||||
// Find the first video stream
|
||||
for video_stream := 0 to format_context^.nb_streams - 1 do
|
||||
if (format_context^.streams[video_stream]^.codec^.codec_type = AVMEDIA_TYPE_VIDEO) then
|
||||
break;
|
||||
Assert(video_stream < format_context^.nb_streams);
|
||||
codec_context := format_context^.streams[video_stream]^.codec;
|
||||
codec := avcodec_find_decoder(codec_context^.codec_id);
|
||||
Assert(avcodec_open2(codec_context, codec, nil) >= 0);
|
||||
|
||||
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255); // Set default colour to black.
|
||||
Assert(SDL_RenderClear(renderer) >= 0);
|
||||
|
||||
MooseTexture := SDL_CreateTexture(renderer, pixel_format, Integer(SDL_TEXTUREACCESS_STREAMING), codec_context^.width,
|
||||
codec_context^.height);
|
||||
Assert(Assigned(MooseTexture));
|
||||
|
||||
img_convert_context := sws_getCachedContext(nil, codec_context^.width, codec_context^.height, codec_context^.pix_fmt,
|
||||
codec_context^.width, codec_context^.height, codec_context^.pix_fmt, SWS_BICUBIC, nil, nil, nil);
|
||||
Assert(Assigned(img_convert_context));
|
||||
|
||||
pFrame := av_frame_alloc();//avcodec_alloc_frame();
|
||||
|
||||
frame := av_frame_alloc();;//avcodec_alloc_frame();
|
||||
ImgBufferSize := avpicture_get_size(codec_context^.pix_fmt, codec_context^.width, codec_context^.height);
|
||||
ImgBuffer := AllocMem(ImgBufferSize);
|
||||
avpicture_fill(pAVPicture(frame), ImgBuffer, codec_context^.pix_fmt, codec_context^.width, codec_context^.height);
|
||||
end;
|
||||
|
||||
procedure TMainForm.FormDestroy(Sender: TObject);
|
||||
begin
|
||||
{ TODO: destroy ffmpeg objects }
|
||||
SDL_DestroyRenderer(renderer);
|
||||
SDL_DestroyWindow(sdlWnd);
|
||||
SDL_Quit();
|
||||
end;
|
||||
|
||||
procedure TMainForm.FormResize(Sender: TObject);
|
||||
begin
|
||||
SDL_RenderSetViewport(renderer, nil);
|
||||
end;
|
||||
|
||||
procedure TMainForm.WMPlayVideo(var Message: TMessage);
|
||||
begin
|
||||
Decode;
|
||||
end;
|
||||
|
||||
procedure TMainForm.Decode;
|
||||
begin
|
||||
frame_finished := 1;
|
||||
repeat
|
||||
if av_read_frame(format_context, packet) >= 0 then
|
||||
begin
|
||||
if (packet.stream_index = video_stream) then
|
||||
begin
|
||||
// Video stream packet
|
||||
avcodec_decode_video2(codec_context, pFrame, frame_finished, @packet);
|
||||
|
||||
if (frame_finished <> 0) then
|
||||
begin
|
||||
pict.data[0] := frame^.data[0];
|
||||
pict.data[1] := frame^.data[2];
|
||||
pict.data[2] := frame^.data[1];
|
||||
|
||||
pict.linesize[0] := frame^.linesize[0];
|
||||
pict.linesize[1] := frame^.linesize[2];
|
||||
pict.linesize[2] := frame^.linesize[1];
|
||||
|
||||
sws_scale(img_convert_context, @pFrame^.data, @pFrame^.linesize, 0, codec_context^.height, @pict.data,
|
||||
@pict.linesize);
|
||||
|
||||
SDL_UpdateTexture(MooseTexture, nil, ImgBuffer, codec_context^.width * SDL_BYTESPERPIXEL(pixel_format));
|
||||
|
||||
SDL_RenderClear(renderer);
|
||||
|
||||
SDL_RenderCopy(renderer, MooseTexture, nil, nil);
|
||||
SDL_RenderPresent(renderer);
|
||||
|
||||
av_frame_unref(pFrame);
|
||||
// avcodec_get_frame_defaults(pFrame);
|
||||
end;
|
||||
end;
|
||||
av_free_packet(packet);
|
||||
end;
|
||||
Application.ProcessMessages;
|
||||
until frame_finished <> 0;
|
||||
PostMessage(Handle, WM_PLAYVIDEO, 0, 0);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,196 +0,0 @@
|
||||
program ffmpeg_sample_player;
|
||||
|
||||
{ .$APPTYPE CONSOLE }
|
||||
{$R *.res}
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
uses
|
||||
Winapi.Windows,
|
||||
System.SysUtils,
|
||||
System.Classes,
|
||||
SDL,
|
||||
ffm.lib,
|
||||
ffm.ctypes,
|
||||
ffm.avformat,
|
||||
ffm.avio,
|
||||
ffm.avutil,
|
||||
ffm.buffer,
|
||||
ffm.dict,
|
||||
ffm.frame,
|
||||
ffm.log,
|
||||
ffm.opt,
|
||||
ffm.pixfmt,
|
||||
ffm.rational,
|
||||
ffm.samplefmt,
|
||||
ffm.parseutils,
|
||||
ffm.swscale,
|
||||
ffm.pixdesc,
|
||||
ffm.imgutils,
|
||||
ffm.mem,
|
||||
ffm.error,
|
||||
ffm.avfilter,
|
||||
ffm.buffersink,
|
||||
ffm.mathematics,
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.buffersrc,
|
||||
ffm.errno,
|
||||
uResourcePaths;
|
||||
|
||||
var
|
||||
err: Integer;
|
||||
filename: AnsiString;
|
||||
format_context: pAVFormatContext = nil;
|
||||
video_stream: Integer;
|
||||
codec_context: pAVCodecContext;
|
||||
codec: pAVCodec;
|
||||
screen: pSDL_Surface;
|
||||
bmp: pSDL_Overlay;
|
||||
img_convert_context: pSwsContext;
|
||||
frame: pAVFrame;
|
||||
packet: TAVPacket;
|
||||
frame_finished: Integer;
|
||||
pict: TAVPicture;
|
||||
rect: TSDL_Rect;
|
||||
event: TSDL_Event;
|
||||
|
||||
const
|
||||
std_filename = cResourceMedia + 'trailer.avi';
|
||||
|
||||
begin
|
||||
try
|
||||
if (ParamCount < 1) then
|
||||
filename := std_filename
|
||||
else
|
||||
filename := ParamStr(1);
|
||||
|
||||
// Register all available file formats and codecs
|
||||
av_register_all();
|
||||
avformat_network_init();
|
||||
|
||||
// Init SDL with video support
|
||||
err := SDL_Init(SDL_INIT_VIDEO);
|
||||
if (err < 0) then
|
||||
begin
|
||||
WriteLn(Format('Unable to init SDL: %s', [SDL_GetError()]));
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
// Open video file
|
||||
err := avformat_open_input(format_context, PAnsiChar(filename), nil, nil);
|
||||
if (err < 0) then
|
||||
begin
|
||||
WriteLn('ffmpeg: Unable to open input file');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
// Retrieve stream information
|
||||
err := avformat_find_stream_info(format_context, nil);
|
||||
if (err < 0) then
|
||||
begin
|
||||
WriteLn('ffmpeg: Unable to find stream info');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
// Dump information about file onto standard error
|
||||
av_dump_format(format_context, 0, PAnsiChar(filename), 0);
|
||||
|
||||
// Find the first video stream
|
||||
for video_stream := 0 to format_context^.nb_streams - 1 do
|
||||
if (format_context^.streams[video_stream]^.codec^.codec_type = AVMEDIA_TYPE_VIDEO) then
|
||||
break;
|
||||
if (video_stream = format_context^.nb_streams) then
|
||||
begin
|
||||
WriteLn('ffmpeg: Unable to find video stream');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
codec_context := format_context^.streams[video_stream]^.codec;
|
||||
codec := avcodec_find_decoder(codec_context^.codec_id);
|
||||
err := avcodec_open2(codec_context, codec, nil);
|
||||
if (err < 0) then
|
||||
begin
|
||||
WriteLn('ffmpeg: Unable to open codec');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
screen := SDL_SetVideoMode(codec_context^.width, codec_context^.height, 0, 0 { SDL_FULLSCREEN } );
|
||||
if (screen = nil) then
|
||||
begin
|
||||
WriteLn('Couldn''t set video mode');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
bmp := SDL_CreateYUVOverlay(codec_context^.width, codec_context^.height, SDL_YV12_OVERLAY, screen);
|
||||
|
||||
img_convert_context := sws_getCachedContext(nil, codec_context^.width, codec_context^.height, codec_context^.pix_fmt, codec_context^.width, codec_context^.height, AV_PIX_FMT_YUV420P, SWS_BICUBIC,
|
||||
nil, nil, nil);
|
||||
if (img_convert_context = nil) then
|
||||
begin
|
||||
WriteLn('Cannot initialize the conversion context');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
frame := av_frame_alloc();//avcodec_alloc_frame();
|
||||
while (av_read_frame(format_context, packet) >= 0) do
|
||||
begin
|
||||
if (packet.stream_index = video_stream) then
|
||||
begin
|
||||
// Video stream packet
|
||||
avcodec_decode_video2(codec_context, frame, frame_finished, @packet);
|
||||
|
||||
if (frame_finished <> 0) then
|
||||
begin
|
||||
SDL_LockYUVOverlay(bmp);
|
||||
|
||||
// Convert frame to YV12 pixel format for display in SDL overlay
|
||||
pByte(pict.data[0]) := pByte(bmp^.pixels[0]);
|
||||
pByte(pict.data[1]) := pByte(bmp^.pixels[2]); // it's because YV12
|
||||
pByte(pict.data[2]) := pByte(bmp^.pixels[1]);
|
||||
|
||||
pict.linesize[0] := bmp^.pitches[0];
|
||||
pict.linesize[1] := bmp^.pitches[2];
|
||||
pict.linesize[2] := bmp^.pitches[1];
|
||||
|
||||
sws_scale(img_convert_context, @frame^.data, @frame^.linesize, 0, codec_context^.height, @pict.data, @pict.linesize);
|
||||
|
||||
SDL_UnlockYUVOverlay(bmp);
|
||||
|
||||
rect.x := 0;
|
||||
rect.y := 0;
|
||||
rect.w := codec_context^.width;
|
||||
rect.h := codec_context^.height;
|
||||
SDL_DisplayYUVOverlay(bmp, @rect);
|
||||
end;
|
||||
end;
|
||||
|
||||
// Free the packet that was allocated by av_read_frame
|
||||
av_free_packet(packet);
|
||||
|
||||
// Handling SDL events there
|
||||
if SDL_PollEvent(@event) <> 0 then
|
||||
if (event.type_ = SDL_QUITEV) then
|
||||
break;
|
||||
end;
|
||||
|
||||
sws_freeContext(img_convert_context);
|
||||
|
||||
// Free the YUV frame
|
||||
av_free(frame);
|
||||
|
||||
// Close the codec
|
||||
avcodec_close(codec_context);
|
||||
|
||||
// Close the video file
|
||||
avformat_close_input(&format_context);
|
||||
|
||||
avformat_network_deinit;
|
||||
|
||||
// Quit SDL
|
||||
SDL_QUIT();
|
||||
|
||||
except
|
||||
on E: Exception do
|
||||
WriteLn(E.ClassName, ': ', E.Message);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,254 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{036F28BC-83B0-471A-B04B-5E5296551153}</ProjectGuid>
|
||||
<MainSource>ffmpeg_sample_player.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Release</Config>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Console</AppType>
|
||||
<FrameworkType>None</FrameworkType>
|
||||
<ProjectVersion>18.2</ProjectVersion>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Android' and '$(Base)'=='true') or '$(Base_Android)'!=''">
|
||||
<Base_Android>true</Base_Android>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Base)'=='true') or '$(Base_iOSDevice32)'!=''">
|
||||
<Base_iOSDevice32>true</Base_iOSDevice32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Base)'=='true') or '$(Base_iOSDevice64)'!=''">
|
||||
<Base_iOSDevice64>true</Base_iOSDevice64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Base)'=='true') or '$(Base_iOSSimulator)'!=''">
|
||||
<Base_iOSSimulator>true</Base_iOSSimulator>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice32)'!=''">
|
||||
<Cfg_2_iOSDevice32>true</Cfg_2_iOSDevice32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice64)'!=''">
|
||||
<Cfg_2_iOSDevice64>true</Cfg_2_iOSDevice64>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSSimulator)'!=''">
|
||||
<Cfg_2_iOSSimulator>true</Cfg_2_iOSSimulator>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_2)'=='true') or '$(Cfg_2_Win32)'!=''">
|
||||
<Cfg_2_Win32>true</Cfg_2_Win32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<DCC_E>false</DCC_E>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;$(DCC_Namespace)</DCC_Namespace>
|
||||
<DCC_N>false</DCC_N>
|
||||
<DCC_ImageBase>00400000</DCC_ImageBase>
|
||||
<DCC_K>false</DCC_K>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_F>false</DCC_F>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=;CFBundleName=</VerInfo_Keys>
|
||||
<SanitizedProjectName>ffmpeg_sample_player</SanitizedProjectName>
|
||||
<Icon_MainIcon>$(BDS)\bin\delphi_PROJECTICON.ico</Icon_MainIcon>
|
||||
<Icns_MainIcns>$(BDS)\bin\delphi_PROJECTICNS.icns</Icns_MainIcns>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Android)'!=''">
|
||||
<VerInfo_Keys>package=com.embarcadero.$(MSBuildProjectName);label=$(MSBuildProjectName);versionCode=1;versionName=1.0.0;persistent=False;restoreAnyVersion=False;installLocation=auto;largeHeap=False;theme=TitleBar;hardwareAccelerated=true;apiKey=</VerInfo_Keys>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<Android_LauncherIcon36>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_36x36.png</Android_LauncherIcon36>
|
||||
<Android_LauncherIcon48>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_48x48.png</Android_LauncherIcon48>
|
||||
<Android_LauncherIcon72>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_72x72.png</Android_LauncherIcon72>
|
||||
<Android_LauncherIcon96>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_96x96.png</Android_LauncherIcon96>
|
||||
<Android_LauncherIcon144>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_144x144.png</Android_LauncherIcon144>
|
||||
<Android_SplashImage426>$(BDS)\bin\Artwork\Android\FM_SplashImage_426x320.png</Android_SplashImage426>
|
||||
<Android_SplashImage470>$(BDS)\bin\Artwork\Android\FM_SplashImage_470x320.png</Android_SplashImage470>
|
||||
<Android_SplashImage640>$(BDS)\bin\Artwork\Android\FM_SplashImage_640x480.png</Android_SplashImage640>
|
||||
<Android_SplashImage960>$(BDS)\bin\Artwork\Android\FM_SplashImage_960x720.png</Android_SplashImage960>
|
||||
<AUP_ACCESS_COARSE_LOCATION>true</AUP_ACCESS_COARSE_LOCATION>
|
||||
<AUP_ACCESS_FINE_LOCATION>true</AUP_ACCESS_FINE_LOCATION>
|
||||
<AUP_CALL_PHONE>true</AUP_CALL_PHONE>
|
||||
<AUP_CAMERA>true</AUP_CAMERA>
|
||||
<AUP_INTERNET>true</AUP_INTERNET>
|
||||
<AUP_READ_CALENDAR>true</AUP_READ_CALENDAR>
|
||||
<AUP_READ_EXTERNAL_STORAGE>true</AUP_READ_EXTERNAL_STORAGE>
|
||||
<AUP_WRITE_CALENDAR>true</AUP_WRITE_CALENDAR>
|
||||
<AUP_WRITE_EXTERNAL_STORAGE>true</AUP_WRITE_EXTERNAL_STORAGE>
|
||||
<AUP_READ_PHONE_STATE>true</AUP_READ_PHONE_STATE>
|
||||
<EnabledSysJars>android-support-v4.dex.jar;cloud-messaging.dex.jar;fmx.dex.jar;google-analytics-v2.dex.jar;google-play-billing.dex.jar;google-play-licensing.dex.jar;google-play-services.dex.jar</EnabledSysJars>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice32)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice64)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSSimulator)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice32)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice64)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSSimulator)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_Win32)'!=''">
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">ffmpeg_sample_player.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k200.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp200.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Platforms>
|
||||
<Platform value="Android">False</Platform>
|
||||
<Platform value="iOSDevice32">False</Platform>
|
||||
<Platform value="iOSDevice64">False</Platform>
|
||||
<Platform value="iOSSimulator">False</Platform>
|
||||
<Platform value="Linux64">False</Platform>
|
||||
<Platform value="OSX32">False</Platform>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -1,304 +0,0 @@
|
||||
program filtering_video;
|
||||
|
||||
{$APPTYPE CONSOLE}
|
||||
{$R *.res}
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
uses
|
||||
Winapi.Windows,
|
||||
System.SysUtils,
|
||||
ffm.lib,
|
||||
ffm.ctypes,
|
||||
ffm.avformat,
|
||||
ffm.avio,
|
||||
ffm.avutil,
|
||||
ffm.buffer,
|
||||
ffm.dict,
|
||||
ffm.frame,
|
||||
ffm.log,
|
||||
ffm.opt,
|
||||
ffm.pixfmt,
|
||||
ffm.rational,
|
||||
ffm.samplefmt,
|
||||
ffm.parseutils,
|
||||
ffm.swscale,
|
||||
ffm.pixdesc,
|
||||
ffm.imgutils,
|
||||
ffm.mem,
|
||||
ffm.error,
|
||||
ffm.avfilter,
|
||||
ffm.buffersink,
|
||||
ffm.mathematics,
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.buffersrc,
|
||||
ffm.errno,
|
||||
uResourcePaths;
|
||||
|
||||
const
|
||||
_XOPEN_SOURCE = 600; (* for usleep *)
|
||||
std_filename = cResourceMedia + 'trailer.avi';
|
||||
|
||||
Var
|
||||
filter_descr: pAnsiChar = 'scale=78:24';
|
||||
fmt_ctx: pAVFormatContext = nil;
|
||||
dec_ctx: pAVCodecContext = nil;
|
||||
buffersink_ctx: pAVFilterContext = nil;
|
||||
buffersrc_ctx: pAVFilterContext = nil;
|
||||
filter_graph: pAVFilterGraph = nil;
|
||||
video_stream_index: Integer = -1;
|
||||
last_pts: int64_t = AV_NOPTS_VALUE;
|
||||
|
||||
function open_input_file(const filename: pAnsiChar): Integer;
|
||||
Var
|
||||
ret: Integer;
|
||||
dec: pAVCodec;
|
||||
begin
|
||||
ret := avformat_open_input(fmt_ctx, filename, nil, nil);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Cannot open input file\n');
|
||||
Exit(ret);
|
||||
end;
|
||||
|
||||
ret := avformat_find_stream_info(fmt_ctx, nil);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Cannot find stream information\n');
|
||||
Exit(ret);
|
||||
end;
|
||||
|
||||
(* select the video stream *)
|
||||
ret := av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, dec, 0);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Cannot find a video stream in the input file\n');
|
||||
Exit(ret);
|
||||
end;
|
||||
video_stream_index := ret;
|
||||
dec_ctx := fmt_ctx^.streams[video_stream_index]^.codec;
|
||||
|
||||
(* init the video decoder *)
|
||||
ret := avcodec_open2(dec_ctx, dec, nil);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Cannot open video decoder\n');
|
||||
Exit(ret);
|
||||
end;
|
||||
|
||||
Result := 0;
|
||||
end;
|
||||
|
||||
function init_filters(const filters_descr: pAnsiChar): Integer;
|
||||
Var
|
||||
args: AnsiString;
|
||||
ret: Integer;
|
||||
buffersrc: pAVFilter;
|
||||
buffersink: pAVFilter;
|
||||
outputs: pAVFilterInOut;
|
||||
inputs: pAVFilterInOut;
|
||||
pix_fmts: array [0 .. 1] of TAVPixelFormat;
|
||||
buffersink_params: pAVBufferSinkParams;
|
||||
begin
|
||||
buffersrc := avfilter_get_by_name('buffer');
|
||||
buffersink := avfilter_get_by_name('buffersink');
|
||||
outputs := avfilter_inout_alloc();
|
||||
inputs := avfilter_inout_alloc();
|
||||
pix_fmts[0] := AV_PIX_FMT_GRAY8;
|
||||
pix_fmts[1] := AV_PIX_FMT_NONE;
|
||||
|
||||
filter_graph := avfilter_graph_alloc();
|
||||
|
||||
(* buffer video source: the decoded frames from the decoder will be inserted here. *)
|
||||
args := Format('video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d', [ //
|
||||
dec_ctx^.width, //
|
||||
dec_ctx^.height, //
|
||||
Integer(dec_ctx^.pix_fmt), //
|
||||
dec_ctx^.time_base.num, //
|
||||
dec_ctx^.time_base.den, //
|
||||
dec_ctx^.sample_aspect_ratio.num, //
|
||||
dec_ctx^.sample_aspect_ratio.den //
|
||||
]);
|
||||
|
||||
ret := avfilter_graph_create_filter(buffersrc_ctx, buffersrc, 'in', pAnsiChar(args), nil, filter_graph);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Cannot create buffer source\n');
|
||||
Exit(ret);
|
||||
end;
|
||||
|
||||
(* buffer video sink: to terminate the filter chain. *)
|
||||
buffersink_params := av_buffersink_params_alloc();
|
||||
buffersink_params^.pixel_fmts := @pix_fmts;
|
||||
ret := avfilter_graph_create_filter(buffersink_ctx, buffersink, 'out', nil, buffersink_params, filter_graph);
|
||||
av_free(buffersink_params);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Cannot create buffer sink\n');
|
||||
Exit(ret);
|
||||
end;
|
||||
|
||||
(* Endpoints for the filter graph. *)
|
||||
outputs^.name := av_strdup('in');
|
||||
outputs^.filter_ctx := buffersrc_ctx;
|
||||
outputs^.pad_idx := 0;
|
||||
outputs^.next := nil;
|
||||
|
||||
inputs^.name := av_strdup('out');
|
||||
inputs^.filter_ctx := buffersink_ctx;
|
||||
inputs^.pad_idx := 0;
|
||||
inputs^.next := nil;
|
||||
|
||||
ret := avfilter_graph_parse_ptr(filter_graph, filters_descr, inputs, outputs, nil);
|
||||
if (ret < 0) then
|
||||
Exit(ret);
|
||||
|
||||
ret := avfilter_graph_config(filter_graph, nil);
|
||||
if (ret < 0) then
|
||||
Exit(ret);
|
||||
Result := 0;
|
||||
end;
|
||||
|
||||
procedure display_frame(const vframe: pAVFrame; time_base: TAVRational);
|
||||
Const
|
||||
ds: array[0..4] of char = ' .-+#';
|
||||
Var
|
||||
x, y: Integer;
|
||||
p0, p: pByte;
|
||||
delay: int64;
|
||||
hConsole:THandle;
|
||||
coordScreen:TCOORD;
|
||||
begin
|
||||
if (vframe^.pts <> AV_NOPTS_VALUE) then
|
||||
begin
|
||||
if (last_pts <> AV_NOPTS_VALUE) then
|
||||
begin
|
||||
(* sleep roughly the right amount of time;
|
||||
* usleep is in microseconds, just like AV_TIME_BASE. *)
|
||||
delay := av_rescale_q(vframe^.pts - last_pts, time_base, AV_TIME_BASE_Q);
|
||||
if (delay > 0) and (delay < 1000000) then
|
||||
sleep(delay);
|
||||
end;
|
||||
last_pts := vframe^.pts;
|
||||
end;
|
||||
|
||||
(* Trivial ASCII grayscale display. *)
|
||||
p0 := vframe^.data[0];
|
||||
// Write(#33);
|
||||
hConsole := GetStdHandle(STD_OUTPUT_HANDLE);
|
||||
FillChar(coordScreen,SizeOf(coordScreen),0);
|
||||
SetConsoleCursorPosition(hConsole, coordScreen);
|
||||
for y := 0 to vframe^.height - 1 do
|
||||
begin
|
||||
p := p0;
|
||||
for x := 0 to vframe^.width - 1 do
|
||||
begin
|
||||
Write(ds[p^ div 52]);
|
||||
Inc(p);
|
||||
end;
|
||||
Writeln;
|
||||
p0 := p0 + vframe^.linesize[0];
|
||||
end;
|
||||
end;
|
||||
|
||||
Var
|
||||
ret: Integer;
|
||||
packet: TAVPacket;
|
||||
vframe: pAVFrame = nil;
|
||||
filt_frame: pAVFrame = nil;
|
||||
got_frame: Integer;
|
||||
filename: AnsiString;
|
||||
buf: array [0 .. 1023] of ansichar;
|
||||
|
||||
begin
|
||||
try
|
||||
vframe := av_frame_alloc();
|
||||
filt_frame := av_frame_alloc();
|
||||
|
||||
if (not Assigned(vframe)) or (not Assigned(filt_frame)) then
|
||||
begin
|
||||
Writeln('Could not allocate frame');
|
||||
Halt(1);
|
||||
end;
|
||||
if (ParamCount < 1) then
|
||||
filename := std_filename
|
||||
else
|
||||
filename := ParamStr(1);
|
||||
|
||||
avcodec_register_all();
|
||||
av_register_all();
|
||||
avfilter_register_all();
|
||||
avformat_network_init;
|
||||
try
|
||||
ret := open_input_file(pAnsiChar(filename));
|
||||
if (ret < 0) then
|
||||
Halt(1);
|
||||
ret := init_filters(pAnsiChar(filter_descr));
|
||||
if (ret < 0) then
|
||||
Halt(1);
|
||||
|
||||
(* read all packets *)
|
||||
while True do
|
||||
begin
|
||||
ret := av_read_frame(fmt_ctx, packet);
|
||||
if (ret < 0) then
|
||||
break;
|
||||
|
||||
if (packet.stream_index = video_stream_index) then
|
||||
begin
|
||||
// avcodec_get_frame_defaults(vframe);
|
||||
got_frame := 0;
|
||||
ret := avcodec_decode_video2(dec_ctx, vframe, got_frame, @packet);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Error decoding video');
|
||||
break;
|
||||
end;
|
||||
|
||||
if got_frame <> 0 then
|
||||
begin
|
||||
vframe^.pts := av_frame_get_best_effort_timestamp(vframe);
|
||||
|
||||
(* push the decoded frame into the filtergraph *)
|
||||
if (av_buffersrc_add_frame_flags(buffersrc_ctx, vframe, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) then
|
||||
begin
|
||||
// av_log(nil, AV_LOG_ERROR, 'Error while feeding the filtergraph\n');
|
||||
break;
|
||||
end;
|
||||
|
||||
(* pull filtered frames from the filtergraph *)
|
||||
while True do
|
||||
begin
|
||||
ret := av_buffersink_get_frame(buffersink_ctx, filt_frame);
|
||||
if (ret = AVERROR(EAGAIN)) or (ret = AVERROR_EOF) then
|
||||
break;
|
||||
if (ret < 0) then
|
||||
Halt(1);
|
||||
display_frame(filt_frame, buffersink_ctx^.inputs[0]^.time_base);
|
||||
av_frame_unref(filt_frame);
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
av_free_packet(packet);
|
||||
end;
|
||||
finally
|
||||
avformat_network_deinit;
|
||||
avfilter_graph_free(filter_graph);
|
||||
if Assigned(dec_ctx) then
|
||||
avcodec_close(dec_ctx);
|
||||
avformat_close_input(fmt_ctx);
|
||||
av_frame_free(vframe);
|
||||
av_frame_free(filt_frame);
|
||||
|
||||
if (ret < 0) and (ret <> AVERROR_EOF) then
|
||||
begin
|
||||
av_strerror(ret, buf, sizeof(buf));
|
||||
Writeln('Error occurred: ', buf);
|
||||
// Halt(1);
|
||||
end;
|
||||
end;
|
||||
except
|
||||
on E: Exception do
|
||||
Writeln(E.ClassName, ': ', E.Message);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,244 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{79D5337D-614C-4BD3-8188-3A98A34FF744}</ProjectGuid>
|
||||
<MainSource>filtering_video.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Release</Config>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Console</AppType>
|
||||
<FrameworkType>None</FrameworkType>
|
||||
<ProjectVersion>18.2</ProjectVersion>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Android' and '$(Base)'=='true') or '$(Base_Android)'!=''">
|
||||
<Base_Android>true</Base_Android>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Base)'=='true') or '$(Base_iOSDevice32)'!=''">
|
||||
<Base_iOSDevice32>true</Base_iOSDevice32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Base)'=='true') or '$(Base_iOSDevice64)'!=''">
|
||||
<Base_iOSDevice64>true</Base_iOSDevice64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Base)'=='true') or '$(Base_iOSSimulator)'!=''">
|
||||
<Base_iOSSimulator>true</Base_iOSSimulator>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice32)'!=''">
|
||||
<Cfg_2_iOSDevice32>true</Cfg_2_iOSDevice32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice64)'!=''">
|
||||
<Cfg_2_iOSDevice64>true</Cfg_2_iOSDevice64>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSSimulator)'!=''">
|
||||
<Cfg_2_iOSSimulator>true</Cfg_2_iOSSimulator>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<DCC_E>false</DCC_E>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;$(DCC_Namespace)</DCC_Namespace>
|
||||
<DCC_N>false</DCC_N>
|
||||
<DCC_ImageBase>00400000</DCC_ImageBase>
|
||||
<DCC_K>false</DCC_K>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_F>false</DCC_F>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=;CFBundleName=</VerInfo_Keys>
|
||||
<SanitizedProjectName>filtering_video</SanitizedProjectName>
|
||||
<Icon_MainIcon>$(BDS)\bin\delphi_PROJECTICON.ico</Icon_MainIcon>
|
||||
<Icns_MainIcns>$(BDS)\bin\delphi_PROJECTICNS.icns</Icns_MainIcns>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Android)'!=''">
|
||||
<VerInfo_Keys>package=com.embarcadero.$(MSBuildProjectName);label=$(MSBuildProjectName);versionCode=1;versionName=1.0.0;persistent=False;restoreAnyVersion=False;installLocation=auto;largeHeap=False;theme=TitleBar;hardwareAccelerated=true;apiKey=</VerInfo_Keys>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<Android_LauncherIcon36>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_36x36.png</Android_LauncherIcon36>
|
||||
<Android_LauncherIcon48>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_48x48.png</Android_LauncherIcon48>
|
||||
<Android_LauncherIcon72>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_72x72.png</Android_LauncherIcon72>
|
||||
<Android_LauncherIcon96>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_96x96.png</Android_LauncherIcon96>
|
||||
<Android_LauncherIcon144>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_144x144.png</Android_LauncherIcon144>
|
||||
<Android_SplashImage426>$(BDS)\bin\Artwork\Android\FM_SplashImage_426x320.png</Android_SplashImage426>
|
||||
<Android_SplashImage470>$(BDS)\bin\Artwork\Android\FM_SplashImage_470x320.png</Android_SplashImage470>
|
||||
<Android_SplashImage640>$(BDS)\bin\Artwork\Android\FM_SplashImage_640x480.png</Android_SplashImage640>
|
||||
<Android_SplashImage960>$(BDS)\bin\Artwork\Android\FM_SplashImage_960x720.png</Android_SplashImage960>
|
||||
<AUP_ACCESS_COARSE_LOCATION>true</AUP_ACCESS_COARSE_LOCATION>
|
||||
<AUP_ACCESS_FINE_LOCATION>true</AUP_ACCESS_FINE_LOCATION>
|
||||
<AUP_CALL_PHONE>true</AUP_CALL_PHONE>
|
||||
<AUP_CAMERA>true</AUP_CAMERA>
|
||||
<AUP_INTERNET>true</AUP_INTERNET>
|
||||
<AUP_READ_CALENDAR>true</AUP_READ_CALENDAR>
|
||||
<AUP_READ_EXTERNAL_STORAGE>true</AUP_READ_EXTERNAL_STORAGE>
|
||||
<AUP_WRITE_CALENDAR>true</AUP_WRITE_CALENDAR>
|
||||
<AUP_WRITE_EXTERNAL_STORAGE>true</AUP_WRITE_EXTERNAL_STORAGE>
|
||||
<AUP_READ_PHONE_STATE>true</AUP_READ_PHONE_STATE>
|
||||
<EnabledSysJars>android-support-v4.dex.jar;cloud-messaging.dex.jar;fmx.dex.jar;google-analytics-v2.dex.jar;google-play-billing.dex.jar;google-play-licensing.dex.jar;google-play-services.dex.jar</EnabledSysJars>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice32)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice64)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSSimulator)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice32)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice64)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSSimulator)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">filtering_video.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k200.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp200.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Platforms>
|
||||
<Platform value="Android">False</Platform>
|
||||
<Platform value="iOSDevice32">False</Platform>
|
||||
<Platform value="iOSDevice64">False</Platform>
|
||||
<Platform value="iOSSimulator">False</Platform>
|
||||
<Platform value="Linux64">False</Platform>
|
||||
<Platform value="OSX32">False</Platform>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -1,66 +0,0 @@
|
||||
program metadata;
|
||||
|
||||
{$APPTYPE CONSOLE}
|
||||
{$R *.res}
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
uses
|
||||
System.SysUtils,
|
||||
ffm.lib,
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.ctypes,
|
||||
ffm.avformat,
|
||||
ffm.avio,
|
||||
ffm.avutil,
|
||||
ffm.buffer,
|
||||
ffm.dict,
|
||||
ffm.frame,
|
||||
ffm.log,
|
||||
ffm.opt,
|
||||
ffm.pixfmt,
|
||||
ffm.rational,
|
||||
ffm.samplefmt,
|
||||
ffm.parseutils,
|
||||
ffm.swscale,
|
||||
ffm.pixdesc,
|
||||
ffm.imgutils,
|
||||
ffm.mem,
|
||||
ffm.error,
|
||||
uResourcePaths;
|
||||
|
||||
Var
|
||||
fmt_ctx: pAVFormatContext = nil;
|
||||
tag: pAVDictionaryEntry = Nil;
|
||||
ret: Integer;
|
||||
inp : AnsiString;
|
||||
|
||||
const
|
||||
std_filename = cResourceMedia + 'trailer.avi';
|
||||
|
||||
begin
|
||||
try
|
||||
Writeln(Format('usage: %s <input_file>'#13#10 + 'example program to demonstrate the use of the libavformat metadata API.'#13#10,
|
||||
[ExtractFileName(ParamStr(0))]));
|
||||
if (ParamCount < 1) then
|
||||
inp := std_filename
|
||||
else
|
||||
inp := ParamStr(1);
|
||||
av_register_all();
|
||||
ret := avformat_open_input(fmt_ctx, PAnsiChar(inp), nil, nil);
|
||||
if ret < 0 then
|
||||
Halt(ret);
|
||||
|
||||
tag := av_dict_get(fmt_ctx^.metadata, '', tag, AV_DICT_IGNORE_SUFFIX);
|
||||
while Assigned(tag) do
|
||||
begin
|
||||
Writeln(Format('%s = %s'#13#10, [tag^.key, tag^.value]));
|
||||
tag := av_dict_get(fmt_ctx^.metadata, '', tag, AV_DICT_IGNORE_SUFFIX);
|
||||
end;
|
||||
|
||||
avformat_close_input(fmt_ctx);
|
||||
except
|
||||
on E: Exception do
|
||||
Writeln(E.ClassName, ': ', E.Message);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,244 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{D042FDC4-2AC2-458D-A0D8-0DFA6C296945}</ProjectGuid>
|
||||
<MainSource>metadata.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Release</Config>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Console</AppType>
|
||||
<FrameworkType>None</FrameworkType>
|
||||
<ProjectVersion>18.2</ProjectVersion>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Android' and '$(Base)'=='true') or '$(Base_Android)'!=''">
|
||||
<Base_Android>true</Base_Android>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Base)'=='true') or '$(Base_iOSDevice32)'!=''">
|
||||
<Base_iOSDevice32>true</Base_iOSDevice32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Base)'=='true') or '$(Base_iOSDevice64)'!=''">
|
||||
<Base_iOSDevice64>true</Base_iOSDevice64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Base)'=='true') or '$(Base_iOSSimulator)'!=''">
|
||||
<Base_iOSSimulator>true</Base_iOSSimulator>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice32)'!=''">
|
||||
<Cfg_2_iOSDevice32>true</Cfg_2_iOSDevice32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice64)'!=''">
|
||||
<Cfg_2_iOSDevice64>true</Cfg_2_iOSDevice64>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSSimulator)'!=''">
|
||||
<Cfg_2_iOSSimulator>true</Cfg_2_iOSSimulator>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<DCC_E>false</DCC_E>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;$(DCC_Namespace)</DCC_Namespace>
|
||||
<DCC_N>false</DCC_N>
|
||||
<DCC_ImageBase>00400000</DCC_ImageBase>
|
||||
<DCC_K>false</DCC_K>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_F>false</DCC_F>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=;CFBundleName=</VerInfo_Keys>
|
||||
<SanitizedProjectName>metadata</SanitizedProjectName>
|
||||
<Icon_MainIcon>$(BDS)\bin\delphi_PROJECTICON.ico</Icon_MainIcon>
|
||||
<Icns_MainIcns>$(BDS)\bin\delphi_PROJECTICNS.icns</Icns_MainIcns>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Android)'!=''">
|
||||
<VerInfo_Keys>package=com.embarcadero.$(MSBuildProjectName);label=$(MSBuildProjectName);versionCode=1;versionName=1.0.0;persistent=False;restoreAnyVersion=False;installLocation=auto;largeHeap=False;theme=TitleBar;hardwareAccelerated=true;apiKey=</VerInfo_Keys>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<Android_LauncherIcon36>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_36x36.png</Android_LauncherIcon36>
|
||||
<Android_LauncherIcon48>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_48x48.png</Android_LauncherIcon48>
|
||||
<Android_LauncherIcon72>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_72x72.png</Android_LauncherIcon72>
|
||||
<Android_LauncherIcon96>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_96x96.png</Android_LauncherIcon96>
|
||||
<Android_LauncherIcon144>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_144x144.png</Android_LauncherIcon144>
|
||||
<Android_SplashImage426>$(BDS)\bin\Artwork\Android\FM_SplashImage_426x320.png</Android_SplashImage426>
|
||||
<Android_SplashImage470>$(BDS)\bin\Artwork\Android\FM_SplashImage_470x320.png</Android_SplashImage470>
|
||||
<Android_SplashImage640>$(BDS)\bin\Artwork\Android\FM_SplashImage_640x480.png</Android_SplashImage640>
|
||||
<Android_SplashImage960>$(BDS)\bin\Artwork\Android\FM_SplashImage_960x720.png</Android_SplashImage960>
|
||||
<AUP_ACCESS_COARSE_LOCATION>true</AUP_ACCESS_COARSE_LOCATION>
|
||||
<AUP_ACCESS_FINE_LOCATION>true</AUP_ACCESS_FINE_LOCATION>
|
||||
<AUP_CALL_PHONE>true</AUP_CALL_PHONE>
|
||||
<AUP_CAMERA>true</AUP_CAMERA>
|
||||
<AUP_INTERNET>true</AUP_INTERNET>
|
||||
<AUP_READ_CALENDAR>true</AUP_READ_CALENDAR>
|
||||
<AUP_READ_EXTERNAL_STORAGE>true</AUP_READ_EXTERNAL_STORAGE>
|
||||
<AUP_WRITE_CALENDAR>true</AUP_WRITE_CALENDAR>
|
||||
<AUP_WRITE_EXTERNAL_STORAGE>true</AUP_WRITE_EXTERNAL_STORAGE>
|
||||
<AUP_READ_PHONE_STATE>true</AUP_READ_PHONE_STATE>
|
||||
<EnabledSysJars>android-support-v4.dex.jar;cloud-messaging.dex.jar;fmx.dex.jar;google-analytics-v2.dex.jar;google-play-billing.dex.jar;google-play-licensing.dex.jar;google-play-services.dex.jar</EnabledSysJars>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice32)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice64)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSSimulator)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice32)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice64)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSSimulator)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">metadata.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k200.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp200.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Platforms>
|
||||
<Platform value="Android">False</Platform>
|
||||
<Platform value="iOSDevice32">False</Platform>
|
||||
<Platform value="iOSDevice64">False</Platform>
|
||||
<Platform value="iOSSimulator">False</Platform>
|
||||
<Platform value="Linux64">False</Platform>
|
||||
<Platform value="OSX32">False</Platform>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -1,168 +0,0 @@
|
||||
program scaling_video;
|
||||
|
||||
{$APPTYPE CONSOLE}
|
||||
{$R *.res}
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
uses
|
||||
System.SysUtils,
|
||||
System.Classes,
|
||||
Winapi.Windows,
|
||||
ffm.lib,
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.ctypes,
|
||||
ffm.avformat,
|
||||
ffm.avio,
|
||||
ffm.avutil,
|
||||
ffm.buffer,
|
||||
ffm.dict,
|
||||
ffm.frame,
|
||||
ffm.log,
|
||||
ffm.opt,
|
||||
ffm.pixfmt,
|
||||
ffm.rational,
|
||||
ffm.samplefmt,
|
||||
ffm.parseutils,
|
||||
ffm.swscale,
|
||||
ffm.pixdesc,
|
||||
ffm.imgutils,
|
||||
ffm.mem,
|
||||
ffm.error,
|
||||
uResourcePaths;
|
||||
|
||||
Var
|
||||
src_data: TAVFrameByteArray;//TPointers;
|
||||
dst_data: TAVFrameByteArray;//TPointers;
|
||||
src_linesize: TLinesizes;
|
||||
dst_linesize: TLinesizes;
|
||||
src_w: Integer = 320;
|
||||
src_h: Integer = 240;
|
||||
dst_w, dst_h: Integer;
|
||||
src_pix_fmt: TAVPixelFormat = AV_PIX_FMT_YUV420P;
|
||||
dst_pix_fmt: TAVPixelFormat = AV_PIX_FMT_RGB24;
|
||||
dst_size: AnsiString;
|
||||
dst_filename: AnsiString;
|
||||
// dst_file: File;
|
||||
dst_straem:TMemoryStream;
|
||||
dst_bufsize: Integer;
|
||||
sws_ctx: pSwsContext = nil;
|
||||
i, ret: Integer;
|
||||
|
||||
const
|
||||
in_filename = cResourceMedia + 'trailer.avi';
|
||||
out_filename = cResourceResultDefault + 'trailer-out.avi';
|
||||
|
||||
procedure fill_yuv_image(data: TAVFrameByteArray{TPointers}; linesize: TLinesizes; width: Integer; height: Integer; frame_index: Integer);
|
||||
Var
|
||||
x, y: Integer;
|
||||
begin
|
||||
(* Y *)
|
||||
for y := 0 to height - 1 do
|
||||
for x := 0 to width - 1 do
|
||||
data[0][y * linesize[0] + x] := x + y + frame_index * 3;
|
||||
|
||||
(* Cb and Cr *)
|
||||
for y := 0 to height div 2 - 1 do
|
||||
begin
|
||||
for x := 0 to width div 2 - 1 do
|
||||
begin
|
||||
data[1][y * linesize[1] + x] := 128 + y + frame_index * 2;
|
||||
data[2][y * linesize[2] + x] := 64 + x + frame_index * 5;
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
Var
|
||||
Buf: array [0 .. 511] of AnsiChar;
|
||||
r:Integer;
|
||||
|
||||
begin
|
||||
try
|
||||
if (ParamCount <> 2) then
|
||||
begin
|
||||
WriteLn(Format('Usage: %s output_file output_size' + #13#10 + 'API example program to show how to scale an image with libswscale.' +
|
||||
#13#10 + 'This program generates a series of pictures, rescales them to the given ' +
|
||||
'output_size and saves them to an output file named output_file.' + #13#10, [ExtractFileName(ParamStr(0))]));
|
||||
end;
|
||||
if (ParamCount < 2) then
|
||||
begin
|
||||
if FileExists(in_filename) then
|
||||
if CopyFileEx(PChar(in_filename), PChar(out_filename), nil, nil, nil, COPY_FILE_RESTARTABLE) then
|
||||
begin
|
||||
dst_filename := out_filename;
|
||||
dst_size := '320x240';
|
||||
end
|
||||
else
|
||||
Halt(1);
|
||||
end
|
||||
else
|
||||
begin
|
||||
dst_filename := ParamStr(1);
|
||||
dst_size := ParamStr(2);
|
||||
end;
|
||||
|
||||
if av_parse_video_size(dst_w, dst_h, PAnsiChar(dst_size)) < 0 then
|
||||
begin
|
||||
WriteLn(Format('Invalid size %s, must be in the form WxH or a valid size abbreviation', [dst_size]));
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
(* create scaling context *)
|
||||
sws_ctx := sws_getContext(src_w, src_h, src_pix_fmt, dst_w, dst_h, dst_pix_fmt, SWS_BILINEAR, Nil, Nil, Nil);
|
||||
if not Assigned(sws_ctx) then
|
||||
begin
|
||||
WriteLn(Format('Impossible to create scale context for the conversion fmt:%s s:%dx%d -> fmt:%s s:%dx%d' + #13#10,
|
||||
[av_get_pix_fmt_name(src_pix_fmt), src_w, src_h, av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h]));
|
||||
Halt(1);
|
||||
// ret := AVERROR(EINVAL);
|
||||
// goto
|
||||
// end;
|
||||
end;
|
||||
|
||||
(* allocate source and destination image buffers *)
|
||||
ret := av_image_alloc(src_data, src_linesize, src_w, src_h, src_pix_fmt, 16);
|
||||
// av_strerror(ret,@buf,SizeOf(buf));
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Could not allocate source image');
|
||||
Halt(1);
|
||||
end;
|
||||
|
||||
(* buffer is going to be written to rawvideo file, no alignment *)
|
||||
ret := av_image_alloc(dst_data, dst_linesize, dst_w, dst_h, dst_pix_fmt, 1);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
WriteLn('Could not allocate destination image');
|
||||
end;
|
||||
dst_bufsize := ret;
|
||||
|
||||
dst_straem:=TMemoryStream.Create;
|
||||
|
||||
for i := 0 to 99 do
|
||||
begin
|
||||
(* generate synthetic video *)
|
||||
fill_yuv_image(src_data, src_linesize, src_w, src_h, i);
|
||||
|
||||
(* convert to destination format *)
|
||||
sws_scale(sws_ctx, @src_data, @src_linesize, 0, src_h, @dst_data, @dst_linesize);
|
||||
|
||||
(* write scaled image to file *)
|
||||
dst_straem.Write((@dst_data[0][0])^, dst_bufsize);
|
||||
end;
|
||||
|
||||
dst_straem.SaveToFile(dst_filename);
|
||||
dst_straem.Free;
|
||||
|
||||
WriteLn(Format('Scaling succeeded. Play the output file with the command:' + #13#10 +
|
||||
'ffplay -f rawvideo -pixel_format %s -video_size %dx%d %s', [av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h, dst_filename]));
|
||||
|
||||
av_freep(@src_data[0]);
|
||||
av_freep(@dst_data[0]);
|
||||
sws_freeContext(sws_ctx);
|
||||
|
||||
except
|
||||
on E: Exception do
|
||||
WriteLn(E.ClassName, ': ', E.Message);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,254 +0,0 @@
|
||||
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup>
|
||||
<ProjectGuid>{48D2DD59-2918-4148-8EF9-6F12C0233241}</ProjectGuid>
|
||||
<MainSource>scaling_video.dpr</MainSource>
|
||||
<Base>True</Base>
|
||||
<Config Condition="'$(Config)'==''">Release</Config>
|
||||
<TargetedPlatforms>1</TargetedPlatforms>
|
||||
<AppType>Console</AppType>
|
||||
<FrameworkType>None</FrameworkType>
|
||||
<ProjectVersion>18.2</ProjectVersion>
|
||||
<Platform Condition="'$(Platform)'==''">Win32</Platform>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Base' or '$(Base)'!=''">
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Android' and '$(Base)'=='true') or '$(Base_Android)'!=''">
|
||||
<Base_Android>true</Base_Android>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Base)'=='true') or '$(Base_iOSDevice32)'!=''">
|
||||
<Base_iOSDevice32>true</Base_iOSDevice32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Base)'=='true') or '$(Base_iOSDevice64)'!=''">
|
||||
<Base_iOSDevice64>true</Base_iOSDevice64>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Base)'=='true') or '$(Base_iOSSimulator)'!=''">
|
||||
<Base_iOSSimulator>true</Base_iOSSimulator>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Base)'=='true') or '$(Base_Win32)'!=''">
|
||||
<Base_Win32>true</Base_Win32>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Release' or '$(Cfg_1)'!=''">
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_1)'=='true') or '$(Cfg_1_Win32)'!=''">
|
||||
<Cfg_1_Win32>true</Cfg_1_Win32>
|
||||
<CfgParent>Cfg_1</CfgParent>
|
||||
<Cfg_1>true</Cfg_1>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Config)'=='Debug' or '$(Cfg_2)'!=''">
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<CfgParent>Base</CfgParent>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice32' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice32)'!=''">
|
||||
<Cfg_2_iOSDevice32>true</Cfg_2_iOSDevice32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSDevice64' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSDevice64)'!=''">
|
||||
<Cfg_2_iOSDevice64>true</Cfg_2_iOSDevice64>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='iOSSimulator' and '$(Cfg_2)'=='true') or '$(Cfg_2_iOSSimulator)'!=''">
|
||||
<Cfg_2_iOSSimulator>true</Cfg_2_iOSSimulator>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="('$(Platform)'=='Win32' and '$(Cfg_2)'=='true') or '$(Cfg_2_Win32)'!=''">
|
||||
<Cfg_2_Win32>true</Cfg_2_Win32>
|
||||
<CfgParent>Cfg_2</CfgParent>
|
||||
<Cfg_2>true</Cfg_2>
|
||||
<Base>true</Base>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base)'!=''">
|
||||
<DCC_E>false</DCC_E>
|
||||
<VerInfo_Locale>1049</VerInfo_Locale>
|
||||
<DCC_Namespace>System;Xml;Data;Datasnap;Web;Soap;$(DCC_Namespace)</DCC_Namespace>
|
||||
<DCC_N>false</DCC_N>
|
||||
<DCC_ImageBase>00400000</DCC_ImageBase>
|
||||
<DCC_K>false</DCC_K>
|
||||
<DCC_S>false</DCC_S>
|
||||
<DCC_F>false</DCC_F>
|
||||
<VerInfo_Keys>CompanyName=;FileDescription=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductName=;ProductVersion=1.0.0.0;Comments=;CFBundleName=</VerInfo_Keys>
|
||||
<SanitizedProjectName>scaling_video</SanitizedProjectName>
|
||||
<Icon_MainIcon>$(BDS)\bin\delphi_PROJECTICON.ico</Icon_MainIcon>
|
||||
<Icns_MainIcns>$(BDS)\bin\delphi_PROJECTICNS.icns</Icns_MainIcns>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Android)'!=''">
|
||||
<VerInfo_Keys>package=com.embarcadero.$(MSBuildProjectName);label=$(MSBuildProjectName);versionCode=1;versionName=1.0.0;persistent=False;restoreAnyVersion=False;installLocation=auto;largeHeap=False;theme=TitleBar;hardwareAccelerated=true;apiKey=</VerInfo_Keys>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<Android_LauncherIcon36>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_36x36.png</Android_LauncherIcon36>
|
||||
<Android_LauncherIcon48>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_48x48.png</Android_LauncherIcon48>
|
||||
<Android_LauncherIcon72>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_72x72.png</Android_LauncherIcon72>
|
||||
<Android_LauncherIcon96>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_96x96.png</Android_LauncherIcon96>
|
||||
<Android_LauncherIcon144>$(BDS)\bin\Artwork\Android\FM_LauncherIcon_144x144.png</Android_LauncherIcon144>
|
||||
<Android_SplashImage426>$(BDS)\bin\Artwork\Android\FM_SplashImage_426x320.png</Android_SplashImage426>
|
||||
<Android_SplashImage470>$(BDS)\bin\Artwork\Android\FM_SplashImage_470x320.png</Android_SplashImage470>
|
||||
<Android_SplashImage640>$(BDS)\bin\Artwork\Android\FM_SplashImage_640x480.png</Android_SplashImage640>
|
||||
<Android_SplashImage960>$(BDS)\bin\Artwork\Android\FM_SplashImage_960x720.png</Android_SplashImage960>
|
||||
<AUP_ACCESS_COARSE_LOCATION>true</AUP_ACCESS_COARSE_LOCATION>
|
||||
<AUP_ACCESS_FINE_LOCATION>true</AUP_ACCESS_FINE_LOCATION>
|
||||
<AUP_CALL_PHONE>true</AUP_CALL_PHONE>
|
||||
<AUP_CAMERA>true</AUP_CAMERA>
|
||||
<AUP_INTERNET>true</AUP_INTERNET>
|
||||
<AUP_READ_CALENDAR>true</AUP_READ_CALENDAR>
|
||||
<AUP_READ_EXTERNAL_STORAGE>true</AUP_READ_EXTERNAL_STORAGE>
|
||||
<AUP_WRITE_CALENDAR>true</AUP_WRITE_CALENDAR>
|
||||
<AUP_WRITE_EXTERNAL_STORAGE>true</AUP_WRITE_EXTERNAL_STORAGE>
|
||||
<AUP_READ_PHONE_STATE>true</AUP_READ_PHONE_STATE>
|
||||
<EnabledSysJars>android-support-v4.dex.jar;cloud-messaging.dex.jar;fmx.dex.jar;google-analytics-v2.dex.jar;google-play-billing.dex.jar;google-play-licensing.dex.jar;google-play-services.dex.jar</EnabledSysJars>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice32)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSDevice64)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<BT_BuildType>Debug</BT_BuildType>
|
||||
<VerInfo_BundleId>$(MSBuildProjectName)</VerInfo_BundleId>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_iOSSimulator)'!=''">
|
||||
<VerInfo_Keys>CFBundleName=$(MSBuildProjectName);CFBundleDevelopmentRegion=en;CFBundleDisplayName=$(MSBuildProjectName);CFBundleIdentifier=$(MSBuildProjectName);CFBundleInfoDictionaryVersion=7.1;CFBundleVersion=1.0.0.0;CFBundlePackageType=APPL;CFBundleSignature=????;LSRequiresIPhoneOS=true;CFBundleAllowMixedLocalizations=YES;CFBundleExecutable=$(MSBuildProjectName);UIDeviceFamily=iPhone & iPad;CFBundleResourceSpecification=ResourceRules.plist;NSLocationAlwaysUsageDescription=The reason for accessing the location information of the user;NSLocationWhenInUseUsageDescription=The reason for accessing the location information of the user;FMLocalNotificationPermission=false;UIBackgroundModes=;NSContactsUsageDescription=The reason for accessing the contacts;NSPhotoLibraryUsageDescription=The reason for accessing the photo library;NSCameraUsageDescription=The reason for accessing the camera</VerInfo_Keys>
|
||||
<VerInfo_UIDeviceFamily>iPhoneAndiPad</VerInfo_UIDeviceFamily>
|
||||
<VerInfo_IncludeVerInfo>true</VerInfo_IncludeVerInfo>
|
||||
<iPhone_AppIcon60>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_60x60.png</iPhone_AppIcon60>
|
||||
<iPhone_AppIcon120>$(BDS)\bin\Artwork\iOS\iPhone\FM_ApplicationIcon_120x120.png</iPhone_AppIcon120>
|
||||
<iPhone_Spotlight40>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_40x40.png</iPhone_Spotlight40>
|
||||
<iPhone_Spotlight80>$(BDS)\bin\Artwork\iOS\iPhone\FM_SpotlightSearchIcon_80x80.png</iPhone_Spotlight80>
|
||||
<iPad_SpotLight40>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_40x40.png</iPad_SpotLight40>
|
||||
<iPad_SpotLight80>$(BDS)\bin\Artwork\iOS\iPad\FM_SpotlightSearchIcon_80x80.png</iPad_SpotLight80>
|
||||
<iPad_AppIcon76>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_76x76.png</iPad_AppIcon76>
|
||||
<iPad_AppIcon152>$(BDS)\bin\Artwork\iOS\iPad\FM_ApplicationIcon_152x152.png</iPad_AppIcon152>
|
||||
<iPad_Launch768x1024>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_768x1024.png</iPad_Launch768x1024>
|
||||
<iPad_Launch1024x768>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_1024x768.png</iPad_Launch1024x768>
|
||||
<iPad_Launch1536x2048>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImagePortrait_1536x2048.png</iPad_Launch1536x2048>
|
||||
<iPad_Launch2048x1536>$(BDS)\bin\Artwork\iOS\iPad\FM_LaunchImageLandscape_2048x1536.png</iPad_Launch2048x1536>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Base_Win32)'!=''">
|
||||
<DCC_DcuOutput>.\$(Platform)\$(Config)</DCC_DcuOutput>
|
||||
<DCC_ExeOutput>..\..\..\bin\$(Platform)</DCC_ExeOutput>
|
||||
<Manifest_File>None</Manifest_File>
|
||||
<DCC_Namespace>Winapi;System.Win;Data.Win;Datasnap.Win;Web.Win;Soap.Win;Xml.Win;Bde;$(DCC_Namespace)</DCC_Namespace>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1)'!=''">
|
||||
<DCC_Define>RELEASE;$(DCC_Define)</DCC_Define>
|
||||
<DCC_DebugInformation>0</DCC_DebugInformation>
|
||||
<DCC_LocalDebugSymbols>false</DCC_LocalDebugSymbols>
|
||||
<DCC_SymbolReferenceInfo>0</DCC_SymbolReferenceInfo>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_1_Win32)'!=''">
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2)'!=''">
|
||||
<DCC_GenerateStackFrames>true</DCC_GenerateStackFrames>
|
||||
<DCC_Define>DEBUG;$(DCC_Define)</DCC_Define>
|
||||
<DCC_Optimize>false</DCC_Optimize>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice32)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSDevice64)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_iOSSimulator)'!=''">
|
||||
<DCC_RemoteDebug>true</DCC_RemoteDebug>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Cfg_2_Win32)'!=''">
|
||||
<VerInfo_Locale>1033</VerInfo_Locale>
|
||||
<VerInfo_Keys>CompanyName=;FileVersion=1.0.0.0;InternalName=;LegalCopyright=;LegalTrademarks=;OriginalFilename=;ProductVersion=1.0.0.0;Comments=;ProgramID=com.embarcadero.$(MSBuildProjectName);FileDescription=$(MSBuildProjectName);ProductName=$(MSBuildProjectName)</VerInfo_Keys>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<DelphiCompile Include="$(MainSource)">
|
||||
<MainSource>MainSource</MainSource>
|
||||
</DelphiCompile>
|
||||
<BuildConfiguration Include="Debug">
|
||||
<Key>Cfg_2</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Base">
|
||||
<Key>Base</Key>
|
||||
</BuildConfiguration>
|
||||
<BuildConfiguration Include="Release">
|
||||
<Key>Cfg_1</Key>
|
||||
<CfgParent>Base</CfgParent>
|
||||
</BuildConfiguration>
|
||||
</ItemGroup>
|
||||
<ProjectExtensions>
|
||||
<Borland.Personality>Delphi.Personality.12</Borland.Personality>
|
||||
<Borland.ProjectType/>
|
||||
<BorlandProject>
|
||||
<Delphi.Personality>
|
||||
<Source>
|
||||
<Source Name="MainSource">scaling_video.dpr</Source>
|
||||
</Source>
|
||||
<Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dcloffice2k210.bpl">Microsoft Office 2000 Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
<Excluded_Packages Name="$(BDSBIN)\dclofficexp210.bpl">Microsoft Office XP Sample Automation Server Wrapper Components</Excluded_Packages>
|
||||
</Excluded_Packages>
|
||||
</Delphi.Personality>
|
||||
<Platforms>
|
||||
<Platform value="Android">False</Platform>
|
||||
<Platform value="iOSDevice32">False</Platform>
|
||||
<Platform value="iOSDevice64">False</Platform>
|
||||
<Platform value="iOSSimulator">False</Platform>
|
||||
<Platform value="Linux64">False</Platform>
|
||||
<Platform value="OSX32">False</Platform>
|
||||
<Platform value="Win32">True</Platform>
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
</BorlandProject>
|
||||
<ProjectFileVersion>12</ProjectFileVersion>
|
||||
</ProjectExtensions>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Delphi.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Delphi.Targets')"/>
|
||||
<Import Project="$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj" Condition="Exists('$(APPDATA)\Embarcadero\$(BDSAPPDATABASEDIR)\$(PRODUCTVERSION)\UserTools.proj')"/>
|
||||
</Project>
|
Binary file not shown.
@ -3,9 +3,6 @@
|
||||
<ProjectGuid>{B8FFAD51-22D7-426C-B047-EC116C5C089E}</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<Projects Include="ch7_ex7_3_expanded.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
<Projects Include="ch8_ex8_3.dproj">
|
||||
<Dependencies/>
|
||||
</Projects>
|
||||
@ -35,15 +32,6 @@
|
||||
<Default.Personality/>
|
||||
</BorlandProject>
|
||||
</ProjectExtensions>
|
||||
<Target Name="ch7_ex7_3_expanded">
|
||||
<MSBuild Projects="ch7_ex7_3_expanded.dproj"/>
|
||||
</Target>
|
||||
<Target Name="ch7_ex7_3_expanded:Clean">
|
||||
<MSBuild Projects="ch7_ex7_3_expanded.dproj" Targets="Clean"/>
|
||||
</Target>
|
||||
<Target Name="ch7_ex7_3_expanded:Make">
|
||||
<MSBuild Projects="ch7_ex7_3_expanded.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="ch8_ex8_3">
|
||||
<MSBuild Projects="ch8_ex8_3.dproj"/>
|
||||
</Target>
|
||||
@ -108,13 +96,13 @@
|
||||
<MSBuild Projects="ch13_ex13_1.dproj" Targets="Make"/>
|
||||
</Target>
|
||||
<Target Name="Build">
|
||||
<CallTarget Targets="ch7_ex7_3_expanded;ch8_ex8_3;ch9_watershed;ch10_ex10_1;ch10_ex10_1b_Horn_Schunck;ch10_ex10_2;ch10_motempl;ch13_ex13_1"/>
|
||||
<CallTarget Targets="ch8_ex8_3;ch9_watershed;ch10_ex10_1;ch10_ex10_1b_Horn_Schunck;ch10_ex10_2;ch10_motempl;ch13_ex13_1"/>
|
||||
</Target>
|
||||
<Target Name="Clean">
|
||||
<CallTarget Targets="ch7_ex7_3_expanded:Clean;ch8_ex8_3:Clean;ch9_watershed:Clean;ch10_ex10_1:Clean;ch10_ex10_1b_Horn_Schunck:Clean;ch10_ex10_2:Clean;ch10_motempl:Clean;ch13_ex13_1:Clean"/>
|
||||
<CallTarget Targets="ch8_ex8_3:Clean;ch9_watershed:Clean;ch10_ex10_1:Clean;ch10_ex10_1b_Horn_Schunck:Clean;ch10_ex10_2:Clean;ch10_motempl:Clean;ch13_ex13_1:Clean"/>
|
||||
</Target>
|
||||
<Target Name="Make">
|
||||
<CallTarget Targets="ch7_ex7_3_expanded:Make;ch8_ex8_3:Make;ch9_watershed:Make;ch10_ex10_1:Make;ch10_ex10_1b_Horn_Schunck:Make;ch10_ex10_2:Make;ch10_motempl:Make;ch13_ex13_1:Make"/>
|
||||
<CallTarget Targets="ch8_ex8_3:Make;ch9_watershed:Make;ch10_ex10_1:Make;ch10_ex10_1b_Horn_Schunck:Make;ch10_ex10_2:Make;ch10_motempl:Make;ch13_ex13_1:Make"/>
|
||||
</Target>
|
||||
<Import Project="$(BDS)\Bin\CodeGear.Group.Targets" Condition="Exists('$(BDS)\Bin\CodeGear.Group.Targets')"/>
|
||||
</Project>
|
||||
|
Binary file not shown.
@ -154,6 +154,12 @@
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
<Deployment Version="3">
|
||||
<DeployFile LocalName="C:\Users\Public\Documents\Embarcadero\Studio\19.0\Bpl\dclCommonOpenCV250.bpl" Configuration="Release" Class="ProjectOutput">
|
||||
<Platform Name="Win32">
|
||||
<RemoteName>dclCommonOpenCV250.bpl</RemoteName>
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\osx32\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<Overwrite>true</Overwrite>
|
||||
@ -185,12 +191,6 @@
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="C:\Users\Public\Documents\Embarcadero\Studio\19.0\Bpl\dclCommonOpenCV250.bpl" Configuration="Release" Class="ProjectOutput">
|
||||
<Platform Name="Win32">
|
||||
<RemoteName>dclCommonOpenCV250.bpl</RemoteName>
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployClass Name="AdditionalDebugSymbols">
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
|
@ -1,7 +1,7 @@
|
||||
package dclFMXOpenCV250;
|
||||
|
||||
{$R *.res}
|
||||
{$R dclFMXOpenCV.res}
|
||||
{$R 'dclFMXOpenCV.res'}
|
||||
{$IFDEF IMPLICITBUILDING This IFDEF should not be used by users}
|
||||
{$ALIGN 8}
|
||||
{$ASSERTIONS ON}
|
||||
@ -37,6 +37,7 @@ requires
|
||||
|
||||
contains
|
||||
ocv.comp.RegisterFMX in '..\ocv.comp.RegisterFMX.pas',
|
||||
ocv.comp.ViewFMX in '..\ocv.comp.ViewFMX.pas';
|
||||
ocv.comp.ViewFMX in '..\ocv.comp.ViewFMX.pas',
|
||||
ocv.fmxutils in '..\..\ocv.fmxutils.pas';
|
||||
|
||||
end.
|
||||
|
@ -183,6 +183,7 @@
|
||||
<DCCReference Include="dclCommonOpenCV250.dcp"/>
|
||||
<DCCReference Include="..\ocv.comp.RegisterFMX.pas"/>
|
||||
<DCCReference Include="..\ocv.comp.ViewFMX.pas"/>
|
||||
<DCCReference Include="..\..\ocv.fmxutils.pas"/>
|
||||
<RcCompile Include="..\dclFMXOpenCV.rc">
|
||||
<Form>dclFMXOpenCV.res</Form>
|
||||
</RcCompile>
|
||||
@ -222,14 +223,13 @@
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
<Deployment Version="3">
|
||||
<DeployFile LocalName="C:\Users\Public\Documents\Embarcadero\Studio\15.0\Bpl\dclFMXOpenCV250.bpl" Configuration="Debug" Class="ProjectOutput">
|
||||
<Platform Name="Win32">
|
||||
<RemoteName>dclFMXOpenCV250.bpl</RemoteName>
|
||||
<DeployFile LocalName="$(BDS)\Redist\osx32\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\osx32\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<DeployFile LocalName="$(BDS)\Redist\iossimulator\libPCRE.dylib" Class="DependencyModule">
|
||||
<Platform Name="iOSSimulator">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
@ -239,11 +239,6 @@
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\iossimulator\libPCRE.dylib" Class="DependencyModule">
|
||||
<Platform Name="iOSSimulator">
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\iossim32\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="iOSSimulator">
|
||||
<Overwrite>true</Overwrite>
|
||||
@ -259,6 +254,12 @@
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="C:\Users\Public\Documents\Embarcadero\Studio\15.0\Bpl\dclFMXOpenCV250.bpl" Configuration="Debug" Class="ProjectOutput">
|
||||
<Platform Name="Win32">
|
||||
<RemoteName>dclFMXOpenCV250.bpl</RemoteName>
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployClass Name="AdditionalDebugSymbols">
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
|
@ -152,12 +152,6 @@
|
||||
<Platform value="Win64">False</Platform>
|
||||
</Platforms>
|
||||
<Deployment Version="3">
|
||||
<DeployFile LocalName="C:\Users\Public\Documents\Embarcadero\Studio\19.0\Bpl\dclVCLOpenCV250.bpl" Configuration="Release" Class="ProjectOutput">
|
||||
<Platform Name="Win32">
|
||||
<RemoteName>dclVCLOpenCV250.bpl</RemoteName>
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="$(BDS)\Redist\osx32\libcgunwind.1.0.dylib" Class="DependencyModule">
|
||||
<Platform Name="OSX32">
|
||||
<Overwrite>true</Overwrite>
|
||||
@ -189,6 +183,12 @@
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployFile LocalName="C:\Users\Public\Documents\Embarcadero\Studio\19.0\Bpl\dclVCLOpenCV250.bpl" Configuration="Release" Class="ProjectOutput">
|
||||
<Platform Name="Win32">
|
||||
<RemoteName>dclVCLOpenCV250.bpl</RemoteName>
|
||||
<Overwrite>true</Overwrite>
|
||||
</Platform>
|
||||
</DeployFile>
|
||||
<DeployClass Name="AdditionalDebugSymbols">
|
||||
<Platform Name="OSX32">
|
||||
<Operation>1</Operation>
|
||||
|
@ -41,7 +41,7 @@ Uses
|
||||
SyncObjs,
|
||||
{$ENDIF}
|
||||
ocv.comp.Source,
|
||||
ffm.libavcodec.avcodec;
|
||||
libavcodec;
|
||||
|
||||
Type
|
||||
TOnNotifyFFMpegPacket = procedure(Sender: TObject; const packet: TAVPacket; const isKeyFrame: Boolean) of object;
|
||||
@ -88,12 +88,12 @@ Uses
|
||||
ocv.core_c,
|
||||
ocv.core.types_c,
|
||||
ocv.comp.Types,
|
||||
ffm.avformat,
|
||||
ffm.dict,
|
||||
ffm.avutil,
|
||||
ffm.frame,
|
||||
ffm.swscale,
|
||||
ffm.pixfmt;
|
||||
libavformat,
|
||||
libavutil_dict,
|
||||
libavutil,
|
||||
libavutil_frame,
|
||||
libswscale,
|
||||
libavutil_pixfmt;
|
||||
|
||||
Type
|
||||
TocvFFMpegIPCamSourceThread = class(TocvCustomSourceThread)
|
||||
@ -246,7 +246,7 @@ Var
|
||||
end;
|
||||
if Assigned(pFormatCtx) then
|
||||
begin
|
||||
avformat_close_input(pFormatCtx);
|
||||
avformat_close_input(@pFormatCtx);
|
||||
pFormatCtx := nil;
|
||||
end;
|
||||
if Assigned(iplframe) then
|
||||
@ -256,12 +256,12 @@ Var
|
||||
end;
|
||||
if Assigned(frame) then
|
||||
begin
|
||||
av_frame_free(frame);
|
||||
av_frame_free(@frame);
|
||||
frame := nil;
|
||||
end;
|
||||
if Assigned(optionsDict) then
|
||||
begin
|
||||
av_dict_free(optionsDict);
|
||||
av_dict_free(@optionsDict);
|
||||
optionsDict := nil;
|
||||
end;
|
||||
end;
|
||||
@ -298,14 +298,14 @@ begin
|
||||
|
||||
DoNotyfy(ffocvTryConnect);
|
||||
|
||||
av_dict_set(optionsDict, 'rtsp_transport', 'tcp', 0);
|
||||
av_dict_set(optionsDict, 'rtsp_flags', 'prefer_tcp', 0);
|
||||
av_dict_set(optionsDict, 'allowed_media_types', 'video', 0);
|
||||
av_dict_set(optionsDict, 'reorder_queue_size', '10', 0);
|
||||
av_dict_set(optionsDict, 'max_delay', '500000', 0);
|
||||
av_dict_set(optionsDict, 'stimeout', '1000000', 0);
|
||||
av_dict_set(@optionsDict, 'rtsp_transport', 'tcp', 0);
|
||||
av_dict_set(@optionsDict, 'rtsp_flags', 'prefer_tcp', 0);
|
||||
av_dict_set(@optionsDict, 'allowed_media_types', 'video', 0);
|
||||
av_dict_set(@optionsDict, 'reorder_queue_size', '10', 0);
|
||||
av_dict_set(@optionsDict, 'max_delay', '500000', 0);
|
||||
av_dict_set(@optionsDict, 'stimeout', '1000000', 0);
|
||||
|
||||
ret := avformat_open_input(pFormatCtx, PAnsiChar(FIPCamURL), nil, @optionsDict); // pFormatCtx
|
||||
ret := avformat_open_input(@pFormatCtx, PAnsiChar(FIPCamURL), nil, @optionsDict); // pFormatCtx
|
||||
if ret < 0 then
|
||||
begin
|
||||
DoNotyfy(ffocvErrorGetStream);
|
||||
@ -313,7 +313,7 @@ begin
|
||||
Continue;
|
||||
end;
|
||||
|
||||
av_dict_free(optionsDict);
|
||||
av_dict_free(@optionsDict);
|
||||
optionsDict := nil;
|
||||
if avformat_find_stream_info(pFormatCtx, nil) < 0 then
|
||||
begin
|
||||
@ -368,8 +368,8 @@ begin
|
||||
Continue;
|
||||
end;
|
||||
|
||||
img_convert_context := sws_getCachedContext(nil, pCodecCtx^.Width, pCodecCtx^.Height, pCodecCtx^.pix_fmt, pCodecCtx^.Width,
|
||||
pCodecCtx^.Height, AV_PIX_FMT_BGR24, SWS_BILINEAR, nil, nil, nil);
|
||||
img_convert_context := sws_getCachedContext(nil, pCodecCtx^.Width, pCodecCtx^.Height, Integer(pCodecCtx^.pix_fmt), pCodecCtx^.Width,
|
||||
pCodecCtx^.Height, Integer(AV_PIX_FMT_BGR24), SWS_BILINEAR, nil, nil, nil);
|
||||
if (img_convert_context = nil) then
|
||||
begin
|
||||
DoNotyfy(ffocvErrorGetStream);
|
||||
@ -385,13 +385,13 @@ begin
|
||||
DoNotyfy(ffocvConnected);
|
||||
|
||||
while (not Terminated) and (FSuspendEvent.WaitFor(0) = wrSignaled) and (not FisReconnect) do
|
||||
if av_read_frame(pFormatCtx, packet) >= 0 then
|
||||
if av_read_frame(pFormatCtx, @packet) >= 0 then
|
||||
begin
|
||||
if (packet.stream_index = videoStream) then
|
||||
begin
|
||||
FOwner.DoNotifyPacket(packet, (packet.flags and AV_PKT_FLAG_KEY) <> 0);
|
||||
// Video stream packet
|
||||
avcodec_decode_video2(pCodecCtx, frame, frame_finished, @packet);
|
||||
avcodec_decode_video2(pCodecCtx, frame, @frame_finished, @packet);
|
||||
if (frame_finished <> 0) then
|
||||
begin
|
||||
sws_scale(img_convert_context, @frame^.data, @frame^.linesize, 0, pCodecCtx^.Height, @iplframe^.imageData, @linesize);
|
||||
@ -409,7 +409,7 @@ begin
|
||||
FisReconnect := True;
|
||||
Break;
|
||||
end;
|
||||
av_free_packet(packet);
|
||||
av_free_packet(@packet);
|
||||
end;
|
||||
|
||||
if (not Terminated) and FisReconnect and (FReconnectDelay > 0) and (FSuspendEvent.WaitFor(0) = wrSignaled) then
|
||||
|
@ -129,6 +129,7 @@ implementation
|
||||
|
||||
uses
|
||||
System.UITypes,
|
||||
ocv.core.types_c,
|
||||
ocv.fmxutils;
|
||||
|
||||
{$IFNDEF DELPHIXE5_UP}
|
||||
|
22
source/ffmpeg/LICENSE.txt
Normal file
22
source/ffmpeg/LICENSE.txt
Normal file
@ -0,0 +1,22 @@
|
||||
FFmpeg Delphi/Pascal Headers and Examples License Agreement
|
||||
|
||||
A modified part of FFVCL - Delphi FFmpeg VCL Components.
|
||||
Copyright (c) 2008-2016 DelphiFFmpeg.com
|
||||
All rights reserved.
|
||||
http://www.DelphiFFmpeg.com
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
This source code is provided "as is" by DelphiFFmpeg.com without
|
||||
warranty of any kind, either expressed or implied, including but not
|
||||
limited to the implied warranties of merchantability and/or fitness
|
||||
for a particular purpose.
|
||||
|
||||
Please also notice the License agreement of FFmpeg libraries.
|
30
source/ffmpeg/Readme.txt
Normal file
30
source/ffmpeg/Readme.txt
Normal file
@ -0,0 +1,30 @@
|
||||
FFmpeg Delphi/Pascal Headers and Examples
|
||||
|
||||
Ported from FFmpeg 3.0.2
|
||||
|
||||
Tested on Delphi 6 to Delphi 10.1 Berlin and FPC 2.6.4 (Win32 only)
|
||||
|
||||
*********************************************************************
|
||||
|
||||
FFmpeg Delphi/Pascal Headers and Examples License Agreement
|
||||
|
||||
A modified part of FFVCL - Delphi FFmpeg VCL Components.
|
||||
Copyright (c) 2008-2016 DelphiFFmpeg.com
|
||||
All rights reserved.
|
||||
http://www.DelphiFFmpeg.com
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
This source code is provided "as is" by DelphiFFmpeg.com without
|
||||
warranty of any kind, either expressed or implied, including but not
|
||||
limited to the implied warranties of merchantability and/or fitness
|
||||
for a particular purpose.
|
||||
|
||||
Please also notice the License agreement of FFmpeg libraries.
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,506 +0,0 @@
|
||||
unit ffm.avio;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.log, ffm.ctypes;
|
||||
|
||||
(*
|
||||
// * copyright (c) 2001 Fabrice Bellard
|
||||
// *
|
||||
// * This file is part of ffm.
|
||||
// *
|
||||
// * FFmpeg is free software; you can redistribute it and/or
|
||||
// * modify it under the terms of the GNU Lesser General Public
|
||||
// * License as published by the Free Software Foundation; either
|
||||
// * version 2.1 of the License, or (at your option) any later version.
|
||||
// *
|
||||
// * FFmpeg is distributed in the hope that it will be useful,
|
||||
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
// * Lesser General Public License for more details.
|
||||
// *
|
||||
// * You should have received a copy of the GNU Lesser General Public
|
||||
// * License along with FFmpeg; if not, write to the Free Software
|
||||
// * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
// #ifndef AVFORMAT_AVIO_H
|
||||
// #define AVFORMAT_AVIO_H
|
||||
//
|
||||
(*
|
||||
// * @file
|
||||
// * @ingroup lavf_io
|
||||
// * Buffered I/O operations
|
||||
*)
|
||||
//
|
||||
// #include <stdint.h>
|
||||
//
|
||||
// #include "libavutil/common.h"
|
||||
// #include "libavutil/dict.h"
|
||||
// #include "libavutil/log.h"
|
||||
//
|
||||
// #include "libavformat/version.h"
|
||||
//
|
||||
//
|
||||
// #define AVIO_SEEKABLE_NORMAL 0x0001 /**< Seeking works like for a local file */
|
||||
//
|
||||
(*
|
||||
* Callback for checking whether to abort blocking functions.
|
||||
* AVERROR_EXIT is returned in this case by the interrupted
|
||||
* function. During blocking operations, callback is called with
|
||||
* opaque as parameter. If the callback returns 1, the
|
||||
* blocking operation will be aborted.
|
||||
*
|
||||
* No members can be added to this struct without a major bump, if
|
||||
* new elements have been added after this struct in AVFormatContext
|
||||
* or AVIOContext.
|
||||
*)
|
||||
|
||||
Type
|
||||
TAVIOInterruptCB = record
|
||||
callback: function(param: Pointer): integer; cdecl;
|
||||
opaque: Pointer;
|
||||
end;
|
||||
|
||||
(*
|
||||
// * Bytestream IO Context.
|
||||
// * New fields can be added to the end with minor version bumps.
|
||||
// * Removal, reordering and changes to existing fields require a major
|
||||
// * version bump.
|
||||
// * sizeof(AVIOContext) must not be used outside libav*.
|
||||
// *
|
||||
// * @note None of the function pointers in AVIOContext should be called
|
||||
// * directly, they should only be set by the client application
|
||||
// * when implementing custom I/O. Normally these are set to the
|
||||
// * function pointers specified in avio_alloc_context()
|
||||
*)
|
||||
|
||||
pAVIOContext = ^TAVIOContext;
|
||||
|
||||
TAVIOContext = {packed} record
|
||||
(*
|
||||
* A class for private options.
|
||||
*
|
||||
* If this AVIOContext is created by avio_open2(), av_class is set and
|
||||
* passes the options down to protocols.
|
||||
*
|
||||
* If this AVIOContext is manually allocated, then av_class may be set by
|
||||
* the caller.
|
||||
*
|
||||
* warning -- this field can be NULL, be sure to not pass this AVIOContext
|
||||
* to any av_opt_* functions in that case.
|
||||
*)
|
||||
av_class: pAVClass;
|
||||
buffer: pByte; // **< Start of the buffer. */
|
||||
buffer_size: integer; // **< Maximum buffer size */
|
||||
buf_ptr: pByte; // **< Current position in the buffer */
|
||||
buf_end: pByte; // **< End of the data, may be less than
|
||||
// buffer+buffer_size if the read function returned
|
||||
// less data than requested, e.g. for streams where
|
||||
// no more data has been received yet. */
|
||||
opaque: Pointer; // **< A private pointer, passed to the read/write/seek/...
|
||||
// functions. */
|
||||
// int (*read_packet)(void *opaque, uint8_t *buf, int buf_size);
|
||||
read_packet: function(opaque: Pointer; buf: pByte; buf_size: integer): integer; cdecl;
|
||||
// int (*write_packet)(void *opaque, uint8_t *buf, int buf_size);
|
||||
write_packet: function(opaque: Pointer; buf: pByte; buf_size: integer): integer; cdecl;
|
||||
// int64_t (*seek)(void *opaque, int64_t offset, int whence);
|
||||
seek: function(opaque: Pointer; offset: int64_t; whence: integer): int64_t; cdecl;
|
||||
pos: int64_t; // **< position in the file of the current buffer */
|
||||
must_flush: integer; // **< true if the next seek should flush */
|
||||
eof_reached: integer; // **< true if eof reached */
|
||||
write_flag: integer; // **< true if open for writing */
|
||||
max_packet_size: integer;
|
||||
checksum: culong;
|
||||
checksum_ptr: pByte;
|
||||
// unsigned long (*update_checksum)(unsigned long checksum, const uint8_t *buf, unsigned int size);
|
||||
update_checksum: function(checksum: culong; const buf: pByte; size: Cardinal): Cardinal; cdecl;
|
||||
error: integer; // **< contains the error code or 0 if no error happened */
|
||||
(*
|
||||
* Pause or resume playback for network streaming protocols - e.g. MMS.
|
||||
*)
|
||||
// int (*read_pause)(void *opaque, int pause);
|
||||
read_pause: function(opaque: Pointer; pause: cint): cint; cdecl;
|
||||
(*
|
||||
* Seek to a given timestamp in stream with the specified stream_index.
|
||||
* Needed for some network streaming protocols which don't support seeking
|
||||
* to byte position.
|
||||
*)
|
||||
// int64_t (*read_seek)(void *opaque, int stream_index, int64_t timestamp, int flags);
|
||||
read_seek: function(opaque: Pointer; stream_index: cint; timestamp: cint64; flags: cint): cint64; cdecl;
|
||||
(*
|
||||
* A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable.
|
||||
*)
|
||||
seekable: integer;
|
||||
(*
|
||||
* max filesize, used to limit allocations
|
||||
* This field is internal to libavformat and access from outside is not allowed.
|
||||
*)
|
||||
maxsize: int64_t;
|
||||
(*
|
||||
* avio_read and avio_write should if possible be satisfied directly
|
||||
* instead of going through a buffer, and avio_seek will always
|
||||
* call the underlying seek function directly.
|
||||
*)
|
||||
direct: integer;
|
||||
(*
|
||||
* Bytes read statistic
|
||||
* This field is internal to libavformat and access from outside is not allowed.
|
||||
*)
|
||||
bytes_read: int64_t;
|
||||
(*
|
||||
* seek statistic
|
||||
* This field is internal to libavformat and access from outside is not allowed.
|
||||
*)
|
||||
seek_count: integer;
|
||||
(*
|
||||
* writeout statistic
|
||||
* This field is internal to libavformat and access from outside is not allowed.
|
||||
*)
|
||||
writeout_count: integer;
|
||||
end;
|
||||
|
||||
/// * unbuffered I/O */
|
||||
//
|
||||
(*
|
||||
// * Return AVIO_FLAG_* access flags corresponding to the access permissions
|
||||
// * of the resource in url, or a negative value corresponding to an
|
||||
// * AVERROR code in case of failure. The returned access flags are
|
||||
// * masked by the value in flags.
|
||||
// *
|
||||
// * @note This function is intrinsically unsafe, in the sense that the
|
||||
// * checked resource may change its existence or permission status from
|
||||
// * one call to another. Thus you should not trust the returned value,
|
||||
// * unless you are sure that no other processes are accessing the
|
||||
// * checked resource.
|
||||
*)
|
||||
// int avio_check(const char *url, int flags);
|
||||
//
|
||||
(*
|
||||
// * Allocate and initialize an AVIOContext for buffered I/O. It must be later
|
||||
// * freed with av_free().
|
||||
// *
|
||||
// * @param buffer Memory block for input/output operations via AVIOContext.
|
||||
// * The buffer must be allocated with av_malloc() and friends.
|
||||
// * @param buffer_size The buffer size is very important for performance.
|
||||
// * For protocols with fixed blocksize it should be set to this blocksize.
|
||||
// * For others a typical size is a cache page, e.g. 4kb.
|
||||
// * @param write_flag Set to 1 if the buffer should be writable, 0 otherwise.
|
||||
// * @param opaque An opaque pointer to user-specific data.
|
||||
// * @param read_packet A function for refilling the buffer, may be NULL.
|
||||
// * @param write_packet A function for writing the buffer contents, may be NULL.
|
||||
// * The function may not change the input buffers content.
|
||||
// * @param seek A function for seeking to specified byte position, may be NULL.
|
||||
// *
|
||||
// * @return Allocated AVIOContext or NULL on failure.
|
||||
*)
|
||||
// AVIOContext *avio_alloc_context(
|
||||
// unsigned char *buffer,
|
||||
// int buffer_size,
|
||||
// int write_flag,
|
||||
// void *opaque,
|
||||
// int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
|
||||
// int (*write_packet)(void *opaque, uint8_t *buf, int buf_size),
|
||||
// int64_t (*seek)(void *opaque, int64_t offset, int whence));
|
||||
//
|
||||
// void avio_w8(AVIOContext *s, int b);
|
||||
// void avio_write(AVIOContext *s, const unsigned char *buf, int size);
|
||||
// void avio_wl64(AVIOContext *s, uint64_t val);
|
||||
// void avio_wb64(AVIOContext *s, uint64_t val);
|
||||
// void avio_wl32(AVIOContext *s, unsigned int val);
|
||||
// void avio_wb32(AVIOContext *s, unsigned int val);
|
||||
// void avio_wl24(AVIOContext *s, unsigned int val);
|
||||
// void avio_wb24(AVIOContext *s, unsigned int val);
|
||||
// void avio_wl16(AVIOContext *s, unsigned int val);
|
||||
// void avio_wb16(AVIOContext *s, unsigned int val);
|
||||
//
|
||||
(*
|
||||
// * Write a NULL-terminated string.
|
||||
// * @return number of bytes written.
|
||||
*)
|
||||
// int avio_put_str(AVIOContext *s, const char *str);
|
||||
//
|
||||
(*
|
||||
// * Convert an UTF-8 string to UTF-16LE and write it.
|
||||
// * @return number of bytes written.
|
||||
*)
|
||||
// int avio_put_str16le(AVIOContext *s, const char *str);
|
||||
//
|
||||
(*
|
||||
// * Passing this as the "whence" parameter to a seek function causes it to
|
||||
// * return the filesize without seeking anywhere. Supporting this is optional.
|
||||
// * If it is not supported then the seek function will return <0.
|
||||
*)
|
||||
// #define AVSEEK_SIZE 0x10000
|
||||
//
|
||||
(*
|
||||
// * Oring this flag as into the "whence" parameter to a seek function causes it to
|
||||
// * seek by any means (like reopening and linear reading) or other normally unreasonable
|
||||
// * means that can be extremely slow.
|
||||
// * This may be ignored by the seek code.
|
||||
*)
|
||||
// #define AVSEEK_FORCE 0x20000
|
||||
//
|
||||
(*
|
||||
// * fseek() equivalent for AVIOContext.
|
||||
// * @return new position or AVERROR.
|
||||
*)
|
||||
// int64_t avio_seek(AVIOContext *s, int64_t offset, int whence);
|
||||
//
|
||||
(*
|
||||
// * Skip given number of bytes forward
|
||||
// * @return new position or AVERROR.
|
||||
*)
|
||||
// int64_t avio_skip(AVIOContext *s, int64_t offset);
|
||||
//
|
||||
(*
|
||||
// * ftell() equivalent for AVIOContext.
|
||||
// * @return position or AVERROR.
|
||||
*)
|
||||
// static av_always_inline int64_t avio_tell(AVIOContext *s)
|
||||
// {
|
||||
// return avio_seek(s, 0, SEEK_CUR);
|
||||
// }
|
||||
//
|
||||
(*
|
||||
// * Get the filesize.
|
||||
// * @return filesize or AVERROR
|
||||
*)
|
||||
// int64_t avio_size(AVIOContext *s);
|
||||
//
|
||||
(*
|
||||
// * feof() equivalent for AVIOContext.
|
||||
// * @return non zero if and only if end of file
|
||||
*)
|
||||
// int url_feof(AVIOContext *s);
|
||||
//
|
||||
(*@warning currently size is limited */
|
||||
// int avio_printf(AVIOContext *s, const char *fmt, ...) av_printf_format(2, 3);
|
||||
//
|
||||
(*
|
||||
// * Force flushing of buffered data to the output s.
|
||||
// *
|
||||
// * Force the buffered data to be immediately written to the output,
|
||||
// * without to wait to fill the internal buffer.
|
||||
*)
|
||||
// void avio_flush(AVIOContext *s);
|
||||
//
|
||||
(*
|
||||
// * Read size bytes from AVIOContext into buf.
|
||||
// * @return number of bytes read or AVERROR
|
||||
*)
|
||||
// int avio_read(AVIOContext *s, unsigned char *buf, int size);
|
||||
//
|
||||
(*
|
||||
// * @name Functions for reading from AVIOContext
|
||||
// * @{
|
||||
// *
|
||||
// * @note return 0 if EOF, so you cannot use it if EOF handling is
|
||||
// * necessary
|
||||
*)
|
||||
// int avio_r8 (AVIOContext *s);
|
||||
// unsigned int avio_rl16(AVIOContext *s);
|
||||
// unsigned int avio_rl24(AVIOContext *s);
|
||||
// unsigned int avio_rl32(AVIOContext *s);
|
||||
// uint64_t avio_rl64(AVIOContext *s);
|
||||
// unsigned int avio_rb16(AVIOContext *s);
|
||||
// unsigned int avio_rb24(AVIOContext *s);
|
||||
// unsigned int avio_rb32(AVIOContext *s);
|
||||
// uint64_t avio_rb64(AVIOContext *s);
|
||||
(*
|
||||
// * @}
|
||||
*)
|
||||
//
|
||||
(*
|
||||
// * Read a string from pb into buf. The reading will terminate when either
|
||||
// * a NULL character was encountered, maxlen bytes have been read, or nothing
|
||||
// * more can be read from pb. The result is guaranteed to be NULL-terminated, it
|
||||
// * will be truncated if buf is too small.
|
||||
// * Note that the string is not interpreted or validated in any way, it
|
||||
// * might get truncated in the middle of a sequence for multi-byte encodings.
|
||||
// *
|
||||
// * @return number of bytes read (is always <= maxlen).
|
||||
// * If reading ends on EOF or error, the return value will be one more than
|
||||
// * bytes actually read.
|
||||
*)
|
||||
// int avio_get_str(AVIOContext *pb, int maxlen, char *buf, int buflen);
|
||||
//
|
||||
(*
|
||||
// * Read a UTF-16 string from pb and convert it to UTF-8.
|
||||
// * The reading will terminate when either a null or invalid character was
|
||||
// * encountered or maxlen bytes have been read.
|
||||
// * @return number of bytes read (is always <= maxlen)
|
||||
*)
|
||||
// int avio_get_str16le(AVIOContext *pb, int maxlen, char *buf, int buflen);
|
||||
// int avio_get_str16be(AVIOContext *pb, int maxlen, char *buf, int buflen);
|
||||
//
|
||||
//
|
||||
(*
|
||||
// * @name URL open modes
|
||||
// * The flags argument to avio_open must be one of the following
|
||||
// * constants, optionally ORed with other flags.
|
||||
// * @{
|
||||
*)
|
||||
const
|
||||
AVIO_FLAG_READ = 1; // read-only
|
||||
AVIO_FLAG_WRITE = 2; // write-only
|
||||
AVIO_FLAG_READ_WRITE = AVIO_FLAG_READ or AVIO_FLAG_WRITE; // read-write pseudo flag
|
||||
|
||||
(*
|
||||
// * @}
|
||||
*)
|
||||
//
|
||||
(*
|
||||
* Use non-blocking mode.
|
||||
* If this flag is set, operations on the context will return
|
||||
* AVERROR(EAGAIN) if they can not be performed immediately.
|
||||
* If this flag is not set, operations on the context will never return
|
||||
* AVERROR(EAGAIN).
|
||||
* Note that this flag does not affect the opening/connecting of the
|
||||
* context. Connecting a protocol will always block if necessary (e.g. on
|
||||
* network protocols) but never hang (e.g. on busy devices).
|
||||
* Warning: non-blocking protocols is work-in-progress; this flag may be
|
||||
* silently ignored.
|
||||
*)
|
||||
const
|
||||
AVIO_FLAG_NONBLOCK = 8;
|
||||
|
||||
(*
|
||||
* Use direct mode.
|
||||
* avio_read and avio_write should if possible be satisfied directly
|
||||
* instead of going through a buffer, and avio_seek will always
|
||||
* call the underlying seek function directly.
|
||||
*)
|
||||
AVIO_FLAG_DIRECT = $8000;
|
||||
|
||||
(*
|
||||
// * Create and initialize a AVIOContext for accessing the
|
||||
// * resource indicated by url.
|
||||
// * @note When the resource indicated by url has been opened in
|
||||
// * read+write mode, the AVIOContext can be used only for writing.
|
||||
// *
|
||||
// * @param s Used to return the pointer to the created AVIOContext.
|
||||
// * In case of failure the pointed to value is set to NULL.
|
||||
// * @param flags flags which control how the resource indicated by url
|
||||
// * is to be opened
|
||||
// * @return >= 0 in case of success, a negative value corresponding to an
|
||||
// * AVERROR code in case of failure
|
||||
*)
|
||||
// int avio_open(AVIOContext **s, const char *url, int flags);
|
||||
function avio_open(Var s: pAVIOContext; const url: pAnsiChar; flags: integer): integer; cdecl;
|
||||
|
||||
(*
|
||||
// * Create and initialize a AVIOContext for accessing the
|
||||
// * resource indicated by url.
|
||||
// * @note When the resource indicated by url has been opened in
|
||||
// * read+write mode, the AVIOContext can be used only for writing.
|
||||
// *
|
||||
// * @param s Used to return the pointer to the created AVIOContext.
|
||||
// * In case of failure the pointed to value is set to NULL.
|
||||
// * @param flags flags which control how the resource indicated by url
|
||||
// * is to be opened
|
||||
// * @param int_cb an interrupt callback to be used at the protocols level
|
||||
// * @param options A dictionary filled with protocol-private options. On return
|
||||
// * this parameter will be destroyed and replaced with a dict containing options
|
||||
// * that were not found. May be NULL.
|
||||
// * @return >= 0 in case of success, a negative value corresponding to an
|
||||
// * AVERROR code in case of failure
|
||||
*)
|
||||
// int avio_open2(AVIOContext **s, const char *url, int flags,
|
||||
// const AVIOInterruptCB *int_cb, AVDictionary **options);
|
||||
//
|
||||
(*
|
||||
// * Close the resource accessed by the AVIOContext s and free it.
|
||||
// * This function can only be used if s was opened by avio_open().
|
||||
// *
|
||||
// * The internal buffer is automatically flushed before closing the
|
||||
// * resource.
|
||||
// *
|
||||
// * @return 0 on success, an AVERROR < 0 on error.
|
||||
// * @see avio_closep
|
||||
*)
|
||||
// int avio_close(AVIOContext *s);
|
||||
function avio_close(s: pAVIOContext): integer;
|
||||
(*
|
||||
// * Close the resource accessed by the AVIOContext *s, free it
|
||||
// * and set the pointer pointing to it to NULL.
|
||||
// * This function can only be used if s was opened by avio_open().
|
||||
// *
|
||||
// * The internal buffer is automatically flushed before closing the
|
||||
// * resource.
|
||||
// *
|
||||
// * @return 0 on success, an AVERROR < 0 on error.
|
||||
// * @see avio_close
|
||||
*)
|
||||
// int avio_closep(AVIOContext **s);
|
||||
//
|
||||
//
|
||||
(*
|
||||
// * Open a write only memory stream.
|
||||
// *
|
||||
// * @param s new IO context
|
||||
// * @return zero if no error.
|
||||
*)
|
||||
// int avio_open_dyn_buf(AVIOContext **s);
|
||||
//
|
||||
(*
|
||||
// * Return the written size and a pointer to the buffer. The buffer
|
||||
// * must be freed with av_free().
|
||||
// * Padding of FF_INPUT_BUFFER_PADDING_SIZE is added to the buffer.
|
||||
// *
|
||||
// * @param s IO context
|
||||
// * @param pbuffer pointer to a byte buffer
|
||||
// * @return the length of the byte buffer
|
||||
*)
|
||||
// int avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer);
|
||||
//
|
||||
(*
|
||||
// * Iterate through names of available protocols.
|
||||
// *
|
||||
// * @param opaque A private pointer representing current protocol.
|
||||
// * It must be a pointer to NULL on first iteration and will
|
||||
// * be updated by successive calls to avio_enum_protocols.
|
||||
// * @param output If set to 1, iterate over output protocols,
|
||||
// * otherwise over input protocols.
|
||||
// *
|
||||
// * @return A static string containing the name of current protocol or NULL
|
||||
*)
|
||||
// const char *avio_enum_protocols(void **opaque, int output);
|
||||
//
|
||||
(*
|
||||
// * Pause and resume playing - only meaningful if using a network streaming
|
||||
// * protocol (e.g. MMS).
|
||||
// * @param pause 1 for pause, 0 for resume
|
||||
*)
|
||||
// int avio_pause(AVIOContext *h, int pause);
|
||||
//
|
||||
(*
|
||||
// * Seek to a given timestamp relative to some component stream.
|
||||
// * Only meaningful if using a network streaming protocol (e.g. MMS.).
|
||||
// * @param stream_index The stream index that the timestamp is relative to.
|
||||
// * If stream_index is (-1) the timestamp should be in AV_TIME_BASE
|
||||
// * units from the beginning of the presentation.
|
||||
// * If a stream_index >= 0 is used and the protocol does not support
|
||||
// * seeking based on component streams, the call will fail.
|
||||
// * @param timestamp timestamp in AVStream.time_base units
|
||||
// * or if there is no stream specified then in AV_TIME_BASE units.
|
||||
// * @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE
|
||||
// * and AVSEEK_FLAG_ANY. The protocol may silently ignore
|
||||
// * AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will
|
||||
// * fail if used and not supported.
|
||||
// * @return >= 0 on success
|
||||
// * @see AVInputFormat::read_seek
|
||||
*)
|
||||
// int64_t avio_seek_time(AVIOContext *h, int stream_index,
|
||||
// int64_t timestamp, int flags);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function avio_open; external avformat_dll;
|
||||
function avio_close; external avformat_dll;
|
||||
|
||||
end.
|
@ -1,255 +0,0 @@
|
||||
unit ffm.avutil;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.rational, ffm.ctypes;
|
||||
|
||||
(*
|
||||
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
(*
|
||||
* @mainpage
|
||||
*
|
||||
* @section ffmpeg_intro Introduction
|
||||
*
|
||||
* This document describes the usage of the different libraries
|
||||
* provided by ffm.
|
||||
*
|
||||
* @li @ref libavc "libavcodec" encoding/decoding library
|
||||
* @li @ref lavfi "libavfilter" graph-based frame editing library
|
||||
* @li @ref libavf "libavformat" I/O and muxing/demuxing library
|
||||
* @li @ref lavd "libavdevice" special devices muxing/demuxing library
|
||||
* @li @ref lavu "libavutil" common utility library
|
||||
* @li @ref lswr "libswresample" audio resampling, format conversion and mixing
|
||||
* @li @ref lpp "libpostproc" post processing library
|
||||
* @li @ref lsws "libswscale" color conversion and scaling library
|
||||
*
|
||||
* @section ffmpeg_versioning Versioning and compatibility
|
||||
*
|
||||
* Each of the FFmpeg libraries contains a version.h header, which defines a
|
||||
* major, minor and micro version number with the
|
||||
* <em>LIBRARYNAME_VERSION_{MAJOR,MINOR,MICRO}</em> macros. The major version
|
||||
* number is incremented with backward incompatible changes - e.g. removing
|
||||
* parts of the public API, reordering public struct members, etc. The minor
|
||||
* version number is incremented for backward compatible API changes or major
|
||||
* new features - e.g. adding a new public function or a new decoder. The micro
|
||||
* version number is incremented for smaller changes that a calling program
|
||||
* might still want to check for - e.g. changing behavior in a previously
|
||||
* unspecified situation.
|
||||
*
|
||||
* FFmpeg guarantees backward API and ABI compatibility for each library as long
|
||||
* as its major version number is unchanged. This means that no public symbols
|
||||
* will be removed or renamed. Types and names of the public struct members and
|
||||
* values of public macros and enums will remain the same (unless they were
|
||||
* explicitly declared as not part of the public API). Documented behavior will
|
||||
* not change.
|
||||
*
|
||||
* In other words, any correct program that works with a given FFmpeg snapshot
|
||||
* should work just as well without any changes with any later snapshot with the
|
||||
* same major versions. This applies to both rebuilding the program against new
|
||||
* FFmpeg versions or to replacing the dynamic FFmpeg libraries that a program
|
||||
* links against.
|
||||
*
|
||||
* However, new public symbols may be added and new members may be appended to
|
||||
* public structs whose size is not part of public ABI (most public structs in
|
||||
* FFmpeg). New macros and enum values may be added. Behavior in undocumented
|
||||
* situations may change slightly (and be documented). All those are accompanied
|
||||
* by an entry in doc/APIchanges and incrementing either the minor or micro
|
||||
* version number.
|
||||
*)
|
||||
|
||||
(*
|
||||
* Return the LIBAVUTIL_VERSION_INT constant.
|
||||
*)
|
||||
// unsigned avutil_version(void);
|
||||
//
|
||||
(*
|
||||
* Return the libavutil build-time configuration.
|
||||
*)
|
||||
// const char *avutil_configuration(void);
|
||||
//
|
||||
(*
|
||||
* Return the libavutil license.
|
||||
*)
|
||||
// const char *avutil_license(void);
|
||||
//
|
||||
(*
|
||||
* @addtogroup lavu_media Media Type
|
||||
* @brief Media Type
|
||||
*)
|
||||
Type
|
||||
TAVMediaType = ( //
|
||||
AVMEDIA_TYPE_UNKNOWN = -1,
|
||||
/// < Usually treated as AVMEDIA_TYPE_DATA
|
||||
AVMEDIA_TYPE_VIDEO, //
|
||||
AVMEDIA_TYPE_AUDIO, //
|
||||
AVMEDIA_TYPE_DATA, //
|
||||
/// < Opaque data information usually continuous
|
||||
AVMEDIA_TYPE_SUBTITLE, //
|
||||
AVMEDIA_TYPE_ATTACHMENT, //
|
||||
/// < Opaque data information usually sparse
|
||||
AVMEDIA_TYPE_NB);
|
||||
|
||||
//
|
||||
(*
|
||||
* Return a string describing the media_type enum, NULL if media_type
|
||||
* is unknown.
|
||||
*)
|
||||
// const char *av_get_media_type_string(enum AVMediaType media_type);
|
||||
//
|
||||
(*
|
||||
* @defgroup lavu_const Constants
|
||||
* @{
|
||||
*
|
||||
* @defgroup lavu_enc Encoding specific
|
||||
*
|
||||
* @note those definition should move to avcodec
|
||||
* @{
|
||||
*)
|
||||
const
|
||||
FF_LAMBDA_SHIFT = 7;
|
||||
FF_LAMBDA_SCALE = 1 shl FF_LAMBDA_SHIFT;
|
||||
FF_QP2LAMBDA = 118;
|
||||
/// < factor to convert from H.263 QP to lambda
|
||||
FF_LAMBDA_MAX = (256 * 128 - 1);
|
||||
|
||||
FF_QUALITY_SCALE = FF_LAMBDA_SCALE; // FIXME maybe remove
|
||||
|
||||
(*
|
||||
// * @}
|
||||
// * @defgroup lavu_time Timestamp specific
|
||||
// *
|
||||
// * FFmpeg internal timebase and timestamp definitions
|
||||
// *
|
||||
// * @{
|
||||
*)
|
||||
|
||||
(*
|
||||
* @brief Undefined timestamp value
|
||||
*
|
||||
* Usually reported by demuxer that work on containers that do not provide
|
||||
* either pts or dts.
|
||||
*)
|
||||
AV_NOPTS_VALUE = $8000000000000000;
|
||||
|
||||
(*
|
||||
* Internal time base represented as integer
|
||||
*)
|
||||
AV_TIME_BASE = 1000000;
|
||||
|
||||
(*
|
||||
* Internal time base represented as fractional value
|
||||
*)
|
||||
AV_TIME_BASE_Q: TAVRational = (num: 1; den: AV_TIME_BASE);
|
||||
|
||||
//
|
||||
(*
|
||||
// * @}
|
||||
// * @}
|
||||
// * @defgroup lavu_picture Image related
|
||||
// *
|
||||
// * AVPicture types, pixel formats and basic image planes manipulation.
|
||||
// *
|
||||
// * @{
|
||||
*)
|
||||
Type
|
||||
pAVPictureType = ^TAVPictureType;
|
||||
TAVPictureType = ( //
|
||||
AV_PICTURE_TYPE_NONE = 0,
|
||||
/// < Undefined
|
||||
AV_PICTURE_TYPE_I,
|
||||
/// < Intra
|
||||
AV_PICTURE_TYPE_P,
|
||||
/// < Predicted
|
||||
AV_PICTURE_TYPE_B,
|
||||
/// < Bi-dir predicted
|
||||
AV_PICTURE_TYPE_S,
|
||||
/// < S(GMC)-VOP MPEG4
|
||||
AV_PICTURE_TYPE_SI,
|
||||
/// < Switching Intra
|
||||
AV_PICTURE_TYPE_SP,
|
||||
/// < Switching Predicted
|
||||
AV_PICTURE_TYPE_BI
|
||||
/// < BI type
|
||||
);
|
||||
(*
|
||||
* Return a single letter to describe the given picture type
|
||||
* pict_type.
|
||||
*
|
||||
* @param[in] pict_type the picture type @return a single character
|
||||
* representing the picture type, '?' if pict_type is unknown
|
||||
*)
|
||||
// char av_get_picture_type_char(enum AVPictureType pict_type);
|
||||
|
||||
// #include "common.h"
|
||||
// #include "error.h"
|
||||
// #include "version.h"
|
||||
// #include "mathematics.h"
|
||||
// #include "rational.h"
|
||||
// #include "intfloat_readwrite.h"
|
||||
// #include "log.h"
|
||||
// #include "pixfmt.h"
|
||||
//
|
||||
(*
|
||||
// * Return x default pointer in case p is NULL.
|
||||
*)
|
||||
// static inline void *av_x_if_null(const void *p, const void *x)
|
||||
// {
|
||||
// return (void *)(intptr_t)(p ? p : x);
|
||||
// }
|
||||
//
|
||||
(*
|
||||
* Compute the length of an integer list.
|
||||
*
|
||||
* @param elsize size in bytes of each list element (only 1, 2, 4 or 8)
|
||||
* @param term list terminator (usually 0 or -1)
|
||||
* @param list pointer to the list
|
||||
* @return length of the list, in elements, not counting the terminator
|
||||
*)
|
||||
// unsigned av_int_list_length_for_size(unsigned elsize,
|
||||
// const void *list, uint64_t term) av_pure;
|
||||
function av_int_list_length_for_size(elsize: Cardinal; const list: Pointer; term: uint64_t): Cardinal; cdecl;
|
||||
(*
|
||||
// * Compute the length of an integer list.
|
||||
// *
|
||||
// * @param term list terminator (usually 0 or -1)
|
||||
// * @param list pointer to the list
|
||||
// * @return length of the list, in elements, not counting the terminator
|
||||
*)
|
||||
// #define av_int_list_length(list, term) \
|
||||
// av_int_list_length_for_size(sizeof(*(list)), list, term)
|
||||
//
|
||||
(*
|
||||
// * Open a file using a UTF-8 filename.
|
||||
// * The API of this function matches POSIX fopen(), errors are returned through
|
||||
// * errno.
|
||||
*)
|
||||
// FILE *av_fopen_utf8(const char *path, const char *mode);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_int_list_length_for_size; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,285 +0,0 @@
|
||||
unit ffm.buffer;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
/// *
|
||||
// * This file is part of ffm.
|
||||
// *
|
||||
// * FFmpeg is free software; you can redistribute it and/or
|
||||
// * modify it under the terms of the GNU Lesser General Public
|
||||
// * License as published by the Free Software Foundation; either
|
||||
// * version 2.1 of the License, or (at your option) any later version.
|
||||
// *
|
||||
// * FFmpeg is distributed in the hope that it will be useful,
|
||||
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
// * Lesser General Public License for more details.
|
||||
// *
|
||||
// * You should have received a copy of the GNU Lesser General Public
|
||||
// * License along with FFmpeg; if not, write to the Free Software
|
||||
// * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * @file
|
||||
// * @ingroup lavu_buffer
|
||||
// * refcounted data buffer API
|
||||
// */
|
||||
//
|
||||
// #ifndef AVUTIL_BUFFER_H
|
||||
// #define AVUTIL_BUFFER_H
|
||||
//
|
||||
// #include <stdint.h>
|
||||
//
|
||||
/// **
|
||||
// * @defgroup lavu_buffer AVBuffer
|
||||
// * @ingroup lavu_data
|
||||
// *
|
||||
// * @{
|
||||
// * AVBuffer is an API for reference-counted data buffers.
|
||||
// *
|
||||
// * There are two core objects in this API -- AVBuffer and AVBufferRef. AVBuffer
|
||||
// * represents the data buffer itself; it is opaque and not meant to be accessed
|
||||
// * by the caller directly, but only through AVBufferRef. However, the caller may
|
||||
// * e.g. compare two AVBuffer pointers to check whether two different references
|
||||
// * are describing the same data buffer. AVBufferRef represents a single
|
||||
// * reference to an AVBuffer and it is the object that may be manipulated by the
|
||||
// * caller directly.
|
||||
// *
|
||||
// * There are two functions provided for creating a new AVBuffer with a single
|
||||
// * reference -- av_buffer_alloc() to just allocate a new buffer, and
|
||||
// * av_buffer_create() to wrap an existing array in an AVBuffer. From an existing
|
||||
// * reference, additional references may be created with av_buffer_ref().
|
||||
// * Use av_buffer_unref() to free a reference (this will automatically free the
|
||||
// * data once all the references are freed).
|
||||
// *
|
||||
// * The convention throughout this API and the rest of FFmpeg is such that the
|
||||
// * buffer is considered writable if there exists only one reference to it (and
|
||||
// * it has not been marked as read-only). The av_buffer_is_writable() function is
|
||||
// * provided to check whether this is true and av_buffer_make_writable() will
|
||||
// * automatically create a new writable buffer when necessary.
|
||||
// * Of course nothing prevents the calling code from violating this convention,
|
||||
// * however that is safe only when all the existing references are under its
|
||||
// * control.
|
||||
// *
|
||||
// * @note Referencing and unreferencing the buffers is thread-safe and thus
|
||||
// * may be done from multiple threads simultaneously without any need for
|
||||
// * additional locking.
|
||||
// *
|
||||
// * @note Two different references to the same buffer can point to different
|
||||
// * parts of the buffer (i.e. their AVBufferRef.data will not be equal).
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * A reference counted buffer type. It is opaque and is meant to be used through
|
||||
// * references (AVBufferRef).
|
||||
// */
|
||||
Type
|
||||
pAVBuffer = Pointer;
|
||||
|
||||
(*
|
||||
* A reference to a data buffer.
|
||||
*
|
||||
* The size of this struct is not a part of the public ABI and it is not meant
|
||||
* to be allocated directly.
|
||||
*)
|
||||
pAVBufferRef = ^TAVBufferRef;
|
||||
ppAVBufferRef = ^pAVBufferRef;
|
||||
|
||||
TAVBufferRef = {packed} record
|
||||
buffer: pAVBuffer;
|
||||
(*
|
||||
* The data buffer. It is considered writable if and only if
|
||||
* this is the only reference to the buffer, in which case
|
||||
* av_buffer_is_writable() returns 1.
|
||||
*)
|
||||
data: pByte;
|
||||
(*
|
||||
* Size of data in bytes.
|
||||
*)
|
||||
size: Integer;
|
||||
end;
|
||||
|
||||
/// **
|
||||
// * Allocate an AVBuffer of the given size using av_malloc().
|
||||
// *
|
||||
// * @return an AVBufferRef of given size or NULL when out of memory
|
||||
// */
|
||||
// AVBufferRef *av_buffer_alloc(int size);
|
||||
//
|
||||
/// **
|
||||
// * Same as av_buffer_alloc(), except the returned buffer will be initialized
|
||||
// * to zero.
|
||||
// */
|
||||
// AVBufferRef *av_buffer_allocz(int size);
|
||||
//
|
||||
/// **
|
||||
// * Always treat the buffer as read-only, even when it has only one
|
||||
// * reference.
|
||||
// */
|
||||
// #define AV_BUFFER_FLAG_READONLY (1 << 0)
|
||||
//
|
||||
/// **
|
||||
// * Create an AVBuffer from an existing array.
|
||||
// *
|
||||
// * If this function is successful, data is owned by the AVBuffer. The caller may
|
||||
// * only access data through the returned AVBufferRef and references derived from
|
||||
// * it.
|
||||
// * If this function fails, data is left untouched.
|
||||
// * @param data data array
|
||||
// * @param size size of data in bytes
|
||||
// * @param free a callback for freeing this buffer's data
|
||||
// * @param opaque parameter to be got for processing or passed to free
|
||||
// * @param flags a combination of AV_BUFFER_FLAG_*
|
||||
// *
|
||||
// * @return an AVBufferRef referring to data on success, NULL on failure.
|
||||
// */
|
||||
// AVBufferRef *av_buffer_create(uint8_t *data, int size,
|
||||
// void (*free)(void *opaque, uint8_t *data),
|
||||
// void *opaque, int flags);
|
||||
//
|
||||
/// **
|
||||
// * Default free callback, which calls av_free() on the buffer data.
|
||||
// * This function is meant to be passed to av_buffer_create(), not called
|
||||
// * directly.
|
||||
// */
|
||||
// void av_buffer_default_free(void *opaque, uint8_t *data);
|
||||
//
|
||||
/// **
|
||||
// * Create a new reference to an AVBuffer.
|
||||
// *
|
||||
// * @return a new AVBufferRef referring to the same AVBuffer as buf or NULL on
|
||||
// * failure.
|
||||
// */
|
||||
// AVBufferRef *av_buffer_ref(AVBufferRef *buf);
|
||||
//
|
||||
/// **
|
||||
// * Free a given reference and automatically free the buffer if there are no more
|
||||
// * references to it.
|
||||
// *
|
||||
// * @param buf the reference to be freed. The pointer is set to NULL on return.
|
||||
// */
|
||||
// void av_buffer_unref(AVBufferRef **buf);
|
||||
//
|
||||
/// **
|
||||
// * @return 1 if the caller may write to the data referred to by buf (which is
|
||||
// * true if and only if buf is the only reference to the underlying AVBuffer).
|
||||
// * Return 0 otherwise.
|
||||
// * A positive answer is valid until av_buffer_ref() is called on buf.
|
||||
// */
|
||||
// int av_buffer_is_writable(const AVBufferRef *buf);
|
||||
//
|
||||
/// **
|
||||
// * @return the opaque parameter set by av_buffer_create.
|
||||
// */
|
||||
// void *av_buffer_get_opaque(const AVBufferRef *buf);
|
||||
//
|
||||
// int av_buffer_get_ref_count(const AVBufferRef *buf);
|
||||
//
|
||||
/// **
|
||||
// * Create a writable reference from a given buffer reference, avoiding data copy
|
||||
// * if possible.
|
||||
// *
|
||||
// * @param buf buffer reference to make writable. On success, buf is either left
|
||||
// * untouched, or it is unreferenced and a new writable AVBufferRef is
|
||||
// * written in its place. On failure, buf is left untouched.
|
||||
// * @return 0 on success, a negative AVERROR on failure.
|
||||
// */
|
||||
// int av_buffer_make_writable(AVBufferRef **buf);
|
||||
//
|
||||
/// **
|
||||
// * Reallocate a given buffer.
|
||||
// *
|
||||
// * @param buf a buffer reference to reallocate. On success, buf will be
|
||||
// * unreferenced and a new reference with the required size will be
|
||||
// * written in its place. On failure buf will be left untouched. *buf
|
||||
// * may be NULL, then a new buffer is allocated.
|
||||
// * @param size required new buffer size.
|
||||
// * @return 0 on success, a negative AVERROR on failure.
|
||||
// *
|
||||
// * @note the buffer is actually reallocated with av_realloc() only if it was
|
||||
// * initially allocated through av_buffer_realloc(NULL) and there is only one
|
||||
// * reference to it (i.e. the one passed to this function). In all other cases
|
||||
// * a new buffer is allocated and the data is copied.
|
||||
// */
|
||||
// int av_buffer_realloc(AVBufferRef **buf, int size);
|
||||
//
|
||||
/// **
|
||||
// * @}
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * @defgroup lavu_bufferpool AVBufferPool
|
||||
// * @ingroup lavu_data
|
||||
// *
|
||||
// * @{
|
||||
// * AVBufferPool is an API for a lock-free thread-safe pool of AVBuffers.
|
||||
// *
|
||||
// * Frequently allocating and freeing large buffers may be slow. AVBufferPool is
|
||||
// * meant to solve this in cases when the caller needs a set of buffers of the
|
||||
// * same size (the most obvious use case being buffers for raw video or audio
|
||||
// * frames).
|
||||
// *
|
||||
// * At the beginning, the user must call av_buffer_pool_init() to create the
|
||||
// * buffer pool. Then whenever a buffer is needed, call av_buffer_pool_get() to
|
||||
// * get a reference to a new buffer, similar to av_buffer_alloc(). This new
|
||||
// * reference works in all aspects the same way as the one created by
|
||||
// * av_buffer_alloc(). However, when the last reference to this buffer is
|
||||
// * unreferenced, it is returned to the pool instead of being freed and will be
|
||||
// * reused for subsequent av_buffer_pool_get() calls.
|
||||
// *
|
||||
// * When the caller is done with the pool and no longer needs to allocate any new
|
||||
// * buffers, av_buffer_pool_uninit() must be called to mark the pool as freeable.
|
||||
// * Once all the buffers are released, it will automatically be freed.
|
||||
// *
|
||||
// * Allocating and releasing buffers with this API is thread-safe as long as
|
||||
// * either the default alloc callback is used, or the user-supplied one is
|
||||
// * thread-safe.
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * The buffer pool. This structure is opaque and not meant to be accessed
|
||||
// * directly. It is allocated with av_buffer_pool_init() and freed with
|
||||
// * av_buffer_pool_uninit().
|
||||
// */
|
||||
// typedef struct AVBufferPool AVBufferPool;
|
||||
//
|
||||
/// **
|
||||
// * Allocate and initialize a buffer pool.
|
||||
// *
|
||||
// * @param size size of each buffer in this pool
|
||||
// * @param alloc a function that will be used to allocate new buffers when the
|
||||
// * pool is empty. May be NULL, then the default allocator will be used
|
||||
// * (av_buffer_alloc()).
|
||||
// * @return newly created buffer pool on success, NULL on error.
|
||||
// */
|
||||
// AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size));
|
||||
//
|
||||
/// **
|
||||
// * Mark the pool as being available for freeing. It will actually be freed only
|
||||
// * once all the allocated buffers associated with the pool are released. Thus it
|
||||
// * is safe to call this function while some of the allocated buffers are still
|
||||
// * in use.
|
||||
// *
|
||||
// * @param pool pointer to the pool to be freed. It will be set to NULL.
|
||||
// * @see av_buffer_pool_can_uninit()
|
||||
// */
|
||||
// void av_buffer_pool_uninit(AVBufferPool **pool);
|
||||
//
|
||||
/// **
|
||||
// * Allocate a new AVBuffer, reusing an old buffer from the pool when available.
|
||||
// * This function may be called simultaneously from multiple threads.
|
||||
// *
|
||||
// * @return a reference to the new buffer on success, NULL on error.
|
||||
// */
|
||||
// AVBufferRef *av_buffer_pool_get(AVBufferPool *pool);
|
||||
//
|
||||
/// **
|
||||
// * @}
|
||||
// */
|
||||
|
||||
implementation
|
||||
|
||||
end.
|
@ -1,199 +0,0 @@
|
||||
(*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
unit ffm.buffersink;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.pixfmt, ffm.avfilter, ffm.frame;
|
||||
|
||||
// #if FF_API_AVFILTERBUFFER
|
||||
// (*
|
||||
// * Get an audio/video buffer data from buffer_sink and put it in bufref.
|
||||
// *
|
||||
// * This function works with both audio and video buffer sinks.
|
||||
// *
|
||||
// * @param buffer_sink pointer to a buffersink or abuffersink context
|
||||
// * @param flags a combination of AV_BUFFERSINK_FLAG_* flags
|
||||
// * @return >= 0 in case of success, a negative AVERROR code in case of
|
||||
// * failure
|
||||
// *)
|
||||
// attribute_deprecated
|
||||
// int av_buffersink_get_buffer_ref(AVFilterContext *buffer_sink,
|
||||
// AVFilterBufferRef **bufref, int flags);
|
||||
//
|
||||
// (*
|
||||
// * Get the number of immediately available frames.
|
||||
// *)
|
||||
// attribute_deprecated
|
||||
// int av_buffersink_poll_frame(AVFilterContext *ctx);
|
||||
//
|
||||
// (*
|
||||
// * Get a buffer with filtered data from sink and put it in buf.
|
||||
// *
|
||||
// * @param ctx pointer to a context of a buffersink or abuffersink AVFilter.
|
||||
// * @param buf pointer to the buffer will be written here if buf is non-NULL. buf
|
||||
// * must be freed by the caller using avfilter_unref_buffer().
|
||||
// * Buf may also be NULL to query whether a buffer is ready to be
|
||||
// * output.
|
||||
// *
|
||||
// * @return >= 0 in case of success, a negative AVERROR code in case of
|
||||
// * failure.
|
||||
// *)
|
||||
// attribute_deprecated
|
||||
// int av_buffersink_read(AVFilterContext *ctx, AVFilterBufferRef **buf);
|
||||
//
|
||||
// (*
|
||||
// * Same as av_buffersink_read, but with the ability to specify the number of
|
||||
// * samples read. This function is less efficient than av_buffersink_read(),
|
||||
// * because it copies the data around.
|
||||
// *
|
||||
// * @param ctx pointer to a context of the abuffersink AVFilter.
|
||||
// * @param buf pointer to the buffer will be written here if buf is non-NULL. buf
|
||||
// * must be freed by the caller using avfilter_unref_buffer(). buf
|
||||
// * will contain exactly nb_samples audio samples, except at the end
|
||||
// * of stream, when it can contain less than nb_samples.
|
||||
// * Buf may also be NULL to query whether a buffer is ready to be
|
||||
// * output.
|
||||
// *
|
||||
// * @warning do not mix this function with av_buffersink_read(). Use only one or
|
||||
// * the other with a single sink, not both.
|
||||
// *)
|
||||
// attribute_deprecated
|
||||
// int av_buffersink_read_samples(AVFilterContext *ctx, AVFilterBufferRef **buf,
|
||||
// int nb_samples);
|
||||
// #endif
|
||||
//
|
||||
// (*
|
||||
// * Get a frame with filtered data from sink and put it in frame.
|
||||
// *
|
||||
// * @param ctx pointer to a buffersink or abuffersink filter context.
|
||||
// * @param frame pointer to an allocated frame that will be filled with data.
|
||||
// * The data must be freed using av_frame_unref() / av_frame_free()
|
||||
// * @param flags a combination of AV_BUFFERSINK_FLAG_* flags
|
||||
// *
|
||||
// * @return >= 0 in for success, a negative AVERROR code for failure.
|
||||
// *)
|
||||
// int av_buffersink_get_frame_flags(AVFilterContext *ctx, AVFrame *frame, int flags);
|
||||
function av_buffersink_get_frame_flags(ctx: pAVFilterContext; frame: pAVFrame; flags: Integer): Integer; cdecl;
|
||||
|
||||
//
|
||||
// (*
|
||||
// * Tell av_buffersink_get_buffer_ref() to read video/samples buffer
|
||||
// * reference, but not remove it from the buffer. This is useful if you
|
||||
// * need only to read a video/samples buffer, without to fetch it.
|
||||
// *)
|
||||
// #define AV_BUFFERSINK_FLAG_PEEK 1
|
||||
//
|
||||
// (*
|
||||
// * Tell av_buffersink_get_buffer_ref() not to request a frame from its input.
|
||||
// * If a frame is already buffered, it is read (and removed from the buffer),
|
||||
// * but if no frame is present, return AVERROR(EAGAIN).
|
||||
// *)
|
||||
// #define AV_BUFFERSINK_FLAG_NO_REQUEST 2
|
||||
Type
|
||||
(*
|
||||
* Struct to use for initializing a buffersink context.
|
||||
*)
|
||||
pAVBufferSinkParams = ^TAVBufferSinkParams;
|
||||
|
||||
TAVBufferSinkParams = {packed} record
|
||||
pixel_fmts: pAVPixelFormat;
|
||||
/// < list of allowed pixel formats, terminated by AV_PIX_FMT_NONE
|
||||
end;
|
||||
|
||||
(*
|
||||
* Create an AVBufferSinkParams structure.
|
||||
*
|
||||
* Must be freed with av_free().
|
||||
*)
|
||||
// AVBufferSinkParams *av_buffersink_params_alloc(void);
|
||||
function av_buffersink_params_alloc(): pAVBufferSinkParams; cdecl;
|
||||
|
||||
// (*
|
||||
// * Struct to use for initializing an abuffersink context.
|
||||
// *)
|
||||
// typedef struct {
|
||||
// const enum AVSampleFormat *sample_fmts; ///< list of allowed sample formats, terminated by AV_SAMPLE_FMT_NONE
|
||||
// const int64_t *channel_layouts; ///< list of allowed channel layouts, terminated by -1
|
||||
// const int *channel_counts; ///< list of allowed channel counts, terminated by -1
|
||||
// int all_channel_counts; ///< if not 0, accept any channel count or layout
|
||||
// int *sample_rates; ///< list of allowed sample rates, terminated by -1
|
||||
// } AVABufferSinkParams;
|
||||
//
|
||||
// (*
|
||||
// * Create an AVABufferSinkParams structure.
|
||||
// *
|
||||
// * Must be freed with av_free().
|
||||
// *)
|
||||
// AVABufferSinkParams *av_abuffersink_params_alloc(void);
|
||||
//
|
||||
// (*
|
||||
// * Set the frame size for an audio buffer sink.
|
||||
// *
|
||||
// * All calls to av_buffersink_get_buffer_ref will return a buffer with
|
||||
// * exactly the specified number of samples, or AVERROR(EAGAIN) if there is
|
||||
// * not enough. The last buffer at EOF will be padded with 0.
|
||||
// *)
|
||||
// void av_buffersink_set_frame_size(AVFilterContext *ctx, unsigned frame_size);
|
||||
//
|
||||
// (*
|
||||
// * Get the frame rate of the input.
|
||||
// *)
|
||||
// AVRational av_buffersink_get_frame_rate(AVFilterContext *ctx);
|
||||
//
|
||||
// (*
|
||||
// * Get a frame with filtered data from sink and put it in frame.
|
||||
// *
|
||||
// * @param ctx pointer to a context of a buffersink or abuffersink AVFilter.
|
||||
// * @param frame pointer to an allocated frame that will be filled with data.
|
||||
// * The data must be freed using av_frame_unref() / av_frame_free()
|
||||
// *
|
||||
// * @return >= 0 in case of success, a negative AVERROR code in case of
|
||||
// * failure.
|
||||
// *)
|
||||
// int av_buffersink_get_frame(AVFilterContext *ctx, AVFrame *frame);
|
||||
function av_buffersink_get_frame(ctx:pAVFilterContext; frame:pAVFrame):integer;cdecl;
|
||||
//
|
||||
// (*
|
||||
// * Same as av_buffersink_get_frame(), but with the ability to specify the number
|
||||
// * of samples read. This function is less efficient than
|
||||
// * av_buffersink_get_frame(), because it copies the data around.
|
||||
// *
|
||||
// * @param ctx pointer to a context of the abuffersink AVFilter.
|
||||
// * @param frame pointer to an allocated frame that will be filled with data.
|
||||
// * The data must be freed using av_frame_unref() / av_frame_free()
|
||||
// * frame will contain exactly nb_samples audio samples, except at
|
||||
// * the end of stream, when it can contain less than nb_samples.
|
||||
// *
|
||||
// * @warning do not mix this function with av_buffersink_get_frame(). Use only one or
|
||||
// * the other with a single sink, not both.
|
||||
// *)
|
||||
// int av_buffersink_get_samples(AVFilterContext *ctx, AVFrame *frame, int nb_samples);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_buffersink_params_alloc; external avfilter_dll;
|
||||
function av_buffersink_get_frame_flags; external avfilter_dll;
|
||||
function av_buffersink_get_frame; external avfilter_dll;
|
||||
|
||||
end.
|
@ -1,148 +0,0 @@
|
||||
(*
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
unit ffm.buffersrc;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.frame, ffm.avfilter;
|
||||
|
||||
const
|
||||
(*
|
||||
* Do not check for format changes.
|
||||
*)
|
||||
AV_BUFFERSRC_FLAG_NO_CHECK_FORMAT = 1;
|
||||
|
||||
{$IFDEF FF_API_AVFILTERBUFFER}
|
||||
(*
|
||||
* Ignored
|
||||
*)
|
||||
AV_BUFFERSRC_FLAG_NO_COPY = 2;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* Immediately push the frame to the output.
|
||||
*)
|
||||
AV_BUFFERSRC_FLAG_PUSH = 4;
|
||||
|
||||
(*
|
||||
* Keep a reference to the frame.
|
||||
* If the frame if reference-counted, create a new reference; otherwise
|
||||
* copy the frame data.
|
||||
*)
|
||||
AV_BUFFERSRC_FLAG_KEEP_REF = 8;
|
||||
|
||||
(*
|
||||
* Add buffer data in picref to buffer_src.
|
||||
*
|
||||
* @param buffer_src pointer to a buffer source context
|
||||
* @param picref a buffer reference, or NULL to mark EOF
|
||||
* @param flags a combination of AV_BUFFERSRC_FLAG_*
|
||||
* @return >= 0 in case of success, a negative AVERROR code
|
||||
* in case of failure
|
||||
*)
|
||||
// int av_buffersrc_add_ref(AVFilterContext *buffer_src,
|
||||
// AVFilterBufferRef *picref, int flags);
|
||||
|
||||
(*
|
||||
* Get the number of failed requests.
|
||||
*
|
||||
* A failed request is when the request_frame method is called while no
|
||||
* frame is present in the buffer.
|
||||
* The number is reset when a frame is added.
|
||||
*)
|
||||
// unsigned av_buffersrc_get_nb_failed_requests(AVFilterContext *buffer_src);
|
||||
|
||||
// #if FF_API_AVFILTERBUFFER
|
||||
(*
|
||||
* Add a buffer to the filtergraph s.
|
||||
*
|
||||
* @param buf buffer containing frame data to be passed down the filtergraph.
|
||||
* This function will take ownership of buf, the user must not free it.
|
||||
* A NULL buf signals EOF -- i.e. no more frames will be sent to this filter.
|
||||
*
|
||||
* @deprecated use av_buffersrc_write_frame() or av_buffersrc_add_frame()
|
||||
*)
|
||||
// attribute_deprecated
|
||||
// int av_buffersrc_buffer(AVFilterContext *s, AVFilterBufferRef *buf);
|
||||
// #endif
|
||||
|
||||
(*
|
||||
* Add a frame to the buffer source.
|
||||
*
|
||||
* @param s an instance of the buffersrc filter.
|
||||
* @param frame frame to be added. If the frame is reference counted, this
|
||||
* function will make a new reference to it. Otherwise the frame data will be
|
||||
* copied.
|
||||
*
|
||||
* @return 0 on success, a negative AVERROR on error
|
||||
*
|
||||
* This function is equivalent to av_buffersrc_add_frame_flags() with the
|
||||
* AV_BUFFERSRC_FLAG_KEEP_REF flag.
|
||||
*)
|
||||
// int av_buffersrc_write_frame(AVFilterContext *s, const AVFrame *frame);
|
||||
|
||||
(*
|
||||
* Add a frame to the buffer source.
|
||||
*
|
||||
* @param s an instance of the buffersrc filter.
|
||||
* @param frame frame to be added. If the frame is reference counted, this
|
||||
* function will take ownership of the reference(s) and reset the frame.
|
||||
* Otherwise the frame data will be copied. If this function returns an error,
|
||||
* the input frame is not touched.
|
||||
*
|
||||
* @return 0 on success, a negative AVERROR on error.
|
||||
*
|
||||
* @note the difference between this function and av_buffersrc_write_frame() is
|
||||
* that av_buffersrc_write_frame() creates a new reference to the input frame,
|
||||
* while this function takes ownership of the reference passed to it.
|
||||
*
|
||||
* This function is equivalent to av_buffersrc_add_frame_flags() without the
|
||||
* AV_BUFFERSRC_FLAG_KEEP_REF flag.
|
||||
*)
|
||||
// int av_buffersrc_add_frame(AVFilterContext *ctx, AVFrame *frame);
|
||||
|
||||
(*
|
||||
* Add a frame to the buffer source.
|
||||
*
|
||||
* By default, if the frame is reference-counted, this function will take
|
||||
* ownership of the reference(s) and reset the frame. This can be controled
|
||||
* using the flags.
|
||||
*
|
||||
* If this function returns an error, the input frame is not touched.
|
||||
*
|
||||
* @param buffer_src pointer to a buffer source context
|
||||
* @param frame a frame, or NULL to mark EOF
|
||||
* @param flags a combination of AV_BUFFERSRC_FLAG_*
|
||||
* @return >= 0 in case of success, a negative AVERROR code
|
||||
* in case of failure
|
||||
*)
|
||||
// int av_buffersrc_add_frame_flags(AVFilterContext *buffer_src,
|
||||
// AVFrame *frame, int flags);
|
||||
function av_buffersrc_add_frame_flags(buffer_src: pAVFilterContext; frame: pAVFrame; flags: Integer): Integer; cdecl;
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_buffersrc_add_frame_flags; external avfilter_dll;
|
||||
|
||||
end.
|
@ -1,234 +0,0 @@
|
||||
(*
|
||||
* Copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
|
||||
* Copyright (c) 2008 Peter Ross
|
||||
*
|
||||
* This file is part of FFmpeg.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
unit ffm.channel_layout;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
Uses
|
||||
ffm.ctypes;
|
||||
|
||||
(* *
|
||||
* @file
|
||||
* audio channel layout utility functions
|
||||
*)
|
||||
|
||||
(* *
|
||||
* @addtogroup lavu_audio
|
||||
* @{
|
||||
*)
|
||||
|
||||
(* *
|
||||
* @defgroup channel_masks Audio channel masks
|
||||
*
|
||||
* A channel layout is a 64-bits integer with a bit set for every channel.
|
||||
* The number of bits set must be equal to the number of channels.
|
||||
* The value 0 means that the channel layout is not known.
|
||||
* @note this data structure is not powerful enough to handle channels
|
||||
* combinations that have the same channel multiple times, such as
|
||||
* dual-mono.
|
||||
*
|
||||
* @{
|
||||
*)
|
||||
Const
|
||||
AV_CH_FRONT_LEFT = $00000001;
|
||||
AV_CH_FRONT_RIGHT = $00000002;
|
||||
AV_CH_FRONT_CENTER = $00000004;
|
||||
AV_CH_LOW_FREQUENCY = $00000008;
|
||||
AV_CH_BACK_LEFT = $00000010;
|
||||
AV_CH_BACK_RIGHT = $00000020;
|
||||
AV_CH_FRONT_LEFT_OF_CENTER = $00000040;
|
||||
AV_CH_FRONT_RIGHT_OF_CENTER = $00000080;
|
||||
AV_CH_BACK_CENTER = $00000100;
|
||||
AV_CH_SIDE_LEFT = $00000200;
|
||||
AV_CH_SIDE_RIGHT = $00000400;
|
||||
AV_CH_TOP_CENTER = $00000800;
|
||||
AV_CH_TOP_FRONT_LEFT = $00001000;
|
||||
AV_CH_TOP_FRONT_CENTER = $00002000;
|
||||
AV_CH_TOP_FRONT_RIGHT = $00004000;
|
||||
AV_CH_TOP_BACK_LEFT = $00008000;
|
||||
AV_CH_TOP_BACK_CENTER = $00010000;
|
||||
AV_CH_TOP_BACK_RIGHT = $00020000;
|
||||
AV_CH_STEREO_LEFT = $20000000;
|
||||
/// < Stereo downmix.
|
||||
AV_CH_STEREO_RIGHT = $40000000;
|
||||
/// < See AV_CH_STEREO_LEFT.
|
||||
AV_CH_WIDE_LEFT = $0000000080000000;
|
||||
AV_CH_WIDE_RIGHT = $0000000100000000;
|
||||
AV_CH_SURROUND_DIRECT_LEFT = $0000000200000000;
|
||||
AV_CH_SURROUND_DIRECT_RIGHT = $0000000400000000;
|
||||
AV_CH_LOW_FREQUENCY_2 = $0000000800000000;
|
||||
|
||||
(* * Channel mask value used for AVCodecContext.request_channel_layout
|
||||
to indicate that the user requests the channel order of the decoder output
|
||||
to be the native codec channel order. *)
|
||||
AV_CH_LAYOUT_NATIVE = $8000000000000000;
|
||||
|
||||
(* *
|
||||
* @}
|
||||
* @defgroup channel_mask_c Audio channel convenience macros
|
||||
* @{
|
||||
* *)
|
||||
AV_CH_LAYOUT_MONO = (AV_CH_FRONT_CENTER);
|
||||
AV_CH_LAYOUT_STEREO = (AV_CH_FRONT_LEFT or AV_CH_FRONT_RIGHT);
|
||||
AV_CH_LAYOUT_2POINT1 = (AV_CH_LAYOUT_STEREO or AV_CH_LOW_FREQUENCY);
|
||||
AV_CH_LAYOUT_2_1 = (AV_CH_LAYOUT_STEREO or AV_CH_BACK_CENTER);
|
||||
AV_CH_LAYOUT_SURROUND = (AV_CH_LAYOUT_STEREO or AV_CH_FRONT_CENTER);
|
||||
AV_CH_LAYOUT_3POINT1 = (AV_CH_LAYOUT_SURROUND or AV_CH_LOW_FREQUENCY);
|
||||
AV_CH_LAYOUT_4POINT0 = (AV_CH_LAYOUT_SURROUND or AV_CH_BACK_CENTER);
|
||||
AV_CH_LAYOUT_4POINT1 = (AV_CH_LAYOUT_4POINT0 or AV_CH_LOW_FREQUENCY);
|
||||
AV_CH_LAYOUT_2_2 = (AV_CH_LAYOUT_STEREO or AV_CH_SIDE_LEFT or AV_CH_SIDE_RIGHT);
|
||||
AV_CH_LAYOUT_QUAD = (AV_CH_LAYOUT_STEREO or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT);
|
||||
AV_CH_LAYOUT_5POINT0 = (AV_CH_LAYOUT_SURROUND or AV_CH_SIDE_LEFT or AV_CH_SIDE_RIGHT);
|
||||
AV_CH_LAYOUT_5POINT1 = (AV_CH_LAYOUT_5POINT0 or AV_CH_LOW_FREQUENCY);
|
||||
AV_CH_LAYOUT_5POINT0_BACK = (AV_CH_LAYOUT_SURROUND or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT);
|
||||
AV_CH_LAYOUT_5POINT1_BACK = (AV_CH_LAYOUT_5POINT0_BACK or AV_CH_LOW_FREQUENCY);
|
||||
AV_CH_LAYOUT_6POINT0 = (AV_CH_LAYOUT_5POINT0 or AV_CH_BACK_CENTER);
|
||||
AV_CH_LAYOUT_6POINT0_FRONT = (AV_CH_LAYOUT_2_2 or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER);
|
||||
AV_CH_LAYOUT_HEXAGONAL = (AV_CH_LAYOUT_5POINT0_BACK or AV_CH_BACK_CENTER);
|
||||
AV_CH_LAYOUT_6POINT1 = (AV_CH_LAYOUT_5POINT1 or AV_CH_BACK_CENTER);
|
||||
AV_CH_LAYOUT_6POINT1_BACK = (AV_CH_LAYOUT_5POINT1_BACK or AV_CH_BACK_CENTER);
|
||||
AV_CH_LAYOUT_6POINT1_FRONT = (AV_CH_LAYOUT_6POINT0_FRONT or AV_CH_LOW_FREQUENCY);
|
||||
AV_CH_LAYOUT_7POINT0 = (AV_CH_LAYOUT_5POINT0 or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT);
|
||||
AV_CH_LAYOUT_7POINT0_FRONT = (AV_CH_LAYOUT_5POINT0 or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER);
|
||||
AV_CH_LAYOUT_7POINT1 = (AV_CH_LAYOUT_5POINT1 or AV_CH_BACK_LEFT or AV_CH_BACK_RIGHT);
|
||||
AV_CH_LAYOUT_7POINT1_WIDE = (AV_CH_LAYOUT_5POINT1 or AV_CH_FRONT_LEFT_OF_CENTER or AV_CH_FRONT_RIGHT_OF_CENTER);
|
||||
AV_CH_LAYOUT_7POINT1_WIDE_BACK = (AV_CH_LAYOUT_5POINT1_BACK or AV_CH_FRONT_LEFT_OF_CENTER or
|
||||
AV_CH_FRONT_RIGHT_OF_CENTER);
|
||||
AV_CH_LAYOUT_OCTAGONAL = (AV_CH_LAYOUT_5POINT0 or AV_CH_BACK_LEFT or AV_CH_BACK_CENTER or AV_CH_BACK_RIGHT);
|
||||
AV_CH_LAYOUT_STEREO_DOWNMIX = (AV_CH_STEREO_LEFT or AV_CH_STEREO_RIGHT);
|
||||
|
||||
Type
|
||||
TAVMatrixEncoding = ( //
|
||||
AV_MATRIX_ENCODING_NONE, //
|
||||
AV_MATRIX_ENCODING_DOLBY, //
|
||||
AV_MATRIX_ENCODING_DPLII, //
|
||||
AV_MATRIX_ENCODING_DPLIIX, //
|
||||
AV_MATRIX_ENCODING_DPLIIZ, //
|
||||
AV_MATRIX_ENCODING_DOLBYEX, //
|
||||
AV_MATRIX_ENCODING_DOLBYHEADPHONE, //
|
||||
AV_MATRIX_ENCODING_NB);
|
||||
|
||||
(* *
|
||||
* @}
|
||||
*)
|
||||
|
||||
(* *
|
||||
* Return a channel layout id that matches name, or 0 if no match is found.
|
||||
*
|
||||
* name can be one or several of the following notations,
|
||||
* separated by '+' or ' or ':
|
||||
* - the name of an usual channel layout (mono, stereo, 4.0, quad, 5.0,
|
||||
* 5.0(side), 5.1, 5.1(side), 7.1, 7.1(wide), downmix);
|
||||
* - the name of a single channel (FL, FR, FC, LFE, BL, BR, FLC, FRC, BC,
|
||||
* SL, SR, TC, TFL, TFC, TFR, TBL, TBC, TBR, DL, DR);
|
||||
* - a number of channels, in decimal, optionally followed by 'c', yielding
|
||||
* the default channel layout for that number of channels (@see
|
||||
* av_get_default_channel_layout);
|
||||
* - a channel layout mask, in hexadecimal starting with "$" (see the
|
||||
* AV_CH_* macros).
|
||||
*
|
||||
* @warning Starting from the next major bump the trailing character
|
||||
* 'c' to specify a number of channels will be required, while a
|
||||
* channel layout mask could also be specified as a decimal number
|
||||
* (if and only if not followed by "c").
|
||||
*
|
||||
* Example: "stereo+FC" = "2c+FC" = "2c+1c" = "$7"
|
||||
*)
|
||||
// uint64_t av_get_channel_layout(const char *name);
|
||||
|
||||
(* *
|
||||
* Return a description of a channel layout.
|
||||
* If nb_channels is <= 0, it is guessed from the channel_layout.
|
||||
*
|
||||
* @param buf put here the string containing the channel layout
|
||||
* @param buf_size size in bytes of the buffer
|
||||
*)
|
||||
// void av_get_channel_layout_string(char *buf, int buf_size, int nb_channels, uint64_t channel_layout);
|
||||
|
||||
// struct AVBPrint;
|
||||
(* *
|
||||
* Append a description of a channel layout to a bprint buffer.
|
||||
*)
|
||||
// void av_bprint_channel_layout(struct AVBPrint *bp, int nb_channels, uint64_t channel_layout);
|
||||
|
||||
(* *
|
||||
* Return the number of channels in the channel layout.
|
||||
*)
|
||||
// int av_get_channel_layout_nb_channels(uint64_t channel_layout);
|
||||
function av_get_channel_layout_nb_channels(channel_layout: uint64_t): Integer; cdecl;
|
||||
|
||||
(* *
|
||||
* Return default channel layout for a given number of channels.
|
||||
*)
|
||||
// int64_t av_get_default_channel_layout(int nb_channels);
|
||||
|
||||
(* *
|
||||
* Get the index of a channel in channel_layout.
|
||||
*
|
||||
* @param channel a channel layout describing exactly one channel which must be
|
||||
* present in channel_layout.
|
||||
*
|
||||
* @return index of channel in channel_layout on success, a negative AVERROR
|
||||
* on error.
|
||||
*)
|
||||
// int av_get_channel_layout_channel_index(uint64_t channel_layout,uint64_t channel);
|
||||
|
||||
(* *
|
||||
* Get the channel with the given index in channel_layout.
|
||||
*)
|
||||
// uint64_t av_channel_layout_extract_channel(uint64_t channel_layout, int index);
|
||||
|
||||
(* *
|
||||
* Get the name of a given channel.
|
||||
*
|
||||
* @return channel name on success, NULL on error.
|
||||
*)
|
||||
// const char *av_get_channel_name(uint64_t channel);
|
||||
|
||||
(* *
|
||||
* Get the description of a given channel.
|
||||
*
|
||||
* @param channel a channel layout with a single channel
|
||||
* @return channel description on success, NULL on error
|
||||
*)
|
||||
// const char *av_get_channel_description(uint64_t channel);
|
||||
|
||||
(* *
|
||||
* Get the value and name of a standard channel layout.
|
||||
*
|
||||
* @param[in] index index in an internal list, starting at 0
|
||||
* @param[out] layout channel layout mask
|
||||
* @param[out] name name of the layout
|
||||
* @return 0 if the layout exists,
|
||||
* <0 if index is beyond the limits
|
||||
*)
|
||||
// int av_get_standard_channel_layout(unsigned index, uint64_t *layout, const char **name);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_get_channel_layout_nb_channels; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,687 +0,0 @@
|
||||
unit ffm.cls.videoencoder;
|
||||
|
||||
{$I OpenCV.inc}
|
||||
{$POINTERMATH ON}
|
||||
|
||||
interface
|
||||
|
||||
Uses
|
||||
ffm.libavcodec.avcodec,
|
||||
ffm.pixfmt,
|
||||
ffm.avformat,
|
||||
ffm.swscale,
|
||||
ffm.avio,
|
||||
ffm.frame;
|
||||
|
||||
Type
|
||||
TFFMVideoEncoder = class
|
||||
private const
|
||||
MAX_AUDIO_PACKET_SIZE = (128 * 1024);
|
||||
private
|
||||
// output file name
|
||||
outputFilename: AnsiString;
|
||||
// output format.
|
||||
pOutFormat: pAVOutputFormat;
|
||||
// format context
|
||||
pFormatContext: pAVFormatContext;
|
||||
// video stream context
|
||||
pVideoStream: pAVStream;
|
||||
// audio streams context
|
||||
pAudioStream: pAVStream;
|
||||
// convert context context
|
||||
pImgConvertCtx: pSwsContext;
|
||||
// encode buffer and size
|
||||
pVideoEncodeBuffer: pByte;
|
||||
nSizeVideoEncodeBuffer: Integer;
|
||||
// audio buffer and size
|
||||
pAudioEncodeBuffer: pByte;
|
||||
nSizeAudioEncodeBuffer: Integer;
|
||||
// count of sample
|
||||
audioInputSampleSize: Integer;
|
||||
// current picture
|
||||
pCurrentPicture: pAVFrame;
|
||||
|
||||
// audio buffer. Save rest samples in audioBuffer from previous audio frame.
|
||||
audioBuffer: pByte;
|
||||
nAudioBufferSize: Integer;
|
||||
nAudioBufferSizeCurrent: Integer;
|
||||
W_VIDEO: Integer;
|
||||
H_VIDEO: Integer;
|
||||
fbit_rate: Integer;
|
||||
ftime_base_den: Integer;
|
||||
function flush_encoder: Integer;
|
||||
protected
|
||||
// Add video stream
|
||||
function AddVideoStream(const pContext: pAVFormatContext; const codec_id: TAVCodecID): pAVStream;
|
||||
// Open Video Stream
|
||||
function OpenVideo(const oc: pAVFormatContext; const pStream: pAVStream): boolean;
|
||||
// Allocate memory
|
||||
function CreateFFmpegPicture(const pix_fmt: TAVPixelFormat; const nWidth, nHeight: Integer): pAVFrame;
|
||||
// Close video stream
|
||||
procedure CloseVideo(const pContext: pAVFormatContext; const pStream: pAVStream);
|
||||
// Add audio stream
|
||||
function AddAudioStream(const pContext: pAVFormatContext; const codec_id: TAVCodecID): pAVStream;
|
||||
// Open audio stream
|
||||
function OpenAudio(const pContext: pAVFormatContext; const pStream: pAVStream): boolean;
|
||||
// close audio stream
|
||||
procedure CloseAudio(const pContext: pAVFormatContext; const pStream: pAVStream);
|
||||
// Add video frame
|
||||
function AddVideoFrame(const pOutputFrame: pAVFrame; const pVideoCodec: pAVCodecContext): boolean;
|
||||
// Add audio samples
|
||||
function AddAudioSample(const pFormatContext: pAVFormatContext; const pStream: pAVStream; const soundBuffer: pByte;
|
||||
const soundBufferSize: Integer): boolean;
|
||||
// Free resourses.
|
||||
procedure Free;
|
||||
function NeedConvert(const framepixfmt: TAVPixelFormat): boolean;
|
||||
public
|
||||
constructor Create;
|
||||
destructor Destroy; override;
|
||||
// init output file
|
||||
function InitFile(const inputFile: AnsiString; const container: AnsiString; const AW_VIDEO: Integer = 320;
|
||||
const AH_VIDEO: Integer = 200): boolean;
|
||||
// Set video params
|
||||
procedure SetVideoParams(const atime_base_den: Integer = 25; const abit_rate: Integer = 2000000);
|
||||
// Add video and audio data
|
||||
function AddFrame(const frame: pAVFrame; const soundBuffer: pByte; const soundBufferSize: Integer;
|
||||
const framepixfmt: TAVPixelFormat = AV_PIX_FMT_RGB24): boolean;
|
||||
// end of output
|
||||
function Finish: boolean;
|
||||
end;
|
||||
|
||||
implementation
|
||||
|
||||
Uses
|
||||
{$IFDEF MSWINDOWS}
|
||||
Winapi.Windows,
|
||||
{$ENDIF MSWINDOWS}
|
||||
System.SysUtils,
|
||||
System.AnsiStrings,
|
||||
System.Math,
|
||||
ffm.mem,
|
||||
ffm.avutil,
|
||||
ffm.samplefmt,
|
||||
ffm.mathematics;
|
||||
|
||||
{ TNVRVideoEncoder }
|
||||
|
||||
function TFFMVideoEncoder.AddAudioSample(const pFormatContext: pAVFormatContext; const pStream: pAVStream; const soundBuffer: pByte;
|
||||
const soundBufferSize: Integer): boolean;
|
||||
Var
|
||||
pCodecCxt: pAVCodecContext;
|
||||
packSizeInSize: Integer; // DWORD;
|
||||
nCountSamples: Integer;
|
||||
nCurrentSize: Integer;
|
||||
// nWriteSamples: Integer;
|
||||
pSoundBuffer: pByte;
|
||||
pAudioFrame: pAVFrame;
|
||||
// nBufferShift: Integer;
|
||||
nCurrentBufferSize: Integer;
|
||||
pkt: TAVPacket;
|
||||
nOutputSize, error: Integer;
|
||||
begin
|
||||
|
||||
// pCodecCxt := nil;
|
||||
Result := true;
|
||||
|
||||
pCodecCxt := pStream^.codec;
|
||||
|
||||
// Size of packet on bytes.
|
||||
// FORMAT s16
|
||||
packSizeInSize := soundBufferSize;
|
||||
|
||||
nCountSamples := soundBufferSize div av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);
|
||||
|
||||
// Add current audio frame to previos.
|
||||
CopyMemory(@audioBuffer[nAudioBufferSizeCurrent], soundBuffer, soundBufferSize);
|
||||
nAudioBufferSizeCurrent := nAudioBufferSizeCurrent + soundBufferSize;
|
||||
|
||||
nCurrentSize := nAudioBufferSizeCurrent;
|
||||
// nWriteSamples := 0;
|
||||
pSoundBuffer := audioBuffer;
|
||||
|
||||
while (nCurrentSize >= packSizeInSize) do
|
||||
begin
|
||||
pAudioFrame := nil;
|
||||
|
||||
pAudioFrame := av_frame_alloc();//avcodec_alloc_frame();
|
||||
|
||||
// Audio frame should be equal or smaller pCodecCxt^.frame_size.
|
||||
pAudioFrame^.nb_samples := min(pCodecCxt^.frame_size div av_get_bytes_per_sample(AV_SAMPLE_FMT_S16), nCountSamples);
|
||||
// nBufferShift := nWriteSamples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);
|
||||
nCurrentBufferSize := pAudioFrame^.nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);
|
||||
|
||||
if avcodec_fill_audio_frame(pAudioFrame, 1, AV_SAMPLE_FMT_S16, pSoundBuffer, nCurrentBufferSize, 1) <> 0 then
|
||||
begin
|
||||
Result := false;
|
||||
break;
|
||||
end;
|
||||
|
||||
av_init_packet(@pkt);
|
||||
|
||||
pkt.flags := pkt.flags or AV_PKT_FLAG_KEY;
|
||||
pkt.stream_index := pStream^.index;
|
||||
pkt.data := pAudioEncodeBuffer;
|
||||
pkt.size := nSizeAudioEncodeBuffer;
|
||||
|
||||
nOutputSize := 0;
|
||||
// Create encoded packet from audio frame.
|
||||
error := avcodec_encode_audio2(pCodecCxt, @pkt, pAudioFrame, nOutputSize);
|
||||
|
||||
if (error = 0) and (nOutputSize > 0) then
|
||||
begin
|
||||
if Assigned(pCodecCxt^.coded_frame) and (pCodecCxt^.coded_frame^.pts <> AV_NOPTS_VALUE) then
|
||||
pkt.pts := av_rescale_q(pCodecCxt^.coded_frame^.pts, pCodecCxt^.time_base, pStream^.time_base);
|
||||
|
||||
pkt.stream_index := pStream^.index;
|
||||
|
||||
// Write the compressed frame in the media file.
|
||||
if (av_interleaved_write_frame(pFormatContext, @pkt) <> 0) then
|
||||
begin
|
||||
Result := false;
|
||||
break;
|
||||
end;
|
||||
end;
|
||||
|
||||
nCurrentSize := nCurrentSize - nCurrentBufferSize;
|
||||
pSoundBuffer := pSoundBuffer + nCurrentBufferSize;
|
||||
|
||||
// nWriteSamples := nWriteSamples + pAudioFrame^.nb_samples;
|
||||
avcodec_free_frame(pAudioFrame);
|
||||
end;
|
||||
|
||||
// save excess
|
||||
CopyMemory(audioBuffer, @audioBuffer[nAudioBufferSizeCurrent - nCurrentSize], nCurrentSize);
|
||||
nAudioBufferSizeCurrent := nCurrentSize;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.AddAudioStream(const pContext: pAVFormatContext; const codec_id: TAVCodecID): pAVStream;
|
||||
Var
|
||||
pCodecCxt: pAVCodecContext;
|
||||
begin
|
||||
// pCodecCxt := nil;
|
||||
// Result := nil;
|
||||
// Try create stream.
|
||||
Result := avformat_new_stream(pContext, nil);
|
||||
if not Assigned(Result) then
|
||||
begin
|
||||
// printf("Cannot add new audio stream\n");
|
||||
Exit(nil);
|
||||
end;
|
||||
// Codec.
|
||||
pCodecCxt := Result^.codec;
|
||||
pCodecCxt^.codec_id := codec_id;
|
||||
pCodecCxt^.codec_type := AVMEDIA_TYPE_AUDIO;
|
||||
// Set format
|
||||
pCodecCxt^.bit_rate := 128000;
|
||||
pCodecCxt^.sample_rate := 44100;
|
||||
pCodecCxt^.channels := 1; // mono
|
||||
pCodecCxt^.sample_fmt := AV_SAMPLE_FMT_S16;
|
||||
|
||||
nSizeAudioEncodeBuffer := 4 * MAX_AUDIO_PACKET_SIZE;
|
||||
if not Assigned(pAudioEncodeBuffer) then
|
||||
pAudioEncodeBuffer := av_malloc(nSizeAudioEncodeBuffer);
|
||||
|
||||
// Some formats want stream headers to be separate.
|
||||
if (pContext^.oformat^.flags and AVFMT_GLOBALHEADER) <> 0 then
|
||||
pCodecCxt^.flags := pCodecCxt^.flags or CODEC_FLAG_GLOBAL_HEADER;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.AddFrame(const frame: pAVFrame; const soundBuffer: pByte; const soundBufferSize: Integer;
|
||||
const framepixfmt: TAVPixelFormat): boolean;
|
||||
Var
|
||||
// nOutputSize: Integer;
|
||||
pVideoCodec: pAVCodecContext;
|
||||
begin
|
||||
Result := true;
|
||||
// nOutputSize := 0;
|
||||
// pVideoCodec := nil;
|
||||
|
||||
if Assigned(pVideoStream) and Assigned(frame) and Assigned(frame^.data[0]) then
|
||||
begin
|
||||
pVideoCodec := pVideoStream^.codec;
|
||||
|
||||
if NeedConvert(framepixfmt) then
|
||||
begin
|
||||
// RGB to YUV420P.
|
||||
if not Assigned(pImgConvertCtx) then
|
||||
begin
|
||||
pImgConvertCtx := sws_getContext(pVideoCodec^.width, pVideoCodec^.height, framepixfmt, pVideoCodec^.width, pVideoCodec^.height,
|
||||
pVideoCodec^.pix_fmt, SWS_BICUBLIN, nil, nil, nil);
|
||||
end;
|
||||
|
||||
// Allocate picture.
|
||||
pCurrentPicture := CreateFFmpegPicture(pVideoCodec^.pix_fmt, pVideoCodec^.width, pVideoCodec^.height);
|
||||
|
||||
// if NeedConvert(framepixfmt) and Assigned(pImgConvertCtx) then
|
||||
// begin
|
||||
// Convert RGB to YUV.
|
||||
sws_scale(pImgConvertCtx, @frame^.data, @frame^.linesize, 0, pVideoCodec^.height, @pCurrentPicture^.data, @pCurrentPicture^.linesize);
|
||||
// end;
|
||||
|
||||
Result := AddVideoFrame(pCurrentPicture, pVideoStream^.codec);
|
||||
|
||||
// Free temp frame
|
||||
av_free(pCurrentPicture^.data[0]);
|
||||
av_free(pCurrentPicture);
|
||||
pCurrentPicture := nil;
|
||||
end
|
||||
else
|
||||
Result := AddVideoFrame(frame, pVideoStream^.codec);
|
||||
end;
|
||||
|
||||
// Add sound
|
||||
if Assigned(soundBuffer) and (soundBufferSize > 0) then
|
||||
Result := AddAudioSample(pFormatContext, pAudioStream, soundBuffer, soundBufferSize);
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.AddVideoFrame(const pOutputFrame: pAVFrame; const pVideoCodec: pAVCodecContext): boolean;
|
||||
Var
|
||||
pkt: TAVPacket;
|
||||
packet: TAVPacket;
|
||||
nOutputSize: Integer;
|
||||
error: Integer;
|
||||
begin
|
||||
|
||||
// Result := false;
|
||||
|
||||
if (pFormatContext^.oformat^.flags and AVFMT_RAWPICTURE) <> 0 then
|
||||
begin
|
||||
// Raw video case. The API will change slightly in the near
|
||||
// future for that.
|
||||
av_init_packet(@pkt);
|
||||
|
||||
pkt.flags := pkt.flags or AV_PKT_FLAG_KEY;
|
||||
pkt.stream_index := pVideoStream^.index;
|
||||
pkt.data := pByte(pOutputFrame);
|
||||
pkt.size := sizeof(TAVPicture);
|
||||
|
||||
av_interleaved_write_frame(pFormatContext, @pkt);
|
||||
Result := true;
|
||||
end
|
||||
else
|
||||
begin
|
||||
// Encode
|
||||
av_init_packet(@packet);
|
||||
packet.data := pVideoEncodeBuffer;
|
||||
packet.size := nSizeVideoEncodeBuffer;
|
||||
|
||||
nOutputSize := 0;
|
||||
// Encode frame to packet.
|
||||
error := avcodec_encode_video2(pVideoCodec, @packet, pOutputFrame, nOutputSize);
|
||||
if (error = 0) and (nOutputSize > 0) then
|
||||
begin
|
||||
// AVPacket pkt;
|
||||
av_init_packet(@pkt);
|
||||
if Assigned(pVideoCodec^.coded_frame) and (pVideoCodec^.coded_frame^.pts <> AV_NOPTS_VALUE) then
|
||||
pkt.pts := AV_NOPTS_VALUE;
|
||||
|
||||
if Assigned(pVideoCodec^.coded_frame) and (pVideoCodec^.coded_frame^.key_frame <> 0) then
|
||||
pkt.flags := pkt.flags or AV_PKT_FLAG_KEY;
|
||||
pkt.stream_index := pVideoStream^.index;
|
||||
pkt.data := pVideoEncodeBuffer;
|
||||
pkt.size := packet.size;
|
||||
|
||||
// Write packet with frame.
|
||||
Result := av_interleaved_write_frame(pFormatContext, @pkt) = 0;
|
||||
// Result := av_write_frame(pFormatContext, @pkt) = 0;
|
||||
end
|
||||
else
|
||||
Result := false;
|
||||
end;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.AddVideoStream(const pContext: pAVFormatContext; const codec_id: TAVCodecID): pAVStream;
|
||||
Var
|
||||
pCodecCxt: pAVCodecContext;
|
||||
begin
|
||||
// pCodecCxt := nil;
|
||||
// Result := nil;
|
||||
|
||||
Result := avformat_new_stream(pContext, nil);
|
||||
if not Assigned(Result) then
|
||||
begin
|
||||
// printf("Cannot add new vidoe stream\n");
|
||||
Exit(nil);
|
||||
end;
|
||||
|
||||
pCodecCxt := Result^.codec;
|
||||
pCodecCxt^.codec_id := codec_id;
|
||||
pCodecCxt^.codec_type := AVMEDIA_TYPE_VIDEO;
|
||||
pCodecCxt^.frame_number := 0;
|
||||
// Put sample parameters.
|
||||
pCodecCxt^.bit_rate := fbit_rate; // 2000000;
|
||||
// Resolution must be a multiple of two.
|
||||
pCodecCxt^.width := W_VIDEO;
|
||||
pCodecCxt^.height := H_VIDEO;
|
||||
(* time base: this is the fundamental unit of time (in seconds) in terms
|
||||
of which frame timestamps are represented. for fixed-fps content,
|
||||
timebase should be 1/framerate and timestamp increments should be
|
||||
identically 1. *)
|
||||
pCodecCxt^.time_base.den := ftime_base_den; // 25;
|
||||
pCodecCxt^.time_base.num := 1;
|
||||
pCodecCxt^.gop_size := 12; // emit one intra frame every twelve frames at most
|
||||
|
||||
pCodecCxt^.pix_fmt := AV_PIX_FMT_YUV420P;
|
||||
if (pCodecCxt^.codec_id = AV_CODEC_ID_MPEG2VIDEO) then
|
||||
begin
|
||||
// Just for testing, we also add B frames
|
||||
pCodecCxt^.max_b_frames := 2;
|
||||
end;
|
||||
if (pCodecCxt^.codec_id = AV_CODEC_ID_MPEG1VIDEO) then
|
||||
begin
|
||||
(* Needed to avoid using macroblocks
|
||||
in which some coeffs overflow.This does not happen with normal video, it just happens here as the motion of
|
||||
the chroma plane does not match the luma plane. *)
|
||||
pCodecCxt^.mb_decision := 2;
|
||||
end;
|
||||
|
||||
// Some formats want stream headers to be separate.
|
||||
if (pContext^.oformat^.flags and AVFMT_GLOBALHEADER) <> 0 then
|
||||
begin
|
||||
pCodecCxt^.flags := pCodecCxt^.flags or CODEC_FLAG_GLOBAL_HEADER;
|
||||
end;
|
||||
end;
|
||||
|
||||
procedure TFFMVideoEncoder.CloseAudio(const pContext: pAVFormatContext; const pStream: pAVStream);
|
||||
begin
|
||||
avcodec_close(pStream^.codec);
|
||||
if Assigned(pAudioEncodeBuffer) then
|
||||
begin
|
||||
av_free(pAudioEncodeBuffer);
|
||||
pAudioEncodeBuffer := Nil;
|
||||
end;
|
||||
nSizeAudioEncodeBuffer := 0;
|
||||
end;
|
||||
|
||||
procedure TFFMVideoEncoder.CloseVideo(const pContext: pAVFormatContext; const pStream: pAVStream);
|
||||
begin
|
||||
avcodec_close(pStream^.codec);
|
||||
if Assigned(pCurrentPicture) then
|
||||
begin
|
||||
if Assigned(pCurrentPicture^.data[0]) then
|
||||
begin
|
||||
av_free(pCurrentPicture^.data[0]);
|
||||
pCurrentPicture^.data[0] := nil;
|
||||
end;
|
||||
av_free(pCurrentPicture);
|
||||
pCurrentPicture := nil;
|
||||
end;
|
||||
|
||||
if Assigned(pVideoEncodeBuffer) then
|
||||
begin
|
||||
av_free(pVideoEncodeBuffer);
|
||||
pVideoEncodeBuffer := nil;
|
||||
end;
|
||||
nSizeVideoEncodeBuffer := 0;
|
||||
end;
|
||||
|
||||
constructor TFFMVideoEncoder.Create;
|
||||
begin
|
||||
pOutFormat := nil;
|
||||
pFormatContext := nil;
|
||||
pVideoStream := nil;
|
||||
pImgConvertCtx := nil;
|
||||
pCurrentPicture := nil;
|
||||
pVideoEncodeBuffer := nil;
|
||||
nSizeVideoEncodeBuffer := 0;
|
||||
pAudioEncodeBuffer := nil;
|
||||
nSizeAudioEncodeBuffer := 0;
|
||||
nAudioBufferSize := 1024 * 1024 * 4;
|
||||
audioBuffer := AllocMem(nAudioBufferSize);
|
||||
nAudioBufferSizeCurrent := 0;
|
||||
SetVideoParams;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.CreateFFmpegPicture(const pix_fmt: TAVPixelFormat; const nWidth, nHeight: Integer): pAVFrame;
|
||||
Var
|
||||
picture_buf: pByte;
|
||||
size: Integer;
|
||||
begin
|
||||
// picture_buf := nil;
|
||||
Result := av_frame_alloc();//avcodec_alloc_frame();
|
||||
if not Assigned(Result) then
|
||||
begin
|
||||
// printf("Cannot create frame\n");
|
||||
Exit(nil);
|
||||
end;
|
||||
|
||||
size := avpicture_get_size(pix_fmt, nWidth, nHeight);
|
||||
|
||||
picture_buf := av_malloc(size);
|
||||
|
||||
if not Assigned(picture_buf) then
|
||||
begin
|
||||
av_free(Result);
|
||||
// printf("Cannot allocate buffer\n");
|
||||
Exit(nil);
|
||||
end;
|
||||
avpicture_fill(pAVPicture(Result), picture_buf, pix_fmt, nWidth, nHeight);
|
||||
end;
|
||||
|
||||
destructor TFFMVideoEncoder.Destroy;
|
||||
begin
|
||||
Finish;
|
||||
inherited;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.Finish: boolean;
|
||||
begin
|
||||
Result := true;
|
||||
// Todo: Maybe you need write audio samples from audioBuffer to file before cloasing.
|
||||
if Assigned(pFormatContext) then
|
||||
begin
|
||||
// flush_encoder;
|
||||
av_write_trailer(pFormatContext);
|
||||
Free;
|
||||
end;
|
||||
|
||||
if Assigned(audioBuffer) then
|
||||
begin
|
||||
FreeMem(audioBuffer);
|
||||
audioBuffer := nil;
|
||||
end;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.flush_encoder: Integer;
|
||||
Var
|
||||
ret, got_output: Integer;
|
||||
pkt: TAVPacket;
|
||||
begin
|
||||
Result := 0;
|
||||
(* get the delayed frames *)
|
||||
av_init_packet(@pkt);
|
||||
got_output := 1;
|
||||
While got_output <> 0 do
|
||||
begin
|
||||
ret := avcodec_encode_video2(pVideoStream^.codec, @pkt, nil, got_output);
|
||||
if (ret < 0) then
|
||||
begin
|
||||
// WriteLn('Error encoding frame');
|
||||
Exit(ret);
|
||||
end;
|
||||
if (got_output <> 0) then
|
||||
begin
|
||||
// WriteLn(format('Write frame %3d (size=%5d)', [i, pkt.size]));
|
||||
// BlockWrite(f, pkt.data^, pkt.size);
|
||||
av_free_packet(pkt);
|
||||
end;
|
||||
// Inc(i);
|
||||
end;
|
||||
end;
|
||||
|
||||
procedure TFFMVideoEncoder.Free;
|
||||
Var
|
||||
i: Integer;
|
||||
begin
|
||||
if Assigned(pFormatContext) then
|
||||
begin
|
||||
// close video stream
|
||||
if Assigned(pVideoStream) then
|
||||
CloseVideo(pFormatContext, pVideoStream);
|
||||
|
||||
// close audio stream.
|
||||
if Assigned(pAudioStream) then
|
||||
CloseAudio(pFormatContext, pAudioStream);
|
||||
|
||||
// Free the streams.
|
||||
for i := 0 to pFormatContext^.nb_streams - 1 do
|
||||
begin
|
||||
av_freep(pFormatContext^.streams[i]^.codec);
|
||||
// av_freep(pFormatContext^.streams[i]);
|
||||
end;
|
||||
|
||||
if ((pFormatContext^.flags and AVFMT_NOFILE) = 0) and Assigned(pFormatContext^.pb) then
|
||||
avio_close(pFormatContext^.pb);
|
||||
|
||||
// Free the stream.
|
||||
av_free(pFormatContext);
|
||||
pFormatContext := nil;
|
||||
end;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.InitFile(const inputFile, container: AnsiString; const AW_VIDEO, AH_VIDEO: Integer): boolean;
|
||||
Var
|
||||
filename: PAnsiChar;
|
||||
begin
|
||||
Result := false;
|
||||
W_VIDEO := AW_VIDEO;
|
||||
H_VIDEO := AH_VIDEO;
|
||||
filename := PAnsiChar(inputFile);
|
||||
outputFilename := inputFile;
|
||||
|
||||
// Initialize libavcodec
|
||||
av_register_all();
|
||||
if System.AnsiStrings.SameText(container, 'auto') then
|
||||
// Create format
|
||||
pOutFormat := av_guess_format(nil, filename, nil)
|
||||
else
|
||||
// use contanier
|
||||
pOutFormat := av_guess_format(PAnsiChar(container), nil, nil);
|
||||
|
||||
if Assigned(pOutFormat) then
|
||||
begin
|
||||
// allocate context
|
||||
pFormatContext := avformat_alloc_context();
|
||||
if Assigned(pFormatContext) then
|
||||
begin
|
||||
pFormatContext^.oformat := pOutFormat;
|
||||
CopyMemory(@pFormatContext^.filename, filename, min(System.AnsiStrings.strlen(filename), sizeof(pFormatContext^.filename)));
|
||||
|
||||
// Add video and audio stream
|
||||
pVideoStream := AddVideoStream(pFormatContext, pOutFormat^.video_codec);
|
||||
pAudioStream := AddAudioStream(pFormatContext, pOutFormat^.audio_codec);
|
||||
|
||||
// Set the output parameters (must be done even if no parameters).
|
||||
av_dump_format(pFormatContext, 0, filename, 1);
|
||||
|
||||
// Open Video and Audio stream
|
||||
Result := false;
|
||||
if Assigned(pVideoStream) then
|
||||
Result := OpenVideo(pFormatContext, pVideoStream);
|
||||
|
||||
if Assigned(pAudioStream) then
|
||||
Result := OpenAudio(pFormatContext, pAudioStream);
|
||||
|
||||
if Result and ((pOutFormat^.flags and AVFMT_NOFILE) = 0) and (avio_open(pFormatContext^.pb, filename, AVIO_FLAG_WRITE) < 0) then
|
||||
begin
|
||||
Result := false;
|
||||
// printf(" Cannot open file\ n ");
|
||||
end;
|
||||
|
||||
if Result then
|
||||
// Write file header.
|
||||
avformat_write_header(pFormatContext, nil);
|
||||
end;
|
||||
end;
|
||||
|
||||
if not Result then
|
||||
begin
|
||||
Free();
|
||||
// printf(" Cannot init file\ n ");
|
||||
end;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.NeedConvert(const framepixfmt: TAVPixelFormat): boolean;
|
||||
begin
|
||||
if Assigned(pVideoStream) and Assigned(pVideoStream^.codec) then
|
||||
Result := pVideoStream^.codec^.pix_fmt <> framepixfmt // AV_PIX_FMT_RGB24
|
||||
else
|
||||
Result := false;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.OpenAudio(const pContext: pAVFormatContext; const pStream: pAVStream): boolean;
|
||||
Var
|
||||
pCodecCxt: pAVCodecContext;
|
||||
pCodec: pAVCodec;
|
||||
begin
|
||||
// pCodecCxt := nil;
|
||||
// pCodec := nil;
|
||||
pCodecCxt := pStream^.codec;
|
||||
// Find the audio encoder.
|
||||
pCodec := avcodec_find_encoder(pCodecCxt^.codec_id);
|
||||
if not Assigned(pCodec) then
|
||||
begin
|
||||
// printf("Cannot open audio codec\n");
|
||||
Exit(false);
|
||||
end;
|
||||
// Open it.
|
||||
if (avcodec_open2(pCodecCxt, pCodec, nil) < 0) then
|
||||
begin
|
||||
// printf("Cannot open audio codec\n");
|
||||
Exit(false);
|
||||
end;
|
||||
|
||||
if (pCodecCxt^.frame_size <= 1) then
|
||||
begin
|
||||
// Ugly hack for PCM codecs (will be removed ASAP with new PCM
|
||||
// support to compute the input frame size in samples.
|
||||
audioInputSampleSize := nSizeAudioEncodeBuffer div pCodecCxt^.channels;
|
||||
case pStream^.codec^.codec_id of
|
||||
AV_CODEC_ID_PCM_S16LE, AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_U16LE, AV_CODEC_ID_PCM_U16BE:
|
||||
audioInputSampleSize := audioInputSampleSize shr 1;
|
||||
end;
|
||||
pCodecCxt^.frame_size := audioInputSampleSize;
|
||||
end
|
||||
else
|
||||
audioInputSampleSize := pCodecCxt^.frame_size;
|
||||
Result := true;
|
||||
end;
|
||||
|
||||
function TFFMVideoEncoder.OpenVideo(const oc: pAVFormatContext; const pStream: pAVStream): boolean;
|
||||
Var
|
||||
pCodec: pAVCodec;
|
||||
pContext: pAVCodecContext;
|
||||
begin
|
||||
pContext := pStream^.codec;
|
||||
// Find the video encoder.
|
||||
pCodec := avcodec_find_encoder(pContext^.codec_id);
|
||||
if not Assigned(pCodec) then
|
||||
begin
|
||||
// printf("Cannot found video codec\n");
|
||||
Exit(false);
|
||||
end;
|
||||
|
||||
// Open the codec.
|
||||
if (avcodec_open2(pContext, pCodec, nil) < 0) then
|
||||
begin
|
||||
// printf("Cannot open video codec\n");
|
||||
Exit(false);
|
||||
end;
|
||||
|
||||
pVideoEncodeBuffer := nil;
|
||||
if (pFormatContext^.oformat^.flags and AVFMT_RAWPICTURE) = 0 then
|
||||
begin
|
||||
// allocate output buffer
|
||||
nSizeVideoEncodeBuffer := 10000000;
|
||||
pVideoEncodeBuffer := av_malloc(nSizeVideoEncodeBuffer);
|
||||
end;
|
||||
Result := true;
|
||||
end;
|
||||
|
||||
procedure TFFMVideoEncoder.SetVideoParams(const atime_base_den, abit_rate: Integer);
|
||||
begin
|
||||
fbit_rate := abit_rate;
|
||||
ftime_base_den := atime_base_den;
|
||||
end;
|
||||
|
||||
end.
|
@ -1,74 +0,0 @@
|
||||
{
|
||||
This file is part of the Free Pascal run time library.
|
||||
Copyright (c) 2004 by Marco van de Voort, member of the
|
||||
Free Pascal development team
|
||||
|
||||
Implements C types for in header conversions
|
||||
|
||||
See the file COPYING.FPC, included in this distribution,
|
||||
for details about the copyright.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
**********************************************************************}
|
||||
|
||||
unit ffm.ctypes;
|
||||
|
||||
interface
|
||||
|
||||
type
|
||||
qword = int64; // Keep h2pas "uses ctypes" headers working with delphi.
|
||||
|
||||
{ the following type definitions are compiler dependant }
|
||||
{ and system dependant }
|
||||
|
||||
cint8 = shortint; pcint8 = ^cint8;
|
||||
cuint8 = byte; pcuint8 = ^cuint8;
|
||||
cchar = cint8; pcchar = ^cchar;
|
||||
cschar = cint8; pcschar = ^cschar;
|
||||
cuchar = cuint8; pcuchar = ^cuchar;
|
||||
|
||||
cint16 = smallint; pcint16 = ^cint16;
|
||||
cuint16 = word; pcuint16 = ^cuint16;
|
||||
cshort = cint16; pcshort = ^cshort;
|
||||
csshort = cint16; pcsshort = ^csshort;
|
||||
cushort = cuint16; pcushort = ^cushort;
|
||||
|
||||
cint32 = longint; pcint32 = ^cint32;
|
||||
cuint32 = longword; pcuint32 = ^cuint32;
|
||||
cint = cint32; pcint = ^cint; { minimum range is : 32-bit }
|
||||
csint = cint32; pcsint = ^csint; { minimum range is : 32-bit }
|
||||
cuint = cuint32; pcuint = ^cuint; { minimum range is : 32-bit }
|
||||
csigned = cint; pcsigned = ^csigned;
|
||||
cunsigned = cuint; pcunsigned = ^cunsigned;
|
||||
|
||||
cint64 = int64; pcint64 = ^cint64;
|
||||
cuint64 = qword; pcuint64 = ^cuint64;
|
||||
uint64_t = cuint64; puint64_t = ^uint64_t;
|
||||
clonglong = cint64; pclonglong = ^clonglong;
|
||||
cslonglong = cint64; pcslonglong = ^cslonglong;
|
||||
culonglong = cuint64; pculonglong = ^culonglong;
|
||||
|
||||
cbool = longbool; pcbool = ^cbool;
|
||||
|
||||
{$if defined(cpu64) and not(defined(win64) and defined(cpux86_64))}
|
||||
clong = int64; pclong = ^clong;
|
||||
cslong = int64; pcslong = ^cslong;
|
||||
culong = qword; pculong = ^culong;
|
||||
{$else}
|
||||
clong = longint; pclong = ^clong;
|
||||
cslong = longint; pcslong = ^cslong;
|
||||
culong = cardinal; pculong = ^culong;
|
||||
int64_t = Int64; pint64_t = ^int64_t;
|
||||
{$endif}
|
||||
|
||||
cfloat = single; pcfloat = ^cfloat;
|
||||
cdouble = double; pcdouble = ^cdouble;
|
||||
clongdouble = extended; pclongdouble = ^clongdouble;
|
||||
|
||||
implementation
|
||||
|
||||
end.
|
@ -1,169 +0,0 @@
|
||||
unit ffm.dict;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
(*
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
//
|
||||
(*
|
||||
* @file
|
||||
* Public dictionary API.
|
||||
* @deprecated
|
||||
* AVDictionary is provided for compatibility with libav. It is both in
|
||||
* implementation as well as API inefficient. It does not scale and is
|
||||
* extremely slow with large dictionaries.
|
||||
* It is recommended that new code uses our tree container from tree.c/h
|
||||
* where applicable, which uses AVL trees to achieve O(log n) performance.
|
||||
*)
|
||||
|
||||
(*
|
||||
* @addtogroup lavu_dict AVDictionary
|
||||
* @ingroup lavu_data
|
||||
*
|
||||
* @brief Simple key:value store
|
||||
*
|
||||
* @{
|
||||
* Dictionaries are used for storing key:value pairs. To create
|
||||
* an AVDictionary, simply pass an address of a NULL pointer to
|
||||
* av_dict_set(). NULL can be used as an empty dictionary wherever
|
||||
* a pointer to an AVDictionary is required.
|
||||
* Use av_dict_get() to retrieve an entry or iterate over all
|
||||
* entries and finally av_dict_free() to free the dictionary
|
||||
* and all its contents.
|
||||
*
|
||||
* @code
|
||||
* AVDictionary *d = NULL; // "create" an empty dictionary
|
||||
* av_dict_set(&d, "foo", "bar", 0); // add an entry
|
||||
*
|
||||
* char *k = av_strdup("key"); // if your strings are already allocated,
|
||||
* char *v = av_strdup("value"); // you can avoid copying them like this
|
||||
* av_dict_set(&d, k, v, AV_DICT_DONT_STRDUP_KEY | AV_DICT_DONT_STRDUP_VAL);
|
||||
*
|
||||
* AVDictionaryEntry *t = NULL;
|
||||
* while (t = av_dict_get(d, "", t, AV_DICT_IGNORE_SUFFIX)) {
|
||||
* <....> // iterate over all entries in d
|
||||
* }
|
||||
*
|
||||
* av_dict_free(&d);
|
||||
* @endcode
|
||||
*
|
||||
*)
|
||||
const
|
||||
AV_DICT_MATCH_CASE = 1;
|
||||
AV_DICT_IGNORE_SUFFIX = 2;
|
||||
AV_DICT_DONT_STRDUP_KEY = 4; // **< Take ownership of a key that's been
|
||||
// allocated with av_malloc() and children. */
|
||||
AV_DICT_DONT_STRDUP_VAL = 8; // **< Take ownership of a value that's been
|
||||
// allocated with av_malloc() and chilren. */
|
||||
AV_DICT_DONT_OVERWRITE = 16;
|
||||
/// < Don't overwrite existing entries.
|
||||
AV_DICT_APPEND = 32; // **< If the entry already exists, append to it. Note that no
|
||||
// delimiter is added, the strings are simply concatenated. */
|
||||
|
||||
type
|
||||
pAVDictionaryEntry = ^TAVDictionaryEntry;
|
||||
|
||||
TAVDictionaryEntry = { packed } record
|
||||
key: pAnsiChar;
|
||||
value: pAnsiChar;
|
||||
end;
|
||||
|
||||
pAVDictionary = ^TAVDictionary;
|
||||
ppAVDictionary = ^pAVDictionary;
|
||||
|
||||
TAVDictionary = { packed } record
|
||||
end;
|
||||
|
||||
(*
|
||||
* Get a dictionary entry with matching key.
|
||||
*
|
||||
* @param prev Set to the previous matching element to find the next.
|
||||
* If set to NULL the first matching element is returned.
|
||||
* @param flags Allows case as well as suffix-insensitive comparisons.
|
||||
* @return Found entry or NULL, changing key or value leads to undefined behavior.
|
||||
*)
|
||||
// AVDictionaryEntry * av_dict_get(AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags);
|
||||
function av_dict_get(m: pAVDictionary; const key: pAnsiChar; const prev: pAVDictionaryEntry; flags: Integer): pAVDictionaryEntry; cdecl;
|
||||
|
||||
(*
|
||||
* Get number of entries in dictionary.
|
||||
*
|
||||
* @param m dictionary
|
||||
* @return number of entries in dictionary
|
||||
*)
|
||||
// int av_dict_count(const AVDictionary *m);
|
||||
|
||||
(*
|
||||
* Set the given entry in *pm, overwriting an existing entry.
|
||||
*
|
||||
* @param pm pointer to a pointer to a dictionary struct. If *pm is NULL
|
||||
* a dictionary struct is allocated and put in *pm.
|
||||
* @param key entry key to add to *pm (will be av_strduped depending on flags)
|
||||
* @param value entry value to add to *pm (will be av_strduped depending on flags).
|
||||
* Passing a NULL value will cause an existing entry to be deleted.
|
||||
* @return >= 0 on success otherwise an error code <0
|
||||
*)
|
||||
// int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags);
|
||||
function av_dict_set(Var pm: pAVDictionary; const key: pAnsiChar; const value: pAnsiChar; flags: Integer): Integer; cdecl;
|
||||
|
||||
(*
|
||||
* Parse the key/value pairs list and add to a dictionary.
|
||||
*
|
||||
* @param key_val_sep a 0-terminated list of characters used to separate
|
||||
* key from value
|
||||
* @param pairs_sep a 0-terminated list of characters used to separate
|
||||
* two pairs from each other
|
||||
* @param flags flags to use when adding to dictionary.
|
||||
* AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL
|
||||
* are ignored since the key/value tokens will always
|
||||
* be duplicated.
|
||||
* @return 0 on success, negative AVERROR code on failure
|
||||
*)
|
||||
// int av_dict_parse_string(AVDictionary **pm, const char *str,
|
||||
// const char *key_val_sep, const char *pairs_sep,
|
||||
// int flags);
|
||||
//
|
||||
(*
|
||||
* Copy entries from one AVDictionary struct into another.
|
||||
* @param dst pointer to a pointer to a AVDictionary struct. If *dst is NULL,
|
||||
* this function will allocate a struct for you and put it in *dst
|
||||
* @param src pointer to source AVDictionary struct
|
||||
* @param flags flags to use when setting entries in *dst
|
||||
* @note metadata is read using the AV_DICT_IGNORE_SUFFIX flag
|
||||
*)
|
||||
// void av_dict_copy(AVDictionary **dst, AVDictionary *src, int flags);
|
||||
//
|
||||
(*
|
||||
* Free all the memory allocated for an AVDictionary struct
|
||||
* and all keys and values.
|
||||
*)
|
||||
// void av_dict_free(AVDictionary **m);
|
||||
procedure av_dict_free(Var m: pAVDictionary); cdecl;
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_dict_get; external avutil_dll;
|
||||
procedure av_dict_free; external avutil_dll;
|
||||
function av_dict_set; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,123 +0,0 @@
|
||||
unit ffm.errno;
|
||||
|
||||
interface
|
||||
|
||||
Const
|
||||
|
||||
EPERM = 1; // Operation not permitted
|
||||
ENOENT = 2; // No such file or directory
|
||||
ESRCH = 3; // No such process
|
||||
EINTR = 4; // Interrupted system call
|
||||
EIO = 5; // Input/output error
|
||||
ENXIO = 6; // Device not configured
|
||||
E2BIG = 7; // Argument list too long
|
||||
ENOEXEC = 8; // Exec format error
|
||||
EBADF = 9; // Bad file number
|
||||
ECHILD = 10; // No spawned processes
|
||||
EAGAIN = 11; // Resource temporarily unavailable
|
||||
ENOMEM = 12; // Cannot allocate memory
|
||||
EACCES = 13; // Access denied
|
||||
EFAULT = 14; // Bad address
|
||||
ENOTBLK = 15; // Not block device
|
||||
EBUSY = 16; // Device busy
|
||||
EEXIST = 17; // File exist
|
||||
EXDEV = 18; // Cross-device link
|
||||
ENODEV = 19; // Operation not supported by device
|
||||
ENOTDIR = 20; // Not a directory
|
||||
EISDIR = 21; // Is a directory
|
||||
EINVAL = 22; // Invalid argument
|
||||
ENFILE = 23; // Too many open files in system
|
||||
EMFILE = 24; // Too many files open
|
||||
ENOTTY = 25; // Inappropriate ioctl for device
|
||||
ETXTBSY = 26; // Unknown error
|
||||
EFBIG = 27; // File too large
|
||||
ENOSPC = 28; // No space left on device
|
||||
ESPIPE = 29; // Illegal seek
|
||||
EROFS = 30; // Read-only file system
|
||||
EMLINK = 31; // Too many links
|
||||
EPIPE = 32; // Broken pipe
|
||||
EDOM = 33; // Numerical arg out of domain
|
||||
ERANGE = 34; // Result too large
|
||||
EUCLEAN = 35; // Structure needs cleaning
|
||||
EDEADLK = 36; // Resource deadlock avoided
|
||||
EUNKNOWN = 37; // Unknown error
|
||||
ENAMETOOLONG = 38; // File name too long
|
||||
ENOLCK = 39; // No locks available
|
||||
ENOSYS = 40; // Function not implemented
|
||||
ENOTEMPTY = 41; // Directory not empty
|
||||
EILSEQ = 42; // Invalid multibyte sequence
|
||||
|
||||
//
|
||||
// Sockets errors
|
||||
//
|
||||
|
||||
EWOULDBLOCK = 45; // Operation would block
|
||||
EINPROGRESS = 46; // Operation now in progress
|
||||
EALREADY = 47; // Operation already in progress
|
||||
ENOTSOCK = 48; // Socket operation on nonsocket
|
||||
EDESTADDRREQ = 49; // Destination address required
|
||||
EMSGSIZE = 50; // Message too long
|
||||
EPROTOTYPE = 51; // Protocol wrong type for socket
|
||||
ENOPROTOOPT = 52; // Bad protocol option
|
||||
EPROTONOSUPPORT = 53; // Protocol not supported
|
||||
ESOCKTNOSUPPORT = 54; // Socket type not supported
|
||||
EOPNOTSUPP = 55; // Operation not supported
|
||||
EPFNOSUPPORT = 56; // Protocol family not supported
|
||||
EAFNOSUPPORT = 57; // Address family not supported
|
||||
EADDRINUSE = 58; // Address already in use
|
||||
EADDRNOTAVAIL = 59; // Cannot assign requested address
|
||||
ENETDOWN = 60; // Network is down
|
||||
ENETUNREACH = 61; // Network is unreachable
|
||||
ENETRESET = 62; // Network dropped connection on reset
|
||||
ECONNABORTED = 63; // Connection aborted
|
||||
ECONNRESET = 64; // Connection reset by peer
|
||||
ENOBUFS = 65; // No buffer space available
|
||||
EISCONN = 66; // Socket is already connected
|
||||
ENOTCONN = 67; // Socket is not connected
|
||||
ESHUTDOWN = 68; // Cannot send after socket shutdown
|
||||
ETOOMANYREFS = 69; // Too many references
|
||||
ETIMEDOUT = 70; // Operation timed out
|
||||
ECONNREFUSED = 71; // Connection refused
|
||||
ELOOP = 72; // Cannot translate name
|
||||
EWSNAMETOOLONG = 73; // Name component or name was too long
|
||||
EHOSTDOWN = 74; // Host is down
|
||||
EHOSTUNREACH = 75; // No route to host
|
||||
EWSNOTEMPTY = 76; // Cannot remove a directory that is not empty
|
||||
EPROCLIM = 77; // Too many processes
|
||||
EUSERS = 78; // Ran out of quota
|
||||
EDQUOT = 79; // Ran out of disk quota
|
||||
ESTALE = 80; // File handle reference is no longer available
|
||||
EREMOTE = 81; // Item is not available locally
|
||||
|
||||
//
|
||||
// Resolver errors
|
||||
//
|
||||
|
||||
EHOSTNOTFOUND = 82; // Host not found
|
||||
ETRYAGAIN = 83; // Nonauthoritative host not found
|
||||
ENORECOVERY = 84; // A nonrecoverable error occured
|
||||
ENODATA = 85; // Valid name, no data record of requested type
|
||||
|
||||
//
|
||||
// Misc. error codes
|
||||
//
|
||||
|
||||
EPROTO = 86; // Protocol error
|
||||
ECHKSUM = 87; // Checksum error
|
||||
EBADSLT = 88; // Invalid slot
|
||||
EREMOTEIO = 89; // Remote I/O error
|
||||
|
||||
//
|
||||
// Error code aliases
|
||||
//
|
||||
|
||||
ETIMEOUT = ETIMEDOUT;
|
||||
EBUF = ENOBUFS;
|
||||
EROUTE = ENETUNREACH;
|
||||
ECONN = ENOTCONN;
|
||||
ERST = ECONNRESET;
|
||||
EABORT = ECONNABORTED;
|
||||
|
||||
implementation
|
||||
|
||||
end.
|
@ -1,187 +0,0 @@
|
||||
(*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
unit ffm.error;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses ffm.errno;
|
||||
|
||||
(* error handling *)
|
||||
{$IF EDOM > 0}
|
||||
function AVERROR(e: integer): integer; inline;
|
||||
/// < Returns a negative error code from a POSIX error code, to return from library functions.
|
||||
function AVUNERROR(e: integer): integer; inline;
|
||||
/// < Returns a POSIX error code from a library function error return value.
|
||||
{$ELSE}
|
||||
(* Some platforms have E* and errno already negated. *)
|
||||
AVERROR(e)(e)AVUNERROR(e)(e)
|
||||
{$ENDIF}
|
||||
// #define FFERRTAG(a, b, c, d) (-(int)MKTAG(a, b, c, d))
|
||||
|
||||
Const
|
||||
AVERROR_BSF_NOT_FOUND = -(ord($F8) or (ord('B') shl 8) or (ord('S') shl 16) or (ord('F') shl 24));
|
||||
/// < Bitstream filter not found
|
||||
AVERROR_BUG = -(ord('B') or (ord('U') shl 8) or (ord('G') shl 16) or (ord('!') shl 24));
|
||||
/// < Internal bug, also see AVERROR_BUG2
|
||||
AVERROR_BUFFER_TOO_SMALL = -(ord('B') or (ord('U') shl 8) or (ord('F') shl 16) or (ord('S') shl 24));
|
||||
/// < Buffer too small
|
||||
AVERROR_DECODER_NOT_FOUND = -(ord($F8) or (ord('D') shl 8) or (ord('E') shl 16) or (ord('C') shl 24));
|
||||
/// < Decoder not found
|
||||
AVERROR_DEMUXER_NOT_FOUND = -(ord($F8) or (ord('D') shl 8) or (ord('E') shl 16) or (ord('M') shl 24));
|
||||
/// < Demuxer not found
|
||||
AVERROR_ENCODER_NOT_FOUND = -(ord($F8) or (ord('E') shl 8) or (ord('N') shl 16) or (ord('C') shl 24));
|
||||
/// < Encoder not found
|
||||
AVERROR_EOF = -(ord('E') or (ord('O') shl 8) or (ord('F') shl 16) or (ord(' ') shl 24));
|
||||
/// < End of file
|
||||
AVERROR_EXIT = -(ord('E') or (ord('X') shl 8) or (ord('I') shl 16) or (ord('T') shl 24));
|
||||
/// < Immediate exit was requested; the called function should not be restarted
|
||||
AVERROR_EXTERNAL = -(ord('E') or (ord('X') shl 8) or (ord('T') shl 16) or (ord(' ') shl 24));
|
||||
/// < Generic error in an external library
|
||||
AVERROR_FILTER_NOT_FOUND = -(ord($F8) or (ord('F') shl 8) or (ord('I') shl 16) or (ord('L') shl 24));
|
||||
/// < Filter not found
|
||||
AVERROR_INVALIDDATA = -(ord('I') or (ord('N') shl 8) or (ord('D') shl 16) or (ord('A') shl 24));
|
||||
/// < Invalid data found when processing input
|
||||
AVERROR_MUXER_NOT_FOUND = -(ord($F8) or (ord('M') shl 8) or (ord('U') shl 16) or (ord('X') shl 24));
|
||||
/// < Muxer not found
|
||||
AVERROR_OPTION_NOT_FOUND = -(ord($F8) or (ord('O') shl 8) or (ord('P') shl 16) or (ord('T') shl 24));
|
||||
/// < Option not found
|
||||
AVERROR_PATCHWELCOME = -(ord('P') or (ord('A') shl 8) or (ord('W') shl 16) or (ord('E') shl 24));
|
||||
/// < Not yet implemented in FFmpeg, patches welcome
|
||||
AVERROR_PROTOCOL_NOT_FOUND = -(ord($F8) or (ord('P') shl 8) or (ord('R') shl 16) or (ord('O') shl 24));
|
||||
/// < Protocol not found
|
||||
AVERROR_STREAM_NOT_FOUND = -(ord($F8) or (ord('S') shl 8) or (ord('T') shl 16) or (ord('R') shl 24));
|
||||
/// < Stream not found
|
||||
(*
|
||||
* This is semantically identical to AVERROR_BUG
|
||||
* it has been introduced in Libav after our AVERROR_BUG and with a modified value.
|
||||
*)
|
||||
AVERROR_BUG2 = -(ord('B') or (ord('U') shl 8) or (ord('G') shl 16) or (ord(' ') shl 24));
|
||||
AVERROR_UNKNOWN = -(ord('U') or (ord('N') shl 8) or (ord('K') shl 16) or (ord('N') shl 24));
|
||||
/// < Unknown error, typically from an external library
|
||||
AVERROR_EXPERIMENTAL = -($2BB2AFA8);
|
||||
/// < Requested feature is flagged experimental. Set strict_std_compliance if you really want to use it.
|
||||
|
||||
AV_ERROR_MAX_STRING_SIZE = 64;
|
||||
|
||||
(*
|
||||
* Put a description of the AVERROR code errnum in errbuf.
|
||||
* In case of failure the global variable errno is set to indicate the
|
||||
* error. Even in case of failure av_strerror() will print a generic
|
||||
* error message indicating the errnum provided to errbuf.
|
||||
*
|
||||
* @param errnum error code to describe
|
||||
* @param errbuf buffer to which description is written
|
||||
* @param errbuf_size the size in bytes of errbuf
|
||||
* @return 0 on success, a negative value if a description for errnum
|
||||
* cannot be found
|
||||
*)
|
||||
// int av_strerror(int errnum, char *errbuf, size_t errbuf_size);
|
||||
function av_strerror(errnum: integer; errbuf: pAnsiChar; errbuf_size: integer): integer; cdecl;
|
||||
|
||||
(*
|
||||
* Fill the provided buffer with a string containing an error string
|
||||
* corresponding to the AVERROR code errnum.
|
||||
*
|
||||
* @param errbuf a buffer
|
||||
* @param errbuf_size size in bytes of errbuf
|
||||
* @param errnum error code to describe
|
||||
* @return the buffer in input, filled with the error description
|
||||
* @see av_strerror()
|
||||
*)
|
||||
// static inline char *av_make_error_string(char *errbuf, size_t errbuf_size, int errnum)
|
||||
{
|
||||
av_strerror(errnum, errbuf, errbuf_size);
|
||||
return errbuf;
|
||||
}
|
||||
|
||||
function av_make_error_string(errbuf: pAnsiChar; errbuf_size: integer; errnum: integer): pAnsiChar; inline;
|
||||
|
||||
(*
|
||||
* Convenience macro, the return value should be used only directly in
|
||||
* function arguments but never stand-alone.
|
||||
*)
|
||||
// #define av_err2str(errnum) \
|
||||
// av_make_error_string((char[AV_ERROR_MAX_STRING_SIZE]){0}, AV_ERROR_MAX_STRING_SIZE, errnum)
|
||||
function av_err2str(errnum: integer): pAnsiChar; inline;
|
||||
|
||||
{
|
||||
DumpErrorCodes - Error Code : AVERROR_BSF_NOT_FOUND = -1179861752
|
||||
DumpErrorCodes - Error Code : AVERROR_BUG = -558323010
|
||||
DumpErrorCodes - Error Code : AVERROR_BUFFER_TOO_SMALL = -1397118274
|
||||
DumpErrorCodes - Error Code : AVERROR_DECODER_NOT_FOUND = -1128613112
|
||||
DumpErrorCodes - Error Code : AVERROR_DEMUXER_NOT_FOUND = -1296385272
|
||||
DumpErrorCodes - Error Code : AVERROR_ENCODER_NOT_FOUND = -1129203192
|
||||
DumpErrorCodes - Error Code : AVERROR_EOF = -541478725
|
||||
DumpErrorCodes - Error Code : AVERROR_EXIT = -1414092869
|
||||
DumpErrorCodes - Error Code : AVERROR_EXTERNAL = -542398533
|
||||
DumpErrorCodes - Error Code : AVERROR_FILTER_NOT_FOUND = -1279870712
|
||||
DumpErrorCodes - Error Code : AVERROR_INVALIDDATA = -1094995529
|
||||
DumpErrorCodes - Error Code : AVERROR_MUXER_NOT_FOUND = -1481985528
|
||||
DumpErrorCodes - Error Code : AVERROR_OPTION_NOT_FOUND = -1414549496
|
||||
DumpErrorCodes - Error Code : AVERROR_PATCHWELCOME = -1163346256
|
||||
DumpErrorCodes - Error Code : AVERROR_PROTOCOL_NOT_FOUND = -1330794744
|
||||
DumpErrorCodes - Error Code : AVERROR_STREAM_NOT_FOUND = -1381258232
|
||||
DumpErrorCodes - Error Code : AVERROR_BUG2 = -541545794
|
||||
DumpErrorCodes - Error Code : AVERROR_UNKNOWN = -1313558101
|
||||
DumpErrorCodes - Error Code : AVERROR_EXPERIMENTAL = -733130664
|
||||
DumpErrorCodes - Error Code : AVERROR_INPUT_CHANGED = -1668179713
|
||||
DumpErrorCodes - Error Code : AVERROR_OUTPUT_CHANGED = -1668179714
|
||||
DumpErrorCodes - Error Code : AVERROR_HTTP_BAD_REQUEST = -808465656
|
||||
DumpErrorCodes - Error Code : AVERROR_HTTP_UNAUTHORIZED = -825242872
|
||||
DumpErrorCodes - Error Code : AVERROR_HTTP_FORBIDDEN = -858797304
|
||||
DumpErrorCodes - Error Code : AVERROR_HTTP_NOT_FOUND = -875574520
|
||||
DumpErrorCodes - Error Code : AVERROR_HTTP_OTHER_4XX = -1482175736
|
||||
DumpErrorCodes - Error Code : AVERROR_HTTP_SERVER_ERROR = -1482175992
|
||||
}
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function AVERROR(e: integer): integer; inline;
|
||||
begin
|
||||
Result := -e;
|
||||
end;
|
||||
|
||||
function AVUNERROR(e: integer): integer; inline;
|
||||
begin
|
||||
Result := -e;
|
||||
end;
|
||||
|
||||
function av_make_error_string(errbuf: pAnsiChar; errbuf_size: integer; errnum: integer): pAnsiChar; inline;
|
||||
begin
|
||||
av_strerror(errnum, errbuf, errbuf_size);
|
||||
Result := errbuf;
|
||||
end;
|
||||
|
||||
function av_err2str(errnum: integer): pAnsiChar; inline;
|
||||
Var
|
||||
buf: array [0 .. AV_ERROR_MAX_STRING_SIZE - 1] of AnsiChar;
|
||||
begin
|
||||
FillChar(buf, SizeOf(buf), 0);
|
||||
av_make_error_string(buf, AV_ERROR_MAX_STRING_SIZE, errnum);
|
||||
Result := buf;
|
||||
end;
|
||||
|
||||
function av_strerror; external avutil_dll;
|
||||
|
||||
end.
|
||||
|
@ -1,673 +0,0 @@
|
||||
(*
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
unit ffm.frame;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.ctypes, ffm.buffer, ffm.dict, ffm.avutil, ffm.rational;
|
||||
|
||||
Type
|
||||
pAVColorSpace = ^TAVColorSpace;
|
||||
TAVColorSpace = ( //
|
||||
AVCOL_SPC_RGB = 0, //
|
||||
AVCOL_SPC_BT709 = 1, // < also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
|
||||
AVCOL_SPC_UNSPECIFIED = 2, //
|
||||
AVCOL_SPC_FCC = 4, //
|
||||
AVCOL_SPC_BT470BG = 5, // < also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
|
||||
AVCOL_SPC_SMPTE170M = 6, // < also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
|
||||
AVCOL_SPC_SMPTE240M = 7, //
|
||||
AVCOL_SPC_YCOCG = 8, // < Used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
|
||||
AVCOL_SPC_BT2020_NCL = 9, // < ITU-R BT2020 non-constant luminance system
|
||||
AVCOL_SPC_BT2020_CL = 10, // < ITU-R BT2020 constant luminance system
|
||||
AVCOL_SPC_NB // < Not part of ABI
|
||||
);
|
||||
|
||||
Const
|
||||
AVCOL_SPC_YCGCO = AVCOL_SPC_YCOCG;
|
||||
|
||||
(*
|
||||
* The frame data may be corrupted, e.g. due to decoding errors.
|
||||
*)
|
||||
AV_FRAME_FLAG_CORRUPT = (1 shl 0);
|
||||
FF_DECODE_ERROR_INVALID_BITSTREAM = 1;
|
||||
FF_DECODE_ERROR_MISSING_REFERENCE = 2;
|
||||
|
||||
Type
|
||||
pAVColorRange = ^TAVColorRange;
|
||||
TAVColorRange = ( //
|
||||
AVCOL_RANGE_UNSPECIFIED = 0, //
|
||||
AVCOL_RANGE_MPEG = 1, // < the normal 219*2^(n-8) "MPEG" YUV ranges
|
||||
AVCOL_RANGE_JPEG = 2, // < the normal 2^n-1 "JPEG" YUV ranges
|
||||
AVCOL_RANGE_NB // < Not part of ABI
|
||||
);
|
||||
|
||||
pAVFrameSideDataType = ^TAVFrameSideDataType;
|
||||
TAVFrameSideDataType = (
|
||||
(*
|
||||
* The data is the AVPanScan struct defined in libavcodec.
|
||||
*)
|
||||
AV_FRAME_DATA_PANSCAN //
|
||||
);
|
||||
|
||||
pAVFrameSideData = ^TAVFrameSideData;
|
||||
ppAVFrameSideData = ^pAVFrameSideData;
|
||||
|
||||
TAVFrameSideData = {packed} record
|
||||
_type: TAVFrameSideDataType;
|
||||
data: pByte;
|
||||
size: Integer;
|
||||
metadata: pAVDictionary;
|
||||
end;
|
||||
|
||||
(*
|
||||
* This structure describes decoded (raw) audio or video data.
|
||||
*
|
||||
* AVFrame must be allocated using av_frame_alloc(). Note that this only
|
||||
* allocates the AVFrame itself, the buffers for the data must be managed
|
||||
* through other means (see below).
|
||||
* AVFrame must be freed with av_frame_free().
|
||||
*
|
||||
* AVFrame is typically allocated once and then reused multiple times to hold
|
||||
* different data (e.g. a single AVFrame to hold frames received from a
|
||||
* decoder). In such a case, av_frame_unref() will free any references held by
|
||||
* the frame and reset it to its original clean state before it
|
||||
* is reused again.
|
||||
*
|
||||
* The data described by an AVFrame is usually reference counted through the
|
||||
* AVBuffer API. The underlying buffer references are stored in AVFrame.buf /
|
||||
* AVFrame.extended_buf. An AVFrame is considered to be reference counted if at
|
||||
* least one reference is set, i.e. if AVFrame.buf[0] != NULL. In such a case,
|
||||
* every single data plane must be contained in one of the buffers in
|
||||
* AVFrame.buf or AVFrame.extended_buf.
|
||||
* There may be a single buffer for all the data, or one separate buffer for
|
||||
* each plane, or anything in between.
|
||||
*
|
||||
* sizeof(AVFrame) is not a part of the public ABI, so new fields may be added
|
||||
* to the end with a minor bump.
|
||||
* Similarly fields that are marked as to be only accessed by
|
||||
* av_opt_ptr() can be reordered. This allows 2 forks to add fields
|
||||
* without breaking compatibility with each other.
|
||||
*)
|
||||
const
|
||||
AV_NUM_DATA_POINTERS = 8;
|
||||
|
||||
Type
|
||||
pAVNDPArray = ^TAVNDPArray;
|
||||
TAVNDPArray = array [0 .. AV_NUM_DATA_POINTERS - 1] of Integer;
|
||||
|
||||
pAVFrameByteArray = ^TAVFrameByteArray;
|
||||
TAVFrameByteArray = array [0 .. AV_NUM_DATA_POINTERS - 1] of pByte;
|
||||
|
||||
pAVFrameInt64Array = ^TAVFrameInt64Array;
|
||||
TAVFrameInt64Array = array [0 .. AV_NUM_DATA_POINTERS - 1] of uint64;
|
||||
|
||||
pAVFrame = ^TAVFrame;
|
||||
ppAVFrame = ^pAVFrame;
|
||||
|
||||
puint8 = ^uint8;
|
||||
ppuint8 = ^puint8;
|
||||
|
||||
pMotion_val = ^TMotion_val;
|
||||
TMotion_val = array [0 .. 1] of int16;
|
||||
|
||||
pRef_index = ^TRef_index;
|
||||
TRef_index = array [0 .. 1] of int8;
|
||||
|
||||
pAVBufferRefArray = ^TAVBufferRefArray;
|
||||
TAVBufferRefArray = array [0 .. AV_NUM_DATA_POINTERS - 1] of pAVBufferRef;
|
||||
|
||||
TLinesizes = array [0 .. AV_NUM_DATA_POINTERS - 1] of Integer;
|
||||
pLinesizes = ^TLinesizes;
|
||||
|
||||
TAVFrame = {packed} record
|
||||
(*
|
||||
* pointer to the picture/channel planes.
|
||||
* This might be different from the first allocated byte
|
||||
*
|
||||
* Some decoders access areas outside 0,0 - width,height, please
|
||||
* see avcodec_align_dimensions2(). Some filters and swscale can read
|
||||
* up to 16 bytes beyond the planes, if these filters are to be used,
|
||||
* then 16 extra bytes must be allocated.
|
||||
*)
|
||||
data: TAVFrameByteArray;
|
||||
(*
|
||||
* For video, size in bytes of each picture line.
|
||||
* For audio, size in bytes of each plane.
|
||||
*
|
||||
* For audio, only linesize[0] may be set. For planar audio, each channel
|
||||
* plane must be the same size.
|
||||
*
|
||||
* For video the linesizes should be multiplies of the CPUs alignment
|
||||
* preference, this is 16 or 32 for modern desktop CPUs.
|
||||
* Some code requires such alignment other code can be slower without
|
||||
* correct alignment, for yet other it makes no difference.
|
||||
*
|
||||
* @note The linesize may be larger than the size of usable data -- there
|
||||
* may be extra padding present for performance reasons.
|
||||
*)
|
||||
linesize: TLinesizes;
|
||||
(*
|
||||
* pointers to the data planes/channels.
|
||||
*
|
||||
* For video, this should simply point to data[].
|
||||
*
|
||||
* For planar audio, each channel has a separate data pointer, and
|
||||
* linesize[0] contains the size of each channel buffer.
|
||||
* For {packed} audio, there is just one data pointer, and linesize[0]
|
||||
* contains the total size of the buffer for all channels.
|
||||
*
|
||||
* Note: Both data and extended_data should always be set in a valid frame,
|
||||
* but for planar audio with more channels that can fit in data,
|
||||
* extended_data must be used in order to access all channels.
|
||||
*)
|
||||
extended_data: ppuint8;
|
||||
(*
|
||||
* width and height of the video frame
|
||||
*)
|
||||
width, height: Integer;
|
||||
(*
|
||||
* number of audio samples (per channel) described by this frame
|
||||
*)
|
||||
nb_samples: Integer;
|
||||
|
||||
(*
|
||||
* format of the frame, -1 if unknown or unset
|
||||
* Values correspond to enum AVPixelFormat for video frames,
|
||||
* enum AVSampleFormat for audio)
|
||||
*)
|
||||
format: Integer;
|
||||
(*
|
||||
* 1 -> keyframe, 0-> not
|
||||
*)
|
||||
key_frame: Integer;
|
||||
(*
|
||||
* Picture type of the frame.
|
||||
*)
|
||||
pict_type: TAVPictureType;
|
||||
|
||||
{$IFDEF FF_API_AVFRAME_LAVC}
|
||||
// attribute_deprecated
|
||||
base: TAVFrameByteArray;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
|
||||
*)
|
||||
sample_aspect_ratio: TAVRational;
|
||||
(*
|
||||
* Presentation timestamp in time_base units (time when frame should be shown to user).
|
||||
*)
|
||||
pts: int64_t;
|
||||
(*
|
||||
* PTS copied from the AVPacket that was decoded to produce this frame.
|
||||
*)
|
||||
pkt_pts: int64_t;
|
||||
(*
|
||||
* DTS copied from the AVPacket that triggered returning this frame. (if frame threading isnt used)
|
||||
* This is also the Presentation time of this AVFrame calculated from
|
||||
* only AVPacket.dts values without pts values.
|
||||
*)
|
||||
pkt_dts: int64_t;
|
||||
(*
|
||||
* picture number in bitstream order
|
||||
*)
|
||||
coded_picture_number: Integer;
|
||||
(*
|
||||
* picture number in display order
|
||||
*)
|
||||
display_picture_number: Integer;
|
||||
(*
|
||||
* quality (between 1 (good) and FF_LAMBDA_MAX (bad))
|
||||
*)
|
||||
quality: Integer;
|
||||
{$IFDEF FF_API_AVFRAME_LAVC}
|
||||
// attribute_deprecated
|
||||
reference: Integer;
|
||||
(*
|
||||
* QP table *)
|
||||
// attribute_deprecated
|
||||
qscale_table: pint8_t;
|
||||
(*
|
||||
* QP store stride
|
||||
*)
|
||||
// attribute_deprecated
|
||||
qstride: Integer;
|
||||
// attribute_deprecated
|
||||
qscale_type: Integer;
|
||||
(*
|
||||
* mbskip_table[mb]>=1 if MB didn't change
|
||||
* stride= mb_width = (width+15)>>4
|
||||
*)
|
||||
// attribute_deprecated
|
||||
mbskip_table: puint8_t;
|
||||
(*
|
||||
* motion vector table
|
||||
* @code
|
||||
* example:
|
||||
* int mv_sample_log2= 4 - motion_subsample_log2;
|
||||
* int mb_width= (width+15)>>4;
|
||||
* int mv_stride= (mb_width << mv_sample_log2) + 1;
|
||||
* motion_val[direction][x + y*mv_stride][0->mv_x, 1->mv_y];
|
||||
* @endcode
|
||||
*)
|
||||
// attribute_deprecated
|
||||
// int16_t (*motion_val[2])[2];
|
||||
motion_val: array [0 .. 1] of pMotion_val;
|
||||
(*
|
||||
* macroblock type table
|
||||
* mb_type_base + mb_width + 2
|
||||
*)
|
||||
// attribute_deprecated
|
||||
mb_type: puint32_t;
|
||||
(*
|
||||
* DCT coefficients
|
||||
*)
|
||||
// attribute_deprecated
|
||||
dct_coeff: pshort;
|
||||
(*
|
||||
* motion reference frame index
|
||||
* the order in which these are stored can depend on the codec.
|
||||
*)
|
||||
// attribute_deprecated
|
||||
// int8_t *ref_index[2];
|
||||
ref_index: pRef_index;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* for some private data of the user
|
||||
*)
|
||||
opaque: Pointer;
|
||||
(*
|
||||
* error
|
||||
*)
|
||||
error: array [0 .. AV_NUM_DATA_POINTERS - 1] of uint64;
|
||||
{$IFDEF FF_API_AVFRAME_LAVC}
|
||||
// attribute_deprecated
|
||||
_type: Integer;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* When decoding, this signals how much the picture must be delayed.
|
||||
* extra_delay = repeat_pict / (2*fps)
|
||||
*)
|
||||
repeat_pict: Integer;
|
||||
(*
|
||||
* The content of the picture is interlaced.
|
||||
*)
|
||||
interlaced_frame: Integer;
|
||||
(*
|
||||
* If the content is interlaced, is top field displayed first.
|
||||
*)
|
||||
top_field_first: Integer;
|
||||
(*
|
||||
* Tell user application that palette has changed from previous frame.
|
||||
*)
|
||||
palette_has_changed: Integer;
|
||||
|
||||
{$IFDEF FF_API_AVFRAME_LAVC}
|
||||
// attribute_deprecated
|
||||
buffer_hints: Integer;
|
||||
(*
|
||||
* Pan scan.
|
||||
*)
|
||||
// attribute_deprecated
|
||||
pan_scan: pAVPanScan;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* reordered opaque 64bit (generally an integer or a double precision float
|
||||
* PTS but can be anything).
|
||||
* The user sets AVCodecContext.reordered_opaque to represent the input at
|
||||
* that time,
|
||||
* the decoder reorders values as needed and sets AVFrame.reordered_opaque
|
||||
* to exactly one of the values provided by the user through AVCodecContext.reordered_opaque
|
||||
* @deprecated in favor of pkt_pts
|
||||
*)
|
||||
reordered_opaque: int64_t;
|
||||
{$IFDEF FF_API_AVFRAME_LAVC}
|
||||
(*
|
||||
* @deprecated this field is unused
|
||||
*)
|
||||
// attribute_deprecated
|
||||
hwaccel_picture_private: Pointer;
|
||||
// attribute_deprecated
|
||||
owner: pAVCodecContext;
|
||||
// attribute_deprecated
|
||||
thread_opaque: Pointer;
|
||||
(*
|
||||
* log2 of the size of the block which a single vector in motion_val represents:
|
||||
* (4->16x16, 3->8x8, 2-> 4x4, 1-> 2x2)
|
||||
*)
|
||||
// attribute_deprecated
|
||||
motion_subsample_log2: uint8_t;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* Sample rate of the audio data.
|
||||
*)
|
||||
sample_rate: Integer;
|
||||
(*
|
||||
* Channel layout of the audio data.
|
||||
*)
|
||||
channel_layout: uint64_t;
|
||||
//
|
||||
(*
|
||||
* AVBuffer references backing the data for this frame. If all elements of
|
||||
* this array are NULL, then this frame is not reference counted.
|
||||
*
|
||||
* There may be at most one AVBuffer per data plane, so for video this array
|
||||
* always contains all the references. For planar audio with more than
|
||||
* AV_NUM_DATA_POINTERS channels, there may be more buffers than can fit in
|
||||
* this array. Then the extra AVBufferRef pointers are stored in the
|
||||
* extended_buf array.
|
||||
*)
|
||||
buf: TAVBufferRefArray;
|
||||
(*
|
||||
* For planar audio which requires more than AV_NUM_DATA_POINTERS
|
||||
* AVBufferRef pointers, this array will hold all the references which
|
||||
* cannot fit into AVFrame.buf.
|
||||
*
|
||||
* Note that this is different from AVFrame.extended_data, which always
|
||||
* contains all the pointers. This array only contains the extra pointers,
|
||||
* which cannot fit into AVFrame.buf.
|
||||
*
|
||||
* This array is always allocated using av_malloc() by whoever constructs
|
||||
* the frame. It is freed in av_frame_unref().
|
||||
*)
|
||||
extended_buf: ppAVBufferRef;
|
||||
(*
|
||||
* Number of elements in extended_buf.
|
||||
*)
|
||||
nb_extended_buf: Integer;
|
||||
side_data: ppAVFrameSideData;
|
||||
nb_side_data: Integer;
|
||||
(*
|
||||
* Frame flags, a combination of AV_FRAME_FLAG_*
|
||||
*)
|
||||
flags: Integer;
|
||||
(*
|
||||
* frame timestamp estimated using various heuristics, in stream time base
|
||||
* Code outside libavcodec should access this field using:
|
||||
* av_frame_get_best_effort_timestamp(frame)
|
||||
* - encoding: unused
|
||||
* - decoding: set by libavcodec, read by user.
|
||||
*)
|
||||
best_effort_timestamp: int64_t;
|
||||
(*
|
||||
* reordered pos from the last AVPacket that has been input into the decoder
|
||||
* Code outside libavcodec should access this field using:
|
||||
* av_frame_get_pkt_pos(frame)
|
||||
* - encoding: unused
|
||||
* - decoding: Read by user.
|
||||
*)
|
||||
pkt_pos: int64_t;
|
||||
(*
|
||||
* duration of the corresponding packet, expressed in
|
||||
* AVStream->time_base units, 0 if unknown.
|
||||
* Code outside libavcodec should access this field using:
|
||||
* av_frame_get_pkt_duration(frame)
|
||||
* - encoding: unused
|
||||
* - decoding: Read by user.
|
||||
*)
|
||||
pkt_duration: int64_t;
|
||||
(*
|
||||
* metadata.
|
||||
* Code outside libavcodec should access this field using:
|
||||
* av_frame_get_metadata(frame)
|
||||
* - encoding: Set by user.
|
||||
* - decoding: Set by libavcodec.
|
||||
*)
|
||||
metadata: pAVDictionary;
|
||||
(*
|
||||
* decode error flags of the frame, set to a combination of
|
||||
* FF_DECODE_ERROR_xxx flags if the decoder produced a frame, but there
|
||||
* were errors during the decoding.
|
||||
* Code outside libavcodec should access this field using:
|
||||
* av_frame_get_decode_error_flags(frame)
|
||||
* - encoding: unused
|
||||
* - decoding: set by libavcodec, read by user.
|
||||
*)
|
||||
decode_error_flags: Integer;
|
||||
(*
|
||||
* number of audio channels, only used for audio.
|
||||
* Code outside libavcodec should access this field using:
|
||||
* av_frame_get_channels(frame)
|
||||
* - encoding: unused
|
||||
* - decoding: Read by user.
|
||||
*)
|
||||
channels: Integer;
|
||||
(*
|
||||
* size of the corresponding packet containing the compressed
|
||||
* frame. It must be accessed using av_frame_get_pkt_size() and
|
||||
* av_frame_set_pkt_size().
|
||||
* It is set to a negative value if unknown.
|
||||
* - encoding: unused
|
||||
* - decoding: set by libavcodec, read by user.
|
||||
*)
|
||||
pkt_size: Integer;
|
||||
(*
|
||||
* YUV colorspace type.
|
||||
* It must be accessed using av_frame_get_colorspace() and
|
||||
* av_frame_set_colorspace().
|
||||
* - encoding: Set by user
|
||||
* - decoding: Set by libavcodec
|
||||
*)
|
||||
colorspace: TAVColorSpace;
|
||||
(*
|
||||
* MPEG vs JPEG YUV range.
|
||||
* It must be accessed using av_frame_get_color_range() and
|
||||
* av_frame_set_color_range().
|
||||
* - encoding: Set by user
|
||||
* - decoding: Set by libavcodec
|
||||
*)
|
||||
color_range: TAVColorRange;
|
||||
(*
|
||||
* Not to be accessed directly from outside libavutil
|
||||
*)
|
||||
qp_table_buf: pAVBufferRef;
|
||||
end;
|
||||
|
||||
(*
|
||||
// * Accessors for some AVFrame fields.
|
||||
// * The position of these field in the structure is not part of the ABI,
|
||||
// * they should not be accessed directly outside libavcodec.
|
||||
*)
|
||||
// int64_t av_frame_get_best_effort_timestamp(const AVFrame *frame);
|
||||
function av_frame_get_best_effort_timestamp(const frame: pAVFrame): int64; cdecl;
|
||||
// void av_frame_set_best_effort_timestamp(AVFrame *frame, int64_t val);
|
||||
// int64_t av_frame_get_pkt_duration (const AVFrame *frame);
|
||||
// void av_frame_set_pkt_duration (AVFrame *frame, int64_t val);
|
||||
// int64_t av_frame_get_pkt_pos (const AVFrame *frame);
|
||||
// void av_frame_set_pkt_pos (AVFrame *frame, int64_t val);
|
||||
// int64_t av_frame_get_channel_layout (const AVFrame *frame);
|
||||
// void av_frame_set_channel_layout (AVFrame *frame, int64_t val);
|
||||
// int av_frame_get_channels (const AVFrame *frame);
|
||||
// void av_frame_set_channels (AVFrame *frame, int val);
|
||||
// int av_frame_get_sample_rate (const AVFrame *frame);
|
||||
// void av_frame_set_sample_rate (AVFrame *frame, int val);
|
||||
// AVDictionary *av_frame_get_metadata (const AVFrame *frame);
|
||||
// void av_frame_set_metadata (AVFrame *frame, AVDictionary *val);
|
||||
// int av_frame_get_decode_error_flags (const AVFrame *frame);
|
||||
// void av_frame_set_decode_error_flags (AVFrame *frame, int val);
|
||||
// int av_frame_get_pkt_size(const AVFrame *frame);
|
||||
// void av_frame_set_pkt_size(AVFrame *frame, int val);
|
||||
// AVDictionary **avpriv_frame_get_metadatap(AVFrame *frame);
|
||||
// int8_t *av_frame_get_qp_table(AVFrame *f, int *stride, int *type);
|
||||
// int av_frame_set_qp_table(AVFrame *f, AVBufferRef *buf, int stride, int type);
|
||||
// enum AVColorSpace av_frame_get_colorspace(const AVFrame *frame);
|
||||
// void av_frame_set_colorspace(AVFrame *frame, enum AVColorSpace val);
|
||||
// enum AVColorRange av_frame_get_color_range(const AVFrame *frame);
|
||||
// void av_frame_set_color_range(AVFrame *frame, enum AVColorRange val);
|
||||
//
|
||||
(*
|
||||
// * Get the name of a colorspace.
|
||||
// * @return a static string identifying the colorspace; can be NULL.
|
||||
*)
|
||||
// const char *av_get_colorspace_name(enum AVColorSpace val);
|
||||
//
|
||||
(*
|
||||
// * Allocate an AVFrame and set its fields to default values. The resulting
|
||||
// * struct must be freed using av_frame_free().
|
||||
// *
|
||||
// * @return An AVFrame filled with default values or NULL on failure.
|
||||
// *
|
||||
// * @note this only allocates the AVFrame itself, not the data buffers. Those
|
||||
// * must be allocated through other means, e.g. with av_frame_get_buffer() or
|
||||
// * manually.
|
||||
*)
|
||||
// AVFrame *av_frame_alloc(void);
|
||||
function av_frame_alloc(): pAVFrame; cdecl;
|
||||
//
|
||||
(*
|
||||
// * Free the frame and any dynamically allocated objects in it,
|
||||
// * e.g. extended_data. If the frame is reference counted, it will be
|
||||
// * unreferenced first.
|
||||
// *
|
||||
// * @param frame frame to be freed. The pointer will be set to NULL.
|
||||
*)
|
||||
// void av_frame_free(AVFrame **frame);
|
||||
procedure av_frame_free(Var frame: pAVFrame); cdecl;
|
||||
(*
|
||||
// * Setup a new reference to the data described by a given frame.
|
||||
// *
|
||||
// * Copy frame properties from src to dst and create a new reference for each
|
||||
// * AVBufferRef from src.
|
||||
// *
|
||||
// * If src is not reference counted, new buffers are allocated and the data is
|
||||
// * copied.
|
||||
// *
|
||||
// * @return 0 on success, a negative AVERROR on error
|
||||
*)
|
||||
// int av_frame_ref(AVFrame *dst, const AVFrame *src);
|
||||
//
|
||||
(*
|
||||
// * Create a new frame that references the same data as src.
|
||||
// *
|
||||
// * This is a shortcut for av_frame_alloc()+av_frame_ref().
|
||||
// *
|
||||
// * @return newly created AVFrame on success, NULL on error.
|
||||
*)
|
||||
// AVFrame *av_frame_clone(const AVFrame *src);
|
||||
//
|
||||
(*
|
||||
// * Unreference all the buffers referenced by frame and reset the frame fields.
|
||||
*)
|
||||
// void av_frame_unref(AVFrame *frame);
|
||||
procedure av_frame_unref(frame: pAVFrame); cdecl;
|
||||
|
||||
(*
|
||||
// * Move everythnig contained in src to dst and reset src.
|
||||
*)
|
||||
// void av_frame_move_ref(AVFrame *dst, AVFrame *src);
|
||||
//
|
||||
(*
|
||||
// * Allocate new buffer(s) for audio or video data.
|
||||
// *
|
||||
// * The following fields must be set on frame before calling this function:
|
||||
// * - format (pixel format for video, sample format for audio)
|
||||
// * - width and height for video
|
||||
// * - nb_samples and channel_layout for audio
|
||||
// *
|
||||
// * This function will fill AVFrame.data and AVFrame.buf arrays and, if
|
||||
// * necessary, allocate and fill AVFrame.extended_data and AVFrame.extended_buf.
|
||||
// * For planar formats, one buffer will be allocated for each plane.
|
||||
// *
|
||||
// * @param frame frame in which to store the new buffers.
|
||||
// * @param align required buffer size alignment
|
||||
// *
|
||||
// * @return 0 on success, a negative AVERROR on error.
|
||||
*)
|
||||
// int av_frame_get_buffer(AVFrame *frame, int align);
|
||||
//
|
||||
(*
|
||||
// * Check if the frame data is writable.
|
||||
// *
|
||||
// * @return A positive value if the frame data is writable (which is true if and
|
||||
// * only if each of the underlying buffers has only one reference, namely the one
|
||||
// * stored in this frame). Return 0 otherwise.
|
||||
// *
|
||||
// * If 1 is returned the answer is valid until av_buffer_ref() is called on any
|
||||
// * of the underlying AVBufferRefs (e.g. through av_frame_ref() or directly).
|
||||
// *
|
||||
// * @see av_frame_make_writable(), av_buffer_is_writable()
|
||||
*)
|
||||
// int av_frame_is_writable(AVFrame *frame);
|
||||
//
|
||||
(*
|
||||
// * Ensure that the frame data is writable, avoiding data copy if possible.
|
||||
// *
|
||||
// * Do nothing if the frame is writable, allocate new buffers and copy the data
|
||||
// * if it is not.
|
||||
// *
|
||||
// * @return 0 on success, a negative AVERROR on error.
|
||||
// *
|
||||
// * @see av_frame_is_writable(), av_buffer_is_writable(),
|
||||
// * av_buffer_make_writable()
|
||||
*)
|
||||
// int av_frame_make_writable(AVFrame *frame);
|
||||
//
|
||||
(*
|
||||
// * Copy only "metadata" fields from src to dst.
|
||||
// *
|
||||
// * Metadata for the purpose of this function are those fields that do not affect
|
||||
// * the data layout in the buffers. E.g. pts, sample rate (for audio) or sample
|
||||
// * aspect ratio (for video), but not width/height or channel layout.
|
||||
// * Side data is also copied.
|
||||
*)
|
||||
// int av_frame_copy_props(AVFrame *dst, const AVFrame *src);
|
||||
//
|
||||
(*
|
||||
// * Get the buffer reference a given data plane is stored in.
|
||||
// *
|
||||
// * @param plane index of the data plane of interest in frame->extended_data.
|
||||
// *
|
||||
// * @return the buffer reference that contains the plane or NULL if the input
|
||||
// * frame is not valid.
|
||||
*)
|
||||
// AVBufferRef *av_frame_get_plane_buffer(AVFrame *frame, int plane);
|
||||
//
|
||||
(*
|
||||
// * Add a new side data to a frame.
|
||||
// *
|
||||
// * @param frame a frame to which the side data should be added
|
||||
// * @param type type of the added side data
|
||||
// * @param size size of the side data
|
||||
// *
|
||||
// * @return newly added side data on success, NULL on error
|
||||
*)
|
||||
// AVFrameSideData *av_frame_new_side_data(AVFrame *frame,
|
||||
// enum AVFrameSideDataType type,
|
||||
// int size);
|
||||
//
|
||||
(*
|
||||
// * @return a pointer to the side data of a given type on success, NULL if there
|
||||
// * is no side data with such type in this frame.
|
||||
*)
|
||||
// AVFrameSideData *av_frame_get_side_data(const AVFrame *frame,
|
||||
// enum AVFrameSideDataType type);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_frame_alloc; external avutil_dll;
|
||||
function av_frame_get_best_effort_timestamp; external avutil_dll;
|
||||
procedure av_frame_unref; external avutil_dll;
|
||||
procedure av_frame_free; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,210 +0,0 @@
|
||||
unit ffm.imgutils;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.pixfmt, ffm.frame;
|
||||
|
||||
(*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
(*
|
||||
* Compute the max pixel step for each plane of an image with a
|
||||
* format described by pixdesc.
|
||||
*
|
||||
* The pixel step is the distance in bytes between the first byte of
|
||||
* the group of bytes which describe a pixel component and the first
|
||||
* byte of the successive group in the same plane for the same
|
||||
* component.
|
||||
*
|
||||
* @param max_pixsteps an array which is filled with the max pixel step
|
||||
* for each plane. Since a plane may contain different pixel
|
||||
* components, the computed max_pixsteps[plane] is relative to the
|
||||
* component in the plane with the max pixel step.
|
||||
* @param max_pixstep_comps an array which is filled with the component
|
||||
* for each plane which has the max pixel step. May be NULL.
|
||||
*)
|
||||
// void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4],
|
||||
// const AVPixFmtDescriptor *pixdesc);
|
||||
|
||||
(*
|
||||
* Compute the size of an image line with format pix_fmt and width
|
||||
* width for the plane plane.
|
||||
*
|
||||
* @return the computed size in bytes
|
||||
*)
|
||||
// int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane);
|
||||
|
||||
(*
|
||||
* Fill plane linesizes for an image with pixel format pix_fmt and
|
||||
* width width.
|
||||
*
|
||||
* @param linesizes array to be filled with the linesize for each plane
|
||||
* @return >= 0 in case of success, a negative error code otherwise
|
||||
*)
|
||||
// int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int width);
|
||||
|
||||
(*
|
||||
* Fill plane data pointers for an image with pixel format pix_fmt and
|
||||
* height height.
|
||||
*
|
||||
* @param data pointers array to be filled with the pointer for each image plane
|
||||
* @param ptr the pointer to a buffer which will contain the image
|
||||
* @param linesizes the array containing the linesize for each
|
||||
* plane, should be filled by av_image_fill_linesizes()
|
||||
* @return the size in bytes required for the image buffer, a negative
|
||||
* error code in case of failure
|
||||
*)
|
||||
// int av_image_fill_pointers(uint8_t *data[4], enum AVPixelFormat pix_fmt, int height,
|
||||
// uint8_t *ptr, const int linesizes[4]);
|
||||
|
||||
(*
|
||||
* Allocate an image with size w and h and pixel format pix_fmt, and
|
||||
* fill pointers and linesizes accordingly.
|
||||
* The allocated image buffer has to be freed by using
|
||||
* av_freep(&pointers[0]).
|
||||
*
|
||||
* @param align the value to use for buffer size alignment
|
||||
* @return the size in bytes required for the image buffer, a negative
|
||||
* error code in case of failure
|
||||
*)
|
||||
// int av_image_alloc(uint8_t *pointers[4], int linesizes[4],
|
||||
// int w, int h, enum AVPixelFormat pix_fmt, int align);
|
||||
// Type
|
||||
// TPointers = array [0 .. 3] of pByte;
|
||||
// pPointers = ^TPointers;
|
||||
// TLinesizes = array [0 .. 3] of integer;
|
||||
// pLinesizes = ^TLinesizes;
|
||||
|
||||
function av_image_alloc(Var pointers: TAVFrameByteArray; linesizes: TLinesizes; w: integer; h: integer;
|
||||
pix_fmt: TAVPixelFormat; align: integer): integer; cdecl;
|
||||
// function av_image_alloc(pointers: pPointers; linesizes: TLinesizes; w: integer; h: integer; pix_fmt: TAVPixelFormat;
|
||||
// align: integer): integer; overload; cdecl;
|
||||
|
||||
(*
|
||||
* Copy image plane from src to dst.
|
||||
* That is, copy "height" number of lines of "bytewidth" bytes each.
|
||||
* The first byte of each successive line is separated by *_linesize
|
||||
* bytes.
|
||||
*
|
||||
* bytewidth must be contained by both absolute values of dst_linesize
|
||||
* and src_linesize, otherwise the function behavior is undefined.
|
||||
*
|
||||
* @param dst_linesize linesize for the image plane in dst
|
||||
* @param src_linesize linesize for the image plane in src
|
||||
*)
|
||||
// void av_image_copy_plane(uint8_t *dst, int dst_linesize,
|
||||
// const uint8_t *src, int src_linesize,
|
||||
// int bytewidth, int height);
|
||||
|
||||
(*
|
||||
* Copy image in src_data to dst_data.
|
||||
*
|
||||
* @param dst_linesizes linesizes for the image in dst_data
|
||||
* @param src_linesizes linesizes for the image in src_data
|
||||
*)
|
||||
// void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4],
|
||||
// const uint8_t *src_data[4], const int src_linesizes[4],
|
||||
// enum AVPixelFormat pix_fmt, int width, int height);
|
||||
|
||||
(*
|
||||
* Setup the data pointers and linesizes based on the specified image
|
||||
* parameters and the provided array.
|
||||
*
|
||||
* The fields of the given image are filled in by using the src
|
||||
* address which points to the image data buffer. Depending on the
|
||||
* specified pixel format, one or multiple image data pointers and
|
||||
* line sizes will be set. If a planar format is specified, several
|
||||
* pointers will be set pointing to the different picture planes and
|
||||
* the line sizes of the different planes will be stored in the
|
||||
* lines_sizes array. Call with src == NULL to get the required
|
||||
* size for the src buffer.
|
||||
*
|
||||
* To allocate the buffer and fill in the dst_data and dst_linesize in
|
||||
* one call, use av_image_alloc().
|
||||
*
|
||||
* @param dst_data data pointers to be filled in
|
||||
* @param dst_linesizes linesizes for the image in dst_data to be filled in
|
||||
* @param src buffer which will contain or contains the actual image data, can be NULL
|
||||
* @param pix_fmt the pixel format of the image
|
||||
* @param width the width of the image in pixels
|
||||
* @param height the height of the image in pixels
|
||||
* @param align the value used in src for linesize alignment
|
||||
* @return the size in bytes required for src, a negative error code
|
||||
* in case of failure
|
||||
*)
|
||||
// int av_image_fill_arrays(uint8_t *dst_data[4], int dst_linesize[4],
|
||||
// const uint8_t *src,
|
||||
// enum AVPixelFormat pix_fmt, int width, int height, int align);
|
||||
|
||||
(*
|
||||
* Return the size in bytes of the amount of data required to store an
|
||||
* image with the given parameters.
|
||||
*
|
||||
* @param[in] align the assumed linesize alignment
|
||||
*)
|
||||
// int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align);
|
||||
|
||||
(*
|
||||
* Copy image data from an image into a buffer.
|
||||
*
|
||||
* av_image_get_buffer_size() can be used to compute the required size
|
||||
* for the buffer to fill.
|
||||
*
|
||||
* @param dst a buffer into which picture data will be copied
|
||||
* @param dst_size the size in bytes of dst
|
||||
* @param src_data pointers containing the source image data
|
||||
* @param src_linesizes linesizes for the image in src_data
|
||||
* @param pix_fmt the pixel format of the source image
|
||||
* @param width the width of the source image in pixels
|
||||
* @param height the height of the source image in pixels
|
||||
* @param align the assumed linesize alignment for dst
|
||||
* @return the number of bytes written to dst, or a negative value
|
||||
* (error code) on error
|
||||
*)
|
||||
// int av_image_copy_to_buffer(uint8_t *dst, int dst_size,
|
||||
// const uint8_t * const src_data[4], const int src_linesize[4],
|
||||
// enum AVPixelFormat pix_fmt, int width, int height, int align);
|
||||
|
||||
(*
|
||||
* Check if the given dimension of an image is valid, meaning that all
|
||||
* bytes of the image can be addressed with a signed int.
|
||||
*
|
||||
* @param w the width of the picture
|
||||
* @param h the height of the picture
|
||||
* @param log_offset the offset to sum to the log level for logging with log_ctx
|
||||
* @param log_ctx the parent logging context, it may be NULL
|
||||
* @return >= 0 if valid, a negative error code otherwise
|
||||
*)
|
||||
// int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx);
|
||||
|
||||
// int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_image_alloc; external avutil_dll;
|
||||
// function av_image_alloc(Var pointers: TPointers; linesizes: TLinesizes; w: integer; h: integer; pix_fmt: TAVPixelFormat;
|
||||
// align: integer): integer; overload; cdecl; external avutil_dll name 'av_image_alloc';
|
||||
// function av_image_alloc(pointers: pPointers; linesizes: TLinesizes; w: integer; h: integer; pix_fmt: TAVPixelFormat;
|
||||
// align: integer): integer; overload; cdecl; external avutil_dll name 'av_image_alloc';
|
||||
|
||||
end.
|
@ -1,19 +0,0 @@
|
||||
unit ffm.lib;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
const
|
||||
avcodec_dll = 'avcodec-57.dll';
|
||||
avdevice_all = 'avdevice-57.dll';
|
||||
avfilter_dll = 'avfilter-6.dll';
|
||||
avformat_dll = 'avformat-57.dll';
|
||||
avutil_dll = 'avutil-55.dll';
|
||||
postproc_dll = 'postproc-54.dll';
|
||||
swresample_dll = 'swresample-2.dll';
|
||||
swscale_dll = 'swscale-4.dll';
|
||||
|
||||
implementation
|
||||
|
||||
end.
|
File diff suppressed because it is too large
Load Diff
@ -1,309 +0,0 @@
|
||||
(*
|
||||
// * copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
|
||||
// *
|
||||
// * This file is part of ffm.
|
||||
// *
|
||||
// * FFmpeg is free software; you can redistribute it and/or
|
||||
// * modify it under the terms of the GNU Lesser General Public
|
||||
// * License as published by the Free Software Foundation; either
|
||||
// * version 2.1 of the License, or (at your option) any later version.
|
||||
// *
|
||||
// * FFmpeg is distributed in the hope that it will be useful,
|
||||
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
// * Lesser General Public License for more details.
|
||||
// *
|
||||
// * You should have received a copy of the GNU Lesser General Public
|
||||
// * License along with FFmpeg; if not, write to the Free Software
|
||||
// * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
unit ffm.log;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.opt;
|
||||
|
||||
Type
|
||||
|
||||
PVA_LIST = ^VA_LIST;
|
||||
VA_LIST = array [0 .. 0] of Pointer;
|
||||
|
||||
TAVClassCategory = (AV_CLASS_CATEGORY_NA = 0, AV_CLASS_CATEGORY_INPUT, AV_CLASS_CATEGORY_OUTPUT,
|
||||
AV_CLASS_CATEGORY_MUXER, AV_CLASS_CATEGORY_DEMUXER, AV_CLASS_CATEGORY_ENCODER, AV_CLASS_CATEGORY_DECODER,
|
||||
AV_CLASS_CATEGORY_FILTER, AV_CLASS_CATEGORY_BITSTREAM_FILTER, AV_CLASS_CATEGORY_SWSCALER,
|
||||
AV_CLASS_CATEGORY_SWRESAMPLER, AV_CLASS_CATEGORY_NB);
|
||||
|
||||
// struct AVOptionRanges;
|
||||
|
||||
(*
|
||||
// * Describe the class of an AVClass context structure. That is an
|
||||
// * arbitrary struct of which the first field is a pointer to an
|
||||
// * AVClass struct (e.g. AVCodecContext, AVFormatContext etc.).
|
||||
*)
|
||||
pAVClass = ^TAVClass;
|
||||
|
||||
TItem_name = function(ctx: Pointer): PAnsiChar; cdecl;
|
||||
TChild_next = procedure(obj: Pointer; prev: Pointer); cdecl;
|
||||
TChild_class_next = function(prev: pAVClass): pAVClass; cdecl;
|
||||
TGet_category = function(ctx: Pointer): TAVClassCategory; cdecl;
|
||||
TQuery_ranges = function(av_ranges: ppAVOptionRanges; obj: Pointer; key: PAnsiChar; flag: Integer): Integer; cdecl;
|
||||
|
||||
TAVClass = record
|
||||
(*
|
||||
* The name of the class; usually it is the same name as the
|
||||
* context structure type to which the AVClass is associated.
|
||||
*)
|
||||
class_name: PAnsiChar;
|
||||
(*
|
||||
* A pointer to a function which returns the name of a context
|
||||
* instance ctx associated with the class.
|
||||
*)
|
||||
// const char* (*item_name)(void* ctx);
|
||||
item_name: TItem_name;
|
||||
(*
|
||||
* a pointer to the first option specified in the class if any or NULL
|
||||
*
|
||||
* @see av_set_default_options()
|
||||
*)
|
||||
option: pAVOption;
|
||||
(*
|
||||
* LIBAVUTIL_VERSION with which this structure was created.
|
||||
* This is used to allow fields to be added without requiring major
|
||||
* version bumps everywhere.
|
||||
*)
|
||||
version: Integer;
|
||||
(*
|
||||
* Offset in the structure where log_level_offset is stored.
|
||||
* 0 means there is no such variable
|
||||
*)
|
||||
log_level_offset_offset: Integer;
|
||||
(*
|
||||
* Offset in the structure where a pointer to the parent context for
|
||||
* logging is stored. For example a decoder could pass its AVCodecContext
|
||||
* to eval as such a parent context, which an av_log() implementation
|
||||
* could then leverage to display the parent context.
|
||||
* The offset can be NULL.
|
||||
*)
|
||||
parent_log_context_offset: Integer;
|
||||
(*
|
||||
* Return next AVOptions-enabled child or NULL
|
||||
*)
|
||||
// void* (*child_next)(void *obj, void *prev);
|
||||
child_next: TChild_next;
|
||||
(*
|
||||
* Return an AVClass corresponding to the next potential
|
||||
* AVOptions-enabled child.
|
||||
*
|
||||
* The difference between child_next and this is that
|
||||
* child_next iterates over _already existing_ objects, while
|
||||
* child_class_next iterates over _all possible_ children.
|
||||
*)
|
||||
// const struct AVClass* (*child_class_next)(const struct AVClass *prev);
|
||||
child_class_next: TChild_class_next;
|
||||
(*
|
||||
* Category used for visualization (like color)
|
||||
* This is only set if the category is equal for all objects using this class.
|
||||
* available since version (51 << 16 | 56 << 8 | 100)
|
||||
*)
|
||||
category: TAVClassCategory;
|
||||
(*
|
||||
* Callback to return the category.
|
||||
* available since version (51 << 16 | 59 << 8 | 100)
|
||||
*)
|
||||
// AVClassCategory (*get_category)(void* ctx);
|
||||
get_category: TGet_category;
|
||||
(*
|
||||
// * Callback to return the supported/allowed ranges.
|
||||
// * available since version (52.12)
|
||||
*)
|
||||
// int (*query_ranges)(struct AVOptionRanges **, void *obj, const char *key, int flags);
|
||||
query_ranges: TQuery_ranges;
|
||||
end;
|
||||
|
||||
const
|
||||
(* Print no output. *)
|
||||
AV_LOG_QUIET = -8;
|
||||
(* Something went really wrong and we will crash now. *)
|
||||
AV_LOG_PANIC = 0;
|
||||
(*
|
||||
* Something went wrong and recovery is not possible.
|
||||
* For example, no header was found for a format which depends
|
||||
* on headers or an illegal combination of parameters is used.
|
||||
*)
|
||||
AV_LOG_FATAL = 8;
|
||||
(*
|
||||
* Something went wrong and cannot losslessly be recovered.
|
||||
* However, not all future data is affected.
|
||||
*)
|
||||
AV_LOG_ERROR = 16;
|
||||
(*
|
||||
* Something somehow does not look correct. This may or may not
|
||||
* lead to problems. An example would be the use of '-vstrict -2'.
|
||||
*)
|
||||
AV_LOG_WARNING = 24;
|
||||
(*
|
||||
* Standard information.
|
||||
*)
|
||||
AV_LOG_INFO = 32;
|
||||
(*
|
||||
* Detailed information.
|
||||
*)
|
||||
AV_LOG_VERBOSE = 40;
|
||||
(*
|
||||
* Stuff which is only useful for libav* developers.
|
||||
*)
|
||||
AV_LOG_DEBUG = 48;
|
||||
AV_LOG_MAX_OFFSET = (AV_LOG_DEBUG - AV_LOG_QUIET);
|
||||
(*
|
||||
* Send the specified message to the log if the level is less than or equal
|
||||
* to the current av_log_level. By default, all logging messages are sent to
|
||||
* stderr. This behavior can be altered by setting a different logging callback
|
||||
* function.
|
||||
* @see av_log_set_callback
|
||||
*
|
||||
* @param avcl A pointer to an arbitrary struct of which the first field is a
|
||||
* pointer to an AVClass struct.
|
||||
* @param level The importance level of the message expressed using a @ref
|
||||
* lavu_log_constants "Logging Constant".
|
||||
* @param fmt The format string (printf-compatible) that specifies how
|
||||
* subsequent arguments are converted to output.
|
||||
*)
|
||||
|
||||
// void av_log(void *avcl, int level, const char *fmt, ...) av_printf_format(3, 4);
|
||||
|
||||
(*
|
||||
* Send the specified message to the log if the level is less than or equal
|
||||
* to the current av_log_level. By default, all logging messages are sent to
|
||||
* stderr. This behavior can be altered by setting a different logging callback
|
||||
* function.
|
||||
* @see av_log_set_callback
|
||||
*
|
||||
* @param avcl A pointer to an arbitrary struct of which the first field is a
|
||||
* pointer to an AVClass struct.
|
||||
* @param level The importance level of the message expressed using a @ref
|
||||
* lavu_log_constants "Logging Constant".
|
||||
* @param fmt The format string (printf-compatible) that specifies how
|
||||
* subsequent arguments are converted to output.
|
||||
* @param vl The arguments referenced by the format string.
|
||||
*)
|
||||
// void av_vlog(void *avcl, int level, const char *fmt, va_list vl);
|
||||
(*
|
||||
* Get the current log level
|
||||
*
|
||||
* @see lavu_log_constants
|
||||
*
|
||||
* @return Current log level
|
||||
*)
|
||||
// int av_log_get_level(void);
|
||||
function av_log_get_level: Integer; cdecl;
|
||||
|
||||
(*
|
||||
* Set the log level
|
||||
*
|
||||
* @see lavu_log_constants
|
||||
*
|
||||
* @param level Logging level
|
||||
*)
|
||||
// void av_log_set_level(int level);
|
||||
procedure av_log_set_level(level: Integer); cdecl;
|
||||
|
||||
(*
|
||||
* Set the logging callback
|
||||
*
|
||||
* @note The callback must be thread safe, even if the application does not use
|
||||
* threads itself as some codecs are multithreaded.
|
||||
*
|
||||
* @see av_log_default_callback
|
||||
*
|
||||
* @param callback A logging function with a compatible signature.
|
||||
*)
|
||||
// void av_log_set_callback(void (*callback)(void*, int, const char*, va_list));
|
||||
Type
|
||||
Tav_log_set_callback = procedure(prt: Pointer; level: Integer; fmt: PAnsiChar; vl: PVA_LIST); cdecl varargs;
|
||||
|
||||
procedure av_log_set_callback(callbackproc: Tav_log_set_callback); cdecl;
|
||||
|
||||
(*
|
||||
* Default logging callback
|
||||
*
|
||||
* It prints the message to stderr, optionally colorizing it.
|
||||
*
|
||||
* @param avcl A pointer to an arbitrary struct of which the first field is a
|
||||
* pointer to an AVClass struct.
|
||||
* @param level The importance level of the message expressed using a @ref
|
||||
* lavu_log_constants "Logging Constant".
|
||||
* @param fmt The format string (printf-compatible) that specifies how
|
||||
* subsequent arguments are converted to output.
|
||||
* @param ap The arguments referenced by the format string.
|
||||
*)
|
||||
// void av_log_default_callback(void* ptr, int level, const char* fmt, va_list vl);
|
||||
|
||||
(*
|
||||
* Return the context name
|
||||
*
|
||||
* @param ctx The AVClass context
|
||||
*
|
||||
* @return The AVClass class_name
|
||||
*)
|
||||
// const char* av_default_item_name(void* ctx);
|
||||
function av_default_item_name(clx: Pointer): PAnsiChar; cdecl;
|
||||
|
||||
// AVClassCategory av_default_get_category(void *ptr);
|
||||
function av_default_get_category(ptr: Pointer): TAVClassCategory; cdecl;
|
||||
|
||||
(*
|
||||
* Format a line of log the same way as the default callback.
|
||||
* @param line buffer to receive the formated line
|
||||
* @param line_size size of the buffer
|
||||
* @param print_prefix used to store whether the prefix must be printed;
|
||||
* must point to a persistent integer initially set to 1
|
||||
*)
|
||||
// void av_log_format_line(void *ptr, int level, const char *fmt, va_list vl,
|
||||
// char *line, int line_size, int *print_prefix);
|
||||
procedure av_log_format_line(ptr: Pointer; level: Integer; const fmt: PAnsiChar; vl: PVA_LIST; line: PAnsiChar;
|
||||
line_size: Integer; Var print_prefix: Integer); cdecl;
|
||||
|
||||
(*
|
||||
* av_dlog macros
|
||||
* Useful to print debug messages that shouldn't get compiled in normally.
|
||||
*)
|
||||
// #ifdef DEBUG
|
||||
// # define av_dlog(pctx, ...) av_log(pctx, AV_LOG_DEBUG, __VA_ARGS__)
|
||||
// #else
|
||||
// # define av_dlog(pctx, ...) do { if (0) av_log(pctx, AV_LOG_DEBUG, __VA_ARGS__); } while (0)
|
||||
// #endif
|
||||
|
||||
(*
|
||||
* Skip repeated messages, this requires the user app to use av_log() instead of
|
||||
* (f)printf as the 2 would otherwise interfere and lead to
|
||||
* "Last message repeated x times" messages below (f)printf messages with some
|
||||
* bad luck.
|
||||
* Also to receive the last, "last repeated" line if any, the user app must
|
||||
* call av_log(NULL, AV_LOG_QUIET, "%s", ""); at the end
|
||||
*)
|
||||
const
|
||||
AV_LOG_SKIP_REPEATED = 1;
|
||||
AV_LOG_PRINT_LEVEL = 2;
|
||||
|
||||
// void av_log_set_flags(int arg);
|
||||
procedure av_log_set_flags(arg: Integer); cdecl;
|
||||
// int av_log_get_flags(void);
|
||||
function av_log_get_flags: Integer; cdecl;
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
procedure av_log_set_flags; external avutil_dll;
|
||||
function av_log_get_level; external avutil_dll;
|
||||
procedure av_log_set_level; external avutil_dll;
|
||||
function av_default_item_name; external avutil_dll;
|
||||
function av_default_get_category; external avutil_dll;
|
||||
function av_log_get_flags; external avutil_dll;
|
||||
procedure av_log_set_callback; external avutil_dll;
|
||||
procedure av_log_format_line; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,169 +0,0 @@
|
||||
(*
|
||||
* copyright (c) 2005-2012 Michael Niedermayer <michaelni@gmx.at>
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
unit ffm.mathematics;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.rational;
|
||||
|
||||
{$IFNDEF M_E}
|
||||
|
||||
const
|
||||
M_E = 2.7182818284590452354; (* e *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_LN2}
|
||||
|
||||
const
|
||||
M_LN2 = 0.69314718055994530942; (* log_e 2 *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_LN10}
|
||||
|
||||
const
|
||||
M_LN10 = 2.30258509299404568402; (* log_e 10 *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_LOG2_10}
|
||||
|
||||
const
|
||||
M_LOG2_10 = 3.32192809488736234787; (* log_2 10 *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_PHI}
|
||||
|
||||
const
|
||||
M_PHI = 1.61803398874989484820; (* phi / golden ratio *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_PI}
|
||||
|
||||
const
|
||||
M_PI = 3.14159265358979323846; (* pi *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_SQRT1_2}
|
||||
|
||||
const
|
||||
M_SQRT1_2 = 0.70710678118654752440; (* 1/sqrt(2) *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF M_SQRT2}
|
||||
|
||||
const
|
||||
M_SQRT2 = 1.41421356237309504880; (* sqrt(2) *)
|
||||
{$ENDIF}
|
||||
{$IFNDEF NAN}
|
||||
|
||||
const
|
||||
NAN = $7FC00000;
|
||||
{$ENDIF}
|
||||
{$IFNDEF INFINITY}
|
||||
|
||||
const
|
||||
INFINITY = $7F800000;
|
||||
{$ENDIF}
|
||||
|
||||
Type
|
||||
TAVRounding = (AV_ROUND_ZERO = 0,
|
||||
/// < Round toward zero.
|
||||
AV_ROUND_INF = 1,
|
||||
/// < Round away from zero.
|
||||
AV_ROUND_DOWN = 2,
|
||||
/// < Round toward -infinity.
|
||||
AV_ROUND_UP = 3,
|
||||
/// < Round toward +infinity.
|
||||
AV_ROUND_NEAR_INF = 5,
|
||||
/// < Round to nearest and halfway cases away from zero.
|
||||
AV_ROUND_PASS_MINMAX = 8192
|
||||
/// < Flag to pass INT64_MIN/MAX through instead of rescaling, this avoids special cases for AV_NOPTS_VALUE
|
||||
);
|
||||
|
||||
(*
|
||||
* Return the greatest common divisor of a and b.
|
||||
* If both a and b are 0 or either or both are <0 then behavior is
|
||||
* undefined.
|
||||
*)
|
||||
// int64_t av_const av_gcd(int64_t a, int64_t b);
|
||||
|
||||
(*
|
||||
* Rescale a 64-bit integer with rounding to nearest.
|
||||
* A simple a*b/c isn't possible as it can overflow.
|
||||
*)
|
||||
// int64_t av_rescale(int64_t a, int64_t b, int64_t c) av_const;
|
||||
|
||||
(*
|
||||
* Rescale a 64-bit integer with specified rounding.
|
||||
* A simple a*b/c isn't possible as it can overflow.
|
||||
*
|
||||
* @return rescaled value a, or if AV_ROUND_PASS_MINMAX is set and a is
|
||||
* INT64_MIN or INT64_MAX then a is passed through unchanged.
|
||||
*)
|
||||
// int64_t av_rescale_rnd(int64_t a, int64_t b, int64_t c, enum AVRounding) av_const;
|
||||
|
||||
(*
|
||||
* Rescale a 64-bit integer by 2 rational numbers.
|
||||
*)
|
||||
// int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq) av_const;
|
||||
function av_rescale_q(a: int64; bq: TAVRational; cq: TAVRational): int64; cdecl;
|
||||
|
||||
(*
|
||||
* Rescale a 64-bit integer by 2 rational numbers with specified rounding.
|
||||
*
|
||||
* @return rescaled value a, or if AV_ROUND_PASS_MINMAX is set and a is
|
||||
* INT64_MIN or INT64_MAX then a is passed through unchanged.
|
||||
*)
|
||||
// int64_t av_rescale_q_rnd(int64_t a, AVRational bq, AVRational cq, enum AVRounding)av_const;
|
||||
|
||||
(*
|
||||
* Compare 2 timestamps each in its own timebases.
|
||||
* The result of the function is undefined if one of the timestamps
|
||||
* is outside the int64_t range when represented in the others timebase.
|
||||
* @return -1 if ts_a is before ts_b, 1 if ts_a is after ts_b or 0 if they represent the same position
|
||||
*)
|
||||
// int av_compare_ts(int64_t ts_a, AVRational tb_a, int64_t ts_b, AVRational tb_b);
|
||||
|
||||
(*
|
||||
* Compare 2 integers modulo mod.
|
||||
* That is we compare integers a and b for which only the least
|
||||
* significant log2(mod) bits are known.
|
||||
*
|
||||
* @param mod must be a power of 2
|
||||
* @return a negative value if a is smaller than b
|
||||
* a positive value if a is greater than b
|
||||
* 0 if a equals b
|
||||
*)
|
||||
// int64_t av_compare_mod(uint64_t a, uint64_t b, uint64_t mod);
|
||||
|
||||
(*
|
||||
* Rescale a timestamp while preserving known durations.
|
||||
*
|
||||
* @param in_ts Input timestamp
|
||||
* @param in_tb Input timesbase
|
||||
* @param fs_tb Duration and *last timebase
|
||||
* @param duration duration till the next call
|
||||
* @param out_tb Output timesbase
|
||||
*)
|
||||
// int64_t av_rescale_delta(AVRational in_tb, int64_t in_ts, AVRational fs_tb, int duration, int64_t * last, AVRational out_tb);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_rescale_q; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,359 +0,0 @@
|
||||
unit ffm.mem;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
(*
|
||||
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
// #if defined(__INTEL_COMPILER) && __INTEL_COMPILER < 1110 || defined(__SUNPRO_C)
|
||||
// #define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v
|
||||
// #define DECLARE_ASM_CONST(n,t,v) const t __attribute__ ((aligned (n))) v
|
||||
// #elif defined(__TI_COMPILER_VERSION__)
|
||||
// #define DECLARE_ALIGNED(n,t,v) \
|
||||
// AV_PRAGMA(DATA_ALIGN(v,n)) \
|
||||
// t __attribute__((aligned(n))) v
|
||||
// #define DECLARE_ASM_CONST(n,t,v) \
|
||||
// AV_PRAGMA(DATA_ALIGN(v,n)) \
|
||||
// static const t __attribute__((aligned(n))) v
|
||||
// #elif defined(__GNUC__)
|
||||
// #define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v
|
||||
// #define DECLARE_ASM_CONST(n,t,v) static const t av_used __attribute__ ((aligned (n))) v
|
||||
// #elif defined(_MSC_VER)
|
||||
// #define DECLARE_ALIGNED(n,t,v) __declspec(align(n)) t v
|
||||
// #define DECLARE_ASM_CONST(n,t,v) __declspec(align(n)) static const t v
|
||||
// #else
|
||||
// #define DECLARE_ALIGNED(n,t,v) t v
|
||||
// #define DECLARE_ASM_CONST(n,t,v) static const t v
|
||||
// #endif
|
||||
//
|
||||
// #if AV_GCC_VERSION_AT_LEAST(3,1)
|
||||
// #define av_malloc_attrib __attribute__((__malloc__))
|
||||
// #else
|
||||
// #define av_malloc_attrib
|
||||
// #endif
|
||||
//
|
||||
// #if AV_GCC_VERSION_AT_LEAST(4,3)
|
||||
// #define av_alloc_size(...) __attribute__((alloc_size(__VA_ARGS__)))
|
||||
// #else
|
||||
// #define av_alloc_size(...)
|
||||
// #endif
|
||||
|
||||
(*
|
||||
* Allocate a block of size bytes with alignment suitable for all
|
||||
* memory accesses (including vectors if available on the CPU).
|
||||
* @param size Size in bytes for the memory block to be allocated.
|
||||
* @return Pointer to the allocated block, NULL if the block cannot
|
||||
* be allocated.
|
||||
* @see av_mallocz()
|
||||
*)
|
||||
// void *av_malloc(size_t size) av_malloc_attrib av_alloc_size(1);
|
||||
function av_malloc(size: Cardinal): Pointer; cdecl;
|
||||
|
||||
(*
|
||||
* Allocate a block of size * nmemb bytes with av_malloc().
|
||||
* @param nmemb Number of elements
|
||||
* @param size Size of the single element
|
||||
* @return Pointer to the allocated block, NULL if the block cannot
|
||||
* be allocated.
|
||||
* @see av_malloc()
|
||||
*)
|
||||
// av_alloc_size(1, 2) static inline void *av_malloc_array(size_t nmemb, size_t size)
|
||||
{
|
||||
if (!size || nmemb >= INT_MAX / size)
|
||||
return NULL;
|
||||
return av_malloc(nmemb * size);
|
||||
}
|
||||
|
||||
(*
|
||||
* Allocate or reallocate a block of memory.
|
||||
* If ptr is NULL and size > 0, allocate a new block. If
|
||||
* size is zero, free the memory block pointed to by ptr.
|
||||
* @param ptr Pointer to a memory block already allocated with
|
||||
* av_realloc() or NULL.
|
||||
* @param size Size in bytes of the memory block to be allocated or
|
||||
* reallocated.
|
||||
* @return Pointer to a newly-reallocated block or NULL if the block
|
||||
* cannot be reallocated or the function is used to free the memory block.
|
||||
* @warning Pointers originating from the av_malloc() family of functions must
|
||||
* not be passed to av_realloc(). The former can be implemented using
|
||||
* memalign() (or other functions), and there is no guarantee that
|
||||
* pointers from such functions can be passed to realloc() at all.
|
||||
* The situation is undefined according to POSIX and may crash with
|
||||
* some libc implementations.
|
||||
* @see av_fast_realloc()
|
||||
*)
|
||||
// void *av_realloc(void *ptr, size_t size) av_alloc_size(2);
|
||||
|
||||
(*
|
||||
* Allocate or reallocate a block of memory.
|
||||
* This function does the same thing as av_realloc, except:
|
||||
* - It takes two arguments and checks the result of the multiplication for
|
||||
* integer overflow.
|
||||
* - It frees the input block in case of failure, thus avoiding the memory
|
||||
* leak with the classic "buf = realloc(buf); if (!buf) return -1;".
|
||||
*)
|
||||
// void *av_realloc_f(void *ptr, size_t nelem, size_t elsize);
|
||||
|
||||
(*
|
||||
* Allocate or reallocate a block of memory.
|
||||
* If *ptr is NULL and size > 0, allocate a new block. If
|
||||
* size is zero, free the memory block pointed to by ptr.
|
||||
* @param ptr Pointer to a pointer to a memory block already allocated
|
||||
* with av_realloc(), or pointer to a pointer to NULL.
|
||||
* The pointer is updated on success, or freed on failure.
|
||||
* @param size Size in bytes for the memory block to be allocated or
|
||||
* reallocated
|
||||
* @return Zero on success, an AVERROR error code on failure.
|
||||
* @warning Pointers originating from the av_malloc() family of functions must
|
||||
* not be passed to av_reallocp(). The former can be implemented using
|
||||
* memalign() (or other functions), and there is no guarantee that
|
||||
* pointers from such functions can be passed to realloc() at all.
|
||||
* The situation is undefined according to POSIX and may crash with
|
||||
* some libc implementations.
|
||||
*)
|
||||
// int av_reallocp(void *ptr, size_t size);
|
||||
|
||||
(*
|
||||
* Allocate or reallocate an array.
|
||||
* If ptr is NULL and nmemb > 0, allocate a new block. If
|
||||
* nmemb is zero, free the memory block pointed to by ptr.
|
||||
* @param ptr Pointer to a memory block already allocated with
|
||||
* av_realloc() or NULL.
|
||||
* @param nmemb Number of elements
|
||||
* @param size Size of the single element
|
||||
* @return Pointer to a newly-reallocated block or NULL if the block
|
||||
* cannot be reallocated or the function is used to free the memory block.
|
||||
* @warning Pointers originating from the av_malloc() family of functions must
|
||||
* not be passed to av_realloc(). The former can be implemented using
|
||||
* memalign() (or other functions), and there is no guarantee that
|
||||
* pointers from such functions can be passed to realloc() at all.
|
||||
* The situation is undefined according to POSIX and may crash with
|
||||
* some libc implementations.
|
||||
*)
|
||||
// av_alloc_size(2, 3) void *av_realloc_array(void *ptr, size_t nmemb, size_t size);
|
||||
|
||||
(*
|
||||
* Allocate or reallocate an array through a pointer to a pointer.
|
||||
* If *ptr is NULL and nmemb > 0, allocate a new block. If
|
||||
* nmemb is zero, free the memory block pointed to by ptr.
|
||||
* @param ptr Pointer to a pointer to a memory block already allocated
|
||||
* with av_realloc(), or pointer to a pointer to NULL.
|
||||
* The pointer is updated on success, or freed on failure.
|
||||
* @param nmemb Number of elements
|
||||
* @param size Size of the single element
|
||||
* @return Zero on success, an AVERROR error code on failure.
|
||||
* @warning Pointers originating from the av_malloc() family of functions must
|
||||
* not be passed to av_realloc(). The former can be implemented using
|
||||
* memalign() (or other functions), and there is no guarantee that
|
||||
* pointers from such functions can be passed to realloc() at all.
|
||||
* The situation is undefined according to POSIX and may crash with
|
||||
* some libc implementations.
|
||||
*)
|
||||
// av_alloc_size(2, 3) int av_reallocp_array(void *ptr, size_t nmemb, size_t size);
|
||||
|
||||
(*
|
||||
* Free a memory block which has been allocated with av_malloc(z)() or
|
||||
* av_realloc().
|
||||
* @param ptr Pointer to the memory block which should be freed.
|
||||
* @note ptr = NULL is explicitly allowed.
|
||||
* @note It is recommended that you use av_freep() instead.
|
||||
* @see av_freep()
|
||||
*)
|
||||
// void av_free(void *ptr);
|
||||
procedure av_free(ptr: Pointer); cdecl;
|
||||
|
||||
(*
|
||||
* Allocate a block of size bytes with alignment suitable for all
|
||||
* memory accesses (including vectors if available on the CPU) and
|
||||
* zero all the bytes of the block.
|
||||
* @param size Size in bytes for the memory block to be allocated.
|
||||
* @return Pointer to the allocated block, NULL if it cannot be allocated.
|
||||
* @see av_malloc()
|
||||
*)
|
||||
// void *av_mallocz(size_t size) av_malloc_attrib av_alloc_size(1);
|
||||
function av_mallocz(size: Cardinal): Pointer; cdecl;
|
||||
|
||||
(*
|
||||
* Allocate a block of nmemb * size bytes with alignment suitable for all
|
||||
* memory accesses (including vectors if available on the CPU) and
|
||||
* zero all the bytes of the block.
|
||||
* The allocation will fail if nmemb * size is greater than or equal
|
||||
* to INT_MAX.
|
||||
* @param nmemb
|
||||
* @param size
|
||||
* @return Pointer to the allocated block, NULL if it cannot be allocated.
|
||||
*)
|
||||
// void *av_calloc(size_t nmemb, size_t size) av_malloc_attrib;
|
||||
|
||||
(*
|
||||
* Allocate a block of size * nmemb bytes with av_mallocz().
|
||||
* @param nmemb Number of elements
|
||||
* @param size Size of the single element
|
||||
* @return Pointer to the allocated block, NULL if the block cannot
|
||||
* be allocated.
|
||||
* @see av_mallocz()
|
||||
* @see av_malloc_array()
|
||||
*)
|
||||
// av_alloc_size(1, 2) static inline void *av_mallocz_array(size_t nmemb, size_t size)
|
||||
{
|
||||
if (!size || nmemb >= INT_MAX / size)
|
||||
return NULL;
|
||||
return av_mallocz(nmemb * size);
|
||||
}
|
||||
|
||||
(*
|
||||
* Duplicate the string s.
|
||||
* @param s string to be duplicated
|
||||
* @return Pointer to a newly-allocated string containing a
|
||||
* copy of s or NULL if the string cannot be allocated.
|
||||
*)
|
||||
// char *av_strdup(const char *s) av_malloc_attrib;
|
||||
function av_strdup(const s: pAnsiChar): pAnsiChar; cdecl;
|
||||
|
||||
(*
|
||||
* Duplicate the buffer p.
|
||||
* @param p buffer to be duplicated
|
||||
* @return Pointer to a newly allocated buffer containing a
|
||||
* copy of p or NULL if the buffer cannot be allocated.
|
||||
*)
|
||||
// void *av_memdup(const void *p, size_t size);
|
||||
|
||||
(*
|
||||
* Free a memory block which has been allocated with av_malloc(z)() or
|
||||
* av_realloc() and set the pointer pointing to it to NULL.
|
||||
* @param ptr Pointer to the pointer to the memory block which should
|
||||
* be freed.
|
||||
* @see av_free()
|
||||
*)
|
||||
// void av_freep(void *ptr);
|
||||
procedure av_freep(ptr: Pointer); cdecl;
|
||||
|
||||
(*
|
||||
* Add an element to a dynamic array.
|
||||
*
|
||||
* The array to grow is supposed to be an array of pointers to
|
||||
* structures, and the element to add must be a pointer to an already
|
||||
* allocated structure.
|
||||
*
|
||||
* The array is reallocated when its size reaches powers of 2.
|
||||
* Therefore, the amortized cost of adding an element is constant.
|
||||
*
|
||||
* In case of success, the pointer to the array is updated in order to
|
||||
* point to the new grown array, and the number pointed to by nb_ptr
|
||||
* is incremented.
|
||||
* In case of failure, the array is freed, *tab_ptr is set to NULL and
|
||||
* *nb_ptr is set to 0.
|
||||
*
|
||||
* @param tab_ptr pointer to the array to grow
|
||||
* @param nb_ptr pointer to the number of elements in the array
|
||||
* @param elem element to add
|
||||
* @see av_dynarray2_add()
|
||||
*)
|
||||
// void av_dynarray_add(void *tab_ptr, int *nb_ptr, void *elem);
|
||||
|
||||
(*
|
||||
* Add an element of size elem_size to a dynamic array.
|
||||
*
|
||||
* The array is reallocated when its number of elements reaches powers of 2.
|
||||
* Therefore, the amortized cost of adding an element is constant.
|
||||
*
|
||||
* In case of success, the pointer to the array is updated in order to
|
||||
* point to the new grown array, and the number pointed to by nb_ptr
|
||||
* is incremented.
|
||||
* In case of failure, the array is freed, *tab_ptr is set to NULL and
|
||||
* *nb_ptr is set to 0.
|
||||
*
|
||||
* @param tab_ptr pointer to the array to grow
|
||||
* @param nb_ptr pointer to the number of elements in the array
|
||||
* @param elem_size size in bytes of the elements in the array
|
||||
* @param elem_data pointer to the data of the element to add. If NULL, the space of
|
||||
* the new added element is not filled.
|
||||
* @return pointer to the data of the element to copy in the new allocated space.
|
||||
* If NULL, the new allocated space is left uninitialized."
|
||||
* @see av_dynarray_add()
|
||||
*)
|
||||
// void *av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size,
|
||||
// const uint8_t *elem_data);
|
||||
|
||||
(*
|
||||
* Multiply two size_t values checking for overflow.
|
||||
* @return 0 if success, AVERROR(EINVAL) if overflow.
|
||||
*)
|
||||
// static inline int av_size_mult(size_t a, size_t b, size_t *r)
|
||||
{
|
||||
size_t t = a * b;
|
||||
(* Hack inspired from glibc: only try the division if nelem and elsize
|
||||
* are both greater than sqrt(SIZE_MAX). *)
|
||||
if ((a | b) >= ((size_t)1 << (sizeof(size_t) * 4)) && a && t / a != b)
|
||||
return AVERROR(EINVAL);
|
||||
*r = t;
|
||||
return 0;
|
||||
}
|
||||
|
||||
(*
|
||||
* Set the maximum size that may me allocated in one block.
|
||||
*)
|
||||
// void av_max_alloc(size_t max);
|
||||
|
||||
(*
|
||||
* deliberately overlapping memcpy implementation
|
||||
* @param dst destination buffer
|
||||
* @param back how many bytes back we start (the initial size of the overlapping window), must be > 0
|
||||
* @param cnt number of bytes to copy, must be >= 0
|
||||
*
|
||||
* cnt > back is valid, this will copy the bytes we just copied,
|
||||
* thus creating a repeating pattern with a period length of back.
|
||||
*)
|
||||
// void av_memcpy_backptr(uint8_t *dst, int back, int cnt);
|
||||
|
||||
(*
|
||||
* Reallocate the given block if it is not large enough, otherwise do nothing.
|
||||
*
|
||||
* @see av_realloc
|
||||
*)
|
||||
// void *av_fast_realloc(void *ptr, unsigned int *size, size_t min_size);
|
||||
|
||||
(*
|
||||
* Allocate a buffer, reusing the given one if large enough.
|
||||
*
|
||||
* Contrary to av_fast_realloc the current buffer contents might not be
|
||||
* preserved and on error the old buffer is freed, thus no special
|
||||
* handling to avoid memleaks is necessary.
|
||||
*
|
||||
* @param ptr pointer to pointer to already allocated buffer, overwritten with pointer to new buffer
|
||||
* @param size size of the buffer *ptr points to
|
||||
* @param min_size minimum size of *ptr buffer after returning, *ptr will be NULL and
|
||||
* *size 0 if an error occurred.
|
||||
*)
|
||||
// void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
procedure av_free; external avutil_dll;
|
||||
procedure av_freep; external avutil_dll;
|
||||
function av_strdup; external avutil_dll;
|
||||
function av_malloc; external avutil_dll;
|
||||
function av_mallocz; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,792 +0,0 @@
|
||||
unit ffm.opt;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.ctypes, ffm.rational;
|
||||
|
||||
/// *
|
||||
// * AVOptions
|
||||
// * copyright (c) 2005 Michael Niedermayer <michaelni@gmx.at>
|
||||
// *
|
||||
// * This file is part of ffm.
|
||||
// *
|
||||
// * FFmpeg is free software; you can redistribute it and/or
|
||||
// * modify it under the terms of the GNU Lesser General Public
|
||||
// * License as published by the Free Software Foundation; either
|
||||
// * version 2.1 of the License, or (at your option) any later version.
|
||||
// *
|
||||
// * FFmpeg is distributed in the hope that it will be useful,
|
||||
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
// * Lesser General Public License for more details.
|
||||
// *
|
||||
// * You should have received a copy of the GNU Lesser General Public
|
||||
// * License along with FFmpeg; if not, write to the Free Software
|
||||
// * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * @file
|
||||
// * AVOptions
|
||||
// */
|
||||
//
|
||||
// #include "rational.h"
|
||||
// #include "avutil.h"
|
||||
// #include "dict.h"
|
||||
// #include "log.h"
|
||||
// #include "pixfmt.h"
|
||||
// #include "samplefmt.h"
|
||||
//
|
||||
/// **
|
||||
// * @defgroup avoptions AVOptions
|
||||
// * @ingroup lavu_data
|
||||
// * @{
|
||||
// * AVOptions provide a generic system to declare options on arbitrary structs
|
||||
// * ("objects"). An option can have a help text, a type and a range of possible
|
||||
// * values. Options may then be enumerated, read and written to.
|
||||
// *
|
||||
// * @section avoptions_implement Implementing AVOptions
|
||||
// * This section describes how to add AVOptions capabilities to a struct.
|
||||
// *
|
||||
// * All AVOptions-related information is stored in an AVClass. Therefore
|
||||
// * the first member of the struct should be a pointer to an AVClass describing it.
|
||||
// * The option field of the AVClass must be set to a NULL-terminated static array
|
||||
// * of AVOptions. Each AVOption must have a non-empty name, a type, a default
|
||||
// * value and for number-type AVOptions also a range of allowed values. It must
|
||||
// * also declare an offset in bytes from the start of the struct, where the field
|
||||
// * associated with this AVOption is located. Other fields in the AVOption struct
|
||||
// * should also be set when applicable, but are not required.
|
||||
// *
|
||||
// * The following example illustrates an AVOptions-enabled struct:
|
||||
// * @code
|
||||
// * typedef struct test_struct {
|
||||
// * AVClass *class;
|
||||
// * int int_opt;
|
||||
// * char *str_opt;
|
||||
// * uint8_t *bin_opt;
|
||||
// * int bin_len;
|
||||
// * } test_struct;
|
||||
// *
|
||||
// * static const AVOption test_options[] = {
|
||||
// * { "test_int", "This is a test option of int type.", offsetof(test_struct, int_opt),
|
||||
// * AV_OPT_TYPE_INT, { .i64 = -1 }, INT_MIN, INT_MAX },
|
||||
// * { "test_str", "This is a test option of string type.", offsetof(test_struct, str_opt),
|
||||
// * AV_OPT_TYPE_STRING },
|
||||
// * { "test_bin", "This is a test option of binary type.", offsetof(test_struct, bin_opt),
|
||||
// * AV_OPT_TYPE_BINARY },
|
||||
// * { NULL },
|
||||
// * };
|
||||
// *
|
||||
// * static const AVClass test_class = {
|
||||
// * .class_name = "test class",
|
||||
// * .item_name = av_default_item_name,
|
||||
// * .option = test_options,
|
||||
// * .version = LIBAVUTIL_VERSION_INT,
|
||||
// * };
|
||||
// * @endcode
|
||||
// *
|
||||
// * Next, when allocating your struct, you must ensure that the AVClass pointer
|
||||
// * is set to the correct value. Then, av_opt_set_defaults() can be called to
|
||||
// * initialize defaults. After that the struct is ready to be used with the
|
||||
// * AVOptions API.
|
||||
// *
|
||||
// * When cleaning up, you may use the av_opt_free() function to automatically
|
||||
// * free all the allocated string and binary options.
|
||||
// *
|
||||
// * Continuing with the above example:
|
||||
// *
|
||||
// * @code
|
||||
// * test_struct *alloc_test_struct(void)
|
||||
// * {
|
||||
// * test_struct *ret = av_malloc(sizeof(*ret));
|
||||
// * ret->class = &test_class;
|
||||
// * av_opt_set_defaults(ret);
|
||||
// * return ret;
|
||||
// * }
|
||||
// * void free_test_struct(test_struct **foo)
|
||||
// * {
|
||||
// * av_opt_free(*foo);
|
||||
// * av_freep(foo);
|
||||
// * }
|
||||
// * @endcode
|
||||
// *
|
||||
// * @subsection avoptions_implement_nesting Nesting
|
||||
// * It may happen that an AVOptions-enabled struct contains another
|
||||
// * AVOptions-enabled struct as a member (e.g. AVCodecContext in
|
||||
// * libavcodec exports generic options, while its priv_data field exports
|
||||
// * codec-specific options). In such a case, it is possible to set up the
|
||||
// * parent struct to export a child's options. To do that, simply
|
||||
// * implement AVClass.child_next() and AVClass.child_class_next() in the
|
||||
// * parent struct's AVClass.
|
||||
// * Assuming that the test_struct from above now also contains a
|
||||
// * child_struct field:
|
||||
// *
|
||||
// * @code
|
||||
// * typedef struct child_struct {
|
||||
// * AVClass *class;
|
||||
// * int flags_opt;
|
||||
// * } child_struct;
|
||||
// * static const AVOption child_opts[] = {
|
||||
// * { "test_flags", "This is a test option of flags type.",
|
||||
// * offsetof(child_struct, flags_opt), AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT_MIN, INT_MAX },
|
||||
// * { NULL },
|
||||
// * };
|
||||
// * static const AVClass child_class = {
|
||||
// * .class_name = "child class",
|
||||
// * .item_name = av_default_item_name,
|
||||
// * .option = child_opts,
|
||||
// * .version = LIBAVUTIL_VERSION_INT,
|
||||
// * };
|
||||
// *
|
||||
// * void *child_next(void *obj, void *prev)
|
||||
// * {
|
||||
// * test_struct *t = obj;
|
||||
// * if (!prev && t->child_struct)
|
||||
// * return t->child_struct;
|
||||
// * return NULL
|
||||
// * }
|
||||
// * const AVClass child_class_next(const AVClass *prev)
|
||||
// * {
|
||||
// * return prev ? NULL : &child_class;
|
||||
// * }
|
||||
// * @endcode
|
||||
// * Putting child_next() and child_class_next() as defined above into
|
||||
// * test_class will now make child_struct's options accessible through
|
||||
// * test_struct (again, proper setup as described above needs to be done on
|
||||
// * child_struct right after it is created).
|
||||
// *
|
||||
// * From the above example it might not be clear why both child_next()
|
||||
// * and child_class_next() are needed. The distinction is that child_next()
|
||||
// * iterates over actually existing objects, while child_class_next()
|
||||
// * iterates over all possible child classes. E.g. if an AVCodecContext
|
||||
// * was initialized to use a codec which has private options, then its
|
||||
// * child_next() will return AVCodecContext.priv_data and finish
|
||||
// * iterating. OTOH child_class_next() on AVCodecContext.av_class will
|
||||
// * iterate over all available codecs with private options.
|
||||
// *
|
||||
// * @subsection avoptions_implement_named_constants Named constants
|
||||
// * It is possible to create named constants for options. Simply set the unit
|
||||
// * field of the option the constants should apply to a string and
|
||||
// * create the constants themselves as options of type AV_OPT_TYPE_CONST
|
||||
// * with their unit field set to the same string.
|
||||
// * Their default_val field should contain the value of the named
|
||||
// * constant.
|
||||
// * For example, to add some named constants for the test_flags option
|
||||
// * above, put the following into the child_opts array:
|
||||
// * @code
|
||||
// * { "test_flags", "This is a test option of flags type.",
|
||||
// * offsetof(child_struct, flags_opt), AV_OPT_TYPE_FLAGS, { .i64 = 0 }, INT_MIN, INT_MAX, "test_unit" },
|
||||
// * { "flag1", "This is a flag with value 16", 0, AV_OPT_TYPE_CONST, { .i64 = 16 }, 0, 0, "test_unit" },
|
||||
// * @endcode
|
||||
// *
|
||||
// * @section avoptions_use Using AVOptions
|
||||
// * This section deals with accessing options in an AVOptions-enabled struct.
|
||||
// * Such structs in FFmpeg are e.g. AVCodecContext in libavcodec or
|
||||
// * AVFormatContext in libavformat.
|
||||
// *
|
||||
// * @subsection avoptions_use_examine Examining AVOptions
|
||||
// * The basic functions for examining options are av_opt_next(), which iterates
|
||||
// * over all options defined for one object, and av_opt_find(), which searches
|
||||
// * for an option with the given name.
|
||||
// *
|
||||
// * The situation is more complicated with nesting. An AVOptions-enabled struct
|
||||
// * may have AVOptions-enabled children. Passing the AV_OPT_SEARCH_CHILDREN flag
|
||||
// * to av_opt_find() will make the function search children recursively.
|
||||
// *
|
||||
// * For enumerating there are basically two cases. The first is when you want to
|
||||
// * get all options that may potentially exist on the struct and its children
|
||||
// * (e.g. when constructing documentation). In that case you should call
|
||||
// * av_opt_child_class_next() recursively on the parent struct's AVClass. The
|
||||
// * second case is when you have an already initialized struct with all its
|
||||
// * children and you want to get all options that can be actually written or read
|
||||
// * from it. In that case you should call av_opt_child_next() recursively (and
|
||||
// * av_opt_next() on each result).
|
||||
// *
|
||||
// * @subsection avoptions_use_get_set Reading and writing AVOptions
|
||||
// * When setting options, you often have a string read directly from the
|
||||
// * user. In such a case, simply passing it to av_opt_set() is enough. For
|
||||
// * non-string type options, av_opt_set() will parse the string according to the
|
||||
// * option type.
|
||||
// *
|
||||
// * Similarly av_opt_get() will read any option type and convert it to a string
|
||||
// * which will be returned. Do not forget that the string is allocated, so you
|
||||
// * have to free it with av_free().
|
||||
// *
|
||||
// * In some cases it may be more convenient to put all options into an
|
||||
// * AVDictionary and call av_opt_set_dict() on it. A specific case of this
|
||||
// * are the format/codec open functions in lavf/lavc which take a dictionary
|
||||
// * filled with option as a parameter. This allows to set some options
|
||||
// * that cannot be set otherwise, since e.g. the input file format is not known
|
||||
// * before the file is actually opened.
|
||||
// */
|
||||
Type
|
||||
TAVOptionType = (
|
||||
{ } AV_OPT_TYPE_FLAGS,
|
||||
{ } AV_OPT_TYPE_INT,
|
||||
{ } AV_OPT_TYPE_INT64,
|
||||
{ } AV_OPT_TYPE_DOUBLE,
|
||||
{ } AV_OPT_TYPE_FLOAT,
|
||||
{ } AV_OPT_TYPE_STRING,
|
||||
{ } AV_OPT_TYPE_RATIONAL,
|
||||
{ } AV_OPT_TYPE_BINARY,
|
||||
// < offset must point to a pointer immediately followed by an int for the length
|
||||
{ } AV_OPT_TYPE_CONST = 128,
|
||||
{ } AV_OPT_TYPE_IMAGE_SIZE = $53495A45,
|
||||
// MKBETAG('S', 'I', 'Z', 'E'), // < offset must point to two consecutive integers
|
||||
{ } AV_OPT_TYPE_PIXEL_FMT = $50464D54, // MKBETAG('P', 'F', 'M', 'T'),
|
||||
{ } AV_OPT_TYPE_SAMPLE_FMT = $53464D54, // MKBETAG('S', 'F', 'M', 'T'),
|
||||
{ } AV_OPT_TYPE_VIDEO_RATE = $56524154, // MKBETAG('V', 'R', 'A', 'T'), // < offset must point to AVRational
|
||||
{ } AV_OPT_TYPE_DURATION = $44555220, // MKBETAG('D', 'U', 'R', ' '),
|
||||
{ } AV_OPT_TYPE_COLOR = $434F4C52, // MKBETAG('C', 'O', 'L', 'R'),
|
||||
{ } AV_OPT_TYPE_CHANNEL_LAYOUT = $43484C41 // MKBETAG('C', 'H', 'L', 'A'),
|
||||
{$IFDEF FF_API_OLD_AVOPTIONS}
|
||||
,
|
||||
{ } FF_OPT_TYPE_FLAGS = 0,
|
||||
{ } FF_OPT_TYPE_INT,
|
||||
{ } FF_OPT_TYPE_INT64,
|
||||
{ } FF_OPT_TYPE_DOUBLE,
|
||||
{ } FF_OPT_TYPE_FLOAT,
|
||||
{ } FF_OPT_TYPE_STRING,
|
||||
{ } FF_OPT_TYPE_RATIONAL,
|
||||
{ } FF_OPT_TYPE_BINARY, // < offset must point to a pointer immediately followed by an int for the length
|
||||
{ } FF_OPT_TYPE_CONST = 128
|
||||
{$ENDIF}
|
||||
);
|
||||
|
||||
Const
|
||||
AV_OPT_FLAG_ENCODING_PARAM = 1;
|
||||
/// < a generic parameter which can be set by the user for muxing or encoding
|
||||
AV_OPT_FLAG_DECODING_PARAM = 2;
|
||||
/// < a generic parameter which can be set by the user for demuxing or decoding
|
||||
AV_OPT_FLAG_METADATA = 4;
|
||||
/// < some data extracted or inserted into the file like title, comment, ...
|
||||
AV_OPT_FLAG_AUDIO_PARAM = 8;
|
||||
AV_OPT_FLAG_VIDEO_PARAM = 16;
|
||||
AV_OPT_FLAG_SUBTITLE_PARAM = 32;
|
||||
AV_OPT_FLAG_FILTERING_PARAM = (1 shl 16);
|
||||
|
||||
/// < a generic parameter which can be set by the user for filtering
|
||||
///
|
||||
Type
|
||||
(*
|
||||
* AVOption
|
||||
*)
|
||||
pAVOption = ^TAVOption;
|
||||
|
||||
TAVOption = { packed } record
|
||||
name: pAnsiChar;
|
||||
(*
|
||||
* short English help text
|
||||
* @todo What about other languages?
|
||||
*)
|
||||
help: pAnsiChar;
|
||||
(*
|
||||
* The offset relative to the context structure where the option
|
||||
* value is stored. It should be 0 for named constants.
|
||||
*)
|
||||
offset: Integer;
|
||||
_type: TAVOptionType;
|
||||
(*
|
||||
* the default value for scalar options
|
||||
*)
|
||||
default_val: record case cint of 0: (i64: cint64);
|
||||
1: (dbl: cdouble);
|
||||
2: (str: pAnsiChar);
|
||||
(* TODO those are unused now *)
|
||||
3: (q: TAVRational);
|
||||
end;
|
||||
|
||||
min:
|
||||
double;
|
||||
/// < minimum valid value for the option
|
||||
max:
|
||||
double;
|
||||
/// < maximum valid value for the option
|
||||
|
||||
flags:
|
||||
Integer;
|
||||
// FIXME think about enc-audio, ... style flags
|
||||
(*
|
||||
* The logical unit to which the option belongs. Non-constant
|
||||
* options and corresponding named constants share the same
|
||||
* unit. May be NULL.
|
||||
*)
|
||||
_unit:
|
||||
pAnsiChar;
|
||||
end;
|
||||
|
||||
(*
|
||||
// * A single allowed range of values, or a single allowed value.
|
||||
*)
|
||||
pAVOptionRange = ^TAVOptionRange;
|
||||
ppAVOptionRange = ^pAVOptionRange;
|
||||
TAVOptionRange = { packed } record str: pAnsiChar;
|
||||
value_min, value_max: double;
|
||||
/// < For string ranges this represents the min/max length, for dimensions this represents the min/max pixel count
|
||||
component_min, component_max: double;
|
||||
/// < For string this represents the unicode range for chars, 0-127 limits to ASCII
|
||||
is_range:
|
||||
Integer;
|
||||
/// < if set to 1 the struct encodes a range, if set to 0 a single value
|
||||
end;
|
||||
(*
|
||||
// * List of AVOptionRange structs
|
||||
*)
|
||||
pAVOptionRanges = ^TAVOptionRanges;
|
||||
ppAVOptionRanges = ^pAVOptionRanges;
|
||||
TAVOptionRanges = { packed } record range: ppAVOptionRange;
|
||||
nb_ranges:
|
||||
Integer;
|
||||
end;
|
||||
|
||||
//
|
||||
// #if FF_API_FIND_OPT
|
||||
(*
|
||||
// * Look for an option in obj. Look only for the options which
|
||||
// * have the flags set as specified in mask and flags (that is,
|
||||
// * for which it is the case that (opt->flags & mask) == flags).
|
||||
// *
|
||||
// * @param[in] obj a pointer to a struct whose first element is a
|
||||
// * pointer to an AVClass
|
||||
// * @param[in] name the name of the option to look for
|
||||
// * @param[in] unit the unit of the option to look for, or any if NULL
|
||||
// * @return a pointer to the option found, or NULL if no option
|
||||
// * has been found
|
||||
// *
|
||||
// * @deprecated use av_opt_find.
|
||||
*)
|
||||
// attribute_deprecated
|
||||
// const AVOption *av_find_opt(void *obj, const char *name, const char *unit, int mask, int flags);
|
||||
// #endif
|
||||
//
|
||||
// #if FF_API_OLD_AVOPTIONS
|
||||
(*
|
||||
// * Set the field of obj with the given name to value.
|
||||
// *
|
||||
// * @param[in] obj A struct whose first element is a pointer to an
|
||||
// * AVClass.
|
||||
// * @param[in] name the name of the field to set
|
||||
// * @param[in] val The value to set. If the field is not of a string
|
||||
// * type, then the given string is parsed.
|
||||
// * SI postfixes and some named scalars are supported.
|
||||
// * If the field is of a numeric type, it has to be a numeric or named
|
||||
// * scalar. Behavior with more than one scalar and +- infix operators
|
||||
// * is undefined.
|
||||
// * If the field is of a flags type, it has to be a sequence of numeric
|
||||
// * scalars or named flags separated by '+' or '-'. Prefixing a flag
|
||||
// * with '+' causes it to be set without affecting the other flags;
|
||||
// * similarly, '-' unsets a flag.
|
||||
// * @param[out] o_out if non-NULL put here a pointer to the AVOption
|
||||
// * found
|
||||
// * @param alloc this parameter is currently ignored
|
||||
// * @return 0 if the value has been set, or an AVERROR code in case of
|
||||
// * error:
|
||||
// * AVERROR_OPTION_NOT_FOUND if no matching option exists
|
||||
// * AVERROR(ERANGE) if the value is out of range
|
||||
// * AVERROR(EINVAL) if the value is not valid
|
||||
// * @deprecated use av_opt_set()
|
||||
*)
|
||||
// attribute_deprecated
|
||||
// int av_set_string3(void *obj, const char *name, const char *val, int alloc, const AVOption **o_out);
|
||||
//
|
||||
// attribute_deprecated const AVOption *av_set_double(void *obj, const char *name, double n);
|
||||
// attribute_deprecated const AVOption *av_set_q(void *obj, const char *name, AVRational n);
|
||||
// attribute_deprecated const AVOption *av_set_int(void *obj, const char *name, int64_t n);
|
||||
//
|
||||
// double av_get_double(void *obj, const char *name, const AVOption **o_out);
|
||||
// AVRational av_get_q(void *obj, const char *name, const AVOption **o_out);
|
||||
// int64_t av_get_int(void *obj, const char *name, const AVOption **o_out);
|
||||
// attribute_deprecated const char *av_get_string(void *obj, const char *name, const AVOption **o_out, char *buf, int buf_len);
|
||||
// attribute_deprecated const AVOption *av_next_option(void *obj, const AVOption *last);
|
||||
// #endif
|
||||
//
|
||||
(*
|
||||
// * Show the obj options.
|
||||
// *
|
||||
// * @param req_flags requested flags for the options to show. Show only the
|
||||
// * options for which it is opt->flags & req_flags.
|
||||
// * @param rej_flags rejected flags for the options to show. Show only the
|
||||
// * options for which it is !(opt->flags & req_flags).
|
||||
// * @param av_log_obj log context to use for showing the options
|
||||
*)
|
||||
// int av_opt_show2(void *obj, void *av_log_obj, int req_flags, int rej_flags);
|
||||
//
|
||||
(*
|
||||
// * Set the values of all AVOption fields to their default values.
|
||||
// *
|
||||
// * @param s an AVOption-enabled struct (its first member must be a pointer to AVClass)
|
||||
*)
|
||||
// void av_opt_set_defaults(void *s);
|
||||
//
|
||||
// #if FF_API_OLD_AVOPTIONS
|
||||
// attribute_deprecated
|
||||
// void av_opt_set_defaults2(void *s, int mask, int flags);
|
||||
// #endif
|
||||
//
|
||||
(*
|
||||
// * Parse the key/value pairs list in opts. For each key/value pair
|
||||
// * found, stores the value in the field in ctx that is named like the
|
||||
// * key. ctx must be an AVClass context, storing is done using
|
||||
// * AVOptions.
|
||||
// *
|
||||
// * @param opts options string to parse, may be NULL
|
||||
// * @param key_val_sep a 0-terminated list of characters used to
|
||||
// * separate key from value
|
||||
// * @param pairs_sep a 0-terminated list of characters used to separate
|
||||
// * two pairs from each other
|
||||
// * @return the number of successfully set key/value pairs, or a negative
|
||||
// * value corresponding to an AVERROR code in case of error:
|
||||
// * AVERROR(EINVAL) if opts cannot be parsed,
|
||||
// * the error code issued by av_set_string3() if a key/value pair
|
||||
// * cannot be set
|
||||
*)
|
||||
// int av_set_options_string(void *ctx, const char *opts,
|
||||
// const char *key_val_sep, const char *pairs_sep);
|
||||
//
|
||||
(*
|
||||
// * Parse the key-value pairs list in opts. For each key=value pair found,
|
||||
// * set the value of the corresponding option in ctx.
|
||||
// *
|
||||
// * @param ctx the AVClass object to set options on
|
||||
// * @param opts the options string, key-value pairs separated by a
|
||||
// * delimiter
|
||||
// * @param shorthand a NULL-terminated array of options names for shorthand
|
||||
// * notation: if the first field in opts has no key part,
|
||||
// * the key is taken from the first element of shorthand;
|
||||
// * then again for the second, etc., until either opts is
|
||||
// * finished, shorthand is finished or a named option is
|
||||
// * found; after that, all options must be named
|
||||
// * @param key_val_sep a 0-terminated list of characters used to separate
|
||||
// * key from value, for example '='
|
||||
// * @param pairs_sep a 0-terminated list of characters used to separate
|
||||
// * two pairs from each other, for example ':' or ','
|
||||
// * @return the number of successfully set key=value pairs, or a negative
|
||||
// * value corresponding to an AVERROR code in case of error:
|
||||
// * AVERROR(EINVAL) if opts cannot be parsed,
|
||||
// * the error code issued by av_set_string3() if a key/value pair
|
||||
// * cannot be set
|
||||
// *
|
||||
// * Options names must use only the following characters: a-z A-Z 0-9 - . / _
|
||||
// * Separators must use characters distinct from option names and from each
|
||||
// * other.
|
||||
*)
|
||||
// int av_opt_set_from_string(void *ctx, const char *opts,
|
||||
// const char *const *shorthand,
|
||||
// const char *key_val_sep, const char *pairs_sep);
|
||||
(*
|
||||
// * Free all string and binary options in obj.
|
||||
*)
|
||||
// void av_opt_free(void *obj);
|
||||
//
|
||||
/// **
|
||||
// * Check whether a particular flag is set in a flags field.
|
||||
// *
|
||||
// * @param field_name the name of the flag field option
|
||||
// * @param flag_name the name of the flag to check
|
||||
// * @return non-zero if the flag is set, zero if the flag isn't set,
|
||||
// * isn't of the right type, or the flags field doesn't exist.
|
||||
// */
|
||||
// int av_opt_flag_is_set(void *obj, const char *field_name, const char *flag_name);
|
||||
//
|
||||
/// **
|
||||
// * Set all the options from a given dictionary on an object.
|
||||
// *
|
||||
// * @param obj a struct whose first element is a pointer to AVClass
|
||||
// * @param options options to process. This dictionary will be freed and replaced
|
||||
// * by a new one containing all options not found in obj.
|
||||
// * Of course this new dictionary needs to be freed by caller
|
||||
// * with av_dict_free().
|
||||
// *
|
||||
// * @return 0 on success, a negative AVERROR if some option was found in obj,
|
||||
// * but could not be set.
|
||||
// *
|
||||
// * @see av_dict_copy()
|
||||
// */
|
||||
// int av_opt_set_dict(void *obj, struct AVDictionary **options);
|
||||
//
|
||||
/// **
|
||||
// * Extract a key-value pair from the beginning of a string.
|
||||
// *
|
||||
// * @param ropts pointer to the options string, will be updated to
|
||||
// * point to the rest of the string (one of the pairs_sep
|
||||
// * or the final NUL)
|
||||
// * @param key_val_sep a 0-terminated list of characters used to separate
|
||||
// * key from value, for example '='
|
||||
// * @param pairs_sep a 0-terminated list of characters used to separate
|
||||
// * two pairs from each other, for example ':' or ','
|
||||
// * @param flags flags; see the AV_OPT_FLAG_* values below
|
||||
// * @param rkey parsed key; must be freed using av_free()
|
||||
// * @param rval parsed value; must be freed using av_free()
|
||||
// *
|
||||
// * @return >=0 for success, or a negative value corresponding to an
|
||||
// * AVERROR code in case of error; in particular:
|
||||
// * AVERROR(EINVAL) if no key is present
|
||||
// *
|
||||
// */
|
||||
// int av_opt_get_key_value(const char **ropts,
|
||||
// const char *key_val_sep, const char *pairs_sep,
|
||||
// unsigned flags,
|
||||
// char **rkey, char **rval);
|
||||
//
|
||||
// enum {
|
||||
//
|
||||
// /**
|
||||
// * Accept to parse a value without a key; the key will then be returned
|
||||
// * as NULL.
|
||||
// */
|
||||
// AV_OPT_FLAG_IMPLICIT_KEY = 1,
|
||||
// };
|
||||
//
|
||||
/// **
|
||||
// * @defgroup opt_eval_funcs Evaluating option strings
|
||||
// * @{
|
||||
// * This group of functions can be used to evaluate option strings
|
||||
// * and get numbers out of them. They do the same thing as av_opt_set(),
|
||||
// * except the result is written into the caller-supplied pointer.
|
||||
// *
|
||||
// * @param obj a struct whose first element is a pointer to AVClass.
|
||||
// * @param o an option for which the string is to be evaluated.
|
||||
// * @param val string to be evaluated.
|
||||
// * @param *_out value of the string will be written here.
|
||||
// *
|
||||
// * @return 0 on success, a negative number on failure.
|
||||
// */
|
||||
// int av_opt_eval_flags (void *obj, const AVOption *o, const char *val, int *flags_out);
|
||||
// int av_opt_eval_int (void *obj, const AVOption *o, const char *val, int *int_out);
|
||||
// int av_opt_eval_int64 (void *obj, const AVOption *o, const char *val, int64_t *int64_out);
|
||||
// int av_opt_eval_float (void *obj, const AVOption *o, const char *val, float *float_out);
|
||||
// int av_opt_eval_double(void *obj, const AVOption *o, const char *val, double *double_out);
|
||||
// int av_opt_eval_q (void *obj, const AVOption *o, const char *val, AVRational *q_out);
|
||||
|
||||
const
|
||||
AV_OPT_SEARCH_CHILDREN = $0001; (* *< Search in possible children of the given object first. *)
|
||||
|
||||
(*
|
||||
* The obj passed to av_opt_find() is fake -- only a double pointer to AVClass
|
||||
* instead of a required pointer to a struct containing AVClass. This is
|
||||
* useful for searching for options without needing to allocate the corresponding
|
||||
* object.
|
||||
*)
|
||||
// #define AV_OPT_SEARCH_FAKE_OBJ 0x0002
|
||||
//
|
||||
/// **
|
||||
// * Look for an option in an object. Consider only options which
|
||||
// * have all the specified flags set.
|
||||
// *
|
||||
// * @param[in] obj A pointer to a struct whose first element is a
|
||||
// * pointer to an AVClass.
|
||||
// * Alternatively a double pointer to an AVClass, if
|
||||
// * AV_OPT_SEARCH_FAKE_OBJ search flag is set.
|
||||
// * @param[in] name The name of the option to look for.
|
||||
// * @param[in] unit When searching for named constants, name of the unit
|
||||
// * it belongs to.
|
||||
// * @param opt_flags Find only options with all the specified flags set (AV_OPT_FLAG).
|
||||
// * @param search_flags A combination of AV_OPT_SEARCH_*.
|
||||
// *
|
||||
// * @return A pointer to the option found, or NULL if no option
|
||||
// * was found.
|
||||
// *
|
||||
// * @note Options found with AV_OPT_SEARCH_CHILDREN flag may not be settable
|
||||
// * directly with av_set_string3(). Use special calls which take an options
|
||||
// * AVDictionary (e.g. avformat_open_input()) to set options found with this
|
||||
// * flag.
|
||||
// */
|
||||
// const AVOption *av_opt_find(void *obj, const char *name, const char *unit,
|
||||
// int opt_flags, int search_flags);
|
||||
//
|
||||
/// **
|
||||
// * Look for an option in an object. Consider only options which
|
||||
// * have all the specified flags set.
|
||||
// *
|
||||
// * @param[in] obj A pointer to a struct whose first element is a
|
||||
// * pointer to an AVClass.
|
||||
// * Alternatively a double pointer to an AVClass, if
|
||||
// * AV_OPT_SEARCH_FAKE_OBJ search flag is set.
|
||||
// * @param[in] name The name of the option to look for.
|
||||
// * @param[in] unit When searching for named constants, name of the unit
|
||||
// * it belongs to.
|
||||
// * @param opt_flags Find only options with all the specified flags set (AV_OPT_FLAG).
|
||||
// * @param search_flags A combination of AV_OPT_SEARCH_*.
|
||||
// * @param[out] target_obj if non-NULL, an object to which the option belongs will be
|
||||
// * written here. It may be different from obj if AV_OPT_SEARCH_CHILDREN is present
|
||||
// * in search_flags. This parameter is ignored if search_flags contain
|
||||
// * AV_OPT_SEARCH_FAKE_OBJ.
|
||||
// *
|
||||
// * @return A pointer to the option found, or NULL if no option
|
||||
// * was found.
|
||||
// */
|
||||
// const AVOption *av_opt_find2(void *obj, const char *name, const char *unit,
|
||||
// int opt_flags, int search_flags, void **target_obj);
|
||||
//
|
||||
/// **
|
||||
// * Iterate over all AVOptions belonging to obj.
|
||||
// *
|
||||
// * @param obj an AVOptions-enabled struct or a double pointer to an
|
||||
// * AVClass describing it.
|
||||
// * @param prev result of the previous call to av_opt_next() on this object
|
||||
// * or NULL
|
||||
// * @return next AVOption or NULL
|
||||
// */
|
||||
// const AVOption *av_opt_next(void *obj, const AVOption *prev);
|
||||
//
|
||||
/// **
|
||||
// * Iterate over AVOptions-enabled children of obj.
|
||||
// *
|
||||
// * @param prev result of a previous call to this function or NULL
|
||||
// * @return next AVOptions-enabled child or NULL
|
||||
// */
|
||||
// void *av_opt_child_next(void *obj, void *prev);
|
||||
//
|
||||
/// **
|
||||
// * Iterate over potential AVOptions-enabled children of parent.
|
||||
// *
|
||||
// * @param prev result of a previous call to this function or NULL
|
||||
// * @return AVClass corresponding to next potential child or NULL
|
||||
// */
|
||||
// const AVClass *av_opt_child_class_next(const AVClass *parent, const AVClass *prev);
|
||||
//
|
||||
/// **
|
||||
// * @defgroup opt_set_funcs Option setting functions
|
||||
// * @{
|
||||
// * Those functions set the field of obj with the given name to value.
|
||||
// *
|
||||
// * @param[in] obj A struct whose first element is a pointer to an AVClass.
|
||||
// * @param[in] name the name of the field to set
|
||||
// * @param[in] val The value to set. In case of av_opt_set() if the field is not
|
||||
// * of a string type, then the given string is parsed.
|
||||
// * SI postfixes and some named scalars are supported.
|
||||
// * If the field is of a numeric type, it has to be a numeric or named
|
||||
// * scalar. Behavior with more than one scalar and +- infix operators
|
||||
// * is undefined.
|
||||
// * If the field is of a flags type, it has to be a sequence of numeric
|
||||
// * scalars or named flags separated by '+' or '-'. Prefixing a flag
|
||||
// * with '+' causes it to be set without affecting the other flags;
|
||||
// * similarly, '-' unsets a flag.
|
||||
// * @param search_flags flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN
|
||||
// * is passed here, then the option may be set on a child of obj.
|
||||
// *
|
||||
// * @return 0 if the value has been set, or an AVERROR code in case of
|
||||
// * error:
|
||||
// * AVERROR_OPTION_NOT_FOUND if no matching option exists
|
||||
// * AVERROR(ERANGE) if the value is out of range
|
||||
// * AVERROR(EINVAL) if the value is not valid
|
||||
// */
|
||||
// int av_opt_set (void *obj, const char *name, const char *val, int search_flags);
|
||||
function av_opt_set(obj: Pointer; const name: pAnsiChar; const val: pAnsiChar; search_flags: Integer): Integer; cdecl;
|
||||
// int av_opt_set_int (void *obj, const char *name, int64_t val, int search_flags);
|
||||
// int av_opt_set_double(void *obj, const char *name, double val, int search_flags);
|
||||
// int av_opt_set_q (void *obj, const char *name, AVRational val, int search_flags);
|
||||
// int av_opt_set_bin (void *obj, const char *name, const uint8_t *val, int size, int search_flags);
|
||||
function av_opt_set_bin(obj: Pointer; const name: pAnsiChar; const val: PByte; size: Integer; search_flags: Integer)
|
||||
: Integer; cdecl;
|
||||
// int av_opt_set_image_size(void *obj, const char *name, int w, int h, int search_flags);
|
||||
// int av_opt_set_pixel_fmt (void *obj, const char *name, enum AVPixelFormat fmt, int search_flags);
|
||||
// int av_opt_set_sample_fmt(void *obj, const char *name, enum AVSampleFormat fmt, int search_flags);
|
||||
// int av_opt_set_video_rate(void *obj, const char *name, AVRational val, int search_flags);
|
||||
// int av_opt_set_channel_layout(void *obj, const char *name, int64_t ch_layout, int search_flags);
|
||||
//
|
||||
/// **
|
||||
// * Set a binary option to an integer list.
|
||||
// *
|
||||
// * @param obj AVClass object to set options on
|
||||
// * @param name name of the binary option
|
||||
// * @param val pointer to an integer list (must have the correct type with
|
||||
// * regard to the contents of the list)
|
||||
// * @param term list terminator (usually 0 or -1)
|
||||
// * @param flags search flags
|
||||
// */
|
||||
// #define av_opt_set_int_list(obj, name, val, term, flags) \
|
||||
// (av_int_list_length(val, term) > INT_MAX / sizeof(*(val)) ? \
|
||||
// AVERROR(EINVAL) : \
|
||||
// av_opt_set_bin(obj, name, (const uint8_t *)(val), \
|
||||
// av_int_list_length(val, term) * sizeof(*(val)), flags))
|
||||
/// **
|
||||
// * @}
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * @defgroup opt_get_funcs Option getting functions
|
||||
// * @{
|
||||
// * Those functions get a value of the option with the given name from an object.
|
||||
// *
|
||||
// * @param[in] obj a struct whose first element is a pointer to an AVClass.
|
||||
// * @param[in] name name of the option to get.
|
||||
// * @param[in] search_flags flags passed to av_opt_find2. I.e. if AV_OPT_SEARCH_CHILDREN
|
||||
// * is passed here, then the option may be found in a child of obj.
|
||||
// * @param[out] out_val value of the option will be written here
|
||||
// * @return >=0 on success, a negative error code otherwise
|
||||
// */
|
||||
/// **
|
||||
// * @note the returned string will be av_malloc()ed and must be av_free()ed by the caller
|
||||
// */
|
||||
// int av_opt_get (void *obj, const char *name, int search_flags, uint8_t **out_val);
|
||||
// int av_opt_get_int (void *obj, const char *name, int search_flags, int64_t *out_val);
|
||||
// int av_opt_get_double(void *obj, const char *name, int search_flags, double *out_val);
|
||||
// int av_opt_get_q (void *obj, const char *name, int search_flags, AVRational *out_val);
|
||||
// int av_opt_get_image_size(void *obj, const char *name, int search_flags, int *w_out, int *h_out);
|
||||
// int av_opt_get_pixel_fmt (void *obj, const char *name, int search_flags, enum AVPixelFormat *out_fmt);
|
||||
// int av_opt_get_sample_fmt(void *obj, const char *name, int search_flags, enum AVSampleFormat *out_fmt);
|
||||
// int av_opt_get_video_rate(void *obj, const char *name, int search_flags, AVRational *out_val);
|
||||
// int av_opt_get_channel_layout(void *obj, const char *name, int search_flags, int64_t *ch_layout);
|
||||
/// **
|
||||
// * @}
|
||||
// */
|
||||
/// **
|
||||
// * Gets a pointer to the requested field in a struct.
|
||||
// * This function allows accessing a struct even when its fields are moved or
|
||||
// * renamed since the application making the access has been compiled,
|
||||
// *
|
||||
// * @returns a pointer to the field, it can be cast to the correct type and read
|
||||
// * or written to.
|
||||
// */
|
||||
// void *av_opt_ptr(const AVClass *avclass, void *obj, const char *name);
|
||||
//
|
||||
/// **
|
||||
// * Free an AVOptionRanges struct and set it to NULL.
|
||||
// */
|
||||
// void av_opt_freep_ranges(AVOptionRanges **ranges);
|
||||
//
|
||||
/// **
|
||||
// * Get a list of allowed ranges for the given option.
|
||||
// *
|
||||
// * The returned list may depend on other fields in obj like for example profile.
|
||||
// *
|
||||
// * @param flags is a bitmask of flags, undefined flags should not be set and should be ignored
|
||||
// * AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance
|
||||
// *
|
||||
// * The result must be freed with av_opt_freep_ranges.
|
||||
// *
|
||||
// * @return >= 0 on success, a negative errro code otherwise
|
||||
// */
|
||||
// int av_opt_query_ranges(AVOptionRanges **, void *obj, const char *key, int flags);
|
||||
//
|
||||
/// **
|
||||
// * Get a default list of allowed ranges for the given option.
|
||||
// *
|
||||
// * This list is constructed without using the AVClass.query_ranges() callback
|
||||
// * and can be used as fallback from within the callback.
|
||||
// *
|
||||
// * @param flags is a bitmask of flags, undefined flags should not be set and should be ignored
|
||||
// * AV_OPT_SEARCH_FAKE_OBJ indicates that the obj is a double pointer to a AVClass instead of a full instance
|
||||
// *
|
||||
// * The result must be freed with av_opt_free_ranges.
|
||||
// *
|
||||
// * @return >= 0 on success, a negative errro code otherwise
|
||||
// */
|
||||
// int av_opt_query_ranges_default(AVOptionRanges **, void *obj, const char *key, int flags);
|
||||
//
|
||||
/// **
|
||||
// * @}
|
||||
// */
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_opt_set_bin; external avutil_dll;
|
||||
function av_opt_set; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,202 +0,0 @@
|
||||
unit ffm.parseutils;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.rational;
|
||||
|
||||
(*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
(*
|
||||
* @file
|
||||
* misc parsing utilities
|
||||
*)
|
||||
|
||||
(*
|
||||
* Parse str and store the parsed ratio in q.
|
||||
*
|
||||
* Note that a ratio with infinite (1/0) or negative value is
|
||||
* considered valid, so you should check on the returned value if you
|
||||
* want to exclude those values.
|
||||
*
|
||||
* The undefined value can be expressed using the "0:0" string.
|
||||
*
|
||||
* @param[in,out] q pointer to the AVRational which will contain the ratio
|
||||
* @param[in] str the string to parse: it has to be a string in the format
|
||||
* num:den, a float number or an expression
|
||||
* @param[in] max the maximum allowed numerator and denominator
|
||||
* @param[in] log_offset log level offset which is applied to the log
|
||||
* level of log_ctx
|
||||
* @param[in] log_ctx parent logging context
|
||||
* @return >= 0 on success, a negative error code otherwise
|
||||
*)
|
||||
//int av_parse_ratio(AVRational * q, const char * str, int max, int log_offset, void * log_ctx);
|
||||
function av_parse_ratio(p:pAVRational; const str:PAnsiString; max:Integer; log_offset:Integer; log_ctx:pointer):integer; cdecl;
|
||||
|
||||
//#define av_parse_ratio_quiet(rate, str, max)\
|
||||
//av_parse_ratio(rate, str, max, AV_LOG_MAX_OFFSET, NULL)
|
||||
function av_parse_ratio_quiet(p:pAVRational; const str:PAnsiString; max:Integer):Integer; inline;
|
||||
|
||||
(*
|
||||
* Parse str and put in width_ptr and height_ptr the detected values.
|
||||
*
|
||||
* @param[in,out] width_ptr pointer to the variable which will contain the detected
|
||||
* width value
|
||||
* @param[in,out] height_ptr pointer to the variable which will contain the detected
|
||||
* height value
|
||||
* @param[in] str the string to parse: it has to be a string in the format
|
||||
* width x height or a valid video size abbreviation.
|
||||
* @return >= 0 on success, a negative error code otherwise
|
||||
*)
|
||||
// int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str);
|
||||
function av_parse_video_size(Var width_ptr: Integer; Var height_ptr: Integer; const str: pAnsiChar): Integer;
|
||||
cdecl;
|
||||
|
||||
(*
|
||||
* Parse str and store the detected values in *rate.
|
||||
*
|
||||
* @param[in,out] rate pointer to the AVRational which will contain the detected
|
||||
* frame rate
|
||||
* @param[in] str the string to parse: it has to be a string in the format
|
||||
* rate_num / rate_den, a float number or a valid video rate abbreviation
|
||||
* @return >= 0 on success, a negative error code otherwise
|
||||
*)
|
||||
//int av_parse_video_rate(AVRational * rate, const char * str);
|
||||
|
||||
(*
|
||||
* Put the RGBA values that correspond to color_string in rgba_color.
|
||||
*
|
||||
* @param color_string a string specifying a color. It can be the name of
|
||||
* a color (case insensitive match) or a [0x|#]RRGGBB[AA] sequence,
|
||||
* possibly followed by "@" and a string representing the alpha
|
||||
* component.
|
||||
* The alpha component may be a string composed by "0x" followed by an
|
||||
* hexadecimal number or a decimal number between 0.0 and 1.0, which
|
||||
* represents the opacity value (0x00/0.0 means completely transparent,
|
||||
* 0xff/1.0 completely opaque).
|
||||
* If the alpha component is not specified then 0xff is assumed.
|
||||
* The string "random" will result in a random color.
|
||||
* @param slen length of the initial part of color_string containing the
|
||||
* color. It can be set to -1 if color_string is a null terminated string
|
||||
* containing nothing else than the color.
|
||||
* @return >= 0 in case of success, a negative value in case of
|
||||
* failure (for example if color_string cannot be parsed).
|
||||
*)
|
||||
//int av_parse_color(uint8_t * rgba_color, const char * color_string, int slen, void * log_ctx);
|
||||
|
||||
(*
|
||||
* Get the name of a color from the internal table of hard-coded named
|
||||
* colors.
|
||||
*
|
||||
* This function is meant to enumerate the color names recognized by
|
||||
* av_parse_color().
|
||||
*
|
||||
* @param color_idx index of the requested color, starting from 0
|
||||
* @param rgbp if not NULL, will point to a 3-elements array with the color value in RGB
|
||||
* @return the color name string or NULL if color_idx is not in the array
|
||||
*)
|
||||
//const char * av_get_known_color_name(int color_idx, const uint8_t * * rgb);
|
||||
|
||||
(*
|
||||
* Parse timestr and return in *time a corresponding number of
|
||||
* microseconds.
|
||||
*
|
||||
* @param timeval puts here the number of microseconds corresponding
|
||||
* to the string in timestr. If the string represents a duration, it
|
||||
* is the number of microseconds contained in the time interval. If
|
||||
* the string is a date, is the number of microseconds since 1st of
|
||||
* January, 1970 up to the time of the parsed date. If timestr cannot
|
||||
* be successfully parsed, set *time to INT64_MIN.
|
||||
|
||||
* @param timestr a string representing a date or a duration.
|
||||
* - If a date the syntax is:
|
||||
* @code
|
||||
* [{YYYY-MM-DD|YYYYMMDD}[T|t| ]]{{HH:MM:SS[.m...]]]}|{HHMMSS[.m...]]]}}[Z]
|
||||
* now
|
||||
* @endcode
|
||||
* If the value is "now" it takes the current time.
|
||||
* Time is local time unless Z is appended, in which case it is
|
||||
* interpreted as UTC.
|
||||
* If the year-month-day part is not specified it takes the current
|
||||
* year-month-day.
|
||||
* - If a duration the syntax is:
|
||||
* @code
|
||||
* [-][HH:]MM:SS[.m...]
|
||||
* [-]S+[.m...]
|
||||
* @endcode
|
||||
* @param duration flag which tells how to interpret timestr, if not
|
||||
* zero timestr is interpreted as a duration, otherwise as a date
|
||||
* @return >= 0 in case of success, a negative value corresponding to an
|
||||
* AVERROR code otherwise
|
||||
*)
|
||||
//int av_parse_time(int64_t * timeval, const char * timestr, int duration);
|
||||
|
||||
(*
|
||||
* Parse the input string p according to the format string fmt and
|
||||
* store its results in the structure dt.
|
||||
* This implementation supports only a subset of the formats supported
|
||||
* by the standard strptime().
|
||||
*
|
||||
* In particular it actually supports the parameters:
|
||||
* - %H: the hour as a decimal number, using a 24-hour clock, in the
|
||||
* range '00' through '23'
|
||||
* - %J: hours as a decimal number, in the range '0' through INT_MAX
|
||||
* - %M: the minute as a decimal number, using a 24-hour clock, in the
|
||||
* range '00' through '59'
|
||||
* - %S: the second as a decimal number, using a 24-hour clock, in the
|
||||
* range '00' through '59'
|
||||
* - %Y: the year as a decimal number, using the Gregorian calendar
|
||||
* - %m: the month as a decimal number, in the range '1' through '12'
|
||||
* - %d: the day of the month as a decimal number, in the range '1'
|
||||
* through '31'
|
||||
* - %%: a literal '%'
|
||||
*
|
||||
* @return a pointer to the first character not processed in this
|
||||
* function call, or NULL in case the function fails to match all of
|
||||
* the fmt string and therefore an error occurred
|
||||
*)
|
||||
//char * av_small_strptime(const char * p, const char * fmt, struct tm * dt);
|
||||
|
||||
(*
|
||||
* Attempt to find a specific tag in a URL.
|
||||
*
|
||||
* syntax: '?tag1=val1&tag2=val2...'. Little URL decoding is done.
|
||||
* Return 1 if found.
|
||||
*)
|
||||
//int av_find_info_tag(char * arg, int arg_size, const char * tag1, const char * info);
|
||||
|
||||
(*
|
||||
* Convert the decomposed UTC time in tm to a time_t value.
|
||||
*)
|
||||
//time_t av_timegm(struct tm * tm);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib, ffm.log;
|
||||
|
||||
function av_parse_video_size; external avutil_dll;
|
||||
function av_parse_ratio; external avutil_dll;
|
||||
function av_parse_ratio_quiet;
|
||||
begin
|
||||
Result := av_parse_ratio(p,str,max,AV_LOG_MAX_OFFSET, nil);
|
||||
end;
|
||||
|
||||
end.
|
@ -1,304 +0,0 @@
|
||||
(*
|
||||
* pixel format descriptor
|
||||
* Copyright (c) 2009 Michael Niedermayer <michaelni@gmx.at>
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
unit ffm.pixdesc;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.pixfmt;
|
||||
|
||||
Type
|
||||
|
||||
pAVComponentDescriptor = ^TAVComponentDescriptor;
|
||||
|
||||
TAVComponentDescriptor = {packed} record
|
||||
// uint16_t plane :2; ///< which of the 4 planes contains the component
|
||||
(*
|
||||
* Number of elements between 2 horizontally consecutive pixels minus 1.
|
||||
* Elements are bits for bitstream formats, bytes otherwise.
|
||||
*)
|
||||
// uint16_t step_minus1 :3;
|
||||
(*
|
||||
* Number of elements before the component of the first pixel plus 1.
|
||||
* Elements are bits for bitstream formats, bytes otherwise.
|
||||
*)
|
||||
// uint16_t offset_plus1 :3;
|
||||
// uint16_t shift :3; ///< number of least significant bits that must be shifted away to get the value
|
||||
// uint16_t depth_minus1 :4; ///< number of bits in the component minus 1
|
||||
data: uint16;
|
||||
end;
|
||||
|
||||
(*
|
||||
* Descriptor that unambiguously describes how the bits of a pixel are
|
||||
* stored in the up to 4 data planes of an image. It also stores the
|
||||
* subsampling factors and number of components.
|
||||
*
|
||||
* @note This is separate of the colorspace (RGB, YCbCr, YPbPr, JPEG-style YUV
|
||||
* and all the YUV variants) AVPixFmtDescriptor just stores how values
|
||||
* are stored not what these values represent.
|
||||
*)
|
||||
pAVPixFmtDescriptor = ^TAVPixFmtDescriptor;
|
||||
|
||||
TAVPixFmtDescriptor = {packed} record
|
||||
name: pAnsiChar;
|
||||
nb_components: uint8;
|
||||
/// < The number of components each pixel has, (1-4)
|
||||
(*
|
||||
* Amount to shift the luma width right to find the chroma width.
|
||||
* For YV12 this is 1 for example.
|
||||
* chroma_width = -((-luma_width) >> log2_chroma_w)
|
||||
* The note above is needed to ensure rounding up.
|
||||
* This value only refers to the chroma components.
|
||||
*)
|
||||
log2_chroma_w: uint8;
|
||||
/// < chroma_width = -((-luma_width )>>log2_chroma_w)
|
||||
(*
|
||||
* Amount to shift the luma height right to find the chroma height.
|
||||
* For YV12 this is 1 for example.
|
||||
* chroma_height= -((-luma_height) >> log2_chroma_h)
|
||||
* The note above is needed to ensure rounding up.
|
||||
* This value only refers to the chroma components.
|
||||
*)
|
||||
log2_chroma_h: uint8;
|
||||
flags: uint8;
|
||||
(*
|
||||
* Parameters that describe how pixels are {packed}.
|
||||
* If the format has 2 or 4 components, then alpha is last.
|
||||
* If the format has 1 or 2 components, then luma is 0.
|
||||
* If the format has 3 or 4 components,
|
||||
* if the RGB flag is set then 0 is red, 1 is green and 2 is blue;
|
||||
* otherwise 0 is luma, 1 is chroma-U and 2 is chroma-V.
|
||||
*)
|
||||
comp: array [0 .. 3] of TAVComponentDescriptor;
|
||||
end;
|
||||
|
||||
const
|
||||
(*
|
||||
* Pixel format is big-endian.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_BE = (1 shl 0);
|
||||
(*
|
||||
* Pixel format has a palette in data[1], values are indexes in this palette.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_PAL = (1 shl 1);
|
||||
(*
|
||||
* All values of a component are bit-wise {packed} end to end.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_BITSTREAM = (1 shl 2);
|
||||
(*
|
||||
* Pixel format is an HW accelerated format.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_HWACCEL = (1 shl 3);
|
||||
(*
|
||||
* At least one pixel component is not in the first data plane.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_PLANAR = (1 shl 4);
|
||||
(*
|
||||
* The pixel format contains RGB-like data (as opposed to YUV/grayscale).
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_RGB = (1 shl 5);
|
||||
(*
|
||||
* The pixel format is "pseudo-paletted". This means that FFmpeg treats it as
|
||||
* paletted internally, but the palette is generated by the decoder and is not
|
||||
* stored in the file.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_PSEUDOPAL = (1 shl 6);
|
||||
(*
|
||||
* The pixel format has an alpha channel.
|
||||
*)
|
||||
AV_PIX_FMT_FLAG_ALPHA = (1 shl 7);
|
||||
|
||||
{$IFDEF FF_API_PIX_FMT}
|
||||
(*
|
||||
* @deprecated use the AV_PIX_FMT_FLAG_* flags
|
||||
*)
|
||||
PIX_FMT_BE = AV_PIX_FMT_FLAG_BE;
|
||||
PIX_FMT_PAL = AV_PIX_FMT_FLAG_PAL;
|
||||
PIX_FMT_BITSTREAM = AV_PIX_FMT_FLAG_BITSTREAM;
|
||||
PIX_FMT_HWACCEL = AV_PIX_FMT_FLAG_HWACCEL;
|
||||
PIX_FMT_PLANAR = AV_PIX_FMT_FLAG_PLANAR;
|
||||
PIX_FMT_RGB = AV_PIX_FMT_FLAG_RGB;
|
||||
PIX_FMT_PSEUDOPAL = AV_PIX_FMT_FLAG_PSEUDOPAL;
|
||||
PIX_FMT_ALPHA = AV_PIX_FMT_FLAG_ALPHA;
|
||||
{$ENDIF}
|
||||
{$IFDEF FF_API_PIX_FMT_DESC}
|
||||
(*
|
||||
* The array of all the pixel format descriptors.
|
||||
*)
|
||||
extern attribute_deprecated
|
||||
|
||||
const
|
||||
AVPixFmtDescriptor av_pix_fmt_descriptors[];
|
||||
{$ENDIF}
|
||||
(*
|
||||
* Read a line from an image, and write the values of the
|
||||
* pixel format component c to dst.
|
||||
*
|
||||
* @param data the array containing the pointers to the planes of the image
|
||||
* @param linesize the array containing the linesizes of the image
|
||||
* @param desc the pixel format descriptor for the image
|
||||
* @param x the horizontal coordinate of the first pixel to read
|
||||
* @param y the vertical coordinate of the first pixel to read
|
||||
* @param w the width of the line to read, that is the number of
|
||||
* values to write to dst
|
||||
* @param read_pal_component if not zero and the format is a paletted
|
||||
* format writes the values corresponding to the palette
|
||||
* component c in data[1] to dst, rather than the palette indexes in
|
||||
* data[0]. The behavior is undefined if the format is not paletted.
|
||||
*)
|
||||
// void av_read_image_line(uint16_t *dst, const uint8_t *data[4], const int linesize[4],
|
||||
// const AVPixFmtDescriptor *desc, int x, int y, int c, int w, int read_pal_component);
|
||||
|
||||
(*
|
||||
* Write the values from src to the pixel format component c of an
|
||||
* image line.
|
||||
*
|
||||
* @param src array containing the values to write
|
||||
* @param data the array containing the pointers to the planes of the
|
||||
* image to write into. It is supposed to be zeroed.
|
||||
* @param linesize the array containing the linesizes of the image
|
||||
* @param desc the pixel format descriptor for the image
|
||||
* @param x the horizontal coordinate of the first pixel to write
|
||||
* @param y the vertical coordinate of the first pixel to write
|
||||
* @param w the width of the line to write, that is the number of
|
||||
* values to write to the image line
|
||||
*)
|
||||
// void av_write_image_line(const uint16_t *src, uint8_t *data[4], const int linesize[4],
|
||||
// const AVPixFmtDescriptor *desc, int x, int y, int c, int w);
|
||||
|
||||
(*
|
||||
* Return the pixel format corresponding to name.
|
||||
*
|
||||
* If there is no pixel format with name name, then looks for a
|
||||
* pixel format with the name corresponding to the native endian
|
||||
* format of name.
|
||||
* For example in a little-endian system, first looks for "gray16",
|
||||
* then for "gray16le".
|
||||
*
|
||||
* Finally if no pixel format has been found, returns AV_PIX_FMT_NONE.
|
||||
*)
|
||||
// enum AVPixelFormat av_get_pix_fmt(const char *name);
|
||||
|
||||
(*
|
||||
* Return the short name for a pixel format, NULL in case pix_fmt is
|
||||
* unknown.
|
||||
*
|
||||
* @see av_get_pix_fmt(), av_get_pix_fmt_string()
|
||||
*)
|
||||
// const char *av_get_pix_fmt_name(enum AVPixelFormat pix_fmt);
|
||||
function av_get_pix_fmt_name(pix_fmt: TAVPixelFormat): pAnsiChar; cdecl;
|
||||
|
||||
(*
|
||||
* Print in buf the string corresponding to the pixel format with
|
||||
* number pix_fmt, or a header if pix_fmt is negative.
|
||||
*
|
||||
* @param buf the buffer where to write the string
|
||||
* @param buf_size the size of buf
|
||||
* @param pix_fmt the number of the pixel format to print the
|
||||
* corresponding info string, or a negative value to print the
|
||||
* corresponding header.
|
||||
*)
|
||||
// char *av_get_pix_fmt_string (char *buf, int buf_size, enum AVPixelFormat pix_fmt);
|
||||
|
||||
(*
|
||||
* Return the number of bits per pixel used by the pixel format
|
||||
* described by pixdesc. Note that this is not the same as the number
|
||||
* of bits per sample.
|
||||
*
|
||||
* The returned number of bits refers to the number of bits actually
|
||||
* used for storing the pixel information, that is padding bits are
|
||||
* not counted.
|
||||
*)
|
||||
// int av_get_bits_per_pixel(const AVPixFmtDescriptor *pixdesc);
|
||||
|
||||
(*
|
||||
* Return the number of bits per pixel for the pixel format
|
||||
* described by pixdesc, including any padding or unused bits.
|
||||
*)
|
||||
// int av_get_padded_bits_per_pixel(const AVPixFmtDescriptor *pixdesc);
|
||||
|
||||
(*
|
||||
* @return a pixel format descriptor for provided pixel format or NULL if
|
||||
* this pixel format is unknown.
|
||||
*)
|
||||
// const AVPixFmtDescriptor *av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt);
|
||||
|
||||
(*
|
||||
* Iterate over all pixel format descriptors known to libavutil.
|
||||
*
|
||||
* @param prev previous descriptor. NULL to get the first descriptor.
|
||||
*
|
||||
* @return next descriptor or NULL after the last descriptor
|
||||
*)
|
||||
// const AVPixFmtDescriptor *av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev);
|
||||
|
||||
(*
|
||||
* @return an AVPixelFormat id described by desc, or AV_PIX_FMT_NONE if desc
|
||||
* is not a valid pointer to a pixel format descriptor.
|
||||
*)
|
||||
// enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc);
|
||||
|
||||
(*
|
||||
* Utility function to access log2_chroma_w log2_chroma_h from
|
||||
* the pixel format AVPixFmtDescriptor.
|
||||
*
|
||||
* See avcodec_get_chroma_sub_sample() for a function that asserts a
|
||||
* valid pixel format instead of returning an error code.
|
||||
* Its recommanded that you use avcodec_get_chroma_sub_sample unless
|
||||
* you do check the return code!
|
||||
*
|
||||
* @param[in] pix_fmt the pixel format
|
||||
* @param[out] h_shift store log2_chroma_w
|
||||
* @param[out] v_shift store log2_chroma_h
|
||||
*
|
||||
* @return 0 on success, AVERROR(ENOSYS) on invalid or unknown pixel format
|
||||
*)
|
||||
// int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt,
|
||||
// int *h_shift, int *v_shift);
|
||||
|
||||
(*
|
||||
* @return number of planes in pix_fmt, a negative AVERROR if pix_fmt is not a
|
||||
* valid pixel format.
|
||||
*)
|
||||
// int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt);
|
||||
|
||||
// void ff_check_pixfmt_descriptors(void);
|
||||
|
||||
(*
|
||||
* Utility function to swap the endianness of a pixel format.
|
||||
*
|
||||
* @param[in] pix_fmt the pixel format
|
||||
*
|
||||
* @return pixel format with swapped endianness if it exists,
|
||||
* otherwise AV_PIX_FMT_NONE
|
||||
*)
|
||||
// enum AVPixelFormat av_pix_fmt_swap_endianness(enum AVPixelFormat pix_fmt);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_get_pix_fmt_name;external avutil_dll;
|
||||
|
||||
end.
|
@ -1,623 +0,0 @@
|
||||
unit ffm.pixfmt;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
/// *
|
||||
// * copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
|
||||
// *
|
||||
// * This file is part of ffm.
|
||||
// *
|
||||
// * FFmpeg is free software; you can redistribute it and/or
|
||||
// * modify it under the terms of the GNU Lesser General Public
|
||||
// * License as published by the Free Software Foundation; either
|
||||
// * version 2.1 of the License, or (at your option) any later version.
|
||||
// *
|
||||
// * FFmpeg is distributed in the hope that it will be useful,
|
||||
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
// * Lesser General Public License for more details.
|
||||
// *
|
||||
// * You should have received a copy of the GNU Lesser General Public
|
||||
// * License along with FFmpeg; if not, write to the Free Software
|
||||
// * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
// */
|
||||
|
||||
Const
|
||||
AVPALETTE_SIZE = 1024;
|
||||
AVPALETTE_COUNT = 256;
|
||||
|
||||
(*
|
||||
* Pixel format.
|
||||
*
|
||||
* @note
|
||||
* AV_PIX_FMT_RGB32 is handled in an endian-specific manner. An RGBA
|
||||
* color is put together as:
|
||||
* (A << 24) | (R << 16) | (G << 8) | B
|
||||
* This is stored as BGRA on little-endian CPU architectures and ARGB on
|
||||
* big-endian CPUs.
|
||||
*
|
||||
* @par
|
||||
* When the pixel format is palettized RGB (AV_PIX_FMT_PAL8), the palettized
|
||||
* image data is stored in AVFrame.data[0]. The palette is transported in
|
||||
* AVFrame.data[1], is 1024 bytes long (256 4-byte entries) and is
|
||||
* formatted the same as in AV_PIX_FMT_RGB32 described above (i.e., it is
|
||||
* also endian-specific). Note also that the individual RGB palette
|
||||
* components stored in AVFrame.data[1] should be in the range 0..255.
|
||||
* This is important as many custom PAL8 video codecs that were designed
|
||||
* to run on the IBM VGA graphics adapter use 6-bit palette components.
|
||||
*
|
||||
* @par
|
||||
* For all the 8bit per pixel formats, an RGB32 palette is in data[1] like
|
||||
* for pal8. This palette is filled in automatically by the function
|
||||
* allocating the picture.
|
||||
*
|
||||
* @note
|
||||
* Make sure that all newly added big-endian formats have (pix_fmt & 1) == 1
|
||||
* and that all newly added little-endian formats have (pix_fmt & 1) == 0.
|
||||
* This allows simpler detection of big vs little-endian.
|
||||
*)
|
||||
Type
|
||||
|
||||
pAVPixelFormat = ^TAVPixelFormat;
|
||||
|
||||
TAVPixelFormat = ( //
|
||||
AV_PIX_FMT_NONE = -1, //
|
||||
AV_PIX_FMT_YUV420P,
|
||||
/// < planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
|
||||
AV_PIX_FMT_YUYV422,
|
||||
/// < packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
|
||||
AV_PIX_FMT_RGB24,
|
||||
/// < packed RGB 8:8:8, 24bpp, RGBRGB...
|
||||
AV_PIX_FMT_BGR24,
|
||||
/// < packed RGB 8:8:8, 24bpp, BGRBGR...
|
||||
AV_PIX_FMT_YUV422P,
|
||||
/// < planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
|
||||
AV_PIX_FMT_YUV444P,
|
||||
/// < planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
|
||||
AV_PIX_FMT_YUV410P,
|
||||
/// < planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
|
||||
AV_PIX_FMT_YUV411P,
|
||||
/// < planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
|
||||
AV_PIX_FMT_GRAY8,
|
||||
/// < Y , 8bpp
|
||||
AV_PIX_FMT_MONOWHITE,
|
||||
/// < Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
|
||||
AV_PIX_FMT_MONOBLACK,
|
||||
/// < Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
|
||||
AV_PIX_FMT_PAL8,
|
||||
/// < 8 bit with PIX_FMT_RGB32 palette
|
||||
AV_PIX_FMT_YUVJ420P,
|
||||
/// < planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_range
|
||||
AV_PIX_FMT_YUVJ422P,
|
||||
/// < planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_range
|
||||
AV_PIX_FMT_YUVJ444P,
|
||||
/// < planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_range
|
||||
{$IFDEF FF_API_XVMC}
|
||||
AV_PIX_FMT_XVMC_MPEG2_MC,
|
||||
/// < XVideo Motion Acceleration via common packet passing
|
||||
AV_PIX_FMT_XVMC_MPEG2_IDCT,
|
||||
{$ENDIF}// * FF_API_XVMC */
|
||||
AV_PIX_FMT_UYVY422,
|
||||
/// < packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
|
||||
AV_PIX_FMT_UYYVYY411,
|
||||
/// < packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
|
||||
AV_PIX_FMT_BGR8,
|
||||
/// < packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
|
||||
AV_PIX_FMT_BGR4,
|
||||
/// < packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
|
||||
AV_PIX_FMT_BGR4_BYTE,
|
||||
/// < packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
|
||||
AV_PIX_FMT_RGB8,
|
||||
/// < packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
|
||||
AV_PIX_FMT_RGB4,
|
||||
/// < packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
|
||||
AV_PIX_FMT_RGB4_BYTE,
|
||||
/// < packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
|
||||
AV_PIX_FMT_NV12,
|
||||
/// < planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
|
||||
AV_PIX_FMT_NV21,
|
||||
/// < as above, but U and V bytes are swapped
|
||||
|
||||
AV_PIX_FMT_ARGB,
|
||||
/// < packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
|
||||
AV_PIX_FMT_RGBA,
|
||||
/// < packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
|
||||
AV_PIX_FMT_ABGR,
|
||||
/// < packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
|
||||
AV_PIX_FMT_BGRA,
|
||||
/// < packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
|
||||
|
||||
AV_PIX_FMT_GRAY16BE,
|
||||
/// < Y , 16bpp, big-endian
|
||||
AV_PIX_FMT_GRAY16LE,
|
||||
/// < Y , 16bpp, little-endian
|
||||
AV_PIX_FMT_YUV440P,
|
||||
/// < planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
|
||||
AV_PIX_FMT_YUVJ440P,
|
||||
/// < planar YUV 4:4:0 full scale (JPEG), deprecated in favor of PIX_FMT_YUV440P and setting color_range
|
||||
AV_PIX_FMT_YUVA420P,
|
||||
/// < planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
|
||||
{$IFDEF FF_API_VDPAU}
|
||||
AV_PIX_FMT_VDPAU_H264,
|
||||
/// < H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
AV_PIX_FMT_VDPAU_MPEG1,
|
||||
/// < MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
AV_PIX_FMT_VDPAU_MPEG2,
|
||||
/// < MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
AV_PIX_FMT_VDPAU_WMV3,
|
||||
/// < WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
AV_PIX_FMT_VDPAU_VC1,
|
||||
/// < VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
{$ENDIF}
|
||||
AV_PIX_FMT_RGB48BE,
|
||||
/// < packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
|
||||
AV_PIX_FMT_RGB48LE,
|
||||
/// < packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
|
||||
|
||||
AV_PIX_FMT_RGB565BE,
|
||||
/// < packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
|
||||
AV_PIX_FMT_RGB565LE,
|
||||
/// < packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
|
||||
AV_PIX_FMT_RGB555BE,
|
||||
/// < packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
|
||||
AV_PIX_FMT_RGB555LE,
|
||||
/// < packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
|
||||
|
||||
AV_PIX_FMT_BGR565BE,
|
||||
/// < packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
|
||||
AV_PIX_FMT_BGR565LE,
|
||||
/// < packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
|
||||
AV_PIX_FMT_BGR555BE,
|
||||
/// < packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
|
||||
AV_PIX_FMT_BGR555LE,
|
||||
/// < packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
|
||||
|
||||
AV_PIX_FMT_VAAPI_MOCO,
|
||||
/// < HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
|
||||
AV_PIX_FMT_VAAPI_IDCT,
|
||||
/// < HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
|
||||
AV_PIX_FMT_VAAPI_VLD,
|
||||
/// < HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
|
||||
AV_PIX_FMT_YUV420P16LE,
|
||||
/// < planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV420P16BE,
|
||||
/// < planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV422P16LE,
|
||||
/// < planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV422P16BE,
|
||||
/// < planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV444P16LE,
|
||||
/// < planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV444P16BE,
|
||||
/// < planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
{$IFDEF FF_API_VDPAU}
|
||||
AV_PIX_FMT_VDPAU_MPEG4,
|
||||
/// < MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
{$ENDIF}
|
||||
AV_PIX_FMT_DXVA2_VLD,
|
||||
/// < HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
|
||||
|
||||
AV_PIX_FMT_RGB444LE,
|
||||
/// < packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
|
||||
AV_PIX_FMT_RGB444BE,
|
||||
/// < packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
|
||||
AV_PIX_FMT_BGR444LE,
|
||||
/// < packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
|
||||
AV_PIX_FMT_BGR444BE,
|
||||
/// < packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
|
||||
AV_PIX_FMT_GRAY8A,
|
||||
/// < 8bit gray, 8bit alpha
|
||||
AV_PIX_FMT_BGR48BE,
|
||||
/// < packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
|
||||
AV_PIX_FMT_BGR48LE,
|
||||
/// < packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
|
||||
|
||||
(*
|
||||
* The following 12 formats have the disadvantage of needing 1 format for each bit depth.
|
||||
* Notice that each 9/10 bits sample is stored in 16 bits with extra padding.
|
||||
* If you want to support multiple bit depths, then using AV_PIX_FMT_YUV420P16* with the bpp stored separately is better.
|
||||
*)
|
||||
AV_PIX_FMT_YUV420P9BE,
|
||||
/// < planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV420P9LE,
|
||||
/// < planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV420P10BE,
|
||||
/// < planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV420P10LE,
|
||||
/// < planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV422P10BE,
|
||||
/// < planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV422P10LE,
|
||||
/// < planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV444P9BE,
|
||||
/// < planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV444P9LE,
|
||||
/// < planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV444P10BE,
|
||||
/// < planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV444P10LE,
|
||||
/// < planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV422P9BE,
|
||||
/// < planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV422P9LE,
|
||||
/// < planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
AV_PIX_FMT_VDA_VLD,
|
||||
/// < hardware decoding through VDA
|
||||
|
||||
{$IFDEF AV_PIX_FMT_ABI_GIT_MASTER}
|
||||
AV_PIX_FMT_RGBA64BE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
AV_PIX_FMT_RGBA64LE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
AV_PIX_FMT_BGRA64BE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
AV_PIX_FMT_BGRA64LE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
{$ENDIF}
|
||||
AV_PIX_FMT_GBRP,
|
||||
/// < planar GBR 4:4:4 24bpp
|
||||
AV_PIX_FMT_GBRP9BE,
|
||||
/// < planar GBR 4:4:4 27bpp, big-endian
|
||||
AV_PIX_FMT_GBRP9LE,
|
||||
/// < planar GBR 4:4:4 27bpp, little-endian
|
||||
AV_PIX_FMT_GBRP10BE,
|
||||
/// < planar GBR 4:4:4 30bpp, big-endian
|
||||
AV_PIX_FMT_GBRP10LE,
|
||||
/// < planar GBR 4:4:4 30bpp, little-endian
|
||||
AV_PIX_FMT_GBRP16BE,
|
||||
/// < planar GBR 4:4:4 48bpp, big-endian
|
||||
AV_PIX_FMT_GBRP16LE,
|
||||
/// < planar GBR 4:4:4 48bpp, little-endian
|
||||
|
||||
(*
|
||||
* duplicated pixel formats for compatibility with libav.
|
||||
* FFmpeg supports these formats since May 8 2012 and Jan 28 2012 (commits f9ca1ac7 and 143a5c55)
|
||||
* Libav added them Oct 12 2012 with incompatible values (commit 6d5600e85)
|
||||
*)
|
||||
AV_PIX_FMT_YUVA422P_LIBAV,
|
||||
/// < planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
|
||||
AV_PIX_FMT_YUVA444P_LIBAV,
|
||||
/// < planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
|
||||
|
||||
AV_PIX_FMT_YUVA420P9BE,
|
||||
/// < planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
|
||||
AV_PIX_FMT_YUVA420P9LE,
|
||||
/// < planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
|
||||
AV_PIX_FMT_YUVA422P9BE,
|
||||
/// < planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
|
||||
AV_PIX_FMT_YUVA422P9LE,
|
||||
/// < planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
|
||||
AV_PIX_FMT_YUVA444P9BE,
|
||||
/// < planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
|
||||
AV_PIX_FMT_YUVA444P9LE,
|
||||
/// < planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
|
||||
AV_PIX_FMT_YUVA420P10BE,
|
||||
/// < planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
|
||||
AV_PIX_FMT_YUVA420P10LE,
|
||||
/// < planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
|
||||
AV_PIX_FMT_YUVA422P10BE,
|
||||
/// < planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
|
||||
AV_PIX_FMT_YUVA422P10LE,
|
||||
/// < planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
|
||||
AV_PIX_FMT_YUVA444P10BE,
|
||||
/// < planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
|
||||
AV_PIX_FMT_YUVA444P10LE,
|
||||
/// < planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
|
||||
AV_PIX_FMT_YUVA420P16BE,
|
||||
/// < planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
|
||||
AV_PIX_FMT_YUVA420P16LE,
|
||||
/// < planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
|
||||
AV_PIX_FMT_YUVA422P16BE,
|
||||
/// < planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
|
||||
AV_PIX_FMT_YUVA422P16LE,
|
||||
/// < planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
|
||||
AV_PIX_FMT_YUVA444P16BE,
|
||||
/// < planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
|
||||
AV_PIX_FMT_YUVA444P16LE,
|
||||
/// < planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
|
||||
|
||||
AV_PIX_FMT_VDPAU,
|
||||
/// < HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
|
||||
|
||||
AV_PIX_FMT_XYZ12LE,
|
||||
/// < packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
|
||||
AV_PIX_FMT_XYZ12BE,
|
||||
/// < packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
|
||||
AV_PIX_FMT_NV16,
|
||||
/// < interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
|
||||
AV_PIX_FMT_NV20LE,
|
||||
/// < interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
AV_PIX_FMT_NV20BE,
|
||||
/// < interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
|
||||
{$IFNDEF AV_PIX_FMT_ABI_GIT_MASTER}
|
||||
AV_PIX_FMT_RGBA64BE = $123,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
AV_PIX_FMT_RGBA64LE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
AV_PIX_FMT_BGRA64BE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
AV_PIX_FMT_BGRA64LE,
|
||||
/// < packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
{$ENDIF}
|
||||
AV_PIX_FMT_0RGB = $123 + 4,
|
||||
/// < packed RGB 8:8:8, 32bpp, 0RGB0RGB...
|
||||
AV_PIX_FMT_RGB0,
|
||||
/// < packed RGB 8:8:8, 32bpp, RGB0RGB0...
|
||||
AV_PIX_FMT_0BGR,
|
||||
/// < packed BGR 8:8:8, 32bpp, 0BGR0BGR...
|
||||
AV_PIX_FMT_BGR0,
|
||||
/// < packed BGR 8:8:8, 32bpp, BGR0BGR0...
|
||||
AV_PIX_FMT_YUVA444P,
|
||||
/// < planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
|
||||
AV_PIX_FMT_YUVA422P,
|
||||
/// < planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
|
||||
|
||||
AV_PIX_FMT_YUV420P12BE,
|
||||
/// < planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV420P12LE,
|
||||
/// < planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV420P14BE,
|
||||
/// < planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV420P14LE,
|
||||
/// < planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV422P12BE,
|
||||
/// < planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV422P12LE,
|
||||
/// < planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV422P14BE,
|
||||
/// < planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV422P14LE,
|
||||
/// < planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV444P12BE,
|
||||
/// < planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV444P12LE,
|
||||
/// < planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
AV_PIX_FMT_YUV444P14BE,
|
||||
/// < planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
AV_PIX_FMT_YUV444P14LE,
|
||||
/// < planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
AV_PIX_FMT_GBRP12BE,
|
||||
/// < planar GBR 4:4:4 36bpp, big-endian
|
||||
AV_PIX_FMT_GBRP12LE,
|
||||
/// < planar GBR 4:4:4 36bpp, little-endian
|
||||
AV_PIX_FMT_GBRP14BE,
|
||||
/// < planar GBR 4:4:4 42bpp, big-endian
|
||||
AV_PIX_FMT_GBRP14LE,
|
||||
/// < planar GBR 4:4:4 42bpp, little-endian
|
||||
AV_PIX_FMT_GBRAP,
|
||||
/// < planar GBRA 4:4:4:4 32bpp
|
||||
AV_PIX_FMT_GBRAP16BE,
|
||||
/// < planar GBRA 4:4:4:4 64bpp, big-endian
|
||||
AV_PIX_FMT_GBRAP16LE,
|
||||
/// < planar GBRA 4:4:4:4 64bpp, little-endian
|
||||
AV_PIX_FMT_YUVJ411P,
|
||||
/// < planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor of PIX_FMT_YUV411P and setting color_range
|
||||
|
||||
AV_PIX_FMT_BAYER_BGGR8,
|
||||
/// < bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples */
|
||||
AV_PIX_FMT_BAYER_RGGB8,
|
||||
/// < bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples */
|
||||
AV_PIX_FMT_BAYER_GBRG8,
|
||||
/// < bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples */
|
||||
AV_PIX_FMT_BAYER_GRBG8,
|
||||
/// < bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples */
|
||||
AV_PIX_FMT_BAYER_BGGR16LE,
|
||||
/// < bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian */
|
||||
AV_PIX_FMT_BAYER_BGGR16BE,
|
||||
/// < bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian */
|
||||
AV_PIX_FMT_BAYER_RGGB16LE,
|
||||
/// < bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian */
|
||||
AV_PIX_FMT_BAYER_RGGB16BE,
|
||||
/// < bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian */
|
||||
AV_PIX_FMT_BAYER_GBRG16LE,
|
||||
/// < bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian */
|
||||
AV_PIX_FMT_BAYER_GBRG16BE,
|
||||
/// < bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian */
|
||||
AV_PIX_FMT_BAYER_GRBG16LE,
|
||||
/// < bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian */
|
||||
AV_PIX_FMT_BAYER_GRBG16BE,
|
||||
/// < bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian */
|
||||
|
||||
AV_PIX_FMT_NB
|
||||
/// < number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
|
||||
|
||||
{$IFDEF FF_API_PIX_FMT}
|
||||
,
|
||||
{$INCLUDE old_pix_fmts.inc}
|
||||
{$ENDIF}
|
||||
);
|
||||
|
||||
const
|
||||
{$IFDEF AV_HAVE_INCOMPATIBLE_LIBAV_ABI}
|
||||
AV_PIX_FMT_YUVA422P = AV_PIX_FMT_YUVA422P_LIBAV;
|
||||
AV_PIX_FMT_YUVA444P = AV_PIX_FMT_YUVA444P_LIBAV;
|
||||
{$ENDIF}
|
||||
AV_PIX_FMT_Y400A = AV_PIX_FMT_GRAY8A;
|
||||
AV_PIX_FMT_GBR24P = AV_PIX_FMT_GBRP;
|
||||
|
||||
{$IFDEF AV_HAVE_BIGENDIAN}
|
||||
// #define AV_PIX_FMT_NE(be, le) AV_PIX_FMT_##be
|
||||
AV_PIX_FMT_RGB32 = AV_PIX_FMT_ARGB;
|
||||
AV_PIX_FMT_RGB32_1 = AV_PIX_FMT_RGBA;
|
||||
AV_PIX_FMT_BGR32 = AV_PIX_FMT_ABGR;
|
||||
AV_PIX_FMT_BGR32_1 = AV_PIX_FMT_BGRA;
|
||||
AV_PIX_FMT_0RGB32 = AV_PIX_FMT_0RGB;
|
||||
AV_PIX_FMT_0BGR32 = AV_PIX_FMT_0BGR;
|
||||
//
|
||||
AV_PIX_FMT_GRAY16 = AV_PIX_FMT_GRAY16BE;
|
||||
AV_PIX_FMT_RGB48 = AV_PIX_FMT_RGB48BE;
|
||||
AV_PIX_FMT_RGB565 = AV_PIX_FMT_RGB565BE;
|
||||
AV_PIX_FMT_RGB555 = AV_PIX_FMT_RGB555BE;
|
||||
AV_PIX_FMT_RGB444 = AV_PIX_FMT_RGB444BE;
|
||||
AV_PIX_FMT_BGR48 = AV_PIX_FMT_BGR48BE;
|
||||
AV_PIX_FMT_BGR565 = AV_PIX_FMT_BGR565BE;
|
||||
AV_PIX_FMT_BGR555 = AV_PIX_FMT_BGR555BE;
|
||||
AV_PIX_FMT_BGR444 = AV_PIX_FMT_BGR444BE;
|
||||
//
|
||||
AV_PIX_FMT_YUV420P9 = AV_PIX_FMT_YUV420P9BE;
|
||||
AV_PIX_FMT_YUV422P9 = AV_PIX_FMT_YUV422P9BE;
|
||||
AV_PIX_FMT_YUV444P9 = AV_PIX_FMT_YUV444P9BE;
|
||||
AV_PIX_FMT_YUV420P10 = AV_PIX_FMT_YUV420P10BE;
|
||||
AV_PIX_FMT_YUV422P10 = AV_PIX_FMT_YUV422P10BE;
|
||||
AV_PIX_FMT_YUV444P10 = AV_PIX_FMT_YUV444P10BE;
|
||||
AV_PIX_FMT_YUV420P12 = AV_PIX_FMT_YUV420P12BE;
|
||||
AV_PIX_FMT_YUV422P12 = AV_PIX_FMT_YUV422P12BE;
|
||||
AV_PIX_FMT_YUV444P12 = AV_PIX_FMT_YUV444P12BE;
|
||||
AV_PIX_FMT_YUV420P14 = AV_PIX_FMT_YUV420P14BE;
|
||||
AV_PIX_FMT_YUV422P14 = AV_PIX_FMT_YUV422P14BE;
|
||||
AV_PIX_FMT_YUV444P14 = AV_PIX_FMT_YUV444P14BE;
|
||||
AV_PIX_FMT_YUV420P16 = AV_PIX_FMT_YUV420P16BE;
|
||||
AV_PIX_FMT_YUV422P16 = AV_PIX_FMT_YUV422P16BE;
|
||||
AV_PIX_FMT_YUV444P16 = AV_PIX_FMT_YUV444P16BE;
|
||||
//
|
||||
AV_PIX_FMT_RGBA64 = AV_PIX_FMT_RGBA64BE;
|
||||
AV_PIX_FMT_BGRA64 = AV_PIX_FMT_BGRA64BE;
|
||||
AV_PIX_FMT_GBRP9 = AV_PIX_FMT_GBRP9BE;
|
||||
AV_PIX_FMT_GBRP10 = AV_PIX_FMT_GBRP10BE;
|
||||
AV_PIX_FMT_GBRP12 = AV_PIX_FMT_GBRP12BE;
|
||||
AV_PIX_FMT_GBRP14 = AV_PIX_FMT_GBRP14BE;
|
||||
AV_PIX_FMT_GBRP16 = AV_PIX_FMT_GBRP16BE;
|
||||
AV_PIX_FMT_GBRAP16 = AV_PIX_FMT_GBRAP16BE;
|
||||
//
|
||||
AV_PIX_FMT_BAYER_BGGR16 = AV_PIX_FMT_BAYER_BGGR16BE;
|
||||
AV_PIX_FMT_BAYER_RGGB16 = AV_PIX_FMT_BAYER_RGGB16BE;
|
||||
AV_PIX_FMT_BAYER_GBRG16 = AV_PIX_FMT_BAYER_GBRG16BE;
|
||||
AV_PIX_FMT_BAYER_GRBG16 = AV_PIX_FMT_BAYER_GRBG16BE;
|
||||
//
|
||||
//
|
||||
AV_PIX_FMT_YUVA420P9 = AV_PIX_FMT_YUVA420P9BE;
|
||||
AV_PIX_FMT_YUVA422P9 = AV_PIX_FMT_YUVA422P9BE;
|
||||
AV_PIX_FMT_YUVA444P9 = AV_PIX_FMT_YUVA444P9BE;
|
||||
AV_PIX_FMT_YUVA420P10 = AV_PIX_FMT_YUVA420P10BE;
|
||||
AV_PIX_FMT_YUVA422P10 = AV_PIX_FMT_YUVA422P10BE;
|
||||
AV_PIX_FMT_YUVA444P10 = AV_PIX_FMT_YUVA444P10BE;
|
||||
AV_PIX_FMT_YUVA420P16 = AV_PIX_FMT_YUVA420P16BE;
|
||||
AV_PIX_FMT_YUVA422P16 = AV_PIX_FMT_YUVA422P16BE;
|
||||
AV_PIX_FMT_YUVA444P16 = AV_PIX_FMT_YUVA444P16BE;
|
||||
//
|
||||
AV_PIX_FMT_XYZ12 = AV_PIX_FMT_XYZ12BE;
|
||||
AV_PIX_FMT_NV20 = AV_PIX_FMT_NV20BE;
|
||||
{$ELSE}
|
||||
// #define AV_PIX_FMT_NE(be, le) AV_PIX_FMT_##le
|
||||
AV_PIX_FMT_RGB32 = AV_PIX_FMT_BGRA;
|
||||
AV_PIX_FMT_RGB32_1 = AV_PIX_FMT_ABGR;
|
||||
AV_PIX_FMT_BGR32 = AV_PIX_FMT_RGBA;
|
||||
AV_PIX_FMT_BGR32_1 = AV_PIX_FMT_ARGB;
|
||||
AV_PIX_FMT_0RGB32 = AV_PIX_FMT_BGR0;
|
||||
AV_PIX_FMT_0BGR32 = AV_PIX_FMT_RGB0;
|
||||
//
|
||||
AV_PIX_FMT_GRAY16 = AV_PIX_FMT_GRAY16LE;
|
||||
AV_PIX_FMT_RGB48 = AV_PIX_FMT_RGB48LE;
|
||||
AV_PIX_FMT_RGB565 = AV_PIX_FMT_RGB565LE;
|
||||
AV_PIX_FMT_RGB555 = AV_PIX_FMT_RGB555LE;
|
||||
AV_PIX_FMT_RGB444 = AV_PIX_FMT_RGB444LE;
|
||||
AV_PIX_FMT_BGR48 = AV_PIX_FMT_BGR48LE;
|
||||
AV_PIX_FMT_BGR565 = AV_PIX_FMT_BGR565LE;
|
||||
AV_PIX_FMT_BGR555 = AV_PIX_FMT_BGR555LE;
|
||||
AV_PIX_FMT_BGR444 = AV_PIX_FMT_BGR444LE;
|
||||
//
|
||||
AV_PIX_FMT_YUV420P9 = AV_PIX_FMT_YUV420P9LE;
|
||||
AV_PIX_FMT_YUV422P9 = AV_PIX_FMT_YUV422P9LE;
|
||||
AV_PIX_FMT_YUV444P9 = AV_PIX_FMT_YUV444P9LE;
|
||||
AV_PIX_FMT_YUV420P10 = AV_PIX_FMT_YUV420P10LE;
|
||||
AV_PIX_FMT_YUV422P10 = AV_PIX_FMT_YUV422P10LE;
|
||||
AV_PIX_FMT_YUV444P10 = AV_PIX_FMT_YUV444P10LE;
|
||||
AV_PIX_FMT_YUV420P12 = AV_PIX_FMT_YUV420P12LE;
|
||||
AV_PIX_FMT_YUV422P12 = AV_PIX_FMT_YUV422P12LE;
|
||||
AV_PIX_FMT_YUV444P12 = AV_PIX_FMT_YUV444P12LE;
|
||||
AV_PIX_FMT_YUV420P14 = AV_PIX_FMT_YUV420P14LE;
|
||||
AV_PIX_FMT_YUV422P14 = AV_PIX_FMT_YUV422P14LE;
|
||||
AV_PIX_FMT_YUV444P14 = AV_PIX_FMT_YUV444P14LE;
|
||||
AV_PIX_FMT_YUV420P16 = AV_PIX_FMT_YUV420P16LE;
|
||||
AV_PIX_FMT_YUV422P16 = AV_PIX_FMT_YUV422P16LE;
|
||||
AV_PIX_FMT_YUV444P16 = AV_PIX_FMT_YUV444P16LE;
|
||||
//
|
||||
AV_PIX_FMT_RGBA64 = AV_PIX_FMT_RGBA64LE;
|
||||
AV_PIX_FMT_BGRA64 = AV_PIX_FMT_BGRA64LE;
|
||||
AV_PIX_FMT_GBRP9 = AV_PIX_FMT_GBRP9LE;
|
||||
AV_PIX_FMT_GBRP10 = AV_PIX_FMT_GBRP10LE;
|
||||
AV_PIX_FMT_GBRP12 = AV_PIX_FMT_GBRP12LE;
|
||||
AV_PIX_FMT_GBRP14 = AV_PIX_FMT_GBRP14LE;
|
||||
AV_PIX_FMT_GBRP16 = AV_PIX_FMT_GBRP16LE;
|
||||
AV_PIX_FMT_GBRAP16 = AV_PIX_FMT_GBRAP16LE;
|
||||
//
|
||||
AV_PIX_FMT_BAYER_BGGR16 = AV_PIX_FMT_BAYER_BGGR16LE;
|
||||
AV_PIX_FMT_BAYER_RGGB16 = AV_PIX_FMT_BAYER_RGGB16LE;
|
||||
AV_PIX_FMT_BAYER_GBRG16 = AV_PIX_FMT_BAYER_GBRG16LE;
|
||||
AV_PIX_FMT_BAYER_GRBG16 = AV_PIX_FMT_BAYER_GRBG16LE;
|
||||
//
|
||||
//
|
||||
AV_PIX_FMT_YUVA420P9 = AV_PIX_FMT_YUVA420P9LE;
|
||||
AV_PIX_FMT_YUVA422P9 = AV_PIX_FMT_YUVA422P9LE;
|
||||
AV_PIX_FMT_YUVA444P9 = AV_PIX_FMT_YUVA444P9LE;
|
||||
AV_PIX_FMT_YUVA420P10 = AV_PIX_FMT_YUVA420P10LE;
|
||||
AV_PIX_FMT_YUVA422P10 = AV_PIX_FMT_YUVA422P10LE;
|
||||
AV_PIX_FMT_YUVA444P10 = AV_PIX_FMT_YUVA444P10LE;
|
||||
AV_PIX_FMT_YUVA420P16 = AV_PIX_FMT_YUVA420P16LE;
|
||||
AV_PIX_FMT_YUVA422P16 = AV_PIX_FMT_YUVA422P16LE;
|
||||
AV_PIX_FMT_YUVA444P16 = AV_PIX_FMT_YUVA444P16LE;
|
||||
//
|
||||
AV_PIX_FMT_XYZ12 = AV_PIX_FMT_XYZ12LE;
|
||||
AV_PIX_FMT_NV20 = AV_PIX_FMT_NV20LE;
|
||||
{$ENDIF}
|
||||
{$IFDEF FF_API_PIX_FMT}
|
||||
|
||||
Type
|
||||
TPixelFormat = TAVPixelFormat;
|
||||
|
||||
Const
|
||||
//
|
||||
PIX_FMT_Y400A = AV_PIX_FMT_Y400A;
|
||||
PIX_FMT_GBR24P = AV_PIX_FMT_GBR24P;
|
||||
//
|
||||
// PIX_FMT_NE(be, le) AV_PIX_FMT_NE(be, le)
|
||||
//
|
||||
PIX_FMT_RGB32 = AV_PIX_FMT_RGB32;
|
||||
PIX_FMT_RGB32_1 = AV_PIX_FMT_RGB32_1;
|
||||
PIX_FMT_BGR32 = AV_PIX_FMT_BGR32;
|
||||
PIX_FMT_BGR32_1 = AV_PIX_FMT_BGR32_1;
|
||||
PIX_FMT_0RGB32 = AV_PIX_FMT_0RGB32;
|
||||
PIX_FMT_0BGR32 = AV_PIX_FMT_0BGR32;
|
||||
//
|
||||
PIX_FMT_GRAY16 = AV_PIX_FMT_GRAY16;
|
||||
PIX_FMT_RGB48 = AV_PIX_FMT_RGB48;
|
||||
PIX_FMT_RGB565 = AV_PIX_FMT_RGB565;
|
||||
PIX_FMT_RGB555 = AV_PIX_FMT_RGB555;
|
||||
PIX_FMT_RGB444 = AV_PIX_FMT_RGB444;
|
||||
PIX_FMT_BGR48 = AV_PIX_FMT_BGR48;
|
||||
PIX_FMT_BGR565 = AV_PIX_FMT_BGR565;
|
||||
PIX_FMT_BGR555 = AV_PIX_FMT_BGR555;
|
||||
PIX_FMT_BGR444 = AV_PIX_FMT_BGR444;
|
||||
//
|
||||
PIX_FMT_YUV420P9 = AV_PIX_FMT_YUV420P9;
|
||||
PIX_FMT_YUV422P9 = AV_PIX_FMT_YUV422P9;
|
||||
PIX_FMT_YUV444P9 = AV_PIX_FMT_YUV444P9;
|
||||
PIX_FMT_YUV420P10 = AV_PIX_FMT_YUV420P10;
|
||||
PIX_FMT_YUV422P10 = AV_PIX_FMT_YUV422P10;
|
||||
PIX_FMT_YUV444P10 = AV_PIX_FMT_YUV444P10;
|
||||
PIX_FMT_YUV420P12 = AV_PIX_FMT_YUV420P12;
|
||||
PIX_FMT_YUV422P12 = AV_PIX_FMT_YUV422P12;
|
||||
PIX_FMT_YUV444P12 = AV_PIX_FMT_YUV444P12;
|
||||
PIX_FMT_YUV420P14 = AV_PIX_FMT_YUV420P14;
|
||||
PIX_FMT_YUV422P14 = AV_PIX_FMT_YUV422P14;
|
||||
PIX_FMT_YUV444P14 = AV_PIX_FMT_YUV444P14;
|
||||
PIX_FMT_YUV420P16 = AV_PIX_FMT_YUV420P16;
|
||||
PIX_FMT_YUV422P16 = AV_PIX_FMT_YUV422P16;
|
||||
PIX_FMT_YUV444P16 = AV_PIX_FMT_YUV444P16;
|
||||
//
|
||||
PIX_FMT_RGBA64 = AV_PIX_FMT_RGBA64;
|
||||
PIX_FMT_BGRA64 = AV_PIX_FMT_BGRA64;
|
||||
PIX_FMT_GBRP9 = AV_PIX_FMT_GBRP9;
|
||||
PIX_FMT_GBRP10 = AV_PIX_FMT_GBRP10;
|
||||
PIX_FMT_GBRP12 = AV_PIX_FMT_GBRP12;
|
||||
PIX_FMT_GBRP14 = AV_PIX_FMT_GBRP14;
|
||||
PIX_FMT_GBRP16 = AV_PIX_FMT_GBRP16;
|
||||
{$ENDIF}
|
||||
|
||||
implementation
|
||||
|
||||
end.
|
@ -1,175 +0,0 @@
|
||||
unit ffm.rational;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
/// *
|
||||
// * rational numbers
|
||||
// * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
|
||||
// *
|
||||
// * This file is part of ffm.
|
||||
// *
|
||||
// * FFmpeg is free software; you can redistribute it and/or
|
||||
// * modify it under the terms of the GNU Lesser General Public
|
||||
// * License as published by the Free Software Foundation; either
|
||||
// * version 2.1 of the License, or (at your option) any later version.
|
||||
// *
|
||||
// * FFmpeg is distributed in the hope that it will be useful,
|
||||
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
// * Lesser General Public License for more details.
|
||||
// *
|
||||
// * You should have received a copy of the GNU Lesser General Public
|
||||
// * License along with FFmpeg; if not, write to the Free Software
|
||||
// * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
// */
|
||||
//
|
||||
/// **
|
||||
// * @file
|
||||
// * rational numbers
|
||||
// * @author Michael Niedermayer <michaelni@gmx.at>
|
||||
// */
|
||||
//
|
||||
// #ifndef AVUTIL_RATIONAL_H
|
||||
// #define AVUTIL_RATIONAL_H
|
||||
//
|
||||
// #include <stdint.h>
|
||||
// #include <limits.h>
|
||||
// #include "attributes.h"
|
||||
//
|
||||
/// **
|
||||
// * @addtogroup lavu_math
|
||||
// * @{
|
||||
// */
|
||||
type
|
||||
(*
|
||||
* rational number numerator/denominator
|
||||
*)
|
||||
pAVRational = ^TAVRational;
|
||||
|
||||
TAVRational = { packed } record
|
||||
num: Integer;
|
||||
/// < numerator
|
||||
den: Integer;
|
||||
/// < denominator
|
||||
end;
|
||||
|
||||
/// **
|
||||
// * Compare two rationals.
|
||||
// * @param a first rational
|
||||
// * @param b second rational
|
||||
// * @return 0 if a==b, 1 if a>b, -1 if a<b, and INT_MIN if one of the
|
||||
// * values is of the form 0/0
|
||||
// */
|
||||
// static inline int av_cmp_q(AVRational a, AVRational b){
|
||||
// const int64_t tmp= a.num * (int64_t)b.den - b.num * (int64_t)a.den;
|
||||
//
|
||||
// if(tmp) return (int)((tmp ^ a.den ^ b.den)>>63)|1;
|
||||
// else if(b.den && a.den) return 0;
|
||||
// else if(a.num && b.num) return (a.num>>31) - (b.num>>31);
|
||||
// else return INT_MIN;
|
||||
// }
|
||||
//
|
||||
/// **
|
||||
// * Convert rational to double.
|
||||
// * @param a rational to convert
|
||||
// * @return (double) a
|
||||
// */
|
||||
// static inline double av_q2d(AVRational a){
|
||||
// return a.num / (double) a.den;
|
||||
// }
|
||||
//
|
||||
/// **
|
||||
// * Reduce a fraction.
|
||||
// * This is useful for framerate calculations.
|
||||
// * @param dst_num destination numerator
|
||||
// * @param dst_den destination denominator
|
||||
// * @param num source numerator
|
||||
// * @param den source denominator
|
||||
// * @param max the maximum allowed for dst_num & dst_den
|
||||
// * @return 1 if exact, 0 otherwise
|
||||
// */
|
||||
// int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max);
|
||||
//
|
||||
/// **
|
||||
// * Multiply two rationals.
|
||||
// * @param b first rational
|
||||
// * @param c second rational
|
||||
// * @return b*c
|
||||
// */
|
||||
// AVRational av_mul_q(AVRational b, AVRational c) av_const;
|
||||
//
|
||||
/// **
|
||||
// * Divide one rational by another.
|
||||
// * @param b first rational
|
||||
// * @param c second rational
|
||||
// * @return b/c
|
||||
// */
|
||||
// AVRational av_div_q(AVRational b, AVRational c) av_const;
|
||||
//
|
||||
/// **
|
||||
// * Add two rationals.
|
||||
// * @param b first rational
|
||||
// * @param c second rational
|
||||
// * @return b+c
|
||||
// */
|
||||
// AVRational av_add_q(AVRational b, AVRational c) av_const;
|
||||
//
|
||||
/// **
|
||||
// * Subtract one rational from another.
|
||||
// * @param b first rational
|
||||
// * @param c second rational
|
||||
// * @return b-c
|
||||
// */
|
||||
// AVRational av_sub_q(AVRational b, AVRational c) av_const;
|
||||
//
|
||||
/// **
|
||||
// * Invert a rational.
|
||||
// * @param q value
|
||||
// * @return 1 / q
|
||||
// */
|
||||
// static av_always_inline AVRational av_inv_q(AVRational q)
|
||||
// {
|
||||
// AVRational r = { q.den, q.num };
|
||||
// return r;
|
||||
// }
|
||||
//
|
||||
/// **
|
||||
// * Convert a double precision floating point number to a rational.
|
||||
// * inf is expressed as {1,0} or {-1,0} depending on the sign.
|
||||
// *
|
||||
// * @param d double to convert
|
||||
// * @param max the maximum allowed numerator and denominator
|
||||
// * @return (AVRational) d
|
||||
// */
|
||||
// AVRational av_d2q(double d, int max) av_const;
|
||||
//
|
||||
/// **
|
||||
// * @return 1 if q1 is nearer to q than q2, -1 if q2 is nearer
|
||||
// * than q1, 0 if they have the same distance.
|
||||
// */
|
||||
// int av_nearer_q(AVRational q, AVRational q1, AVRational q2);
|
||||
//
|
||||
/// **
|
||||
// * Find the nearest value in q_list to q.
|
||||
// * @param q_list an array of rationals terminated by {0, 0}
|
||||
// * @return the index of the nearest value found in the array
|
||||
// */
|
||||
// int av_find_nearest_q_idx(AVRational q, const AVRational* q_list);
|
||||
//
|
||||
/// **
|
||||
// * @}
|
||||
// */
|
||||
|
||||
function AVRational(const num, den: Integer): TAVRational; inline;
|
||||
|
||||
implementation
|
||||
|
||||
function AVRational(const num, den: Integer): TAVRational;
|
||||
begin
|
||||
Result.num := num;
|
||||
Result.den := den;
|
||||
end;
|
||||
|
||||
end.
|
@ -1,274 +0,0 @@
|
||||
(*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
unit ffm.samplefmt;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
(*
|
||||
* Audio Sample Formats
|
||||
*
|
||||
* @par
|
||||
* The data described by the sample format is always in native-endian order.
|
||||
* Sample values can be expressed by native C types, hence the lack of a signed
|
||||
* 24-bit sample format even though it is a common raw audio data format.
|
||||
*
|
||||
* @par
|
||||
* The floating-point formats are based on full volume being in the range
|
||||
* [-1.0, 1.0]. Any values outside this range are beyond full volume level.
|
||||
*
|
||||
* @par
|
||||
* The data layout as used in av_samples_fill_arrays() and elsewhere in FFmpeg
|
||||
* (such as AVFrame in libavcodec) is as follows:
|
||||
*
|
||||
* For planar sample formats, each audio channel is in a separate data plane,
|
||||
* and linesize is the buffer size, in bytes, for a single plane. All data
|
||||
* planes must be the same size. For {packed} sample formats, only the first data
|
||||
* plane is used, and samples for each channel are interleaved. In this case,
|
||||
* linesize is the buffer size, in bytes, for the 1 plane.
|
||||
*)
|
||||
Type
|
||||
pAVSampleFormat = ^TAVSampleFormat;
|
||||
TAVSampleFormat = ( //
|
||||
AV_SAMPLE_FMT_NONE = -1, //
|
||||
AV_SAMPLE_FMT_U8,
|
||||
/// < unsigned 8 bits
|
||||
AV_SAMPLE_FMT_S16,
|
||||
/// < signed 16 bits
|
||||
AV_SAMPLE_FMT_S32,
|
||||
/// < signed 32 bits
|
||||
AV_SAMPLE_FMT_FLT,
|
||||
/// < float
|
||||
AV_SAMPLE_FMT_DBL,
|
||||
/// < double
|
||||
AV_SAMPLE_FMT_U8P,
|
||||
/// < unsigned 8 bits, planar
|
||||
AV_SAMPLE_FMT_S16P,
|
||||
/// < signed 16 bits, planar
|
||||
AV_SAMPLE_FMT_S32P,
|
||||
/// < signed 32 bits, planar
|
||||
AV_SAMPLE_FMT_FLTP,
|
||||
/// < float, planar
|
||||
AV_SAMPLE_FMT_DBLP,
|
||||
/// < double, planar
|
||||
AV_SAMPLE_FMT_NB
|
||||
/// < Number of sample formats. DO NOT USE if linking dynamically
|
||||
);
|
||||
|
||||
(*
|
||||
* Return the name of sample_fmt, or NULL if sample_fmt is not
|
||||
* recognized.
|
||||
*)
|
||||
// const char *av_get_sample_fmt_name(enum AVSampleFormat sample_fmt);
|
||||
function av_get_sample_fmt_name(sample_fmt: TAVSampleFormat): pAnsiChar; cdecl;
|
||||
|
||||
(*
|
||||
* Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE
|
||||
* on error.
|
||||
*)
|
||||
// enum AVSampleFormat av_get_sample_fmt(const char *name);
|
||||
|
||||
(*
|
||||
* Return the planar<->{packed} alternative form of the given sample format, or
|
||||
* AV_SAMPLE_FMT_NONE on error. If the passed sample_fmt is already in the
|
||||
* requested planar/{packed} format, the format returned is the same as the
|
||||
* input.
|
||||
*)
|
||||
// enum AVSampleFormat av_get_alt_sample_fmt(enum AVSampleFormat sample_fmt, int planar);
|
||||
|
||||
(*
|
||||
* Get the {packed} alternative form of the given sample format.
|
||||
*
|
||||
* If the passed sample_fmt is already in {packed} format, the format returned is
|
||||
* the same as the input.
|
||||
*
|
||||
* @return the {packed} alternative form of the given sample format or
|
||||
AV_SAMPLE_FMT_NONE on error.
|
||||
*)
|
||||
// enum AVSampleFormat av_get_{packed}_sample_fmt(enum AVSampleFormat sample_fmt);
|
||||
|
||||
(*
|
||||
* Get the planar alternative form of the given sample format.
|
||||
*
|
||||
* If the passed sample_fmt is already in planar format, the format returned is
|
||||
* the same as the input.
|
||||
*
|
||||
* @return the planar alternative form of the given sample format or
|
||||
AV_SAMPLE_FMT_NONE on error.
|
||||
*)
|
||||
// enum AVSampleFormat av_get_planar_sample_fmt(enum AVSampleFormat sample_fmt);
|
||||
|
||||
(*
|
||||
* Generate a string corresponding to the sample format with
|
||||
* sample_fmt, or a header if sample_fmt is negative.
|
||||
*
|
||||
* @param buf the buffer where to write the string
|
||||
* @param buf_size the size of buf
|
||||
* @param sample_fmt the number of the sample format to print the
|
||||
* corresponding info string, or a negative value to print the
|
||||
* corresponding header.
|
||||
* @return the pointer to the filled buffer or NULL if sample_fmt is
|
||||
* unknown or in case of other errors
|
||||
*)
|
||||
// char *av_get_sample_fmt_string(char *buf, int buf_size, enum AVSampleFormat sample_fmt);
|
||||
|
||||
{$IFDEF FF_API_GET_BITS_PER_SAMPLE_FMT}
|
||||
(*
|
||||
* @deprecated Use av_get_bytes_per_sample() instead.
|
||||
*)
|
||||
// attribute_deprecated
|
||||
// int av_get_bits_per_sample_fmt(enum AVSampleFormat sample_fmt);
|
||||
{$ENDIF}
|
||||
(*
|
||||
* Return number of bytes per sample.
|
||||
*
|
||||
* @param sample_fmt the sample format
|
||||
* @return number of bytes per sample or zero if unknown for the given
|
||||
* sample format
|
||||
*)
|
||||
// int av_get_bytes_per_sample(enum AVSampleFormat sample_fmt);
|
||||
function av_get_bytes_per_sample(sample_fmt: TAVSampleFormat): integer; cdecl;
|
||||
|
||||
(*
|
||||
* Check if the sample format is planar.
|
||||
*
|
||||
* @param sample_fmt the sample format to inspect
|
||||
* @return 1 if the sample format is planar, 0 if it is interleaved
|
||||
*)
|
||||
// int av_sample_fmt_is_planar(enum AVSampleFormat sample_fmt);
|
||||
|
||||
(*
|
||||
* Get the required buffer size for the given audio parameters.
|
||||
*
|
||||
* @param[out] linesize calculated linesize, may be NULL
|
||||
* @param nb_channels the number of channels
|
||||
* @param nb_samples the number of samples in a single channel
|
||||
* @param sample_fmt the sample format
|
||||
* @param align buffer size alignment (0 = default, 1 = no alignment)
|
||||
* @return required buffer size, or negative error code on failure
|
||||
*)
|
||||
// int av_samples_get_buffer_size(int *linesize, int nb_channels, int nb_samples, enum AVSampleFormat sample_fmt, int align);
|
||||
function av_samples_get_buffer_size(linesize: pInteger; nb_channels: integer; nb_samples: integer;
|
||||
sample_fmt: TAVSampleFormat; align: integer): integer; cdecl;
|
||||
|
||||
(*
|
||||
* Fill plane data pointers and linesize for samples with sample
|
||||
* format sample_fmt.
|
||||
*
|
||||
* The audio_data array is filled with the pointers to the samples data planes:
|
||||
* for planar, set the start point of each channel's data within the buffer,
|
||||
* for {packed}, set the start point of the entire buffer only.
|
||||
*
|
||||
* The value pointed to by linesize is set to the aligned size of each
|
||||
* channel's data buffer for planar layout, or to the aligned size of the
|
||||
* buffer for all channels for {packed} layout.
|
||||
*
|
||||
* The buffer in buf must be big enough to contain all the samples
|
||||
* (use av_samples_get_buffer_size() to compute its minimum size),
|
||||
* otherwise the audio_data pointers will point to invalid data.
|
||||
*
|
||||
* @see enum AVSampleFormat
|
||||
* The documentation for AVSampleFormat describes the data layout.
|
||||
*
|
||||
* @param[out] audio_data array to be filled with the pointer for each channel
|
||||
* @param[out] linesize calculated linesize, may be NULL
|
||||
* @param buf the pointer to a buffer containing the samples
|
||||
* @param nb_channels the number of channels
|
||||
* @param nb_samples the number of samples in a single channel
|
||||
* @param sample_fmt the sample format
|
||||
* @param align buffer size alignment (0 = default, 1 = no alignment)
|
||||
* @return >=0 on success or a negative error code on failure
|
||||
* @todo return minimum size in bytes required for the buffer in case
|
||||
* of success at the next bump
|
||||
*)
|
||||
// int av_samples_fill_arrays(uint8_t **audio_data, int *linesize,
|
||||
// const uint8_t *buf,
|
||||
// int nb_channels, int nb_samples,
|
||||
// enum AVSampleFormat sample_fmt, int align);
|
||||
|
||||
(*
|
||||
* Allocate a samples buffer for nb_samples samples, and fill data pointers and
|
||||
* linesize accordingly.
|
||||
* The allocated samples buffer can be freed by using av_freep(&audio_data[0])
|
||||
* Allocated data will be initialized to silence.
|
||||
*
|
||||
* @see enum AVSampleFormat
|
||||
* The documentation for AVSampleFormat describes the data layout.
|
||||
*
|
||||
* @param[out] audio_data array to be filled with the pointer for each channel
|
||||
* @param[out] linesize aligned size for audio buffer(s), may be NULL
|
||||
* @param nb_channels number of audio channels
|
||||
* @param nb_samples number of samples per channel
|
||||
* @param align buffer size alignment (0 = default, 1 = no alignment)
|
||||
* @return >=0 on success or a negative error code on failure
|
||||
* @todo return the size of the allocated buffer in case of success at the next bump
|
||||
* @see av_samples_fill_arrays()
|
||||
* @see av_samples_alloc_array_and_samples()
|
||||
*)
|
||||
// int av_samples_alloc(uint8_t **audio_data, int *linesize, int nb_channels,
|
||||
// int nb_samples, enum AVSampleFormat sample_fmt, int align);
|
||||
|
||||
(*
|
||||
* Allocate a data pointers array, samples buffer for nb_samples
|
||||
* samples, and fill data pointers and linesize accordingly.
|
||||
*
|
||||
* This is the same as av_samples_alloc(), but also allocates the data
|
||||
* pointers array.
|
||||
*
|
||||
* @see av_samples_alloc()
|
||||
*)
|
||||
// int av_samples_alloc_array_and_samples(uint8_t ***audio_data, int *linesize, int nb_channels,
|
||||
// int nb_samples, enum AVSampleFormat sample_fmt, int align);
|
||||
|
||||
(*
|
||||
* Copy samples from src to dst.
|
||||
*
|
||||
* @param dst destination array of pointers to data planes
|
||||
* @param src source array of pointers to data planes
|
||||
* @param dst_offset offset in samples at which the data will be written to dst
|
||||
* @param src_offset offset in samples at which the data will be read from src
|
||||
* @param nb_samples number of samples to be copied
|
||||
* @param nb_channels number of audio channels
|
||||
* @param sample_fmt audio sample format
|
||||
*)
|
||||
// int av_samples_copy(uint8_t **dst, uint8_t * const *src, int dst_offset,
|
||||
// int src_offset, int nb_samples, int nb_channels,
|
||||
// enum AVSampleFormat sample_fmt);
|
||||
|
||||
(*
|
||||
* Fill an audio buffer with silence.
|
||||
*
|
||||
* @param audio_data array of pointers to data planes
|
||||
* @param offset offset in samples at which to start filling
|
||||
* @param nb_samples number of samples to fill
|
||||
* @param nb_channels number of audio channels
|
||||
* @param sample_fmt audio sample format
|
||||
*)
|
||||
// int av_samples_set_silence(uint8_t **audio_data, int offset, int nb_samples,
|
||||
// int nb_channels, enum AVSampleFormat sample_fmt);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_get_bytes_per_sample; external avutil_dll;
|
||||
function av_get_sample_fmt_name; external avutil_dll;
|
||||
function av_samples_get_buffer_size; external avutil_dll;
|
||||
|
||||
end.
|
@ -1,309 +0,0 @@
|
||||
(*
|
||||
* Copyright (C) 2011-2013 Michael Niedermayer (michaelni@gmx.at)
|
||||
*
|
||||
* This file is part of libswresample
|
||||
*
|
||||
* libswresample is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* libswresample is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with libswresample; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
unit ffm.swresample;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
(*
|
||||
* @defgroup lswr Libswresample
|
||||
* @{
|
||||
*
|
||||
* Libswresample (lswr) is a library that handles audio resampling, sample
|
||||
* format conversion and mixing.
|
||||
*
|
||||
* Interaction with lswr is done through SwrContext, which is
|
||||
* allocated with swr_alloc() or swr_alloc_set_opts(). It is opaque, so all parameters
|
||||
* must be set with the @ref avoptions API.
|
||||
*
|
||||
* For example the following code will setup conversion from planar float sample
|
||||
* format to interleaved signed 16-bit integer, downsampling from 48kHz to
|
||||
* 44.1kHz and downmixing from 5.1 channels to stereo (using the default mixing
|
||||
* matrix):
|
||||
* @code
|
||||
* SwrContext *swr = swr_alloc();
|
||||
* av_opt_set_channel_layout(swr, "in_channel_layout", AV_CH_LAYOUT_5POINT1, 0);
|
||||
* av_opt_set_channel_layout(swr, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
|
||||
* av_opt_set_int(swr, "in_sample_rate", 48000, 0);
|
||||
* av_opt_set_int(swr, "out_sample_rate", 44100, 0);
|
||||
* av_opt_set_sample_fmt(swr, "in_sample_fmt", AV_SAMPLE_FMT_FLTP, 0);
|
||||
* av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
|
||||
* @endcode
|
||||
*
|
||||
* Once all values have been set, it must be initialized with swr_init(). If
|
||||
* you need to change the conversion parameters, you can change the parameters
|
||||
* as described above, or by using swr_alloc_set_opts(), then call swr_init()
|
||||
* again.
|
||||
*
|
||||
* The conversion itself is done by repeatedly calling swr_convert().
|
||||
* Note that the samples may get buffered in swr if you provide insufficient
|
||||
* output space or if sample rate conversion is done, which requires "future"
|
||||
* samples. Samples that do not require future input can be retrieved at any
|
||||
* time by using swr_convert() (in_count can be set to 0).
|
||||
* At the end of conversion the resampling buffer can be flushed by calling
|
||||
* swr_convert() with NULL in and 0 in_count.
|
||||
*
|
||||
* The delay between input and output, can at any time be found by using
|
||||
* swr_get_delay().
|
||||
*
|
||||
* The following code demonstrates the conversion loop assuming the parameters
|
||||
* from above and caller-defined functions get_input() and handle_output():
|
||||
* @code
|
||||
* uint8_t **input;
|
||||
* int in_samples;
|
||||
*
|
||||
* while (get_input(&input, &in_samples)) {
|
||||
* uint8_t *output;
|
||||
* int out_samples = av_rescale_rnd(swr_get_delay(swr, 48000) +
|
||||
* in_samples, 44100, 48000, AV_ROUND_UP);
|
||||
* av_samples_alloc(&output, NULL, 2, out_samples,
|
||||
* AV_SAMPLE_FMT_S16, 0);
|
||||
* out_samples = swr_convert(swr, &output, out_samples,
|
||||
* input, in_samples);
|
||||
* handle_output(output, out_samples);
|
||||
* av_freep(&output);
|
||||
* }
|
||||
* @endcode
|
||||
*
|
||||
* When the conversion is finished, the conversion
|
||||
* context and everything associated with it must be freed with swr_free().
|
||||
* There will be no memory leak if the data is not completely flushed before
|
||||
* swr_free().
|
||||
*)
|
||||
|
||||
//#if LIBSWRESAMPLE_VERSION_MAJOR < 1
|
||||
//#define SWR_CH_MAX 32 ///< Maximum number of channels
|
||||
//#endif
|
||||
|
||||
//#define SWR_FLAG_RESAMPLE 1 ///< Force resampling even if equal sample rate
|
||||
//TODO use int resample ?
|
||||
//long term TODO can we enable this dynamically?
|
||||
|
||||
//enum SwrDitherType {
|
||||
// SWR_DITHER_NONE = 0,
|
||||
// SWR_DITHER_RECTANGULAR,
|
||||
// SWR_DITHER_TRIANGULAR,
|
||||
// SWR_DITHER_TRIANGULAR_HIGHPASS,
|
||||
//
|
||||
// SWR_DITHER_NS = 64, ///< not part of API/ABI
|
||||
// SWR_DITHER_NS_LIPSHITZ,
|
||||
// SWR_DITHER_NS_F_WEIGHTED,
|
||||
// SWR_DITHER_NS_MODIFIED_E_WEIGHTED,
|
||||
// SWR_DITHER_NS_IMPROVED_E_WEIGHTED,
|
||||
// SWR_DITHER_NS_SHIBATA,
|
||||
// SWR_DITHER_NS_LOW_SHIBATA,
|
||||
// SWR_DITHER_NS_HIGH_SHIBATA,
|
||||
// SWR_DITHER_NB, ///< not part of API/ABI
|
||||
//};
|
||||
//
|
||||
//(* Resampling Engines *)
|
||||
//enum SwrEngine {
|
||||
// SWR_ENGINE_SWR, (*< SW Resampler *)
|
||||
// SWR_ENGINE_SOXR, (*< SoX Resampler *)
|
||||
// SWR_ENGINE_NB, ///< not part of API/ABI
|
||||
//};
|
||||
//
|
||||
//(* Resampling Filter Types *)
|
||||
//enum SwrFilterType {
|
||||
// SWR_FILTER_TYPE_CUBIC, (*< Cubic *)
|
||||
// SWR_FILTER_TYPE_BLACKMAN_NUTTALL, (*< Blackman Nuttall Windowed Sinc *)
|
||||
// SWR_FILTER_TYPE_KAISER, (*< Kaiser Windowed Sinc *)
|
||||
//};
|
||||
Type
|
||||
//typedef struct SwrContext SwrContext;
|
||||
pSwrContext = ^TSwrContext;
|
||||
TSwrContext = record
|
||||
|
||||
end;
|
||||
|
||||
(*
|
||||
* Get the AVClass for swrContext. It can be used in combination with
|
||||
* AV_OPT_SEARCH_FAKE_OBJ for examining options.
|
||||
*
|
||||
* @see av_opt_find().
|
||||
*)
|
||||
//const AVClass *swr_get_class(void);
|
||||
|
||||
(*
|
||||
* Allocate SwrContext.
|
||||
*
|
||||
* If you use this function you will need to set the parameters (manually or
|
||||
* with swr_alloc_set_opts()) before calling swr_init().
|
||||
*
|
||||
* @see swr_alloc_set_opts(), swr_init(), swr_free()
|
||||
* @return NULL on error, allocated context otherwise
|
||||
*)
|
||||
//struct SwrContext *swr_alloc(void);
|
||||
|
||||
(*
|
||||
* Initialize context after user parameters have been set.
|
||||
*
|
||||
* @return AVERROR error code in case of failure.
|
||||
*)
|
||||
//int swr_init(struct SwrContext *s);
|
||||
|
||||
(*
|
||||
* Allocate SwrContext if needed and set/reset common parameters.
|
||||
*
|
||||
* This function does not require s to be allocated with swr_alloc(). On the
|
||||
* other hand, swr_alloc() can use swr_alloc_set_opts() to set the parameters
|
||||
* on the allocated context.
|
||||
*
|
||||
* @param s Swr context, can be NULL
|
||||
* @param out_ch_layout output channel layout (AV_CH_LAYOUT_* )
|
||||
* @param out_sample_fmt output sample format (AV_SAMPLE_FMT_* ).
|
||||
* @param out_sample_rate output sample rate (frequency in Hz)
|
||||
* @param in_ch_layout input channel layout (AV_CH_LAYOUT_* )
|
||||
* @param in_sample_fmt input sample format (AV_SAMPLE_FMT_* ).
|
||||
* @param in_sample_rate input sample rate (frequency in Hz)
|
||||
* @param log_offset logging level offset
|
||||
* @param log_ctx parent logging context, can be NULL
|
||||
*
|
||||
* @see swr_init(), swr_free()
|
||||
* @return NULL on error, allocated context otherwise
|
||||
*)
|
||||
//struct SwrContext *swr_alloc_set_opts(struct SwrContext *s,
|
||||
// int64_t out_ch_layout, enum AVSampleFormat out_sample_fmt, int out_sample_rate,
|
||||
// int64_t in_ch_layout, enum AVSampleFormat in_sample_fmt, int in_sample_rate,
|
||||
// int log_offset, void *log_ctx);
|
||||
|
||||
(*
|
||||
* Free the given SwrContext and set the pointer to NULL.
|
||||
*)
|
||||
//void swr_free(struct SwrContext **s);
|
||||
|
||||
(*
|
||||
* Convert audio.
|
||||
*
|
||||
* in and in_count can be set to 0 to flush the last few samples out at the
|
||||
* end.
|
||||
*
|
||||
* If more input is provided than output space then the input will be buffered.
|
||||
* You can avoid this buffering by providing more output space than input.
|
||||
* Convertion will run directly without copying whenever possible.
|
||||
*
|
||||
* @param s allocated Swr context, with parameters set
|
||||
* @param out output buffers, only the first one need be set in case of packed audio
|
||||
* @param out_count amount of space available for output in samples per channel
|
||||
* @param in input buffers, only the first one need to be set in case of packed audio
|
||||
* @param in_count number of input samples available in one channel
|
||||
*
|
||||
* @return number of samples output per channel, negative value on error
|
||||
*)
|
||||
//int swr_convert(struct SwrContext *s, uint8_t **out, int out_count,
|
||||
// const uint8_t **in , int in_count);
|
||||
function swr_convert(s:pSwrContext; Var out_:PByte; out_count:Integer;
|
||||
const in_:pByte;in_count:Integer):Integer;cdecl;
|
||||
|
||||
(*
|
||||
* Convert the next timestamp from input to output
|
||||
* timestamps are in 1/(in_sample_rate * out_sample_rate) units.
|
||||
*
|
||||
* @note There are 2 slightly differently behaving modes.
|
||||
* First is when automatic timestamp compensation is not used, (min_compensation >= FLT_MAX)
|
||||
* in this case timestamps will be passed through with delays compensated
|
||||
* Second is when automatic timestamp compensation is used, (min_compensation < FLT_MAX)
|
||||
* in this case the output timestamps will match output sample numbers
|
||||
*
|
||||
* @param pts timestamp for the next input sample, INT64_MIN if unknown
|
||||
* @return the output timestamp for the next output sample
|
||||
*)
|
||||
//int64_t swr_next_pts(struct SwrContext *s, int64_t pts);
|
||||
|
||||
(*
|
||||
* Activate resampling compensation.
|
||||
*)
|
||||
//int swr_set_compensation(struct SwrContext *s, int sample_delta, int compensation_distance);
|
||||
|
||||
(*
|
||||
* Set a customized input channel mapping.
|
||||
*
|
||||
* @param s allocated Swr context, not yet initialized
|
||||
* @param channel_map customized input channel mapping (array of channel
|
||||
* indexes, -1 for a muted channel)
|
||||
* @return AVERROR error code in case of failure.
|
||||
*)
|
||||
//int swr_set_channel_mapping(struct SwrContext *s, const int *channel_map);
|
||||
|
||||
(*
|
||||
* Set a customized remix matrix.
|
||||
*
|
||||
* @param s allocated Swr context, not yet initialized
|
||||
* @param matrix remix coefficients; matrix[i + stride * o] is
|
||||
* the weight of input channel i in output channel o
|
||||
* @param stride offset between lines of the matrix
|
||||
* @return AVERROR error code in case of failure.
|
||||
*)
|
||||
//int swr_set_matrix(struct SwrContext *s, const double *matrix, int stride);
|
||||
|
||||
(*
|
||||
* Drops the specified number of output samples.
|
||||
*)
|
||||
//int swr_drop_output(struct SwrContext *s, int count);
|
||||
|
||||
(*
|
||||
* Injects the specified number of silence samples.
|
||||
*)
|
||||
//int swr_inject_silence(struct SwrContext *s, int count);
|
||||
|
||||
(*
|
||||
* Gets the delay the next input sample will experience relative to the next output sample.
|
||||
*
|
||||
* Swresample can buffer data if more input has been provided than available
|
||||
* output space, also converting between sample rates needs a delay.
|
||||
* This function returns the sum of all such delays.
|
||||
* The exact delay is not necessarily an integer value in either input or
|
||||
* output sample rate. Especially when downsampling by a large value, the
|
||||
* output sample rate may be a poor choice to represent the delay, similarly
|
||||
* for upsampling and the input sample rate.
|
||||
*
|
||||
* @param s swr context
|
||||
* @param base timebase in which the returned delay will be
|
||||
* if its set to 1 the returned delay is in seconds
|
||||
* if its set to 1000 the returned delay is in milli seconds
|
||||
* if its set to the input sample rate then the returned delay is in input samples
|
||||
* if its set to the output sample rate then the returned delay is in output samples
|
||||
* an exact rounding free delay can be found by using LCM(in_sample_rate, out_sample_rate)
|
||||
* @returns the delay in 1/base units.
|
||||
*)
|
||||
//int64_t swr_get_delay(struct SwrContext *s, int64_t base);
|
||||
|
||||
(*
|
||||
* Return the LIBSWRESAMPLE_VERSION_INT constant.
|
||||
*)
|
||||
//unsigned swresample_version(void);
|
||||
|
||||
(*
|
||||
* Return the swr build-time configuration.
|
||||
*)
|
||||
//const char *swresample_configuration(void);
|
||||
|
||||
(*
|
||||
* Return the swr license.
|
||||
*)
|
||||
//const char *swresample_license(void);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function swr_convert; external swscale_dll;
|
||||
|
||||
end.
|
@ -1,413 +0,0 @@
|
||||
unit ffm.swscale;
|
||||
|
||||
{$i ffmpeg.inc}
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
ffm.pixfmt, ffm.ctypes;
|
||||
|
||||
(*
|
||||
* Copyright (C) 2001-2011 Michael Niedermayer <michaelni@gmx.at>
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
(*
|
||||
* @file
|
||||
* @ingroup lsws
|
||||
* external API header
|
||||
*)
|
||||
|
||||
(*
|
||||
* @defgroup lsws Libswscale
|
||||
* @{
|
||||
*)
|
||||
|
||||
(*
|
||||
* Return the LIBSWSCALE_VERSION_INT constant.
|
||||
*)
|
||||
// unsigned swscale_version(void);
|
||||
|
||||
(*
|
||||
* Return the libswscale build-time configuration.
|
||||
*)
|
||||
// const char *swscale_configuration(void);
|
||||
|
||||
(*
|
||||
* Return the libswscale license.
|
||||
*)
|
||||
// const char *swscale_license(void);
|
||||
|
||||
Const
|
||||
(* values for the flags, the stuff on the command line is different *)
|
||||
SWS_FAST_BILINEAR = 1;
|
||||
SWS_BILINEAR = 2;
|
||||
SWS_BICUBIC = 4;
|
||||
SWS_X = 8;
|
||||
SWS_POINT = $10;
|
||||
SWS_AREA = $20;
|
||||
SWS_BICUBLIN = $40;
|
||||
SWS_GAUSS = $80;
|
||||
SWS_SINC = $100;
|
||||
SWS_LANCZOS = $200;
|
||||
SWS_SPLINE = $400;
|
||||
|
||||
SWS_SRC_V_CHR_DROP_MASK = $30000;
|
||||
SWS_SRC_V_CHR_DROP_SHIFT = 16;
|
||||
|
||||
SWS_PARAM_DEFAULT = 123456;
|
||||
|
||||
SWS_PRINT_INFO = $1000;
|
||||
|
||||
// the following 3 flags are not completely implemented
|
||||
// internal chrominace subsampling info
|
||||
SWS_FULL_CHR_H_INT = $2000;
|
||||
// input subsampling info
|
||||
SWS_FULL_CHR_H_INP = $4000;
|
||||
SWS_DIRECT_BGR = $8000;
|
||||
SWS_ACCURATE_RND = $40000;
|
||||
SWS_BITEXACT = $80000;
|
||||
SWS_ERROR_DIFFUSION = $800000;
|
||||
|
||||
{$IFDEF FF_API_SWS_CPU_CAPS}
|
||||
(*
|
||||
* CPU caps are autodetected now, those flags
|
||||
* are only provided for API compatibility.
|
||||
*)
|
||||
SWS_CPU_CAPS_MMX = $80000000;
|
||||
SWS_CPU_CAPS_MMXEXT = $20000000;
|
||||
SWS_CPU_CAPS_MMX2 = $20000000;
|
||||
SWS_CPU_CAPS_3DNOW = $40000000;
|
||||
SWS_CPU_CAPS_ALTIVEC = $10000000;
|
||||
SWS_CPU_CAPS_BFIN = $01000000;
|
||||
SWS_CPU_CAPS_SSE2 = $02000000;
|
||||
{$ENDIF}
|
||||
SWS_MAX_REDUCE_CUTOFF = 0.002;
|
||||
|
||||
SWS_CS_ITU709 = 1;
|
||||
SWS_CS_FCC = 4;
|
||||
SWS_CS_ITU601 = 5;
|
||||
SWS_CS_ITU624 = 5;
|
||||
SWS_CS_SMPTE170M = 5;
|
||||
SWS_CS_SMPTE240M = 7;
|
||||
SWS_CS_DEFAULT = 5;
|
||||
|
||||
(*
|
||||
* Return a pointer to yuv<->rgb coefficients for the given colorspace
|
||||
* suitable for sws_setColorspaceDetails().
|
||||
*
|
||||
* @param colorspace One of the SWS_CS_* macros. If invalid,
|
||||
* SWS_CS_DEFAULT is used.
|
||||
*)
|
||||
// const int *sws_getCoefficients(int colorspace);
|
||||
|
||||
Type
|
||||
// when used for filters they must have an odd number of elements
|
||||
// coeffs cannot be shared between vectors
|
||||
pSwsVector = ^TSwsVector;
|
||||
|
||||
TSwsVector = {packed} record
|
||||
coeff: pDouble;
|
||||
/// < pointer to the list of coefficients
|
||||
length: Integer;
|
||||
/// < number of coefficients in the vector
|
||||
end;
|
||||
|
||||
// vectors can be shared
|
||||
pSwsFilter = ^TSwsFilter;
|
||||
|
||||
TSwsFilter = {packed} record
|
||||
lumH: pSwsVector;
|
||||
lumV: pSwsVector;
|
||||
chrH: pSwsVector;
|
||||
chrV: pSwsVector;
|
||||
end;
|
||||
|
||||
pSwsContext = ^TSwsContext;
|
||||
|
||||
TSwsContext = {packed} record
|
||||
|
||||
end;
|
||||
|
||||
(*
|
||||
* Return a positive value if pix_fmt is a supported input format, 0
|
||||
* otherwise.
|
||||
*)
|
||||
// int sws_isSupportedInput(enum AVPixelFormat pix_fmt);
|
||||
|
||||
(*
|
||||
* Return a positive value if pix_fmt is a supported output format, 0
|
||||
* otherwise.
|
||||
*)
|
||||
// int sws_isSupportedOutput(enum AVPixelFormat pix_fmt);
|
||||
|
||||
(*
|
||||
* @param[in] pix_fmt the pixel format
|
||||
* @return a positive value if an endianness conversion for pix_fmt is
|
||||
* supported, 0 otherwise.
|
||||
*)
|
||||
// int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt);
|
||||
|
||||
(*
|
||||
* Allocate an empty SwsContext. This must be filled and passed to
|
||||
* sws_init_context(). For filling see AVOptions, options.c and
|
||||
* sws_setColorspaceDetails().
|
||||
*)
|
||||
// struct SwsContext *sws_alloc_context(void);
|
||||
|
||||
(*
|
||||
* Initialize the swscaler context sws_context.
|
||||
*
|
||||
* @return zero or positive value on success, a negative value on
|
||||
* error
|
||||
*)
|
||||
// int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter);
|
||||
|
||||
(*
|
||||
* Free the swscaler context swsContext.
|
||||
* If swsContext is NULL, then does nothing.
|
||||
*)
|
||||
// void sws_freeContext(struct SwsContext *swsContext);
|
||||
procedure sws_freeContext(swsContext: pSwsContext); cdecl;
|
||||
|
||||
{$IFDEF FF_API_SWS_GETCONTEXT}
|
||||
(*
|
||||
* Allocate and return an SwsContext. You need it to perform
|
||||
* scaling/conversion operations using sws_scale().
|
||||
*
|
||||
* @param srcW the width of the source image
|
||||
* @param srcH the height of the source image
|
||||
* @param srcFormat the source image format
|
||||
* @param dstW the width of the destination image
|
||||
* @param dstH the height of the destination image
|
||||
* @param dstFormat the destination image format
|
||||
* @param flags specify which algorithm and options to use for rescaling
|
||||
* @return a pointer to an allocated context, or NULL in case of error
|
||||
* @note this function is to be removed after a saner alternative is
|
||||
* written
|
||||
* @deprecated Use sws_getCachedContext() instead.
|
||||
*)
|
||||
// struct SwsContext *sws_getContext(int srcW, int srcH, enum AVPixelFormat srcFormat,
|
||||
// int dstW, int dstH, enum AVPixelFormat dstFormat,
|
||||
// int flags, SwsFilter *srcFilter,
|
||||
// SwsFilter *dstFilter, const double *param);
|
||||
function sws_getContext(srcW: Integer; srcH: Integer; srcFormat: TAVPixelFormat; dstW: Integer; dstH: Integer; dstFormat: TAVPixelFormat;
|
||||
flags: Integer; srcFilter: pSwsFilter; dstFilter: pSwsFilter; const param: pDouble): pSwsContext; cdecl;
|
||||
{$ENDIF}
|
||||
(*
|
||||
* Scale the image slice in srcSlice and put the resulting scaled
|
||||
* slice in the image in dst. A slice is a sequence of consecutive
|
||||
* rows in an image.
|
||||
*
|
||||
* Slices have to be provided in sequential order, either in
|
||||
* top-bottom or bottom-top order. If slices are provided in
|
||||
* non-sequential order the behavior of the function is undefined.
|
||||
*
|
||||
* @param c the scaling context previously created with
|
||||
* sws_getContext()
|
||||
* @param srcSlice the array containing the pointers to the planes of
|
||||
* the source slice
|
||||
* @param srcStride the array containing the strides for each plane of
|
||||
* the source image
|
||||
* @param srcSliceY the position in the source image of the slice to
|
||||
* process, that is the number (counted starting from
|
||||
* zero) in the image of the first row of the slice
|
||||
* @param srcSliceH the height of the source slice, that is the number
|
||||
* of rows in the slice
|
||||
* @param dst the array containing the pointers to the planes of
|
||||
* the destination image
|
||||
* @param dstStride the array containing the strides for each plane of
|
||||
* the destination image
|
||||
* @return the height of the output slice
|
||||
*)
|
||||
// int sws_scale(struct SwsContext *c, const uint8_t *const srcSlice[],
|
||||
// const int srcStride[], int srcSliceY, int srcSliceH,
|
||||
// uint8_t *const dst[], const int dstStride[]);
|
||||
|
||||
Type
|
||||
TCintArray = array [0 .. 0] of integer;
|
||||
PCintArray = ^TCintArray;
|
||||
TPCuint8Array = array [0 .. 0] of pByte;
|
||||
PPCuint8Array = ^TPCuint8Array;
|
||||
|
||||
function sws_scale( //
|
||||
c: pSwsContext; // struct SwsContext *c
|
||||
const srcSlice: PPCuint8Array; // const uint8_t *const srcSlice[]
|
||||
const srcStride: PCintArray; // const int srcStride[]
|
||||
srcSliceY: integer; // int srcSliceY
|
||||
srcSliceH: integer; // int srcSliceH
|
||||
dst: PPCuint8Array; // uint8_t *const dst[]
|
||||
const dstStride: PCintArray // const int dstStride[]
|
||||
): integer; cdecl;
|
||||
{
|
||||
int sws_scale(
|
||||
struct SwsContext *c,
|
||||
const uint8_t *const srcSlice[],
|
||||
const int srcStride[],
|
||||
int srcSliceY,
|
||||
int srcSliceH,
|
||||
uint8_t *const dst[],
|
||||
const int dstStride[]);
|
||||
}
|
||||
|
||||
(*
|
||||
* @param dstRange flag indicating the while-black range of the output (1=jpeg / 0=mpeg)
|
||||
* @param srcRange flag indicating the while-black range of the input (1=jpeg / 0=mpeg)
|
||||
* @param table the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x]
|
||||
* @param inv_table the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x]
|
||||
* @param brightness 16.16 fixed point brightness correction
|
||||
* @param contrast 16.16 fixed point contrast correction
|
||||
* @param saturation 16.16 fixed point saturation correction
|
||||
* @return -1 if not supported
|
||||
*)
|
||||
// int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4],
|
||||
// int srcRange, const int table[4], int dstRange,
|
||||
// int brightness, int contrast, int saturation);
|
||||
|
||||
(*
|
||||
* @return -1 if not supported
|
||||
*)
|
||||
// int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table,
|
||||
// int *srcRange, int **table, int *dstRange,
|
||||
// int *brightness, int *contrast, int *saturation);
|
||||
|
||||
(*
|
||||
* Allocate and return an uninitialized vector with length coefficients.
|
||||
*)
|
||||
// SwsVector *sws_allocVec(int length);
|
||||
|
||||
(*
|
||||
* Return a normalized Gaussian curve used to filter stuff
|
||||
* quality = 3 is high quality, lower is lower quality.
|
||||
*)
|
||||
// SwsVector *sws_getGaussianVec(double variance, double quality);
|
||||
|
||||
(*
|
||||
* Allocate and return a vector with length coefficients, all
|
||||
* with the same value c.
|
||||
*)
|
||||
// SwsVector *sws_getConstVec(double c, int length);
|
||||
|
||||
(*
|
||||
* Allocate and return a vector with just one coefficient, with
|
||||
* value 1.0.
|
||||
*)
|
||||
// SwsVector *sws_getIdentityVec(void);
|
||||
|
||||
(*
|
||||
* Scale all the coefficients of a by the scalar value.
|
||||
*)
|
||||
// void sws_scaleVec(SwsVector *a, double scalar);
|
||||
|
||||
(*
|
||||
* Scale all the coefficients of a so that their sum equals height.
|
||||
*)
|
||||
// void sws_normalizeVec(SwsVector *a, double height);
|
||||
// void sws_convVec(SwsVector *a, SwsVector *b);
|
||||
// void sws_addVec(SwsVector *a, SwsVector *b);
|
||||
// void sws_subVec(SwsVector *a, SwsVector *b);
|
||||
// void sws_shiftVec(SwsVector *a, int shift);
|
||||
|
||||
(*
|
||||
* Allocate and return a clone of the vector a, that is a vector
|
||||
* with the same coefficients as a.
|
||||
*)
|
||||
// SwsVector *sws_cloneVec(SwsVector *a);
|
||||
|
||||
(*
|
||||
* Print with av_log() a textual representation of the vector a
|
||||
* if log_level <= av_log_level.
|
||||
*)
|
||||
// void sws_printVec2(SwsVector *a, AVClass *log_ctx, int log_level);
|
||||
|
||||
// void sws_freeVec(SwsVector *a);
|
||||
|
||||
// SwsFilter *sws_getDefaultFilter(float lumaGBlur, float chromaGBlur,
|
||||
// float lumaSharpen, float chromaSharpen,
|
||||
// float chromaHShift, float chromaVShift,
|
||||
// int verbose);
|
||||
// void sws_freeFilter(SwsFilter *filter);
|
||||
|
||||
(*
|
||||
* Check if context can be reused, otherwise reallocate a new one.
|
||||
*
|
||||
* If context is NULL, just calls sws_getContext() to get a new
|
||||
* context. Otherwise, checks if the parameters are the ones already
|
||||
* saved in context. If that is the case, returns the current
|
||||
* context. Otherwise, frees context and gets a new context with
|
||||
* the new parameters.
|
||||
*
|
||||
* Be warned that srcFilter and dstFilter are not checked, they
|
||||
* are assumed to remain the same.
|
||||
*)
|
||||
// struct SwsContext *sws_getCachedContext(struct SwsContext *context,
|
||||
// int srcW, int srcH, enum AVPixelFormat srcFormat,
|
||||
// int dstW, int dstH, enum AVPixelFormat dstFormat,
|
||||
// int flags, SwsFilter *srcFilter,
|
||||
// SwsFilter *dstFilter, const double *param);
|
||||
|
||||
function sws_getCachedContext(context:pSwsContext;
|
||||
srcW:Integer; srcH:Integer; srcFormat:TAVPixelFormat;
|
||||
dstW:Integer; dstH:Integer; dstFormat:TAVPixelFormat;
|
||||
flags:Integer; srcFilter:pSwsFilter;
|
||||
dstFilter:pSwsFilter; const param:pDouble):pSwsContext;cdecl;
|
||||
|
||||
(*
|
||||
* Convert an 8-bit paletted frame into a frame with a color depth of 32 bits.
|
||||
*
|
||||
* The output frame will have the same {packed} format as the palette.
|
||||
*
|
||||
* @param src source frame buffer
|
||||
* @param dst destination frame buffer
|
||||
* @param num_pixels number of pixels to convert
|
||||
* @param palette array with [256] entries, which must match color arrangement (RGB or BGR) of src
|
||||
*)
|
||||
// void sws_convertPalette8Topacked32(const uint8_t *src, uint8_t *dst, int num_pixels, const uint8_t *palette);
|
||||
|
||||
(*
|
||||
* Convert an 8-bit paletted frame into a frame with a color depth of 24 bits.
|
||||
*
|
||||
* With the palette format "ABCD", the destination frame ends up with the format "ABC".
|
||||
*
|
||||
* @param src source frame buffer
|
||||
* @param dst destination frame buffer
|
||||
* @param num_pixels number of pixels to convert
|
||||
* @param palette array with [256] entries, which must match color arrangement (RGB or BGR) of src
|
||||
*)
|
||||
// void sws_convertPalette8Topacked24(const uint8_t *src, uint8_t *dst, int num_pixels, const uint8_t *palette);
|
||||
|
||||
(*
|
||||
* Get the AVClass for swsContext. It can be used in combination with
|
||||
* AV_OPT_SEARCH_FAKE_OBJ for examining options.
|
||||
*
|
||||
* @see av_opt_find().
|
||||
*)
|
||||
// const AVClass *sws_get_class(void);
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
{$IFDEF FF_API_SWS_GETCONTEXT}
|
||||
function sws_getContext; external swscale_dll;
|
||||
{$ENDIF}
|
||||
function sws_scale; external swscale_dll;
|
||||
procedure sws_freeContext; external swscale_dll;
|
||||
function sws_getCachedContext; external swscale_dll;
|
||||
|
||||
end.
|
@ -1,46 +0,0 @@
|
||||
(*
|
||||
* Copyright (c) 2000-2003 Fabrice Bellard
|
||||
*
|
||||
* This file is part of ffm.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
unit ffm.time;
|
||||
|
||||
interface
|
||||
|
||||
(**
|
||||
* Get the current time in microseconds.
|
||||
*)
|
||||
function av_gettime():int64;cdecl;
|
||||
|
||||
(**
|
||||
* Sleep for a period of time. Although the duration is expressed in
|
||||
* microseconds, the actual delay may be rounded to the precision of the
|
||||
* system timer.
|
||||
*
|
||||
* @param usec Number of microseconds to sleep.
|
||||
* @return zero on success or (negative) error code.
|
||||
*)
|
||||
function av_usleep(usec:cardinal):Integer;cdecl;
|
||||
|
||||
implementation
|
||||
|
||||
uses ffm.lib;
|
||||
|
||||
function av_gettime;external avutil_dll;
|
||||
function av_usleep;external avutil_dll;
|
||||
|
||||
end.
|
@ -1,18 +0,0 @@
|
||||
//
|
||||
{$POINTERMATH ON}
|
||||
{$MINENUMSIZE 4} (* use 4-byte enums *)
|
||||
{$WRITEABLECONST ON}
|
||||
//
|
||||
// swscale
|
||||
{$DEFINE FF_API_SWS_GETCONTEXT}
|
||||
// avfilter
|
||||
{$DEFINE FF_API_AVFILTERBUFFER}
|
||||
{$DEFINE FF_API_FOO_COUNT}
|
||||
// libavcodec.avcodec
|
||||
{$DEFINE FF_API_CODEC_ID}
|
||||
{$DEFINE FF_API_XVMC}
|
||||
{$DEFINE FF_API_DEBUG_MV}
|
||||
{$DEFINE FF_API_THREAD_OPAQUE}
|
||||
{$DEFINE FF_API_ERROR_RATE}
|
||||
{$DEFINE FF_API_CODEC_PKT}
|
||||
{$DEFINE FF_API_DESTRUCT_PACKET}
|
@ -1,391 +0,0 @@
|
||||
(*
|
||||
* This file is part of FFmpeg.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
(*
|
||||
* This header exists to prevent new codec IDs from being accidentally added to
|
||||
* the deprecated list.
|
||||
* Do not include it directly. It will be removed on next major bump
|
||||
*
|
||||
* Do not add new items to this list. Use the AVCodecID enum instead.
|
||||
*)
|
||||
,
|
||||
CODEC_ID_NONE = AV_CODEC_ID_NONE,
|
||||
|
||||
(* video codecs *)
|
||||
CODEC_ID_MPEG1VIDEO,
|
||||
CODEC_ID_MPEG2VIDEO, ///< preferred ID for MPEG-1/2 video decoding
|
||||
CODEC_ID_MPEG2VIDEO_XVMC,
|
||||
CODEC_ID_H261,
|
||||
CODEC_ID_H263,
|
||||
CODEC_ID_RV10,
|
||||
CODEC_ID_RV20,
|
||||
CODEC_ID_MJPEG,
|
||||
CODEC_ID_MJPEGB,
|
||||
CODEC_ID_LJPEG,
|
||||
CODEC_ID_SP5X,
|
||||
CODEC_ID_JPEGLS,
|
||||
CODEC_ID_MPEG4,
|
||||
CODEC_ID_RAWVIDEO,
|
||||
CODEC_ID_MSMPEG4V1,
|
||||
CODEC_ID_MSMPEG4V2,
|
||||
CODEC_ID_MSMPEG4V3,
|
||||
CODEC_ID_WMV1,
|
||||
CODEC_ID_WMV2,
|
||||
CODEC_ID_H263P,
|
||||
CODEC_ID_H263I,
|
||||
CODEC_ID_FLV1,
|
||||
CODEC_ID_SVQ1,
|
||||
CODEC_ID_SVQ3,
|
||||
CODEC_ID_DVVIDEO,
|
||||
CODEC_ID_HUFFYUV,
|
||||
CODEC_ID_CYUV,
|
||||
CODEC_ID_H264,
|
||||
CODEC_ID_INDEO3,
|
||||
CODEC_ID_VP3,
|
||||
CODEC_ID_THEORA,
|
||||
CODEC_ID_ASV1,
|
||||
CODEC_ID_ASV2,
|
||||
CODEC_ID_FFV1,
|
||||
CODEC_ID_4XM,
|
||||
CODEC_ID_VCR1,
|
||||
CODEC_ID_CLJR,
|
||||
CODEC_ID_MDEC,
|
||||
CODEC_ID_ROQ,
|
||||
CODEC_ID_INTERPLAY_VIDEO,
|
||||
CODEC_ID_XAN_WC3,
|
||||
CODEC_ID_XAN_WC4,
|
||||
CODEC_ID_RPZA,
|
||||
CODEC_ID_CINEPAK,
|
||||
CODEC_ID_WS_VQA,
|
||||
CODEC_ID_MSRLE,
|
||||
CODEC_ID_MSVIDEO1,
|
||||
CODEC_ID_IDCIN,
|
||||
CODEC_ID_8BPS,
|
||||
CODEC_ID_SMC,
|
||||
CODEC_ID_FLIC,
|
||||
CODEC_ID_TRUEMOTION1,
|
||||
CODEC_ID_VMDVIDEO,
|
||||
CODEC_ID_MSZH,
|
||||
CODEC_ID_ZLIB,
|
||||
CODEC_ID_QTRLE,
|
||||
CODEC_ID_TSCC,
|
||||
CODEC_ID_ULTI,
|
||||
CODEC_ID_QDRAW,
|
||||
CODEC_ID_VIXL,
|
||||
CODEC_ID_QPEG,
|
||||
CODEC_ID_PNG,
|
||||
CODEC_ID_PPM,
|
||||
CODEC_ID_PBM,
|
||||
CODEC_ID_PGM,
|
||||
CODEC_ID_PGMYUV,
|
||||
CODEC_ID_PAM,
|
||||
CODEC_ID_FFVHUFF,
|
||||
CODEC_ID_RV30,
|
||||
CODEC_ID_RV40,
|
||||
CODEC_ID_VC1,
|
||||
CODEC_ID_WMV3,
|
||||
CODEC_ID_LOCO,
|
||||
CODEC_ID_WNV1,
|
||||
CODEC_ID_AASC,
|
||||
CODEC_ID_INDEO2,
|
||||
CODEC_ID_FRAPS,
|
||||
CODEC_ID_TRUEMOTION2,
|
||||
CODEC_ID_BMP,
|
||||
CODEC_ID_CSCD,
|
||||
CODEC_ID_MMVIDEO,
|
||||
CODEC_ID_ZMBV,
|
||||
CODEC_ID_AVS,
|
||||
CODEC_ID_SMACKVIDEO,
|
||||
CODEC_ID_NUV,
|
||||
CODEC_ID_KMVC,
|
||||
CODEC_ID_FLASHSV,
|
||||
CODEC_ID_CAVS,
|
||||
CODEC_ID_JPEG2000,
|
||||
CODEC_ID_VMNC,
|
||||
CODEC_ID_VP5,
|
||||
CODEC_ID_VP6,
|
||||
CODEC_ID_VP6F,
|
||||
CODEC_ID_TARGA,
|
||||
CODEC_ID_DSICINVIDEO,
|
||||
CODEC_ID_TIERTEXSEQVIDEO,
|
||||
CODEC_ID_TIFF,
|
||||
CODEC_ID_GIF,
|
||||
CODEC_ID_DXA,
|
||||
CODEC_ID_DNXHD,
|
||||
CODEC_ID_THP,
|
||||
CODEC_ID_SGI,
|
||||
CODEC_ID_C93,
|
||||
CODEC_ID_BETHSOFTVID,
|
||||
CODEC_ID_PTX,
|
||||
CODEC_ID_TXD,
|
||||
CODEC_ID_VP6A,
|
||||
CODEC_ID_AMV,
|
||||
CODEC_ID_VB,
|
||||
CODEC_ID_PCX,
|
||||
CODEC_ID_SUNRAST,
|
||||
CODEC_ID_INDEO4,
|
||||
CODEC_ID_INDEO5,
|
||||
CODEC_ID_MIMIC,
|
||||
CODEC_ID_RL2,
|
||||
CODEC_ID_ESCAPE124,
|
||||
CODEC_ID_DIRAC,
|
||||
CODEC_ID_BFI,
|
||||
CODEC_ID_CMV,
|
||||
CODEC_ID_MOTIONPIXELS,
|
||||
CODEC_ID_TGV,
|
||||
CODEC_ID_TGQ,
|
||||
CODEC_ID_TQI,
|
||||
CODEC_ID_AURA,
|
||||
CODEC_ID_AURA2,
|
||||
CODEC_ID_V210X,
|
||||
CODEC_ID_TMV,
|
||||
CODEC_ID_V210,
|
||||
CODEC_ID_DPX,
|
||||
CODEC_ID_MAD,
|
||||
CODEC_ID_FRWU,
|
||||
CODEC_ID_FLASHSV2,
|
||||
CODEC_ID_CDGRAPHICS,
|
||||
CODEC_ID_R210,
|
||||
CODEC_ID_ANM,
|
||||
CODEC_ID_BINKVIDEO,
|
||||
CODEC_ID_IFF_ILBM,
|
||||
CODEC_ID_IFF_BYTERUN1,
|
||||
CODEC_ID_KGV1,
|
||||
CODEC_ID_YOP,
|
||||
CODEC_ID_VP8,
|
||||
CODEC_ID_PICTOR,
|
||||
CODEC_ID_ANSI,
|
||||
CODEC_ID_A64_MULTI,
|
||||
CODEC_ID_A64_MULTI5,
|
||||
CODEC_ID_R10K,
|
||||
CODEC_ID_MXPEG,
|
||||
CODEC_ID_LAGARITH,
|
||||
CODEC_ID_PRORES,
|
||||
CODEC_ID_JV,
|
||||
CODEC_ID_DFA,
|
||||
CODEC_ID_WMV3IMAGE,
|
||||
CODEC_ID_VC1IMAGE,
|
||||
CODEC_ID_UTVIDEO,
|
||||
CODEC_ID_BMV_VIDEO,
|
||||
CODEC_ID_VBLE,
|
||||
CODEC_ID_DXTORY,
|
||||
CODEC_ID_V410,
|
||||
CODEC_ID_XWD,
|
||||
CODEC_ID_CDXL,
|
||||
CODEC_ID_XBM,
|
||||
CODEC_ID_ZEROCODEC,
|
||||
CODEC_ID_MSS1,
|
||||
CODEC_ID_MSA1,
|
||||
CODEC_ID_TSCC2,
|
||||
CODEC_ID_MTS2,
|
||||
CODEC_ID_CLLC,
|
||||
CODEC_ID_Y41P = $59343150,//MKBETAG('Y','4','1','P'),
|
||||
CODEC_ID_ESCAPE130 = $45313330,//MKBETAG('E','1','3','0'),
|
||||
CODEC_ID_EXR = $30455852,//MKBETAG('0','E','X','R'),
|
||||
CODEC_ID_AVRP = $41565250,//MKBETAG('A','V','R','P'),
|
||||
|
||||
CODEC_ID_G2M = $3047324D,//MKBETAG( 0 ,'G','2','M'),
|
||||
CODEC_ID_AVUI = $41565549,//MKBETAG('A','V','U','I'),
|
||||
CODEC_ID_AYUV = $41595556,//MKBETAG('A','Y','U','V'),
|
||||
CODEC_ID_V308 = $56333038,//MKBETAG('V','3','0','8'),
|
||||
CODEC_ID_V408 = $56343038,//MKBETAG('V','4','0','8'),
|
||||
CODEC_ID_YUV4 = $59555634,//MKBETAG('Y','U','V','4'),
|
||||
CODEC_ID_SANM = $53414E4D,//MKBETAG('S','A','N','M'),
|
||||
CODEC_ID_PAF_VIDEO = $50414656,//MKBETAG('P','A','F','V'),
|
||||
CODEC_ID_SNOW = AV_CODEC_ID_SNOW,
|
||||
|
||||
(* various PCM "codecs" *)
|
||||
CODEC_ID_FIRST_AUDIO = $10000, ///< A dummy id pointing at the start of audio codecs
|
||||
CODEC_ID_PCM_S16LE = $10000,
|
||||
CODEC_ID_PCM_S16BE,
|
||||
CODEC_ID_PCM_U16LE,
|
||||
CODEC_ID_PCM_U16BE,
|
||||
CODEC_ID_PCM_S8,
|
||||
CODEC_ID_PCM_U8,
|
||||
CODEC_ID_PCM_MULAW,
|
||||
CODEC_ID_PCM_ALAW,
|
||||
CODEC_ID_PCM_S32LE,
|
||||
CODEC_ID_PCM_S32BE,
|
||||
CODEC_ID_PCM_U32LE,
|
||||
CODEC_ID_PCM_U32BE,
|
||||
CODEC_ID_PCM_S24LE,
|
||||
CODEC_ID_PCM_S24BE,
|
||||
CODEC_ID_PCM_U24LE,
|
||||
CODEC_ID_PCM_U24BE,
|
||||
CODEC_ID_PCM_S24DAUD,
|
||||
CODEC_ID_PCM_ZORK,
|
||||
CODEC_ID_PCM_S16LE_PLANAR,
|
||||
CODEC_ID_PCM_DVD,
|
||||
CODEC_ID_PCM_F32BE,
|
||||
CODEC_ID_PCM_F32LE,
|
||||
CODEC_ID_PCM_F64BE,
|
||||
CODEC_ID_PCM_F64LE,
|
||||
CODEC_ID_PCM_BLURAY,
|
||||
CODEC_ID_PCM_LXF,
|
||||
CODEC_ID_S302M,
|
||||
CODEC_ID_PCM_S8_PLANAR,
|
||||
|
||||
(* various ADPCM codecs *)
|
||||
CODEC_ID_ADPCM_IMA_QT = $11000,
|
||||
CODEC_ID_ADPCM_IMA_WAV,
|
||||
CODEC_ID_ADPCM_IMA_DK3,
|
||||
CODEC_ID_ADPCM_IMA_DK4,
|
||||
CODEC_ID_ADPCM_IMA_WS,
|
||||
CODEC_ID_ADPCM_IMA_SMJPEG,
|
||||
CODEC_ID_ADPCM_MS,
|
||||
CODEC_ID_ADPCM_4XM,
|
||||
CODEC_ID_ADPCM_XA,
|
||||
CODEC_ID_ADPCM_ADX,
|
||||
CODEC_ID_ADPCM_EA,
|
||||
CODEC_ID_ADPCM_G726,
|
||||
CODEC_ID_ADPCM_CT,
|
||||
CODEC_ID_ADPCM_SWF,
|
||||
CODEC_ID_ADPCM_YAMAHA,
|
||||
CODEC_ID_ADPCM_SBPRO_4,
|
||||
CODEC_ID_ADPCM_SBPRO_3,
|
||||
CODEC_ID_ADPCM_SBPRO_2,
|
||||
CODEC_ID_ADPCM_THP,
|
||||
CODEC_ID_ADPCM_IMA_AMV,
|
||||
CODEC_ID_ADPCM_EA_R1,
|
||||
CODEC_ID_ADPCM_EA_R3,
|
||||
CODEC_ID_ADPCM_EA_R2,
|
||||
CODEC_ID_ADPCM_IMA_EA_SEAD,
|
||||
CODEC_ID_ADPCM_IMA_EA_EACS,
|
||||
CODEC_ID_ADPCM_EA_XAS,
|
||||
CODEC_ID_ADPCM_EA_MAXIS_XA,
|
||||
CODEC_ID_ADPCM_IMA_ISS,
|
||||
CODEC_ID_ADPCM_G722,
|
||||
CODEC_ID_ADPCM_IMA_APC,
|
||||
CODEC_ID_VIMA = $56494D41,//MKBETAG('V','I','M','A'),
|
||||
|
||||
(* AMR *)
|
||||
CODEC_ID_AMR_NB = $12000,
|
||||
CODEC_ID_AMR_WB,
|
||||
|
||||
(* RealAudio codecs*)
|
||||
CODEC_ID_RA_144 = $13000,
|
||||
CODEC_ID_RA_288,
|
||||
|
||||
(* various DPCM codecs *)
|
||||
CODEC_ID_ROQ_DPCM = $14000,
|
||||
CODEC_ID_INTERPLAY_DPCM,
|
||||
CODEC_ID_XAN_DPCM,
|
||||
CODEC_ID_SOL_DPCM,
|
||||
|
||||
(* audio codecs *)
|
||||
CODEC_ID_MP2 = $15000,
|
||||
CODEC_ID_MP3, ///< preferred ID for decoding MPEG audio layer 1, 2 or 3
|
||||
CODEC_ID_AAC,
|
||||
CODEC_ID_AC3,
|
||||
CODEC_ID_DTS,
|
||||
CODEC_ID_VORBIS,
|
||||
CODEC_ID_DVAUDIO,
|
||||
CODEC_ID_WMAV1,
|
||||
CODEC_ID_WMAV2,
|
||||
CODEC_ID_MACE3,
|
||||
CODEC_ID_MACE6,
|
||||
CODEC_ID_VMDAUDIO,
|
||||
CODEC_ID_FLAC,
|
||||
CODEC_ID_MP3ADU,
|
||||
CODEC_ID_MP3ON4,
|
||||
CODEC_ID_SHORTEN,
|
||||
CODEC_ID_ALAC,
|
||||
CODEC_ID_WESTWOOD_SND1,
|
||||
CODEC_ID_GSM, ///< as in Berlin toast format
|
||||
CODEC_ID_QDM2,
|
||||
CODEC_ID_COOK,
|
||||
CODEC_ID_TRUESPEECH,
|
||||
CODEC_ID_TTA,
|
||||
CODEC_ID_SMACKAUDIO,
|
||||
CODEC_ID_QCELP,
|
||||
CODEC_ID_WAVPACK,
|
||||
CODEC_ID_DSICINAUDIO,
|
||||
CODEC_ID_IMC,
|
||||
CODEC_ID_MUSEPACK7,
|
||||
CODEC_ID_MLP,
|
||||
CODEC_ID_GSM_MS, (* as found in WAV *)
|
||||
CODEC_ID_ATRAC3,
|
||||
CODEC_ID_VOXWARE,
|
||||
CODEC_ID_APE,
|
||||
CODEC_ID_NELLYMOSER,
|
||||
CODEC_ID_MUSEPACK8,
|
||||
CODEC_ID_SPEEX,
|
||||
CODEC_ID_WMAVOICE,
|
||||
CODEC_ID_WMAPRO,
|
||||
CODEC_ID_WMALOSSLESS,
|
||||
CODEC_ID_ATRAC3P,
|
||||
CODEC_ID_EAC3,
|
||||
CODEC_ID_SIPR,
|
||||
CODEC_ID_MP1,
|
||||
CODEC_ID_TWINVQ,
|
||||
CODEC_ID_TRUEHD,
|
||||
CODEC_ID_MP4ALS,
|
||||
CODEC_ID_ATRAC1,
|
||||
CODEC_ID_BINKAUDIO_RDFT,
|
||||
CODEC_ID_BINKAUDIO_DCT,
|
||||
CODEC_ID_AAC_LATM,
|
||||
CODEC_ID_QDMC,
|
||||
CODEC_ID_CELT,
|
||||
CODEC_ID_G723_1,
|
||||
CODEC_ID_G729,
|
||||
CODEC_ID_8SVX_EXP,
|
||||
CODEC_ID_8SVX_FIB,
|
||||
CODEC_ID_BMV_AUDIO,
|
||||
CODEC_ID_RALF,
|
||||
CODEC_ID_IAC,
|
||||
CODEC_ID_ILBC,
|
||||
CODEC_ID_FFWAVESYNTH = $46465753,//MKBETAG('F','F','W','S'),
|
||||
CODEC_ID_SONIC = $534F4E43,//MKBETAG('S','O','N','C'),
|
||||
CODEC_ID_SONIC_LS = $534F4E4C,//MKBETAG('S','O','N','L'),
|
||||
CODEC_ID_PAF_AUDIO = $50414641,//MKBETAG('P','A','F','A'),
|
||||
CODEC_ID_OPUS = $4F505553,//MKBETAG('O','P','U','S'),
|
||||
|
||||
(* subtitle codecs *)
|
||||
CODEC_ID_FIRST_SUBTITLE = $17000, ///< A dummy ID pointing at the start of subtitle codecs.
|
||||
CODEC_ID_DVD_SUBTITLE = $17000,
|
||||
CODEC_ID_DVB_SUBTITLE,
|
||||
CODEC_ID_TEXT, ///< raw UTF-8 text
|
||||
CODEC_ID_XSUB,
|
||||
CODEC_ID_SSA,
|
||||
CODEC_ID_MOV_TEXT,
|
||||
CODEC_ID_HDMV_PGS_SUBTITLE,
|
||||
CODEC_ID_DVB_TELETEXT,
|
||||
CODEC_ID_SRT,
|
||||
CODEC_ID_MICRODVD = $6D445644,//MKBETAG('m','D','V','D'),
|
||||
CODEC_ID_EIA_608 = $63363038,//MKBETAG('c','6','0','8'),
|
||||
CODEC_ID_JACOSUB = $4A535542,//MKBETAG('J','S','U','B'),
|
||||
CODEC_ID_SAMI = $53414D49,//MKBETAG('S','A','M','I'),
|
||||
CODEC_ID_REALTEXT = $52545854,//MKBETAG('R','T','X','T'),
|
||||
CODEC_ID_SUBVIEWER = $53756256,//MKBETAG('S','u','b','V'),
|
||||
|
||||
(* other specific kind of codecs (generally used for attachments) *)
|
||||
CODEC_ID_FIRST_UNKNOWN = $18000, ///< A dummy ID pointing at the start of various fake codecs.
|
||||
CODEC_ID_TTF = $18000,
|
||||
CODEC_ID_BINTEXT = $42545854,//MKBETAG('B','T','X','T'),
|
||||
CODEC_ID_XBIN = $5842494E,//MKBETAG('X','B','I','N'),
|
||||
CODEC_ID_IDF = $30494446,//MKBETAG( 0 ,'I','D','F'),
|
||||
CODEC_ID_OTF = $304F5446,//MKBETAG( 0 ,'O','T','F'),
|
||||
|
||||
CODEC_ID_PROBE = $19000, ///< codec_id is not known (like CODEC_ID_NONE) but lavf should attempt to identify it
|
||||
|
||||
CODEC_ID_MPEG2TS = $20000, (**< _FAKE_ codec to indicate a raw MPEG-2 TS
|
||||
* stream (only used by libavformat) *)
|
||||
CODEC_ID_MPEG4SYSTEMS = $20001, (**< _FAKE_ codec to indicate a MPEG-4 Systems
|
||||
* stream (only used by libavformat) *)
|
||||
CODEC_ID_FFMETADATA = $21000 ///< Dummy codec for streams containing only metadata information.
|
||||
|
@ -1,173 +0,0 @@
|
||||
(*
|
||||
* copyright (c) 2006-2012 Michael Niedermayer <michaelni@gmx.at>
|
||||
*
|
||||
* This file is part of FFmpeg.
|
||||
*
|
||||
* FFmpeg is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* FFmpeg is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with FFmpeg; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*)
|
||||
|
||||
(*
|
||||
* This header exists to prevent new pixel formats from being accidentally added
|
||||
* to the deprecated list.
|
||||
* Do not include it directly. It will be removed on next major bump
|
||||
*
|
||||
* Do not add new items to this list. Use the AVPixelFormat enum instead.
|
||||
*)
|
||||
PIX_FMT_NONE = AV_PIX_FMT_NONE,
|
||||
PIX_FMT_YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
|
||||
PIX_FMT_YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
|
||||
PIX_FMT_RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB...
|
||||
PIX_FMT_BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR...
|
||||
PIX_FMT_YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
|
||||
PIX_FMT_YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
|
||||
PIX_FMT_YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
|
||||
PIX_FMT_YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
|
||||
PIX_FMT_GRAY8, ///< Y , 8bpp
|
||||
PIX_FMT_MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
|
||||
PIX_FMT_MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
|
||||
PIX_FMT_PAL8, ///< 8 bit with PIX_FMT_RGB32 palette
|
||||
PIX_FMT_YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_range
|
||||
PIX_FMT_YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_range
|
||||
PIX_FMT_YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_range
|
||||
{$IFDEF FF_API_XVMC}
|
||||
PIX_FMT_XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing
|
||||
PIX_FMT_XVMC_MPEG2_IDCT,
|
||||
{$endif} //* FF_API_XVMC */
|
||||
PIX_FMT_UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
|
||||
PIX_FMT_UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
|
||||
PIX_FMT_BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
|
||||
PIX_FMT_BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
|
||||
PIX_FMT_BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
|
||||
PIX_FMT_RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
|
||||
PIX_FMT_RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
|
||||
PIX_FMT_RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
|
||||
PIX_FMT_NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
|
||||
PIX_FMT_NV21, ///< as above, but U and V bytes are swapped
|
||||
|
||||
PIX_FMT_ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
|
||||
PIX_FMT_RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
|
||||
PIX_FMT_ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
|
||||
PIX_FMT_BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
|
||||
|
||||
PIX_FMT_GRAY16BE, ///< Y , 16bpp, big-endian
|
||||
PIX_FMT_GRAY16LE, ///< Y , 16bpp, little-endian
|
||||
PIX_FMT_YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
|
||||
PIX_FMT_YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of PIX_FMT_YUV440P and setting color_range
|
||||
PIX_FMT_YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
|
||||
{$ifdef FF_API_VDPAU}
|
||||
PIX_FMT_VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
PIX_FMT_VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
PIX_FMT_VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
PIX_FMT_VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
PIX_FMT_VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
{$endif}
|
||||
PIX_FMT_RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
|
||||
PIX_FMT_RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
|
||||
|
||||
PIX_FMT_RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
|
||||
PIX_FMT_RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
|
||||
PIX_FMT_RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
|
||||
PIX_FMT_RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
|
||||
|
||||
PIX_FMT_BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
|
||||
PIX_FMT_BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
|
||||
PIX_FMT_BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
|
||||
PIX_FMT_BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
|
||||
|
||||
PIX_FMT_VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
|
||||
PIX_FMT_VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
|
||||
PIX_FMT_VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
|
||||
PIX_FMT_YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
PIX_FMT_YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
PIX_FMT_YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
PIX_FMT_YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
PIX_FMT_YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
PIX_FMT_YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
{$ifdef FF_API_VDPAU}
|
||||
PIX_FMT_VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
|
||||
{$endif}
|
||||
PIX_FMT_DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
|
||||
|
||||
PIX_FMT_RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
|
||||
PIX_FMT_RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
|
||||
PIX_FMT_BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
|
||||
PIX_FMT_BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
|
||||
PIX_FMT_GRAY8A, ///< 8bit gray, 8bit alpha
|
||||
PIX_FMT_BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
|
||||
PIX_FMT_BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
|
||||
|
||||
//the following 10 formats have the disadvantage of needing 1 format for each bit depth, thus
|
||||
//If you want to support multiple bit depths, then using PIX_FMT_YUV420P16* with the bpp stored separately
|
||||
//is better
|
||||
PIX_FMT_YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
PIX_FMT_YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
PIX_FMT_YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
PIX_FMT_YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
PIX_FMT_YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
PIX_FMT_YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
PIX_FMT_YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
PIX_FMT_YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
PIX_FMT_YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
PIX_FMT_YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
PIX_FMT_YUV422P9BE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
PIX_FMT_YUV422P9LE, ///< planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
PIX_FMT_VDA_VLD, ///< hardware decoding through VDA
|
||||
|
||||
{$ifdef AV_PIX_FMT_ABI_GIT_MASTER}
|
||||
PIX_FMT_RGBA64BE, ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
PIX_FMT_RGBA64LE, ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
PIX_FMT_BGRA64BE, ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
PIX_FMT_BGRA64LE, ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
{$endif}
|
||||
PIX_FMT_GBRP, ///< planar GBR 4:4:4 24bpp
|
||||
PIX_FMT_GBRP9BE, ///< planar GBR 4:4:4 27bpp, big endian
|
||||
PIX_FMT_GBRP9LE, ///< planar GBR 4:4:4 27bpp, little endian
|
||||
PIX_FMT_GBRP10BE, ///< planar GBR 4:4:4 30bpp, big endian
|
||||
PIX_FMT_GBRP10LE, ///< planar GBR 4:4:4 30bpp, little endian
|
||||
PIX_FMT_GBRP16BE, ///< planar GBR 4:4:4 48bpp, big endian
|
||||
PIX_FMT_GBRP16LE, ///< planar GBR 4:4:4 48bpp, little endian
|
||||
|
||||
{$ifndef AV_PIX_FMT_ABI_GIT_MASTER}
|
||||
PIX_FMT_RGBA64BE=$123, ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
PIX_FMT_RGBA64LE, ///< packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
PIX_FMT_BGRA64BE, ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as big-endian
|
||||
PIX_FMT_BGRA64LE, ///< packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each R/G/B/A component is stored as little-endian
|
||||
{$endif}
|
||||
PIX_FMT_0RGB=$123+4, ///< packed RGB 8:8:8, 32bpp, 0RGB0RGB...
|
||||
PIX_FMT_RGB0, ///< packed RGB 8:8:8, 32bpp, RGB0RGB0...
|
||||
PIX_FMT_0BGR, ///< packed BGR 8:8:8, 32bpp, 0BGR0BGR...
|
||||
PIX_FMT_BGR0, ///< packed BGR 8:8:8, 32bpp, BGR0BGR0...
|
||||
PIX_FMT_YUVA444P, ///< planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
|
||||
PIX_FMT_YUVA422P, ///< planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
|
||||
|
||||
PIX_FMT_YUV420P12BE, ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
PIX_FMT_YUV420P12LE, ///< planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
PIX_FMT_YUV420P14BE, ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
|
||||
PIX_FMT_YUV420P14LE, ///< planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
|
||||
PIX_FMT_YUV422P12BE, ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
PIX_FMT_YUV422P12LE, ///< planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
PIX_FMT_YUV422P14BE, ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
|
||||
PIX_FMT_YUV422P14LE, ///< planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
|
||||
PIX_FMT_YUV444P12BE, ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
PIX_FMT_YUV444P12LE, ///< planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
PIX_FMT_YUV444P14BE, ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
|
||||
PIX_FMT_YUV444P14LE, ///< planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
|
||||
PIX_FMT_GBRP12BE, ///< planar GBR 4:4:4 36bpp, big endian
|
||||
PIX_FMT_GBRP12LE, ///< planar GBR 4:4:4 36bpp, little endian
|
||||
PIX_FMT_GBRP14BE, ///< planar GBR 4:4:4 42bpp, big endian
|
||||
PIX_FMT_GBRP14LE, ///< planar GBR 4:4:4 42bpp, little endian
|
||||
|
||||
PIX_FMT_NB, ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
|
@ -1,5 +1,32 @@
|
||||
(*
|
||||
****************************************************************
|
||||
Delphi-OpenCV Demo
|
||||
Copyright (C) 2013 Project Delphi-OpenCV
|
||||
****************************************************************
|
||||
Contributor:
|
||||
Laentir Valetov
|
||||
email:laex@bk.ru
|
||||
****************************************************************
|
||||
You may retrieve the latest version of this file at the GitHub,
|
||||
located at git://github.com/Laex/Delphi-OpenCV.git
|
||||
****************************************************************
|
||||
The contents of this file are used with permission, subject to
|
||||
the Mozilla Public License Version 1.1 (the "License"); you may
|
||||
not use this file except in compliance with the License. You may
|
||||
obtain a copy of the License at
|
||||
http://www.mozilla.org/MPL/MPL-1_1Final.html
|
||||
|
||||
Software distributed under the License is distributed on an
|
||||
"AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
|
||||
implied. See the License for the specific language governing
|
||||
rights and limitations under the License.
|
||||
****************************************************************
|
||||
*)
|
||||
|
||||
unit ocv.fmxutils;
|
||||
|
||||
{$I OpenCV.inc}
|
||||
|
||||
interface
|
||||
|
||||
Uses
|
||||
@ -15,9 +42,10 @@ procedure IPLImageToFMXBitmap(const IpImage: pIplImage; const FMXBitmap: TBitmap
|
||||
|
||||
implementation
|
||||
|
||||
{$IFDEF DELPHIXE5_UP}
|
||||
|
||||
Uses FMX.Types;
|
||||
|
||||
{$IFDEF DELPHIXE5_UP}
|
||||
procedure IPLImageToFMXBitmap(const IpImage: pIplImage; const FMXBitmap: TBitmap); inline;
|
||||
Var
|
||||
BitmapData: TBitmapData;
|
||||
|
Loading…
Reference in New Issue
Block a user