id
int64 0
755k
| file_name
stringlengths 3
109
| file_path
stringlengths 13
185
| content
stringlengths 31
9.38M
| size
int64 31
9.38M
| language
stringclasses 1
value | extension
stringclasses 11
values | total_lines
int64 1
340k
| avg_line_length
float64 2.18
149k
| max_line_length
int64 7
2.22M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 6
65
| repo_stars
int64 100
47.3k
| repo_forks
int64 0
12k
| repo_open_issues
int64 0
3.4k
| repo_license
stringclasses 9
values | repo_extraction_date
stringclasses 92
values | exact_duplicates_redpajama
bool 2
classes | near_duplicates_redpajama
bool 2
classes | exact_duplicates_githubcode
bool 2
classes | exact_duplicates_stackv2
bool 1
class | exact_duplicates_stackv1
bool 2
classes | near_duplicates_githubcode
bool 2
classes | near_duplicates_stackv1
bool 2
classes | near_duplicates_stackv2
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
22,022
|
toolwindowmanager.h
|
visualfc_liteide/liteidex/src/liteapp/toolwindowmanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: toolwindowmanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef TOOLWINDOWMANAGER_H
#define TOOLWINDOWMANAGER_H
#include "liteapi/liteapi.h"
using namespace LiteApi;
class ToolWindowManager : public IToolWindowManager
{
public:
virtual QAction *addToolWindow(Qt::DockWidgetArea area, QWidget *widget, const QString &id, const QString &title, bool split,
QList<QAction*> widgetActions = QList<QAction*>(),
QList<QWidget*> widgetList = QList<QWidget*>());
virtual void moveToolWindow(Qt::DockWidgetArea from, Qt::DockWidgetArea to,QAction *action, bool split);
virtual QAction *findToolWindow(QWidget *widget);
virtual void removeToolWindow(QAction *action);
virtual void removeToolWindow(QWidget *widget);
};
#endif // TOOLWINDOWMANAGER_H
| 1,857
|
C++
|
.h
| 38
| 45.368421
| 129
| 0.685226
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,023
|
goproxy.h
|
visualfc_liteide/liteidex/src/liteapp/goproxy.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: goproxy.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef GOPROXY_H
#define GOPROXY_H
#include "liteapi/liteapi.h"
#include <QMap>
class GoProxy : public LiteApi::IGoProxy
{
Q_OBJECT
public:
explicit GoProxy(QObject *parent = 0);
static bool hasProxy();
virtual bool isValid() const;
virtual bool isRunning() const;
virtual QByteArray commandId() const;
virtual void writeStdin(const QByteArray &data);
public slots:
virtual void call(const QByteArray &id, const QByteArray &args = QByteArray());
public:
void callback(char *id, int id_size, char *reply, int reply_size, int err);
protected:
bool m_isRuning;
QByteArray m_id;
};
#endif // GOPROXY_H
| 1,717
|
C++
|
.h
| 45
| 36.088889
| 83
| 0.685252
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,024
|
pluginsdialog.h
|
visualfc_liteide/liteidex/src/liteapp/pluginsdialog.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: pluginsdialog.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef PLUGINSDIALOG_H
#define PLUGINSDIALOG_H
#include <QDialog>
#include "liteapi/liteapi.h"
namespace Ui {
class PluginsDialog;
}
class QStandardItemModel;
class QStandardItem;
class PluginManager;
class PluginsDialog : public QDialog
{
Q_OBJECT
public:
explicit PluginsDialog(LiteApi::IApplication *app, QWidget *parent = 0);
~PluginsDialog();
void appendInfo(const LiteApi::PluginInfo *info);
public slots:
void itemChanged(QStandardItem*);
private:
LiteApi::IApplication *m_liteApp;
Ui::PluginsDialog *ui;
QStandardItemModel *m_model;
};
#endif // PLUGINSDIALOG_H
| 1,691
|
C++
|
.h
| 47
| 34.06383
| 82
| 0.700672
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,025
|
optionmanager.h
|
visualfc_liteide/liteidex/src/liteapp/optionmanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: optionmanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef OPTIONMANAGER_H
#define OPTIONMANAGER_H
#include "liteapi/liteapi.h"
using namespace LiteApi;
class OptionsBrowser;
class BrowserEditorImpl;
class OptionManager : public IOptionManager
{
Q_OBJECT
public:
OptionManager();
~OptionManager();
virtual bool initWithApp(IApplication *app);
virtual void addFactory(IOptionFactory *factory);
virtual void removeFactory(IOptionFactory *factory);
virtual QList<IOptionFactory*> factoryList() const;
virtual void emitApplyOption(const QString &mimetype);
public slots:
virtual void exec(const QString &mimeType = QString());
void loadOption(const QString &opt);
protected:
OptionsBrowser *m_browser;
QList<IOptionFactory*> m_factoryList;
};
#endif // OPTIONMANAGER_H
| 1,845
|
C++
|
.h
| 47
| 37.12766
| 82
| 0.709426
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,026
|
sidewindowstyle.h
|
visualfc_liteide/liteidex/src/liteapp/sidewindowstyle.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: sidewindowstyle.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef SIDEWINDOWSTYLE_H
#define SIDEWINDOWSTYLE_H
#include "windowstyle.h"
#include "tooldockwidget.h"
class SideDockWidget;
class OutputDockWidget;
class QToolButton;
struct SideActionState
{
QWidget *toolBtn;
QWidget *widget;
QList<QWidget*> widgetList;
QList<QAction*> widgetActions;
QString id;
QString title;
};
class SideDockWidget : public BaseDockWidget
{
Q_OBJECT
public:
explicit SideDockWidget(QSize iconSize, QWidget *parent = 0);
void createMenu(Qt::DockWidgetArea area);
void setCheckedAction(QAction *action);
void setActions(const QMap<QAction*,SideActionState*> &m);
virtual void setWindowTitle(const QString &text);
signals:
void moveActionTo(Qt::DockWidgetArea,Qt::DockWidgetArea, QAction*);
void currenActionChanged(QAction *org, QAction *act);
protected slots:
void moveAction();
void actionChanged();
virtual void activeComboBoxIndex(int);
virtual void topLevelChanged(bool b);
protected:
QMenu *m_menu;
QMenu *m_moveMenu;
Qt::DockWidgetArea m_area;
QString m_areaInfo;
};
class BaseActionBar : public QObject
{
Q_OBJECT
public:
BaseActionBar(QObject *parent) : QObject(parent)
{}
virtual void addAction(QAction *action, QWidget *widget, const QString &id, const QString &title, QList<QAction*> widgetActions, QList<QWidget*> widgetList) = 0;
virtual void removeAction(QAction *action) = 0;
virtual QAction *findToolAction(QWidget *widget) const = 0;
virtual QToolBar *toolBar() const = 0;
virtual QMap<QAction*,SideActionState*> actionMap() const = 0;
};
class SideActionBar : public BaseActionBar
{
Q_OBJECT
public:
SideActionBar(QSize iconSize, QMainWindow *window, Qt::DockWidgetArea area);
virtual ~SideActionBar();
void addAction(QAction *action, QWidget *widget, const QString &id, const QString &title, QList<QAction*> widgetActions, QList<QWidget*> widgetList);
void removeAction(QAction *action);
QAction *findToolAction(QWidget *widget) const;
virtual QToolBar *toolBar() const { return m_toolBar; }
virtual QMap<QAction*,SideActionState*> actionMap() const { return m_actionStateMap; }
void updateAction(QAction *action);
void setShowToolBar(bool visible);
signals:
void moveActionTo(Qt::DockWidgetArea,Qt::DockWidgetArea,QAction*);
protected slots:
void setHideToolBar(bool b);
void dockVisible(bool);
void toggledAction(bool b);
void currenActionChanged(QAction *org, QAction *act);
protected:
QSize m_iconSize;
QMainWindow *m_window;
Qt::DockWidgetArea m_area;
QToolBar *m_toolBar;
//QAction *spacerAct;
QList<SideDockWidget*> m_dockList;
QMap<QAction*,SideActionState*> m_actionStateMap;
bool m_bHideToolBar;
QString dockWidgetObjName(const QString &id) const;
};
class OutputActionBar : public BaseActionBar
{
Q_OBJECT
public:
OutputActionBar(QSize iconSize, QMainWindow *window, Qt::DockWidgetArea m_area = Qt::BottomDockWidgetArea);
virtual ~OutputActionBar();
OutputDockWidget *dockWidget() const;
void addAction(QAction *action, QWidget *widget, const QString &id, const QString &title, QList<QAction*> widgetActions, QList<QWidget*> widgetList);
void removeAction(QAction *action);
void setHideToolBar(bool b);
virtual QAction *findToolAction(QWidget *widget) const;
virtual QToolBar *toolBar() const { return m_toolBar; }
virtual QMap<QAction*,SideActionState*> actionMap() const { return m_actionStateMap; }
signals:
void moveActionTo(Qt::DockWidgetArea,Qt::DockWidgetArea,QAction*);
protected slots:
void dockVisible(bool);
void toggledAction(bool b);
protected:
Qt::DockWidgetArea m_area;
QToolBar *m_toolBar;
// QAction *spacerAct;
OutputDockWidget *m_dock;
QMap<QAction*,SideActionState*> m_actionStateMap;
bool m_bHideToolBar;
};
class SideWindowStyle : public IWindowStyle
{
Q_OBJECT
public:
SideWindowStyle(LiteApi::IApplication *app, QMainWindow *window, QObject *parent = 0);
~SideWindowStyle();
virtual void createToolWindowMenu();
virtual QAction *addToolWindow(LiteApi::IApplication *app, Qt::DockWidgetArea area, QWidget *widget, const QString &id, const QString &title, bool split = false,
QList<QAction*> widgetActions = QList<QAction*>(),
QList<QWidget*> widgetList = QList<QWidget*>());
virtual void removeToolWindow(QAction *action);
virtual QAction *findToolWindow(QWidget *widget);
virtual void moveToolWindow(Qt::DockWidgetArea from, Qt::DockWidgetArea to, QAction *action, bool split);
virtual void saveToolState() const;
virtual void restoreToolsState();
virtual void updateConer();
void restoreHideToolWindows();
void restoreHideSideToolWindows();
void hideSideToolWindows();
public slots:
void moveActionTo(Qt::DockWidgetArea from, Qt::DockWidgetArea to, QAction*action);
virtual void hideOutputWindow();
virtual void showOrHideToolWindow();
virtual void hideAllToolWindows();
void hideSideBar(bool);
void toggledSideBar(bool);
protected:
LiteApi::IApplication *m_liteApp;
QMainWindow *m_mainWindow;
SideActionBar *m_leftSideBar;
SideActionBar *m_rightSideBar;
OutputActionBar *m_outputBar;
QMap<Qt::DockWidgetArea, BaseActionBar*> m_actionBarMap;
QStatusBar *m_statusBar;
QAction *m_hideSideAct;
QList<QAction*> m_hideActionList;
QList<QAction*> m_hideSideActionList;
QMenu *m_sideMenu;
QMenu *m_outputMenu;
bool m_useShortcuts;
};
#endif // SIDEWINDOWSTYLE_H
| 6,743
|
C++
|
.h
| 172
| 35.273256
| 165
| 0.726524
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,027
|
htmlwidgetmanager.h
|
visualfc_liteide/liteidex/src/liteapp/htmlwidgetmanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: htmlwidgetmanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef HTMLWIDGETMANAGER_H
#define HTMLWIDGETMANAGER_H
#include "liteapi/litehtml.h"
using namespace LiteApi;
class HtmlWidgetManager : public IHtmlWidgetManager
{
Q_OBJECT
public:
explicit HtmlWidgetManager(QObject *parent = 0);
virtual QStringList classNameList() const;
virtual void addFactory(IHtmlWidgetFactory *factory);
virtual QList<IHtmlWidgetFactory*> factoryList() const;
virtual bool setDefaultClassName(const QString &className);
virtual QString defaultClassName() const;
virtual IHtmlWidget *create(QObject *parent);
virtual IHtmlWidget *createByName(QObject *parent, const QString &className);
virtual IHtmlDocument *createDocument(QObject *parent);
virtual IHtmlDocument *createDocumentByName(QObject *parent, const QString &className);
protected:
QList<IHtmlWidgetFactory*> m_factoryList;
QString m_defaultClassName;
};
#endif // HTMLWIDGETMANAGER_H
| 2,005
|
C++
|
.h
| 45
| 42.288889
| 91
| 0.724808
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,028
|
multifolderwindow.h
|
visualfc_liteide/liteidex/src/liteapp/multifolderwindow.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2018 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: multifolderwindow.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef FOLDERWINDOW_H
#define FOLDERWINDOW_H
#include <QObject>
#include "folderview/multifolderview.h"
#include "filemanager.h"
class MultiFolderWindow : public IFolderWindow
{
Q_OBJECT
public:
explicit MultiFolderWindow(LiteApi::IApplication *app, QObject *parent = 0);
virtual ~MultiFolderWindow();
virtual QString id() const;
virtual QWidget *widget() const;
virtual QStringList folderList() const;
virtual void setFolderList(const QStringList &folders);
virtual void addFolderList(const QString &folder);
virtual void closeAllFolders();
virtual void setShowHideFiles(bool b);
virtual void setShowDetails(bool b);
virtual void setSyncEditor(bool b);
public slots:
void doubleClickedFolderView(const QModelIndex &index);
void enterKeyPressedFolderView(const QModelIndex &index);
void currentEditorChanged(LiteApi::IEditor *editor);
void aboutToShowFolderContextMenu(QMenu *menu, LiteApi::FILESYSTEM_CONTEXT_FLAG flag, const QFileInfo &info);
protected:
LiteApi::IApplication *m_liteApp;
MultiFolderView *m_folderListView;
bool m_bSyncEditor;
};
#endif // FOLDERWINDOW_H
| 2,234
|
C++
|
.h
| 53
| 39.641509
| 113
| 0.722095
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,030
|
cdrv.h
|
visualfc_liteide/liteidex/src/liteapp/cdrv.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: cdrv.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef CDRV_H
#define CDRV_H
#include "liteapp_global.h"
typedef void (*DRV_CALLBACK)(char *id, int id_size, char *reply, int len, int err, void *ctx);
typedef int (*GODRV_CALL)(char* id,int id_size, char* args, int args_size, DRV_CALLBACK cb, void *ctx);
extern "C"
int LITEIDESHARED_EXPORT cdrv_main(int argc, char **argv);
extern "C"
void LITEIDESHARED_EXPORT cdrv_init(void *fn);
extern "C"
void LITEIDESHARED_EXPORT cdrv_cb(DRV_CALLBACK cb, char *id, char *reply, int size, int err, void* ctx);
#endif // CDRV_H
| 1,593
|
C++
|
.h
| 34
| 45.617647
| 104
| 0.682141
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,031
|
editormanager.h
|
visualfc_liteide/liteidex/src/liteapp/editormanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: editormanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef EDITORMANAGER_H
#define EDITORMANAGER_H
#include "liteapi/liteapi.h"
#include "colorstyle/colorstyle.h"
#include "tabwidget/litetabwidget.h"
#include <QPointer>
using namespace LiteApi;
class LiteTabWidget;
class QStackedWidget;
class QToolButton;
class QLabel;
class QStandardItemModel;
class OpenEditorsWidget;
class QTreeView;
struct EditLocation {
QString filePath;
QByteArray state;
};
class EditorManager : public IEditorManager
{
Q_OBJECT
public:
~EditorManager();
virtual bool initWithApp(IApplication *app);
void createActions();
public:
virtual IEditor *openEditor(const QString &fileName, const QString &mimeType);
virtual IEditor *openEditorByFactory(const QString &fileName, const QString &mimeType, const QString &factoryId);
virtual void addFactory(IEditorFactory *factory);
virtual void removeFactory(IEditorFactory *factory);
virtual QList<IEditorFactory*> factoryList() const;
virtual QStringList mimeTypeList() const;
public:
virtual QWidget *widget();
virtual IEditor *currentEditor() const;
virtual void setCurrentEditor(IEditor *editor, bool ignoreNavigationHistory = false);
virtual IEditor *findEditor(const QString &fileName, bool canonical) const;
virtual QList<IEditor*> editorList() const;
virtual QAction *registerBrowser(IEditor *editor);
virtual void activeBrowser(IEditor *editor);
virtual void addNavigationHistory(IEditor *editor = 0,const QByteArray &saveState = QByteArray());
virtual void cutForwardNavigationHistory();
virtual void loadColorStyleScheme(const QString &file);
virtual const ColorStyleScheme *colorStyleScheme() const;
virtual void addEditContext(IEditContext *context);
virtual void removeEditContext(IEditContext *context);
virtual void updateEditInfo(const QString &info);
protected:
void addEditor(IEditor *editor);
bool eventFilter(QObject *target, QEvent *event);
QString tabContextFilePath() const;
public:
QList<IEditor*> sortedEditorList() const;
public slots:
virtual bool saveEditor(IEditor *editor = 0, bool emitAboutSave = true);
virtual bool saveEditorAs(IEditor *editor = 0);
virtual bool saveAllEditors(bool emitAboutSave = true);
virtual bool closeEditor(IEditor *editor = 0);
virtual bool closeAllEditors();
void tabContextClose();
void tabContextCloseOthers();
void tabContextCloseLefts();
void tabContextCloseRights();
void tabContextCloseAll();
void tabContextCloseOtherFolderFiles();
void tabContextCloseSameFolderFiles();
void tabContextCopyPathToClipboard();
void tabContextShowInExplorer();
void tabContextOpenInShell();
void tabContextOpenInTerminal();
void goBack();
void goForward();
void updateNavigatorActions();
void updateCurrentPositionInNavigationHistory();
void moveToNewWindow();
void focusChanged(QWidget *old,QWidget *now);
void aboutToShowListMenu();
void triggeredListAction(QAction *act);
void applyOption(QString);
void appIdle(int sec);
void closeEditorForTab(int index);
void updateEditorMenu(IEditContext *context);
signals:
void tabAddRequest();
void doubleClickedTab();
protected slots:
void editorTabChanged(int);
void editorTabCloseRequested(int);
void modificationChanged(bool);
void toggleBrowserAction(bool);
void gotoNextTab();
void gotoPrevTab();
protected:
QMenu *m_nullMenu;
QList<EditLocation> m_navigationHistory;
int m_currentNavigationHistoryPosition;
QWidget *m_widget;
LiteTabWidget *m_editorTabWidget;
QMap<QWidget *, IEditor *> m_widgetEditorMap;
QStandardItemModel *m_editorModel;
OpenEditorsWidget *m_openEditorWidget;
QPointer<IEditor> m_currentEditor;
QList<IEditorFactory*> m_factoryList;
QMap<IEditor*,QAction*> m_browserActionMap;
QMap<QWidget*,IEditContext*> m_editContextMap;
QAction *m_goBackAct;
QAction *m_goForwardAct;
QAction *m_gotoNextTab;
QAction *m_gotoPrevTab;
QMenu *m_listMenu;
QActionGroup *m_listGroup;
QMenu *m_editMenu;
QMenu *m_tabContextFileMenu;
QMenu *m_tabContextNofileMenu;
ColorStyleScheme *m_colorStyleScheme;
int m_tabContextIndex;
int m_maxEditorCount;
QLabel *m_lineInfo;
bool m_isAutoIdleSaveDocuments;
bool m_updateMenuInFocus;
bool m_mouseExtNavigate;
int m_autoIdleSaveDocumentsTime;
};
#endif // EDITORMANAGER_H
| 5,664
|
C++
|
.h
| 146
| 35.143836
| 117
| 0.730357
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,032
|
macsupport.h
|
visualfc_liteide/liteidex/src/liteapp/macsupport.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: macsupport.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef MACSUPPORT_H
#define MACSUPPORT_H
#include <QMainWindow>
#include <QSysInfo>
class MacSupport
{
public:
static void setFullScreen(QMainWindow *window);
static bool isLionOrHigh()
{
return QSysInfo::MacintoshVersion > QSysInfo::MV_10_6;
}
};
#endif // MACSUPPORT_H
| 1,373
|
C++
|
.h
| 36
| 36.361111
| 82
| 0.676669
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,033
|
optionsbrowser.h
|
visualfc_liteide/liteidex/src/liteapp/optionsbrowser.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: optionsbrowser.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef OPTIONSBROWSER_H
#define OPTIONSBROWSER_H
#include <QWidget>
#include <QDialog>
#include "liteapi/liteapi.h"
namespace Ui {
class OptionsWidget;
}
class QListWidgetItem;
class QAbstractButton;
class OptionsBrowser : public QDialog//LiteApi::IBrowserEditor
{
Q_OBJECT
public:
explicit OptionsBrowser(LiteApi::IApplication *app, QWidget *parent = 0);
~OptionsBrowser();
virtual QString name() const;
virtual QString mimeType() const;
void addOption(LiteApi::IOption *opt);
int execute(const QString &mimeType);
QString currenMimeType() const;
signals:
void applyOption(QString);
protected slots:
void itemSelectionChanged();
private slots:
void clicked(QAbstractButton*);
void applay();
private:
LiteApi::IApplication *m_liteApp;
Ui::OptionsWidget *ui;
QMap<QListWidgetItem*,LiteApi::IOption*> m_widgetOptionMap;
};
#endif // OPTIONSBROWSER_H
| 2,005
|
C++
|
.h
| 56
| 33.482143
| 82
| 0.703551
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,034
|
textbrowserhtmlwidget.h
|
visualfc_liteide/liteidex/src/liteapp/textbrowserhtmlwidget.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: textbrowserhtmlwidget.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef TEXTBROWSERHTMLWIDGET_H
#define TEXTBROWSERHTMLWIDGET_H
#include "liteapi/litehtml.h"
using namespace LiteApi;
class QTextBrowser;
class TextBrowserHtmlWidget : public IHtmlWidget
{
Q_OBJECT
public:
TextBrowserHtmlWidget(QObject *parent);
public:
virtual QWidget *widget() const;
virtual QString className() const;
virtual void setSearchPaths(const QStringList &paths);
virtual void setHtml(const QString &html, const QUrl &url);
virtual QUrl url() const;
virtual void clear();
virtual void scrollToAnchor(const QString &anchor);
virtual void setScrollBarValue(Qt::Orientation orientation, int value) ;
virtual int scrollBarValue(Qt::Orientation orientation) const;
virtual int scrollBarMinimum(Qt::Orientation orientation) const;
virtual int scrollBarMaximum(Qt::Orientation orientation) const;
virtual QString selectedText() const;
virtual bool findText(const QString & exp, QTextDocument::FindFlags options);
public slots:
#ifndef QT_NO_PRINTER
virtual void print(QPrinter *printer);
#endif
protected:
QTextBrowser *m_widget;
QUrl m_url;
};
class TextBrowserHtmlDocument : public IHtmlDocument
{
Q_OBJECT
public:
TextBrowserHtmlDocument(QObject *parent);
virtual ~TextBrowserHtmlDocument();
public:
virtual void setHtml(const QString &html, const QUrl &url);
public slots:
#ifndef QT_NO_PRINTER
virtual void print(QPrinter *printer);
#endif
virtual QString toHtml () const;
virtual QString toPlainText () const;
protected:
QTextBrowser *m_doc;
};
class TextBrowserHtmlWidgetFactory : public IHtmlWidgetFactory
{
Q_OBJECT
public:
TextBrowserHtmlWidgetFactory(QObject *parent = 0);
virtual QString className() const;
virtual IHtmlWidget *create(QObject *parent);
virtual IHtmlDocument *createDocument(QObject *parent);
};
#endif // TEXTBROWSERHTMLWIDGET_H
| 2,991
|
C++
|
.h
| 81
| 34.296296
| 82
| 0.733977
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,035
|
liteapp.h
|
visualfc_liteide/liteidex/src/liteapp/liteapp.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteapp.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEAPP_H
#define LITEAPP_H
#include "liteapi/liteapi.h"
#include "textoutput/textoutput.h"
#include "extension/extension.h"
#include "goproxy.h"
using namespace LiteApi;
class MainWindow;
class PluginManager;
class EditorManager;
class FileManager;
class ProjectManager;
class ActionManager;
class MimeTypeManager;
class OptionManager;
class ToolWindowManager;
class HtmlWidgetManager;
class QuickOpenManager;
class RecentManager;
class QSettings;
class QSplitter;
class QComboBox;
class LiteAppOptionFactory;
class AppIdleTimer;
class LiteApp : public IApplication
{
Q_OBJECT
public:
static QString getRootPath();
static QString getToolPath();
static QString getPluginPath();
static QString getResoucePath();
static QString getStoragePath();
static IApplication* NewApplication(const QString &sessionName, IApplication *base = 0);
static PluginManager *pluginManager();
static QList<IApplication*> appList();
public:
LiteApp();
virtual ~LiteApp();
virtual IExtension *extension();
virtual IApplication *newInstance(const QString &sessionName);
virtual QList<IApplication*> instanceList() const;
virtual bool hasGoProxy() const;
virtual IGoProxy *createGoProxy(QObject *parent);
virtual IProjectManager *projectManager();
virtual IEditorManager *editorManager();
virtual IFileManager *fileManager();
virtual IActionManager *actionManager();
virtual IMimeTypeManager *mimeTypeManager();
virtual IOptionManager *optionManager();
virtual IToolWindowManager *toolWindowManager();
virtual IHtmlWidgetManager *htmlWidgetManager();
virtual IRecentManager *recentManager();
virtual QMainWindow *mainWindow() const;
virtual QSettings *settings();
virtual QMap<QString,QVariant> &globalCookie();
virtual QString rootPath() const;
virtual QString applicationPath() const;
virtual QString toolPath() const;
virtual QString resourcePath() const;
virtual QString pluginPath() const;
virtual QString storagePath() const;
virtual QString ideVersion() const;
virtual QString ideFullName() const;
virtual QString ideName() const;
virtual QString ideCopyright() const;
virtual QList<IPlugin*> pluginList() const;
virtual void loadSession(const QString &session);
virtual void saveSession(const QString &session);
virtual QStringList sessionList() const;
virtual QString currentSession() const;
virtual void loadState();
virtual void saveState();
virtual void appendLog(const QString &model, const QString &log = QString(), bool error = false);
virtual void sendBroadcast(const QString &module, const QString &id, const QVariant ¶m = QVariant());
public:
void load(const QString &sessionName, IApplication *baseApp);
void createActions();
void createMenus();
void createToolBars();
void loadPlugins();
void loadMimeType();
void initPlugins();
void setPluginPath(const QString &path);
void setResourcePath(const QString &path);
protected slots:
void goproxyDone(const QByteArray &reply);
void dbclickLogOutput(QTextCursor);
void projectReloaded();
void currentProjectChanged(LiteApi::IProject *project);
void currentEditorChanged(LiteApi::IEditor *editor);
void editorModifyChanged(bool);
void cleanup();
void aboutPlugins();
void escape();
void newWindow();
void closeWindow();
void exit();
void applyOption(QString id);
protected:
QString m_currentSession;
QString m_rootPath;
QString m_applicationPath;
QString m_toolPath;
QString m_pluginPath;
QString m_resourcePath;
QString m_storagePath;
QSettings *m_settings;
Extension *m_extension;
MainWindow *m_mainwindow;
ToolWindowManager *m_toolWindowManager;
HtmlWidgetManager *m_htmlWidgetManager;
RecentManager *m_recentManager;
ActionManager *m_actionManager;
ProjectManager *m_projectManager;
EditorManager *m_editorManager;
FileManager *m_fileManager;
MimeTypeManager *m_mimeTypeManager;
OptionManager *m_optionManager;
TextOutput *m_logOutput;
QAction *m_logAct;
LiteAppOptionFactory *m_liteAppOptionFactory;
QList<IPlugin*> m_pluginList;
AppIdleTimer *m_idleTimer;
public:
static QMap<QString,QVariant> s_cookie;
static QList<IApplication*> s_appList;
protected:
QAction *m_newAct;
QAction *m_openFileAct;
QAction *m_openFolderAct;
QAction *m_openFolderNewWindowAct;
QAction *m_closeAllFolderAct;
QAction *m_newWindow;
QAction *m_closeWindow;
QAction *m_closeAct;
QAction *m_closeAllAct;
QAction *m_openProjectAct;
QAction *m_saveProjectAct;
QAction *m_closeProjectAct;
QAction *m_saveAct;
QAction *m_saveAsAct;
QAction *m_saveAllAct;
QAction *m_exitAct;
QAction *m_optionAct;
QAction *m_aboutAct;
QAction *m_aboutPluginsAct;
QAction *m_fullScreent;
QToolBar *m_stdToolBar;
QMenu *m_fileMenu;
QMenu *m_viewMenu;
QMenu *m_helpMenu;
protected:
GoProxy *m_goProxy;
};
#endif // LITEAPP_H
| 6,416
|
C++
|
.h
| 178
| 32.202247
| 109
| 0.711325
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,036
|
folderprojectfactory.h
|
visualfc_liteide/liteidex/src/liteapp/folderprojectfactory.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: folderprojectfactory.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef FOLDERPROJECTFACTORY_H
#define FOLDERPROJECTFACTORY_H
#include "liteapi/liteapi.h"
class FolderProjectFactory : public LiteApi::IProjectFactory
{
public:
FolderProjectFactory(LiteApi::IApplication *app, QObject *parnet);
virtual QStringList mimeTypes() const;
virtual LiteApi::IProject *open(const QString &fileName, const QString &mimeType);
virtual bool findTargetInfo(const QString &fileName, const QString &mimetype, QMap<QString,QString>& targetInfo) const;
public:
LiteApi::IApplication *m_liteApp;
QStringList m_mimeTypes;
};
#endif // FOLDERPROJECTFACTORY_H
| 1,686
|
C++
|
.h
| 37
| 43.810811
| 124
| 0.710638
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,037
|
windowstyle.h
|
visualfc_liteide/liteidex/src/liteapp/windowstyle.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: windowstyle.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef WINDOWSTYLE_H
#define WINDOWSTYLE_H
#include <liteapi/liteapi.h>
class IWindowStyle : public QObject
{
Q_OBJECT
public:
IWindowStyle(QObject *parent) : QObject(parent) {}
virtual void createToolWindowMenu() = 0;
virtual QAction *addToolWindow(LiteApi::IApplication *app, Qt::DockWidgetArea area, QWidget *widget, const QString &id, const QString &title,
bool split = false,
QList<QAction*> widgetActions = QList<QAction*>(),
QList<QWidget*> widgetList = QList<QWidget*>()) = 0;
virtual void removeToolWindow(QAction *action) = 0;
virtual QAction *findToolWindow(QWidget *wiget) = 0;
virtual void moveToolWindow(Qt::DockWidgetArea from, Qt::DockWidgetArea to,QAction *action,bool split) = 0;
virtual void saveToolState() const = 0;
virtual void restoreToolsState() = 0;
virtual void updateConer() = 0;
public slots:
virtual void hideOutputWindow() = 0;
virtual void showOrHideToolWindow() = 0;
virtual void hideAllToolWindows() = 0;
};
#endif // WINDOWSTYLE_H
| 2,202
|
C++
|
.h
| 47
| 42.425532
| 145
| 0.665737
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,038
|
mainwindow.h
|
visualfc_liteide/liteidex/src/liteapp/mainwindow.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: mainwindow.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include "liteapi/liteapi.h"
#include "toolmainwindow.h"
#include "windowstyle.h"
class IMainWindow : public QObject
{
Q_OBJECT
public:
IMainWindow(QObject *parent) : QObject(parent) {}
virtual QMainWindow *mainWindow() = 0;
virtual LiteApi::IToolWindowManager *toolWindowManager() = 0;
signals:
void fullScreenStateChanged(bool b);
public slots:
virtual void setFullScreen(bool b) = 0;
virtual void editorModifyChanged(bool b) = 0;
virtual void currentEditorChanged(LiteApi::IEditor *editor) = 0;
};
using namespace LiteApi;
class QSplitter;
class MainWindow : public ToolMainWindow
{
Q_OBJECT
public:
MainWindow(IApplication *app, QWidget *parent = 0);
~MainWindow();
public:
QSplitter *splitter();
void setWindowStyle(IWindowStyle *style);
protected:
virtual void closeEvent(QCloseEvent *event);
virtual void dropEvent(QDropEvent *event);
virtual void dragEnterEvent(QDragEnterEvent *event);
virtual void changeEvent(QEvent *e);
virtual bool event(QEvent *event);
signals:
void fullScreenStateChanged(bool b);
public slots:
void setFullScreen(bool b);
void currentEditorChanged(LiteApi::IEditor *editor);
void editorModifyChanged(LiteApi::IEditor *editor, bool b);
void about();
void triggeredWindowsAct();
protected:
IApplication *m_liteApp;
QAction *m_aboutAct;
QSplitter *m_mainSplitter;
bool m_windowClosedCheck;
protected:
void setActiveWindowAction(QWidget *window);
void setWindowTitle(const QString &name, const QString &filePath, bool isModify);
static QMap<QWidget*, QAction *> s_windowActions;
static QMenu *s_macDocMenu;
};
#endif // MAINWINDOW_H
| 2,843
|
C++
|
.h
| 78
| 33.730769
| 85
| 0.710402
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,039
|
splitfolderwindow.h
|
visualfc_liteide/liteidex/src/liteapp/splitfolderwindow.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2018 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: splitfolderwindow.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef SPLITFOLDERWINDOW_H
#define SPLITFOLDERWINDOW_H
#include "filemanager.h"
#include "folderview/folderview.h"
class QStackedWidget;
class QTreeWidget;
class QSplitter;
class QStandardItemModel;
class SplitFolderView;
class SplitFolderWindow : public IFolderWindow
{
Q_OBJECT
public:
SplitFolderWindow(LiteApi::IApplication *app, QObject *parent = 0);
virtual ~SplitFolderWindow();
virtual QString id() const;
virtual QWidget *widget() const;
virtual QStringList folderList() const;
virtual void setFolderList(const QStringList &folders);
virtual void addFolderList(const QString &folder);
virtual void closeAllFolders();
virtual void setShowHideFiles(bool b);
virtual void setShowDetails(bool b);
virtual void setSyncEditor(bool b);
public slots:
void currentIndexChanged(const QModelIndex &index,const QModelIndex &prev);
void closeFolderIndex(const QModelIndex &index);
void reloadFolderIndex(const QModelIndex &index);
void currentEditorChanged(LiteApi::IEditor *editor);
void doubleClickedFolderView(const QModelIndex &index);
void enterKeyPressedFolderView(const QModelIndex &index);
void aboutToShowFolderContextMenu(QMenu *menu, LiteApi::FILESYSTEM_CONTEXT_FLAG flag, const QFileInfo &info);
protected:
LiteApi::IApplication *m_liteApp;
QSplitter *m_spliter;
SplitFolderView *m_tree;
QStandardItemModel *m_model;
QStackedWidget *m_stacked;
QStringList m_folderList;
QDir::Filters m_filters;
bool m_bShowDetails;
bool m_bSyncEditor;
protected:
void addFolderImpl(const QString &folder);
int findInStacked(const QModelIndex &index);
};
class SplitFolderView : public BaseFolderView
{
Q_OBJECT
public:
SplitFolderView(LiteApi::IApplication *app, QWidget *parent = 0);
void addRootPath(const QString &folder);
void clear();
signals:
void closeFolderIndex(const QModelIndex &index);
void reloadFolderIndex(const QModelIndex &index);
public slots:
void customContextMenuRequested(const QPoint &pos);
virtual void openFolder();
virtual void closeFolder();
virtual void reloadFolder();
protected:
QStandardItemModel *m_model;
QMenu *m_contextMenu;
};
#endif // SPLITFOLDERWINDOW_H
| 3,339
|
C++
|
.h
| 88
| 34.977273
| 113
| 0.738755
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,040
|
liteappoption.h
|
visualfc_liteide/liteidex/src/liteapp/liteappoption.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteappoption.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEAPPOPTION_H
#define LITEAPPOPTION_H
#include "liteapi/liteapi.h"
namespace Ui {
class LiteAppOption;
}
class QStandardItemModel;
class QStandardItem;
class LiteAppOption : public LiteApi::IOption
{
Q_OBJECT
public:
explicit LiteAppOption(LiteApi::IApplication *app, QObject *parent = 0);
~LiteAppOption();
virtual QWidget *widget();
virtual QString name() const;
virtual QString mimeType() const;
virtual void save();
virtual void load();
public slots:
void reloadShortcuts();
void shortcutsChanaged(QStandardItem*);
void resetAllShortcuts();
void resetShortcuts();
void importShortcuts();
void exportShortcuts();
void autoLoadLastSessionToggled(bool b);
void autoIdleSaveDocumentsToggled(bool);
private:
LiteApi::IApplication *m_liteApp;
QWidget *m_widget;
Ui::LiteAppOption *ui;
QStandardItemModel *m_keysModel;
//GopherLib libgopher;
};
#endif // LITEAPPOPTION_H
| 2,061
|
C++
|
.h
| 58
| 32.896552
| 82
| 0.696894
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,041
|
actionmanager.h
|
visualfc_liteide/liteidex/src/liteapp/actionmanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: actionmanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef ACTIONMANAGER_H
#define ACTIONMANAGER_H
#include "liteapi/liteapi.h"
using namespace LiteApi;
class ActionContext : public IActionContext {
public:
ActionContext(LiteApi::IApplication *app, const QString &name);
virtual ~ActionContext();
virtual QString contextName() const;
virtual void regAction(QAction *act, const QString &id, const QString &defks, bool standard = false);
virtual void regAction(QAction *act, const QString &id, const QKeySequence::StandardKey &def);
virtual QStringList actionKeys() const;
virtual ActionInfo *actionInfo(const QString &id) const;
virtual void setActionShortcuts(const QString &id, const QString &shortcuts);
protected:
LiteApi::IApplication *m_liteApp;
QString m_name;
QMap<QString,ActionInfo*> m_actionInfoMap;
};
class ActionManager : public IActionManager
{
Q_OBJECT
public:
ActionManager(QObject *parent = 0);
virtual ~ActionManager();
virtual bool initWithApp(IApplication *app);
virtual QMenu *insertMenu(const QString &id, const QString &title, const QString &idBefore = QString());
virtual QMenu *loadMenu(const QString &id);
virtual void removeMenu(QMenu *menu);
virtual QList<QString> menuList() const;
virtual QToolBar *insertToolBar(const QString &id, const QString &title, const QString &before = QString());
virtual void insertToolBar(QToolBar *toolBar,const QString &before = QString());
virtual QToolBar *loadToolBar(const QString &id);
virtual void removeToolBar(QToolBar* toolBar);
virtual QList<QString> toolBarList() const;
virtual void insertViewMenu(VIEWMENU_ACTION_POS pos, QAction *act);
virtual void setViewMenuSeparator(const QString &sepid, bool group);
virtual void insertViewMenuAction(QAction *act, const QString &sepid);
virtual bool insertMenuActions(const QString &idMenu, const QString &idBeforeSep, bool newGroup, QList<QAction*> &actions);
virtual IActionContext *getActionContext(QObject *obj, const QString &name);
virtual QStringList actionKeys() const;
virtual ActionInfo *actionInfo(const QString &id) const;
virtual void setActionShourtcuts(const QString &id, const QString &shortcuts);
virtual QStringList actionContextNameList() const;
virtual IActionContext *actionContextForName(const QString &name);
protected slots:
void removeActionContext(QObject *obj);
public:
static QList<QKeySequence> toShortcuts(const QString &ks);
static QString formatShortcutsString(const QString &ks);
static QString formatShortcutsNativeString(const QString &ks);
protected:
QMap<QString,QMenu*> m_idMenuMap;
QMap<QString,QToolBar*> m_idToolBarMap;
QMap<QString,QAction*> m_idSeperatorMap;
QMap<QString, QMap<QString,QAction*> > m_idMenuSepMap;
QMenu *m_viewMenu;
QAction *m_baseToolBarAct;
QAction *m_baseBrowserAct;
QMap<QObject*,IActionContext*> m_objContextMap;
};
#endif // ACTIONMANAGER_H
| 4,041
|
C++
|
.h
| 84
| 44.845238
| 128
| 0.737788
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,042
|
liteappoptionfactory.h
|
visualfc_liteide/liteidex/src/liteapp/liteappoptionfactory.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteappoptionfactory.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEAPPOPTIONFACTORY_H
#define LITEAPPOPTIONFACTORY_H
#include "liteapi/liteapi.h"
class LiteAppOptionFactory : public LiteApi::IOptionFactory
{
public:
LiteAppOptionFactory(LiteApi::IApplication *app, QObject *parent);
virtual QStringList mimeTypes() const;
virtual LiteApi::IOption *create(const QString &mimeType);
protected:
LiteApi::IApplication *m_liteApp;
};
#endif // LITEAPPOPTIONFACTORY_H
| 1,510
|
C++
|
.h
| 35
| 41.571429
| 82
| 0.702243
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,044
|
splitwindowstyle.h
|
visualfc_liteide/liteidex/src/liteapp/splitwindowstyle.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: splitwindowstyle.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef SPLITWINDOWSSTYLE_H
#define SPLITWINDOWSSTYLE_H
#include "windowstyle.h"
#include <QMap>
#include <QPointer>
#include <QAction>
class ActionGroup;
class QSplitter;
class RotationToolButton;
class SplitDockWidget;
class ActionGroup : public QObject
{
Q_OBJECT
public:
ActionGroup(QObject *parent);
QList<QAction *> actions() const;
void addAction(QAction *act);
void removeAction(QAction *act);
QAction * checkedAction () const;
protected slots:
void actionChanged();
protected:
QList<QAction *> m_actions;
QPointer<QAction> current;
};
class SplitActionToolBar : public QObject
{
Q_OBJECT
public:
SplitActionToolBar(QSize iconSize, QWidget *parent, Qt::DockWidgetArea area);
SplitDockWidget *dock(bool split) const;
void addAction(QAction *action, const QString &title, bool split);
void removeAction(QAction *action, bool split);
void setHideToolBar(bool b);
signals:
void moveActionTo(Qt::DockWidgetArea,Qt::DockWidgetArea,QAction*,bool);
protected slots:
void dock1Visible(bool);
void dock2Visible(bool);
public:
Qt::DockWidgetArea area;
QToolBar *toolBar;
QAction *spacerAct;
SplitDockWidget *dock1;
SplitDockWidget *dock2;
QMap<QAction*,QWidget*> m_actionWidgetMap;
bool bHideToolBar;
};
struct SplitActionState
{
QWidget *widget;
QList<QWidget*> widgetList;
QList<QAction*> widgetActions;
Qt::DockWidgetArea area;
bool split;
QString id;
QString title;
};
class SplitWindowStyle : public IWindowStyle
{
Q_OBJECT
public:
SplitWindowStyle(LiteApi::IApplication *app, QMainWindow *window, QObject *parent = 0);
~SplitWindowStyle();
virtual void createToolWindowMenu();
virtual QAction *addToolWindow(LiteApi::IApplication *app, Qt::DockWidgetArea area, QWidget *widget, const QString &id, const QString &title, bool split = false,
QList<QAction*> widgetActions = QList<QAction*>(),
QList<QWidget*> widgetList = QList<QWidget*>() );
virtual void removeToolWindow(QAction *action);
virtual QAction *findToolWindow(QWidget *wiget);
virtual void saveToolState() const;
virtual void restoreToolsState();
virtual void updateConer();
public slots:
void hideToolWindow(Qt::DockWidgetArea area = Qt::BottomDockWidgetArea);
void showOrHideToolWindow();
void hideAllToolWindows();
void hideOutputWindow();
void restoreToolWindows();
void hideSideBar(bool b);
void moveToolWindow(Qt::DockWidgetArea from, Qt::DockWidgetArea to, QAction *action,bool split = false);
protected slots:
void toggledAction(bool);
protected:
LiteApi::IApplication *m_liteApp;
QMainWindow *m_mainWindow;
QMap<Qt::DockWidgetArea,SplitActionToolBar*> m_areaToolBar;
QMap<QAction*,SplitActionState*> m_actStateMap;
QStatusBar *m_statusBar;
QAction *m_hideSideAct;
QMenu *m_windowMenu;
QList<QAction*> m_hideActionList;
bool m_useShortcuts;
};
#endif // SPLITWINDOWSSTYLE_H
| 4,156
|
C++
|
.h
| 117
| 31.82906
| 165
| 0.713151
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,045
|
idletimer.h
|
visualfc_liteide/liteidex/src/liteapp/idletimer.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2017 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: idletimer.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef IDLETIMER_H
#define IDLETIMER_H
#include "liteapi/liteapi.h"
class AppIdleTimer : public LiteApi::IAppIdleTimer
{
Q_OBJECT
public:
AppIdleTimer();
virtual ~AppIdleTimer();
public:
virtual void resetTimer();
protected slots:
void timeout();
protected:
bool eventFilter(QObject *obj, QEvent *event);
protected:
QTimer *m_timer;
int m_count;
};
#endif // IDLETIMER_H
| 1,530
|
C++
|
.h
| 42
| 33.47619
| 83
| 0.658784
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,046
|
projectmanager.h
|
visualfc_liteide/liteidex/src/liteapp/projectmanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: projectmanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef PROJECTMANAGER_H
#define PROJECTMANAGER_H
#include "liteapi/liteapi.h"
#include "filesystem/filesystemwidget.h"
#include "folderproject.h"
#include <QPointer>
using namespace LiteApi;
class QComboBox;
class QStackedWidget;
class QStandardItem;
class QStandardItemModel;
class QVBoxLayout;
class QComboBox;
class QStackedLayout;
class QActionGroup;
class QScrollArea;
class ProjectManager : public IProjectManager
{
Q_OBJECT
public:
ProjectManager();
~ProjectManager();
virtual bool initWithApp(IApplication *app);
virtual IFolderProject* openFolder(const QString &folderPath);
virtual IProject *openProject(const QString &fileName, const QString &mimeType);
virtual void addFactory(IProjectFactory *factory);
virtual void removeFactory(IProjectFactory *factory);
virtual QList<IProjectFactory*> factoryList() const;
virtual QStringList mimeTypeList() const;
public:
virtual void setCurrentProject(IProject *project);
virtual IProject *currentProject() const;
virtual QList<IEditor*> editorList(IProject *project) const;
virtual void addImportAction(QAction *act);
virtual QWidget *widget();
public slots:
virtual void saveProject(IProject *project = 0);
virtual void closeProject(IProject *project = 0);
virtual void openSchemeDialog(const QString &scheme);
void currentEditorChanged(LiteApi::IEditor*);
void triggeredProject(QAction* act);
void openSchemeAct();
void appLoaded();
void applyOption(QString);
protected:
virtual void closeProjectHelper(IProject *project);
protected:
QPointer<IProject> m_currentProject;
QList<IProjectFactory*> m_factoryList;
QScrollArea *m_widget;
//QAction *m_toolWindowAct;
bool m_bAutoCloseProjectEditors;
FolderProject *m_folderProject;
};
#endif // PROJECTMANAGER_H
| 2,989
|
C++
|
.h
| 77
| 36.064935
| 84
| 0.720482
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,047
|
pluginmanager.h
|
visualfc_liteide/liteidex/src/liteapp/pluginmanager.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: pluginmanager.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef PLUGINMANAGER_H
#define PLUGINMANAGER_H
#include "liteapi/liteapi.h"
using namespace LiteApi;
class PluginManager : public QObject
{
Q_OBJECT
public:
PluginManager(QObject *parent = 0);
virtual ~PluginManager();
QList<IPluginFactory*> factoryList();
void loadPlugins(const QString &dir);
bool isLoaded() const;
protected:
bool m_bLoaded;
QAction *m_aboutPluginsAct;
QList<IPluginFactory*> m_factoryList;
};
#endif // PLUGINMANAGER_H
| 1,580
|
C++
|
.h
| 41
| 36.463415
| 82
| 0.679948
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,048
|
liteapp_global.h
|
visualfc_liteide/liteidex/src/liteapp/liteapp_global.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteapp_global.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEAPP_GLOBAL_H
#define LITEAPP_GLOBAL_H
#include <QtCore/qglobal.h>
#if defined(LITEAPP_LIBRARY)
# define LITEIDESHARED_EXPORT Q_DECL_EXPORT
#else
# define LITEIDESHARED_EXPORT Q_DECL_IMPORT
#endif
#define LITEIDE_STORELOCAL "LiteIDE/StoreLocal"
#define OPTION_LITEAPP "option/liteapp"
#define OPTION_LITEOUTPUT "option/liteoutput"
#define LITEAPP_MAXRECENTFILES "LiteApp/MaxRecentFile"
#define LITEAPP_MAXEDITORCOUNT "LiteApp/MaxEditorCount"
#define LITEAPP_AUTOCLOSEPROEJCTFILES "LiteApp/AutoCloseProjectEditors"
#define LITEAPP_AUTOLOADLASTSESSION "LiteApp/AutoLoadLastSession"
#define LITEAPP_AUTOIDLESAVEDOCUMENTS "LiteApp/AutoIdleSaveDocuments"
#define LITEAPP_AUTOIDLESAVEDOCUMENTS_TIME "LiteApp/AutoIdelSaveDocumentsTime"
#define LITEAPP_AUTOIDLESAVEDOCUMENTS_EMITMESSAGE "LiteApp/AutoIdelSaveDocumentsEmitMessage"
#define LITEAPP_LANGUAGE "General/Language"
#define LITEAPP_STYLE "LiteApp/WindowStyle"
#define LITEAPP_SPLASHVISIBLE "LiteApp/SplashVisible"
#define LITEAPP_WELCOMEPAGEVISIBLE "General/WelcomePageVisible"
#define LITEAPP_TOOLBARICONSIZE "General/ToolBarIconSize"
#define LITEAPP_EDITTABSCLOSABLE "LiteApp/EditTabsClosable"
#define LITEAPP_EDITTABSENABLEWHELL "LiteApp/EditTabEnableWhell"
#define LITEAPP_SHOWEDITTOOLBAR "LiteApp/ShowEditToolbar"
#define LITEAPP_QSS "LiteApp/Qss"
#define LITEAPP_FULLSCREEN "LiteApp/FullScreen"
#define LITEAPP_WINSTATE "LiteApp/WinState"
#define LITEAPP_SHORTCUTS "keybord_shortcuts/"
#define LITEAPP_OPTNFOLDERINNEWWINDOW "LiteApp/OpenFolderInNewWindow"
#define LITEAPP_FOLDERSHOWHIDENFILES "LiteApp/FolderShowHidenFiles"
#define LITEAPP_FOLDERSHOWDETAILS "LiteApp/FolderShowDetails"
#define LITEAPP_FOLDERSSYNCEDITOR "FileManager/synceditor"
#define LITEAPP_FOLDERSPLITMODE "LiteApp/FolderSplitMode"
#define LITEAPP_STARTUPRELOADFILES "LiteApp/StartupReloadFiles"
#define LITEAPP_STARTUPRELOADFOLDERS "LiteApp/StartupReloadFolders"
#define LITEAPP_FILEWATCHERAUTORELOAD "LiteApp/FileWatcherAutoReload"
#define LITEIDE_CUSTOMEICONPATH "LiteApp/CustomeIconPath"
#define LITEIDE_CUSTOMEICON "LiteApp/CustomeIcon"
#define LITEAPP_TOOLWINDOW_SHORTCUTS "LiteApp/ToolWindowShortcuts"
#define LITEAPP_USE_LIBGOPHER "LiteApp/UseLibgopher"
#define LITEAPP_EDITORMOUSEEXTNAVIGATE "LiteApp/EditorMouseExtNavigate"
#define OUTPUT_FAMILY "output/family"
#define OUTPUT_FONTSIZE "output/fontsize"
#define OUTPUT_FONTZOOM "output/fontzoom"
#define OUTPUT_ANTIALIAS "output/antialias"
#define OUTPUT_MAXLINES "output/maxlines"
#define OUTPUT_USECOLORSCHEME "output/colorscheme"
#define LITEAPP_FILESFILTER_MAXCOUNT "LiteApp/FilesFilterMaxCount"
#define LITEAPP_SESSIONLIST "LiteApp/SessionList"
enum TOOLBAR_ICONSIZE {
TOOLBAR_ICONSIZE_16 = 0,
TOOLBAR_ICONSIZE_18,
TOOLBAR_ICONSIZE_20,
TOOLBAR_ICONSIZE_22,
TOOLBAR_ICONSIZE_24
};
#endif // LITEAPP_GLOBAL_H
| 4,027
|
C++
|
.h
| 81
| 47.17284
| 94
| 0.784039
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,049
|
newfiledialog.h
|
visualfc_liteide/liteidex/src/liteapp/newfiledialog.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: newfiledialog.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef NEWFILEDIALOG_H
#define NEWFILEDIALOG_H
#include <QDialog>
#include <QModelIndex>
namespace Ui {
class NewFileDialog;
}
class QStandardItemModel;
class QStringListModel;
class TemplateInfo
{
public:
void clear()
{
name.clear();
author.clear();
type.clear();
info.clear();
files.clear();
open.clear();
scheme.clear();
}
bool isValid()
{
return !name.isEmpty() && !files.isEmpty();
}
QString name;
QString author;
QString type;
QString info;
QStringList files;
QStringList open;
QString scheme;
QString dir;
};
class NewFileDialog : public QDialog
{
Q_OBJECT
public:
explicit NewFileDialog(QWidget *parent = 0);
~NewFileDialog();
void setPathList(const QStringList &pathList);
void setGopath(const QString &path);
void setProjectLocation(const QString &path);
void setFileLocation(const QString &path);
void updateLocation();
virtual void accept();
bool processFile(const QString &infile, const QString &outfile);
QStringList openFiles() const;
QString type() const;
QString scheme() const;
QString openPath() const;
public slots:
void loadTemplate(const QString &root);
void activePath(QModelIndex);
void activeTemplate(QModelIndex);
void nameLineChanged(QString);
void locationLineChanged(QString);
private slots:
void on_locationBrowseButton_clicked();
private:
Ui::NewFileDialog *ui;
QStandardItemModel *m_templateModel;
QStringListModel *m_pathModel;
QString m_projectLocation;
QString m_fileLocation;
QString m_gopath;
QString m_openPath;
QStringList m_openFiles;
QMap<QString,QString> m_stringMap;
TemplateInfo m_cur;
};
#endif // NEWFILEDIALOG_H
| 2,902
|
C++
|
.h
| 95
| 26.821053
| 82
| 0.684643
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,050
|
litedebugapi.h
|
visualfc_liteide/liteidex/src/api/litedebugapi/litedebugapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: litedebugapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEDEBUGAPI_H
#define LITEDEBUGAPI_H
#include "liteapi/liteapi.h"
#include <QAbstractItemModel>
namespace LiteApi {
enum DEBUG_MODEL_TYPE{
ASYNC_MODEL = 1,
VARS_MODEL,
WATCHES_MODEL,
FRAMES_MODEL,
BREAKPOINTS_MODEL,
THREADS_MODEL,
LIBRARY_MODEL,
GOROUTINES_MODEL,
REGS_MODEL,
ASM_MODEL
};
enum DEBUG_LOG_TYPE {
DebugConsoleLog = 1,
DebugApplationLog,
DebugRuntimeLog,
DebugErrorLog
};
enum DEBUG_EDITOR_MARKTYPE {
BreakPointMarkType = 2000,
CurrentLineMarkType = 3000
};
class IDebugger : public QObject
{
Q_OBJECT
public:
IDebugger(QObject *parent = 0): QObject(parent) {}
virtual ~IDebugger() {}
public:
virtual QString mimeType() const = 0;
virtual QAbstractItemModel *debugModel(DEBUG_MODEL_TYPE type) = 0;
virtual void setWorkingDirectory(const QString &dir) = 0;
virtual void setEnvironment (const QStringList &environment) = 0;
virtual bool start(const QString &cmd, const QString &arguments) = 0;
virtual void stop() = 0;
virtual bool isRunning() = 0;
virtual void stepOver() = 0;
virtual void stepInto() = 0;
virtual void stepOut() = 0;
virtual void continueRun() = 0;
virtual void runToLine(const QString &fileName, int line) = 0;
virtual void command(const QByteArray &cmd) = 0;
virtual void enterAppText(const QString &text) = 0;
virtual void enterDebugText(const QString &text) = 0;
virtual void expandItem(QModelIndex index, DEBUG_MODEL_TYPE type) = 0;
virtual void setInitBreakTable(const QMultiMap<QString,int> &bks) = 0;
virtual void setInitWatchList(const QStringList &names) = 0;
virtual void insertBreakPoint(const QString &fileName, int line) = 0;
virtual void removeBreakPoint(const QString &fileName, int line) = 0;
virtual void createWatch(const QString &var) = 0;
virtual void removeWatch(const QString &var) = 0;
virtual void removeAllWatch() = 0;
virtual void dbclickItem(QModelIndex index, DEBUG_MODEL_TYPE type) = 0;
signals:
void debugStarted();
void debugStoped();
void debugLoaded();
void debugLog(LiteApi::DEBUG_LOG_TYPE type, const QString &log);
void setExpand(LiteApi::DEBUG_MODEL_TYPE type, const QModelIndex &index, bool expanded);
void setCurrentLine(const QString &fileName, int line);
void gotoLine(const QString &fileName, int line);
void watchCreated(const QString &watch,const QString &name);
void watchRemoved(const QString &watch);
void beginUpdateModel(LiteApi::DEBUG_MODEL_TYPE type);
void endUpdateModel(LiteApi::DEBUG_MODEL_TYPE type);
void scrollTo(LiteApi::DEBUG_MODEL_TYPE type, const QModelIndex &index);
};
class IDebuggerManager : public IManager
{
Q_OBJECT
public:
IDebuggerManager(QObject *parent = 0) : IManager(parent) {}
virtual void addDebugger(IDebugger *debug) = 0;
virtual void removeDebugger(IDebugger *debug) = 0;
virtual IDebugger *findDebugger(const QString &mimeType) = 0;
virtual QList<IDebugger*> debuggerList() const = 0;
virtual void setCurrentDebugger(IDebugger *debug) = 0;
virtual IDebugger *currentDebugger() = 0;
signals:
void currentDebuggerChanged(LiteApi::IDebugger*);
};
inline IDebuggerManager *getDebugManager(LiteApi::IApplication *app)
{
return LiteApi::findExtensionObject<IDebuggerManager*>(app,"LiteApi.IDebuggerManager");
}
class ILiteDebug : public IObject
{
Q_OBJECT
public:
ILiteDebug(QObject *parent) : IObject(parent)
{
}
public:
virtual IDebuggerManager *debugManager() const = 0;
virtual void startDebug(const QString &cmd, const QString &args, const QString &work) = 0;
virtual bool isRunning() const = 0;
public slots:
virtual void continueRun() = 0;
virtual void runToLine() = 0;
virtual void stopDebug() = 0;
virtual void stepOver() = 0;
virtual void stepInto() = 0;
virtual void stepOut() = 0;
virtual void showLine() = 0;
virtual void toggleBreakPoint() = 0;
virtual void removeAllBreakPoints() = 0;
signals:
void debugBefore();
void debugEnd();
};
inline ILiteDebug *getLiteDebug(LiteApi::IApplication *app)
{
return LiteApi::findExtensionObject<ILiteDebug*>(app,"LiteApi.ILiteDebug");
}
} //namespace LiteApi
#endif //LITEDEBUGAPI_H
| 5,373
|
C++
|
.h
| 143
| 34.125874
| 94
| 0.716449
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,051
|
litettyapi.h
|
visualfc_liteide/liteidex/src/api/litettyapi/litettyapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: litettyapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITETTYAPI_H
#define LITETTYAPI_H
#include "liteapi/liteapi.h"
#include <QProcessEnvironment>
#include <QDir>
namespace LiteApi {
class ITty : public QObject
{
Q_OBJECT
public:
ITty(QObject *parent = 0): QObject(parent) {}
virtual QString serverName() const = 0;
virtual QString errorString() const = 0;
virtual bool listen() = 0;
virtual void shutdown() = 0;
virtual void write(const QByteArray &data) = 0;
signals:
void byteDelivery(const QByteArray &data);
};
class ILiteTty : public QObject
{
public:
ILiteTty(QObject *parent) : QObject(parent) { }
virtual ITty* createTty(QObject *parent) const = 0;
};
inline ILiteTty *getLiteTty(LiteApi::IApplication* app)
{
return LiteApi::findExtensionObject<ILiteTty*>(app,"LiteApi.ILiteTty");
}
inline ITty *createTty(LiteApi::IApplication *app,QObject *parent)
{
ILiteTty *liteTty = getLiteTty(app);
if (liteTty) {
return liteTty->createTty(parent);
}
return 0;
}
} //namespace LiteApi
#endif //LITETTYAPI_H
| 2,114
|
C++
|
.h
| 61
| 32.360656
| 82
| 0.691479
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,052
|
liteenvapi.h
|
visualfc_liteide/liteidex/src/api/liteenvapi/liteenvapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteenvapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEENVAPI_H
#define LITEENVAPI_H
#include "liteapi/liteapi.h"
#include <QProcessEnvironment>
#include <QDir>
#include <QDebug>
namespace LiteApi {
class IEnv : public QObject
{
Q_OBJECT
public:
IEnv(QObject *parent = 0): QObject(parent) {}
virtual ~IEnv() {}
virtual QString id() const = 0;
virtual QString filePath() const = 0;
virtual QStringList orgEnvLines() const = 0;
virtual QMap<QString,QString> goEnvMap() const = 0;
virtual QProcessEnvironment& environment() = 0;
virtual void reload() = 0;
signals:
void goenvError(QString,QString);
void goenvChanged(QString);
};
class IEnvManager : public IManager
{
Q_OBJECT
public:
IEnvManager(QObject *parent = 0) : IManager(parent) {}
virtual QList<IEnv*> envList() const = 0;
virtual IEnv *findEnv(const QString &id, const QString &backup = "system") const = 0;
virtual void setCurrentEnvId(const QString &id) = 0;
virtual IEnv *currentEnv() const = 0;
virtual QProcessEnvironment currentEnvironment() const = 0;
virtual void reloadCurrentEnv() = 0;
signals:
void currentEnvChanged(LiteApi::IEnv*);
};
class IGoEnvManger: public IManager
{
Q_OBJECT
public:
IGoEnvManger(QObject *parent = 0) : IManager(parent) {}
virtual QString gocmd() const = 0;
virtual QString gotools() const = 0;
virtual QString GOROOT() const = 0;
virtual QStringList GOPATH() const = 0;
virtual QProcessEnvironment environment() const = 0;
virtual QProcessEnvironment customEnvironment(const QString &buildFilePath, QString *pCustomBuildPath = 0) const = 0;
virtual QStringList customGOPATH(const QString &buildPath, QString *pCustomBuildPath = 0) const = 0;
virtual QString findRealCustomBuildPath(const QString &buildPath) const = 0;
virtual bool hasCustomGOPATH(const QString &buildPath) const = 0;
virtual void updateGoEnv() = 0;
virtual void updateCustomGOPATH(const QString &buildPath) = 0;
signals:
void globalGOPATHChanged();
void customGOPATHChanged(const QString &buildPath);
};
inline IEnvManager *getEnvManager(LiteApi::IApplication* app)
{
return LiteApi::findExtensionObject<IEnvManager*>(app,"LiteApi.IEnvManager");
}
inline IGoEnvManger *getGoEnvManager(LiteApi::IApplication *app)
{
return LiteApi::findExtensionObject<IGoEnvManger*>(app,"LiteApi.IGoEnvManger");
}
inline QProcessEnvironment getCurrentEnvironment(LiteApi::IApplication *app)
{
QProcessEnvironment e;
IEnvManager *env = getEnvManager(app);
if (env) {
e = env->currentEnvironment();
} else {
e = QProcessEnvironment::systemEnvironment();
}
#ifdef Q_OS_WIN
QString sep = ";";
#else
QString sep = ":";
#endif
QStringList pathList;
foreach (QString path, e.value("PATH").split(sep,qtSkipEmptyParts)) {
pathList.append(QDir::toNativeSeparators(path));
}
pathList.append(app->applicationPath());
pathList.removeDuplicates();
e.insert("PATH",pathList.join(sep));
return e;
}
inline QString getDefaultGOOS()
{
const char* goos = "";
#ifdef Q_OS_WIN
goos = "windows";
#endif
#ifdef Q_OS_LINUX
goos = "linux";
#endif
#ifdef Q_OS_DARWIN
goos = "darwin";
#endif
#ifdef Q_OS_FREEBSD
goos = "freebsd";
#endif
#ifdef Q_OS_OPENBSD
goos = "openbsd";
#endif
return goos;
}
inline QString getDefaultGOROOT()
{
#ifdef Q_OS_WIN
return "c:\\go";
#else
return "/usr/local/go";
#endif
}
inline bool hasGoEnv(const QProcessEnvironment &env)
{
return env.contains("GOROOT") && env.contains("GOARCH");
}
inline QProcessEnvironment getSysEnvironment(LiteApi::IApplication *app)
{
QProcessEnvironment env = getCurrentEnvironment(app);
#ifdef Q_OS_WIN
QString sep = ";";
#else
QString sep = ":";
#endif
IEnvManager *mgr = LiteApi::getEnvManager(app);
if (mgr) {
LiteApi::IEnv *ce = mgr->currentEnv();
if (ce) {
QMapIterator<QString,QString> i(ce->goEnvMap());
while(i.hasNext()) {
i.next();
env.insert(i.key(),i.value());
}
}
}
QString goos = env.value("GOOS");
if (goos.isEmpty()) {
goos = getDefaultGOOS();
}
QString goroot = env.value("GOROOT");
if (goroot.isEmpty()) {
goroot = getDefaultGOROOT();
}
return env;
}
inline QProcessEnvironment getGoEnvironment(LiteApi::IApplication *app)
{
QProcessEnvironment env = getCurrentEnvironment(app);
#ifdef Q_OS_WIN
QString sep = ";";
#else
QString sep = ":";
#endif
IEnvManager *mgr = LiteApi::getEnvManager(app);
if (mgr) {
LiteApi::IEnv *ce = mgr->currentEnv();
if (ce) {
QMapIterator<QString,QString> i(ce->goEnvMap());
while(i.hasNext()) {
i.next();
env.insert(i.key(),i.value());
}
}
}
QString goos = env.value("GOOS");
if (goos.isEmpty()) {
goos = getDefaultGOOS();
}
if (!env.contains("GOEXE")) {
QString goexe;
if (goos == "windows") {
goexe = ".exe";
}
env.insert("GOEXE",goexe);
}
QString goarch = env.value("GOARCH");
QString goroot = env.value("GOROOT");
if (goroot.isEmpty()) {
goroot = getDefaultGOROOT();
}
if (app->settings()->value("liteide/use111gomodule",false).toBool()) {
env.insert("GO111MODULE",app->settings()->value("liteide/go111module").toString());
}
if (app->settings()->value("liteide/usegoproxy",false).toBool()) {
env.insert("GOPROXY",app->settings()->value("liteide/goproxy").toString());
}
if (app->settings()->value("liteide/usegoprivate",false).toBool()) {
env.insert("GOPRIVATE",app->settings()->value("liteide/goprivate").toString());
}
if (app->settings()->value("liteide/usegonoproxy",false).toBool()) {
env.insert("GONOPROXY",app->settings()->value("liteide/gonoproxy").toString());
}
if (app->settings()->value("liteide/usegonosumdb",false).toBool()) {
env.insert("GONOSUMDB",app->settings()->value("liteide/gonosumdb").toString());
}
QStringList pathList;
if (app->settings()->value("liteide/usesysgopath",true).toBool()) {
foreach (QString path, env.value("GOPATH").split(sep,qtSkipEmptyParts)) {
pathList.append(QDir::toNativeSeparators(path));
}
}
if (app->settings()->value("liteide/uselitegopath",true).toBool()) {
foreach (QString path, app->settings()->value("liteide/gopath").toStringList()) {
pathList.append(QDir::toNativeSeparators(path));
}
}
pathList.removeDuplicates();
env.insert("GOPATH",pathList.join(sep));
if (!goroot.isEmpty()) {
pathList.prepend(goroot);
}
QStringList binList;
QString gobin = env.value("GOBIN");
if (!gobin.isEmpty()) {
binList.append(gobin);
}
foreach (QString path, pathList) {
binList.append(QFileInfo(path,"bin").filePath());
binList.append(QFileInfo(path,"bin/"+goos+"_"+goarch).filePath());
}
env.insert("PATH",env.value("PATH")+sep+binList.join(sep)+sep);
return env;
}
inline QStringList getGOPATH(LiteApi::IApplication *app, bool includeGoroot)
{
QProcessEnvironment env = getGoEnvironment(app);
#ifdef Q_OS_WIN
QString sep = ";";
#else
QString sep = ":";
#endif
QStringList pathList;
QString goroot = QDir::toNativeSeparators(env.value("GOROOT"));
if (includeGoroot) {
pathList.append(goroot);
}
foreach (QString path, env.value("GOPATH").split(sep,qtSkipEmptyParts)) {
pathList.append(QDir::toNativeSeparators(path));
}
if (!includeGoroot) {
pathList.removeAll(goroot);
}
pathList.removeDuplicates();
return pathList;
}
inline QString getGOROOT(LiteApi::IApplication *app)
{
return getGoEnvironment(app).value("GOROOT");
}
inline QString lookupSrcRoot(const QString &buildFilePath)
{
int index = buildFilePath.indexOf("/src/");
if (index < 0) {
return QString();
}
return buildFilePath.left(index+4);
}
inline QString lookupParentHasCustom(LiteApi::IApplication *app, const QString &buildFilePath, const QString &srcRoot, QString *pCustomParent = 0)
{
QFileInfo info(buildFilePath);
QString parent = info.path();
if (parent == srcRoot || info.dir().isRoot()) {
return QString();
}
QString customKey = "litebuild-custom/"+parent;
bool use_custom_gopath = app->settings()->value(customKey+"#use_custom_gopath",false).toBool();
if (use_custom_gopath) {
if (pCustomParent) {
*pCustomParent = parent;
}
return customKey;
}
return lookupParentHasCustom(app,parent,srcRoot);
}
inline QProcessEnvironment getCustomGoEnvironment(LiteApi::IApplication *app, const QString &buildFilePath, QString *pCustomBuildPath = 0)
{
if (buildFilePath.isEmpty()) {
return getGoEnvironment(app);
}
QString customKey = "litebuild-custom/"+buildFilePath;
QString customBuildPath = buildFilePath;
bool use_custom_gopath = app->settings()->value(customKey+"#use_custom_gopath",false).toBool();
if (!use_custom_gopath) {
QString srcRoot = lookupSrcRoot(buildFilePath);
if (!srcRoot.isEmpty()) {
customKey = lookupParentHasCustom(app,buildFilePath,srcRoot, &customBuildPath);
if (!customKey.isEmpty()) {
use_custom_gopath = true;
}
}
}
if (!use_custom_gopath) {
return getGoEnvironment(app);
}
if (pCustomBuildPath) {
*pCustomBuildPath = customBuildPath;
}
QProcessEnvironment env = getCurrentEnvironment(app);
#ifdef Q_OS_WIN
QString sep = ";";
#else
QString sep = ":";
#endif
IEnvManager *mgr = LiteApi::getEnvManager(app);
if (mgr) {
LiteApi::IEnv *ce = mgr->currentEnv();
if (ce) {
QMapIterator<QString,QString> i(ce->goEnvMap());
while(i.hasNext()) {
i.next();
env.insert(i.key(),i.value());
}
}
}
QString goos = env.value("GOOS");
if (goos.isEmpty()) {
goos = getDefaultGOOS();
}
if (!env.contains("GOEXE")) {
QString goexe;
if (goos == "windows") {
goexe = ".exe";
}
env.insert("GOEXE",goexe);
}
QString goarch = env.value("GOARCH");
QString goroot = env.value("GOROOT");
if (goroot.isEmpty()) {
goroot = getDefaultGOROOT();
}
QStringList pathList;
bool inherit_sys_gopath = app->settings()->value(customKey+"#inherit_sys_gopath",true).toBool();
bool inherit_lite_gopath = app->settings()->value(customKey+"#inherit_lite_gopath",true).toBool();
bool custom_gopath = app->settings()->value(customKey+"#custom_gopath",false).toBool();
if (inherit_sys_gopath) {
foreach (QString path, env.value("GOPATH").split(sep,qtSkipEmptyParts)) {
pathList.append(QDir::toNativeSeparators(path));
}
}
if (inherit_lite_gopath) {
foreach (QString path, app->settings()->value("liteide/gopath").toStringList()) {
pathList.append(QDir::toNativeSeparators(path));
}
}
if (custom_gopath) {
foreach (QString path, app->settings()->value(customKey+"#gopath").toStringList()) {
pathList.append(QDir::toNativeSeparators(path));
}
}
pathList.removeDuplicates();
env.insert("GOPATH",pathList.join(sep));
if (!goroot.isEmpty()) {
pathList.prepend(goroot);
}
QStringList binList;
QString gobin = env.value("GOBIN");
if (!gobin.isEmpty()) {
binList.append(gobin);
}
foreach (QString path, pathList) {
binList.append(QFileInfo(path,"bin").filePath());
binList.append(QFileInfo(path,"bin/"+goos+"_"+goarch).filePath());
}
env.insert("PATH",env.value("PATH")+sep+binList.join(sep)+sep);
return env;
}
inline QProcessEnvironment getCustomGoEnvironment(LiteApi::IApplication *app, LiteApi::IEditor *editor)
{
QString buildFilePath;
if (editor) {
QString filePath = editor->filePath();
if (!filePath.isEmpty()) {
buildFilePath = QFileInfo(filePath).path();
}
}
return getCustomGoEnvironment(app,buildFilePath);
}
} //namespace LiteApi
#endif //LITEENVAPI_H
| 13,517
|
C++
|
.h
| 405
| 28.130864
| 146
| 0.654294
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,053
|
litefindapi.h
|
visualfc_liteide/liteidex/src/api/litefindapi/litefindapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: litefindapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEFINDAPI_H
#define LITEFINDAPI_H
#include "liteapi/liteapi.h"
namespace LiteApi {
class FileSearchResult
{
public:
FileSearchResult()
: line(0), col(0), len(0) {}
FileSearchResult(const QString &path, const QString &lineText, int line, int col, int len)
: path(path), lineText(lineText), line(line), col(col), len(len) {}
public:
QString path;
QString lineText;
int line;
int col;
int len;
};
class IFileSearch : public QObject
{
Q_OBJECT
public:
IFileSearch(QObject *parent) : QObject(parent) {}
virtual QString mimeType() const = 0;
virtual QString displayName() const = 0;
virtual QWidget* widget() const = 0;
virtual void start() = 0;
virtual void cancel() = 0;
virtual void activate() = 0;
virtual QString searchText() const = 0;
virtual bool replaceMode() const = 0;
virtual bool readOnly() const = 0;
virtual bool canCancel() const = 0;
virtual void setSearchInfo(const QString &text, const QString &filter, const QString &path) = 0;
signals:
void searchTextChanged(const QString &text);
void findStarted();
void findFinished(bool b = true);
void findResult(const LiteApi::FileSearchResult &result);
void findError(const QString &error);
};
class IFileSearchManager : public IManager
{
Q_OBJECT
public:
IFileSearchManager(QObject *parent = 0) : IManager(parent) {}
virtual void addFileSearch(IFileSearch* search) = 0;
virtual IFileSearch *findFileSearch(const QString &mime) = 0;
virtual QList<IFileSearch*> fileSearchList() const = 0;
virtual void setCurrentSearch(LiteApi::IFileSearch *search) = 0;
virtual void showFileSearch(const QString &text, const QString &filter, const QString &path) = 0;
public slots:
virtual void newSearch() = 0;
};
inline IFileSearchManager* getFileSearchManager(LiteApi::IApplication *app)
{
return LiteApi::findExtensionObject<IFileSearchManager*>(app,"LiteApi.IFileSearchManager");
}
} //namespace LiteApi
#endif //LITEFINDAPI_H
| 3,119
|
C++
|
.h
| 82
| 35.036585
| 101
| 0.701488
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,054
|
docbrowserapi.h
|
visualfc_liteide/liteidex/src/api/docbrowserapi/docbrowserapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: docbrowserapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEDOCBROWSERAPI_H
#define LITEDOCBROWSERAPI_H
#include "liteapi/liteapi.h"
#include "liteapi/litehtml.h"
#include <QTextBrowser>
#include <QComboBox>
#include <QToolBar>
namespace LiteApi {
class IDocumentBrowser : public IBrowserEditor
{
Q_OBJECT
public:
IDocumentBrowser(QObject *parent) : IBrowserEditor(parent) {}
virtual void setSearchPaths(const QStringList &paths) = 0;
virtual void setUrlHtml(const QUrl &url,const QString &html) = 0;
virtual void scrollToAnchor(const QString &text) = 0;
virtual QToolBar *toolBar() = 0;
virtual QComboBox *urlComboBox() = 0;
virtual IHtmlWidget *htmlWidget() = 0;
signals:
void linkHovered(const QUrl &url);
void requestUrl(const QUrl &url);
void forwardAvailable(bool available);
void backwardAvailable(bool available);
void documentLoaded();
void anchorChanged(const QString &anchor);
public slots:
virtual void backward() = 0;
virtual void forward() = 0;
};
}
#endif //LITEDOCBROWSERAPI_H
| 2,092
|
C++
|
.h
| 54
| 36.425926
| 82
| 0.704087
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,055
|
golangastapi.h
|
visualfc_liteide/liteidex/src/api/golangastapi/golangastapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: golangastapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef GOLANGASTAPI_H
#define GOLANGASTAPI_H
#include "liteapi/liteapi.h"
#include <QProcessEnvironment>
namespace LiteApi {
/*
const (
tag_package = "p"
tag_type = "t"
tag_struct = "s"
tag_interface = "i"
tag_value = "v"
tag_const = "c"
tag_func = "f"
tag_value_folder = "+v"
tag_const_folder = "+c"
tag_func_folder = "+f"
tag_type_method = "tm"
tag_type_factor = "tf"
tag_type_value = "tv"
tag_todo = "b"
tag_todo_folder = "+b"
)
*/
enum ASTTAG_ENUM {
TagNone = 0,
TagPackage,
TagImport,
TagImportFolder,
TagType,
TagStruct,
TagInterface,
TagValue,
TagConst,
TagFunc,
TagValueFolder,
TagConstFolder,
TagFuncFolder,
TagTypeMethod,
TagTypeFactor,
TagTypeValue,
TagTodo,
TagTodoFolder
};
class IGolangAst : public QObject
{
Q_OBJECT
public:
IGolangAst(QObject *parent = 0): QObject(parent) {}
virtual ~IGolangAst() {}
virtual QIcon iconFromTag(const QString &tag, bool pub) const = 0;
virtual QIcon iconFromTagEnum(LiteApi::ASTTAG_ENUM tag, bool pub) const = 0;
};
} //namespace LiteApi
#endif //GOLANGASTAPI_H
| 2,365
|
C++
|
.h
| 77
| 26.844156
| 82
| 0.625713
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,057
|
golangdocapi.h
|
visualfc_liteide/liteidex/src/api/golangdocapi/golangdocapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: golangdocapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef GOLANGDOCAPI_H
#define GOLANGDOCAPI_H
#include "liteapi/liteapi.h"
/*
openUrl(const QUrl &url);
url scheme
file : open html or plain file
list : url path only [pkg|cmd]
find : find pkg name
pdoc : show pkg doc
*/
namespace LiteApi {
enum PkgApiEnum {
NullApi = 0,
PkgApi = 0x0001,
ConstApi = 0x0002,
VarApi = 0x0004,
StructApi = 0x0008,
InterfaceApi = 0x0010,
TypeApi = 0x0020,
FuncApi = 0x0040,
TypeMethodApi = 0x0080,
TypeVarApi = 0x0100,
AllTypeApi = StructApi | InterfaceApi | TypeApi,
AllGolangApi = PkgApi | ConstApi | VarApi | StructApi | InterfaceApi | TypeApi | FuncApi | TypeMethodApi | TypeVarApi
};
class IGolangApi : public QObject
{
Q_OBJECT
public:
IGolangApi(QObject *parent) : QObject(parent) {}
public:
virtual QStringList all(int flag = AllGolangApi) const = 0;
virtual PkgApiEnum findExp(const QString &tag, QString &exp) const = 0;
virtual QStringList findDocUrl(const QString &tag) const = 0;
virtual QString findDocInfo(const QString &tag) const = 0;
};
class IGolangDoc : public IObject
{
Q_OBJECT
public:
IGolangDoc(QObject *parent) : IObject(parent) {}
public slots:
virtual void openUrl(const QUrl &url, const QVariant &addin = QVariant()) = 0;
virtual void activeBrowser() = 0;
};
inline IGolangDoc *getGolangDoc(LiteApi::IApplication *app)
{
return LiteApi::findExtensionObject<IGolangDoc*>(app,"LiteApi.IGolangDoc");
}
}
#endif //GOLANGDOCAPI_H
| 2,561
|
C++
|
.h
| 74
| 32.216216
| 121
| 0.699919
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,058
|
litebuildapi.h
|
visualfc_liteide/liteidex/src/api/litebuildapi/litebuildapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: litebuildapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEBUILDAPI_H
#define LITEBUILDAPI_H
#include "liteapi/liteapi.h"
#include <QProcessEnvironment>
namespace LiteApi {
class BuildAction
{
public:
BuildAction():
m_debug(false),
m_output(false),
m_readline(false),
m_separator(false),
m_killold(false),
m_navigate(false),
m_folder(false),
m_takeall(false)
{}
void setId(const QString &id) { m_id = id; }
void setOs(const QString &os) { m_os = os; }
void setMenu(const QString &menu) { m_menu = menu; }
void setKey(const QString &key) { m_key = key; }
void setFunc(const QString &func) { m_func = func; }
void setCmd(const QString &cmd) { m_cmd = cmd; }
void setArgs(const QString &args) { m_args = args; }
void setSave(const QString &save) { m_save = save; }
void setDebug(const QString &text) {
m_debug = QVariant(text).toBool();
}
void setOutput(const QString &text) {
m_output = QVariant(text).toBool();
}
void setReadline(const QString &text) {
m_readline = QVariant(text).toBool();
}
void setSeparator(const QString &text) {
m_separator = QVariant(text).toBool();
}
void setKillold(const QString &text) {
m_killold = QVariant(text).toBool();
}
void setNavigate(const QString &text) {
m_navigate = QVariant(text).toBool();
}
void setFolder(const QString &text) {
m_folder = QVariant(text).toBool();
}
void setTakeall(const QString &text) {
m_takeall = QVariant(text).toBool();
}
void setWork(const QString &work) { m_work = work; }
void setCodec(const QString &codec) { m_codec = codec; }
void setRegex(const QString ®ex) { m_regex = regex; }
void setImg(const QString &img) {m_img = img; }
void setTask(const QStringList &task) { m_task = task; }
QString work() const { return m_work; }
QString id() const { return m_id; }
QString os() const { return m_os; }
QString menu() const { return m_menu; }
QString key() const { return m_key; }
QString cmd() const { return m_cmd; }
QString func() const { return m_func; }
QString args() const { return m_args; }
QString save() const { return m_save; }
bool isDebug() const { return m_debug; }
bool isOutput() const { return m_output; }
bool isReadline() const {return m_readline; }
bool isSeparator() const { return m_separator; }
bool isFolder() const { return m_folder; }
bool isKillOld() const { return m_killold; }
bool isNavigate() const { return m_navigate; }
bool isTakeall() const { return m_takeall; }
QString codec() const { return m_codec; }
QString regex() const { return m_regex; }
QString img() const { return m_img; }
QStringList task() const { return m_task; }
void clear() {
m_id.clear();
m_cmd.clear();
m_key.clear();
m_args.clear();
m_codec.clear();
m_regex.clear();
m_img.clear();
m_save.clear();
m_task.clear();
m_debug = false;
m_output = false;
m_readline = false;
m_separator = false;
m_killold = false;
m_folder = false;
m_takeall = false;
}
bool isEmpty() {
return m_id.isEmpty();
}
bool isHidden() {
return m_id.isEmpty() || m_id[0].isLower();
}
protected:
QString m_id;
QString m_os;
QString m_key;
QString m_cmd;
QString m_func;
QString m_args;
QString m_codec;
QString m_regex;
QString m_save;
QString m_img;
QString m_work;
QString m_menu;
QStringList m_task;
bool m_debug;
bool m_output;
bool m_readline;
bool m_separator;
bool m_killold;
bool m_navigate;
bool m_folder;
bool m_takeall;
};
class BuildLookup
{
public:
BuildLookup() : m_top(1)
{
}
void setMimeType(const QString &type) {m_type=type;}
void setFile(const QString &file) {m_file=file;}
void setTop(const QString &top) {
if (top.isEmpty()) {
return;
}
bool ok = false;
int value = top.toInt(&ok);
if (ok) {
m_top=value;
}
}
QString mimeType() const {return m_type;}
QString file() const {return m_file;}
int top() const {return m_top;}
protected:
QString m_type;
QString m_file;
int m_top;
};
class BuildConfig
{
public:
BuildConfig()
{
}
void setId(const QString &id) { m_id = id; }
void setName(const QString &name) { m_name = name; }
void setValue(const QString &value) { m_value = value; }
QString id() const { return m_id; }
QString name() const { return m_name; }
QString value() const { return m_value; }
protected:
QString m_id;
QString m_name;
QString m_value;
};
class BuildCustom
{
public:
BuildCustom() : m_hasShared(false), m_isReadOnly(false), m_isEscaped(false)
{
}
void setId(const QString &id) { m_id = id; }
void setName(const QString &name) { m_name = name; }
void setValue(const QString &value) { m_value = value; }
void setSharedValue(const QString &value) {
m_hasShared = true;
m_sharedValue = value;
}
void setReadOnly(const QString &value)
{
m_isReadOnly = QVariant(value).toBool();
}
void setEscaped(const QString &value)
{
m_isEscaped = QVariant(value).toBool();
}
QString id() const { return m_id; }
QString name() const { return m_name; }
QString value() const { return m_value; }
bool hasShared() const { return m_hasShared; }
QString sharedValue() const { return m_sharedValue; }
bool isReadOnly() const { return m_isReadOnly; }
bool isEscaped() const { return m_isEscaped; }
protected:
QString m_id;
QString m_name;
QString m_value;
QString m_sharedValue;
bool m_hasShared;
bool m_isReadOnly;
bool m_isEscaped;
};
class BuildTarget
{
public:
BuildTarget()
{
}
void setId(const QString &id) { m_id = id; }
void setCmd(const QString &cmd) { m_cmd = cmd; }
void setDebug(const QString &debug) { m_debug = debug; }
void setArgs(const QString &args) { m_args = args; }
void setWork(const QString &work) { m_work = work; }
void setBuildArgs(const QString &args) { m_buildArgs = args; }
QString id() const { return m_id; }
QString cmd() const { return m_cmd; }
QString debug() const { return m_debug; }
QString args() const { return m_args; }
QString work() const { return m_work; }
QString buildArgs() const { return m_buildArgs; }
bool isEmpty() {
return m_id.isEmpty();
}
protected:
QString m_id;
QString m_cmd;
QString m_debug;
QString m_buildArgs;
QString m_args;
QString m_work;
};
class IBuild : public QObject
{
Q_OBJECT
public:
IBuild(QObject *parent = 0): QObject(parent) {}
virtual ~IBuild() {}
virtual QString mimeType() const = 0;
virtual QString id() const = 0;
virtual QString work() const = 0;
virtual QString lock() const = 0;
virtual QList<BuildAction*> actionList() const = 0;
virtual QList<BuildLookup*> lookupList() const = 0;
virtual QList<BuildConfig*> configList() const = 0;
virtual QList<BuildCustom*> customList() const = 0;
virtual QList<BuildTarget*> targetList() const = 0;
virtual BuildAction *findAction(const QString &name) = 0;
virtual QList<QAction*> actions() = 0;
signals:
void buildAction(LiteApi::IBuild *build, LiteApi::BuildAction *act);
};
class IBuildManager : public IManager
{
Q_OBJECT
public:
IBuildManager(QObject *parent = 0) : IManager(parent) {}
virtual void addBuild(IBuild *build) = 0;
virtual void removeBuild(IBuild *build) = 0;
virtual IBuild *findBuild(const QString &mimeType) = 0;
virtual QList<IBuild*> buildList() const = 0;
virtual void setCurrentBuild(IBuild *build) = 0;
virtual IBuild *currentBuild() const = 0;
signals:
void buildChanged(LiteApi::IBuild*);
};
struct TargetInfo {
QString buildRootPath;
QString targetName;
QString debugName;
QString buildArgs;
QString targetArgs;
QString targetWorkDir;
};
class ILiteBuild : public IObject
{
Q_OBJECT
public:
ILiteBuild(QObject *parent) : IObject(parent)
{
}
public:
virtual QString buildTag() const = 0;
virtual QMap<QString,QString> buildEnvMap() const = 0;
virtual TargetInfo getTargetInfo() = 0;
virtual IBuildManager *buildManager() const = 0;
virtual QString envValue(LiteApi::IBuild *build, const QString &value) = 0;
virtual QString buildPathEnvValue(LiteApi::IBuild *build, const QString &buildFilePath, const QString &value) = 0;
virtual void appendOutput(const QString &str, const QBrush &brush, bool active, bool updateExistsTextColor = true) = 0;
virtual void execCommand(const QString &cmd, const QString &args, const QString &workDir, bool updateExistsTextColor = true, bool activateOutputCheck = true, bool navigate = true, bool command = true) = 0;
virtual bool execGoCommand(const QStringList &args, const QString &work, bool waitFinish = true) = 0;
public slots:
virtual void execBuildAction(LiteApi::IBuild*,LiteApi::BuildAction*) = 0;
};
inline QString sourceBuildFilePath(const QString &filePath)
{
QFileInfo info(filePath);
if (info.isDir()) {
return info.filePath();
}
return info.path();
}
inline QString editorBuildFilePath(IEditor *editor)
{
QString buildFilePath;
if (editor) {
QString filePath = editor->filePath();
if (!filePath.isEmpty()) {
buildFilePath = QFileInfo(filePath).path();
}
}
return buildFilePath;
}
inline ILiteBuild *getLiteBuild(LiteApi::IApplication* app)
{
return LiteApi::findExtensionObject<ILiteBuild*>(app,"LiteApi.ILiteBuild");
}
inline IBuild *getGoBuild(LiteApi::IApplication *app)
{
ILiteBuild *build = getLiteBuild(app);
if (!build) {
return 0;
}
return build->buildManager()->findBuild("text/x-gosrc");
}
inline QString parserArgumentValue(const QString &opt, const QString &text)
{
int pos = text.indexOf(opt);
if (pos == -1) {
return QString();
}
QString value = text.mid(pos+opt.length());
if (value.startsWith('=')) {
value = value.mid(1);
} else if (value.startsWith(' ')) {
value = value.trimmed();
}
if (value.isEmpty()) {
return QString();
}
if (value.startsWith('\'')) {
int pos = value.indexOf('\'',1);
if (pos != -1) {
return value.left(pos+1);
}
} else if (value.startsWith('\"')) {
int pos = value.indexOf('\"',1);
if (pos != -1) {
return value.left(pos+1);
}
} else {
int pos = value.indexOf(' ');
if (pos != -1) {
return value.left(pos);
}
return value;
}
return QString();
}
inline QString getGoBuildFlagsArgument(LiteApi::IApplication *app, const QString &buildFilePath, const QString &opt)
{
ILiteBuild *liteBuild = getLiteBuild(app);
LiteApi::IBuild *build = getGoBuild(app);
if (!liteBuild || !build ) {
return QString();
}
QString value = liteBuild->buildPathEnvValue(build,buildFilePath,"$(BUILDFLAGS)");
QString tags = parserArgumentValue(opt,value);
if (tags.isEmpty()) {
value = liteBuild->buildPathEnvValue(build,buildFilePath,"$(BUILDARGS)");
tags = parserArgumentValue(opt,value);
}
return tags;
}
inline QString getGoBuildFlagsArgument(LiteApi::IApplication *app, LiteApi::IEditor *editor, const QString &opt)
{
ILiteBuild *liteBuild = getLiteBuild(app);
if (!liteBuild) {
return QString();
}
QString buildFilePath = editorBuildFilePath(editor);
return getGoBuildFlagsArgument(app,buildFilePath,opt);
}
} //namespace LiteApi
#endif //LITEBUILDAPI_H
| 13,096
|
C++
|
.h
| 407
| 27.233415
| 209
| 0.642242
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,059
|
terminalapi.h
|
visualfc_liteide/liteidex/src/api/terminalapi/terminalapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2020 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: terminalapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef TERMINALAPI_H
#define TERMINALAPI_H
#include "liteapi/liteapi.h"
class QTreeView;
class QLineEdit;
namespace LiteApi {
class ITerminal : public QObject
{
Q_OBJECT
public:
ITerminal(QObject *parent = 0) : QObject(parent) {}
virtual void openDefaultTerminal(const QString &workDir) = 0;
};
inline ITerminal* getTerminalManager(LiteApi::IApplication *app)
{
return LiteApi::findExtensionObject<ITerminal*>(app,"LiteApi.ITerminal");
}
} //namespace LiteApi
#endif //TERMINALAPI_H
| 1,583
|
C++
|
.h
| 41
| 37
| 82
| 0.697978
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,060
|
liteutil.h
|
visualfc_liteide/liteidex/src/api/liteapi/liteutil.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2017 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteutil.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEUTIL_H
#define LITEUTIL_H
#include "liteapi.h"
namespace LiteApi {
inline void updateSetting(QSettings *setting, const QString &key, const QVariant &value, const QVariant &def)
{
if (value == def) {
setting->remove(key);
} else {
setting->setValue(key,value);
}
}
inline void updateAppSetting(LiteApi::IApplication *app, const QString &key, const QVariant &value, const QVariant &def)
{
updateSetting(app->settings(),key,value,def);
}
}
#endif // LITEUTIL_H
| 1,580
|
C++
|
.h
| 40
| 37.525
| 120
| 0.677756
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,061
|
litehtml.h
|
visualfc_liteide/liteidex/src/api/liteapi/litehtml.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: litehtml.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEHTML_H
#define LITEHTML_H
#include <QWidget>
#include <QUrl>
#include <QTextDocument>
#ifndef QT_NO_PRINTER
#include <QPrinter>
#endif
namespace LiteApi {
class IHtmlWidget : public QObject
{
Q_OBJECT
public:
IHtmlWidget(QObject *parent) :QObject(parent){}
virtual ~IHtmlWidget() {}
public:
virtual QWidget *widget() const = 0;
virtual QString className() const = 0;
virtual void setSearchPaths(const QStringList &paths) = 0;
virtual void setHtml(const QString &html, const QUrl &url) = 0;
virtual QUrl url() const = 0;
virtual void clear() = 0;
virtual void scrollToAnchor(const QString &anchor) = 0;
virtual void setScrollBarValue(Qt::Orientation orientation, int value) = 0;
virtual int scrollBarValue(Qt::Orientation orientation) const = 0;
virtual int scrollBarMinimum(Qt::Orientation orientation) const = 0;
virtual int scrollBarMaximum(Qt::Orientation orientation) const = 0;
virtual QString selectedText() const = 0;
virtual bool findText(const QString & exp, QTextDocument::FindFlags options) = 0;
public slots:
#ifndef QT_NO_PRINTER
virtual void print(QPrinter *printer) = 0;
#endif
signals:
void contentsSizeChanged();
void loadFinished(bool);
void anchorChanged(const QString & anchor);
void linkClicked(const QUrl & url);
void linkHovered(const QUrl & url);
};
//html document util
class IHtmlDocument : public QObject
{
Q_OBJECT
public:
IHtmlDocument(QObject *parent) : QObject(parent){}
virtual ~IHtmlDocument() {}
public:
virtual void setHtml(const QString &html, const QUrl &url) = 0;
public slots:
#ifndef QT_NO_PRINTER
virtual void print(QPrinter *printer) = 0;
#endif
virtual QString toHtml () const = 0;
virtual QString toPlainText () const = 0;
signals:
void loadFinished(bool);
};
class IHtmlWidgetFactory : public QObject
{
Q_OBJECT
public:
IHtmlWidgetFactory(QObject *parent = 0) : QObject(parent) {}
virtual QString className() const = 0;
virtual IHtmlWidget *create(QObject *parent) = 0;
virtual IHtmlDocument *createDocument(QObject *parent) = 0;
};
// QTextBrowser and QWebView
class IHtmlWidgetManager : public QObject
{
Q_OBJECT
public:
IHtmlWidgetManager(QObject *parent = 0) : QObject(parent) {}
virtual QStringList classNameList() const = 0;
virtual void addFactory(IHtmlWidgetFactory *factory) = 0;
virtual QList<IHtmlWidgetFactory*> factoryList() const = 0;
virtual bool setDefaultClassName(const QString &className) = 0;
virtual QString defaultClassName() const = 0;
virtual IHtmlWidget *create(QObject *parent) = 0;
virtual IHtmlWidget *createByName(QObject *parent, const QString &className) = 0;
virtual IHtmlDocument *createDocument(QObject *parent) = 0;
virtual IHtmlDocument *createDocumentByName(QObject *parent, const QString &className) = 0;
};
} //namespace LiteApi
#endif // LITEHTML_H
| 4,011
|
C++
|
.h
| 107
| 34.682243
| 95
| 0.719127
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,062
|
liteqt.h
|
visualfc_liteide/liteidex/src/api/liteapi/liteqt.h
|
#ifndef LITEQT_H
#define LITEQT_H
#include <Qt>
#include <QString>
#if QT_VERSION >= QT_VERSION_CHECK(5, 14, 0)
#define qtKeepEmptyParts Qt::KeepEmptyParts
#define qtSkipEmptyParts Qt::SkipEmptyParts
#else
#define qtKeepEmptyParts QString::KeepEmptyParts
#define qtSkipEmptyParts QString::SkipEmptyParts
#endif
#endif // LITEQT_H
| 334
|
C++
|
.h
| 12
| 26.5
| 48
| 0.820755
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,063
|
liteapi.h
|
visualfc_liteide/liteidex/src/api/liteapi/liteapi.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2019 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteapi.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEAPI_H
#define LITEAPI_H
#include "liteqt.h"
#include "liteobj.h"
#include "litehtml.h"
#include <QWidget>
#include <QMenu>
#include <QToolBar>
#include <QPlainTextEdit>
#include <QSettings>
#include <QMainWindow>
#include <QDockWidget>
#include <QFlags>
#include <QUrl>
#include <QDir>
#include <QFileInfo>
#include <QDesktopServices>
#include <QTextCursor>
#include <QAbstractItemModel>
class ColorStyle;
class ColorStyleScheme;
namespace LiteApi {
class IApplication;
class IManager;
class IFile;
class IProject;
class IEditor;
/*
valueForKey
EDITORPATH
EDITORNAME
EDITORDIR
PROJECTPATH
PROJECTNAME
PROJECTDIR
WORKDIR
TARGETPATH
TARGETNAME
TARGETDIR
*/
struct TargetInfo1
{
QString workDir;
QString targetPath;
QString targetName;
QString targetDir;
};
struct ProjectInfo
{
QString projectPath;
QString projectName;
QString projectDir;
};
struct EditorInfo
{
QString editorPath;
QString editorName;
QString editorDir;
};
class IManager : public QObject
{
Q_OBJECT
public:
IManager(QObject *parent = 0) : QObject(parent) {}
virtual ~IManager() {}
virtual bool initWithApp(IApplication *app) {
m_liteApp = app;
return true;
}
virtual IApplication* application() {
return m_liteApp;
}
protected:
IApplication *m_liteApp;
};
class IMimeType
{
public:
virtual ~IMimeType() {}
virtual QString package() const = 0;
virtual QString type() const = 0;
virtual QString scheme() const = 0;
virtual QString comment() const = 0;
virtual QString codec() const = 0;
virtual bool tabToSpace() const = 0;
virtual int tabWidth() const = 0;
virtual QStringList globPatterns() const = 0;
virtual QStringList subClassesOf() const = 0;
virtual void merge(const IMimeType *mimeType) = 0;
virtual void setCustomPatterns(const QStringList &custom) = 0;
virtual QStringList customPatterns() const = 0;
virtual QStringList allPatterns() const = 0;
};
class IMimeTypeManager : public IManager
{
Q_OBJECT
public:
IMimeTypeManager(QObject *parent = 0) : IManager(parent) {}
virtual bool addMimeType(IMimeType *mimeType) = 0;
virtual void removeMimeType(IMimeType *mimeType) = 0;
virtual QList<IMimeType*> mimeTypeList() const= 0;
virtual IMimeType *findMimeType(const QString &type) const = 0;
virtual QString findPackageByMimeType(const QString &type) const = 0;
virtual QString findMimeTypeByFile(const QString &fileName) const = 0;
virtual QString findMimeTypeBySuffix(const QString &suffix) const = 0;
virtual QString findMimeTypeByScheme(const QString &scheme) const = 0;
virtual QStringList findAllFilesByMimeType(const QString &dir, const QString &type, int deep = 0) const = 0;
};
inline QString mimeHead(const QString &mimeType)
{
int find = mimeType.indexOf('/');
if (find == -1) {
return mimeType;
}
return mimeType.left(find);
}
inline bool mimeIsText(const QString &mimeType)
{
return mimeHead(mimeType) == "text";
}
inline bool mimeIsFolder(const QString &mimeType)
{
return mimeHead(mimeType) == "folder";
}
class IFile : public QObject
{
Q_OBJECT
public:
IFile(QObject *parent = 0) : QObject(parent) {}
virtual ~IFile() { }
virtual bool loadText(const QString &filePath, const QString &mimeType, QString &outText) = 0;
virtual bool reloadText(QString &outText) = 0;
virtual bool saveText(const QString &filePath, const QString &text) = 0;
virtual bool isReadOnly() const = 0;
virtual bool isBinary() const = 0;
virtual QString filePath() const = 0;
virtual QString mimeType() const = 0;
};
class IEditorFactory : public QObject
{
Q_OBJECT
public:
IEditorFactory(QObject *parent = 0) : QObject(parent) {}
virtual QStringList mimeTypes() const = 0;
virtual IEditor *open(const QString &fileName, const QString &mimeType) = 0;
virtual IEditor *create(const QString &contents, const QString &mimeType) = 0;
virtual QString id() const = 0;
virtual QString displayName() const = 0;
virtual bool testMimeType(const QString &mimeType) = 0;
};
class IProjectFactory : public QObject
{
Q_OBJECT
public:
IProjectFactory(QObject *parent = 0) : QObject(parent) {}
virtual QStringList mimeTypes() const = 0;
virtual IProject *open(const QString &fileName, const QString &mimeType) = 0;
virtual bool findTargetInfo(const QString &fileName, const QString &mimetype, QMap<QString,QString>& targetInfo) const = 0;
};
enum FILESYSTEM_CONTEXT_FLAG {
FILESYSTEM_ROOT = 0,
FILESYSTEM_ROOTFOLDER,
FILESYSTEM_FOLDER,
FILESYSTEM_FILES
};
class IRecent : public QObject
{
Q_OBJECT
public:
IRecent(QObject *parent = 0) : QObject(parent) {}
virtual QString type() const = 0;
virtual QString displyType() const = 0;
virtual void addRecent(const QString &name, int maxRecent) = 0;
virtual void removeRecent(const QString &name) = 0;
virtual QStringList recentNameList() = 0;
virtual void clearRecentNameList() = 0;
virtual void openRecent(const QString &name) = 0;
};
class ISettingRecent : public IRecent
{
Q_OBJECT
public:
ISettingRecent(QSettings *setting, QObject *parent) : IRecent(parent), m_settings(setting)
{
}
virtual void addRecent(const QString &name, int maxRecent)
{
QString key = recentKey();
QStringList files = m_settings->value(key).toStringList();
files.removeAll(name);
files.prepend(name);
while (files.size() > maxRecent) {
files.removeLast();
}
m_settings->setValue(key, files);
}
virtual void removeRecent(const QString &name)
{
QString key = recentKey();
QStringList values = m_settings->value(key).toStringList();
values.removeAll(name);
m_settings->setValue(key, values);
}
virtual QStringList recentNameList()
{
QString key = recentKey();
return m_settings->value(key).toStringList();
}
virtual void clearRecentNameList()
{
QString key = recentKey();
m_settings->remove(key);
}
protected:
virtual QString recentKey() const
{
return QString("Recent1/%1").arg(type());
}
protected:
QSettings *m_settings;
};
class IRecentManager : public IManager
{
Q_OBJECT
public:
IRecentManager(QObject *parent = 0) : IManager(parent) {}
virtual void registerRecent(IRecent *recent) = 0;
virtual QList<IRecent*> recentList() const = 0;
virtual IRecent *findRecent(const QString &type) const = 0;
virtual QStringList recentTypeList() const = 0;
virtual void addRecent(const QString &name, const QString &type) = 0;
virtual void removeRecent(const QString &name, const QString &type) = 0;
virtual QStringList recentNameList(const QString &type) = 0;
virtual void clearRecentNameList(const QString &type) = 0;
virtual void openRecent(const QString &name, const QString &type) = 0;
virtual void updateRecentMenu(const QString &type) = 0;
signals:
void recentNameListChanged(const QString &type);
};
class IFileManager : public IManager
{
Q_OBJECT
public:
IFileManager(QObject *parent = 0) : IManager(parent) {}
virtual void execFileWizard(const QString &projPath, const QString &filePath, const QString &gopath = QString()) = 0;
virtual bool openFile(const QString &fileName) = 0;
virtual IEditor *openEditor(const QString &fileName, bool bActive = true, bool ignoreNavigationHistory = false) = 0;
virtual IEditor *openEditorByFactory(const QString &fileName, const QString &factoryId, bool bActive = true, bool ignoreNavigationHistory = false) = 0;
virtual IEditor *createEditor(const QString &contents, const QString &_mimeType) = 0;
virtual IEditor *createEditor(const QString &fileName) = 0;
virtual IProject *openProject(const QString &fileName) = 0;
virtual IProject *openProjectScheme(const QString &fileName, const QString &scheme) = 0;
virtual bool findProjectTargetInfo(const QString &fileName, QMap<QString,QString>& targetInfo) const = 0;
//virtual IApplication* openFolderEx(const QString &folder) = 0;
virtual QStringList folderList() const = 0;
virtual void setFolderList(const QStringList &folders) = 0;
virtual void addFolderList(const QString &folders) = 0;
virtual IApplication* openFolderInNewWindow(const QString &folder) = 0;
virtual void emitAboutToShowFolderContextMenu(QMenu *menu, LiteApi::FILESYSTEM_CONTEXT_FLAG flag, const QFileInfo &info, const QString &context) = 0;
signals:
void fileListChanged();
void fileWizardFinished(const QString &type, const QString &scheme, const QString &location);
void aboutToShowFolderContextMenu(QMenu *menu, LiteApi::FILESYSTEM_CONTEXT_FLAG flag, const QFileInfo &info,const QString &context);
public slots:
virtual void newFile() = 0;
virtual void openFiles() = 0;
virtual void openFolder() = 0;
virtual void openEditors() = 0;
virtual void openProjects() = 0;
};
class IEditContext : public QObject
{
Q_OBJECT
public:
IEditContext(QObject *parent) : QObject(parent) {}
virtual QWidget *focusWidget() const = 0;
virtual QMenu *focusMenu() const = 0;
virtual QToolBar *focusToolBar() const = 0;
};
class IView : public IObject
{
Q_OBJECT
public:
IView(QObject *parent = 0) : IObject(parent) {}
virtual QWidget *widget() = 0;
virtual QString name() const = 0;
virtual QIcon icon() const { return QIcon(); }
};
class IEditor : public IView
{
Q_OBJECT
public:
IEditor(QObject *parent = 0) : IView(parent) {}
virtual bool open(const QString &filePath,const QString &mimeType) = 0;
virtual bool reload() = 0;
virtual bool save() = 0;
virtual bool saveAs(const QString &filePath) = 0;
virtual void setReadOnly(bool b) = 0;
virtual bool isReadOnly() const = 0;
virtual bool isModified() const = 0;
virtual QString filePath() const = 0;
virtual QString mimeType() const = 0;
virtual QByteArray saveState() const = 0;
virtual bool restoreState(const QByteArray &array) = 0;
virtual void onActive() = 0;
signals:
void modificationChanged(bool);
void contentsChanged();
void reloaded();
};
struct FindOption {
QString findText;
bool useRegexp;
bool matchWord;
bool matchCase;
bool wrapAround;
bool backWard;
};
class ITextEditor : public IEditor
{
Q_OBJECT
public:
enum PositionOperation {
Current = 1,
EndOfLine = 2,
StartOfLine = 3,
Anchor = 4,
EndOfDoc = 5
};
ITextEditor(QObject *parent = 0) : IEditor(parent) {}
virtual int line() const = 0;
virtual int column() const = 0;
virtual int utf8Position(bool realFile = false, int pos = -1) const = 0;
virtual QByteArray utf8Data() const = 0;
virtual void setLineWrap(bool wrap) = 0;
virtual bool isLineWrap() const = 0;
virtual void gotoLine(int blockNumber, int column, bool center = false, int selection = 0) = 0;
virtual void setFindOption(FindOption *opt) = 0;
virtual int position(PositionOperation posOp = Current, int at = -1) const = 0;
virtual QString textAt(int pos, int length) const = 0;
virtual QRect cursorRect(int pos = -1) const = 0;
virtual QTextCursor textCursor() const = 0;
virtual QTextDocument *document() const = 0;
};
inline ITextEditor *getTextEditor(IEditor *editor)
{
if (editor && editor->extension()) {
return findExtensionObject<ITextEditor*>(editor->extension(),"LiteApi.ITextEditor");
}
return 0;
}
inline QMenu *getMenu(IObject *obj, const QString &id)
{
if (obj && obj->extension()) {
return findExtensionObject<QMenu*>(obj->extension(),QString("LiteApi.Menu.%1").arg(id));
}
return 0;
}
inline IEditContext *getEditContext(IObject *obj)
{
if (obj && obj->extension()) {
return findExtensionObject<IEditContext*>(obj->extension(),"LiteApi.IEditContext");
}
return 0;
}
inline QMenu *getEditMenu(IObject *obj)
{
return getMenu(obj,"Edit");
}
inline QMenu *getContextMenu(IObject *obj)
{
if (obj && obj->extension()) {
return findExtensionObject<QMenu*>(obj->extension(),"LiteApi.ContextMenu");
}
return 0;
}
inline QPlainTextEdit *getPlainTextEdit(IEditor *editor) {
if (editor && editor->extension()) {
return findExtensionObject<QPlainTextEdit*>(editor->extension(),"LiteApi.QPlainTextEdit");
}
return 0;
}
inline QToolBar *getEditToolBar(IEditor *editor) {
if (editor && editor->extension()) {
return findExtensionObject<QToolBar*>(editor->extension(),"LiteApi.QToolBar.Edit");
}
return 0;
}
inline QToolBar *getBuildToolBar(IEditor *editor) {
if (editor && editor->extension()) {
return findExtensionObject<QToolBar*>(editor->extension(),"LiteApi.QToolBar.Build");
}
return 0;
}
class IEditorManager : public IManager
{
Q_OBJECT
public:
IEditorManager(QObject *parent = 0) : IManager(parent) {}
virtual IEditor *openEditor(const QString &fileName, const QString &mimeType) = 0;
virtual IEditor *openEditorByFactory(const QString &fileName, const QString &mimeType, const QString &factoryId) = 0;
virtual void addFactory(IEditorFactory *factory) = 0;
virtual void removeFactory(IEditorFactory *factory) = 0;
virtual QList<IEditorFactory*> factoryList() const = 0;
virtual QStringList mimeTypeList() const = 0;
virtual QWidget *widget() = 0;
virtual IEditor *currentEditor() const = 0;
virtual void setCurrentEditor(IEditor *editor, bool ignoreNavigationHistory = false) = 0;
virtual IEditor *findEditor(const QString &fileName, bool canonical) const = 0;
virtual QList<IEditor*> editorList() const = 0;
virtual QAction *registerBrowser(IEditor *editor) = 0;
virtual void activeBrowser(IEditor *editor) = 0;
virtual void addNavigationHistory(IEditor *editor = 0,const QByteArray &saveState = QByteArray()) = 0;
virtual void cutForwardNavigationHistory() = 0;
virtual void loadColorStyleScheme(const QString &fileName) = 0;
virtual const ColorStyleScheme *colorStyleScheme() const = 0;
virtual void addEditContext(IEditContext *context) = 0;
virtual void removeEditContext(IEditContext *context) = 0;
virtual void updateEditInfo(const QString &info) = 0;
public slots:
virtual bool saveEditor(IEditor *editor = 0, bool emitAboutSave = true) = 0;
virtual bool saveEditorAs(IEditor *editor = 0) = 0;
virtual bool saveAllEditors(bool emitAboutSave = true) = 0;
virtual bool closeEditor(IEditor *editor = 0) = 0;
virtual bool closeAllEditors() = 0;
signals:
void currentEditorChanged(LiteApi::IEditor *editor);
void editorCreated(LiteApi::IEditor *editor);
void editorAboutToClose(LiteApi::IEditor *editor);
void editorAboutToSave(LiteApi::IEditor *editor);
void editorSaved(LiteApi::IEditor *editor);
void editorModifyChanged(LiteApi::IEditor *editor, bool b);
void colorStyleSchemeChanged();
};
class IBrowserEditor : public IEditor
{
Q_OBJECT
public:
IBrowserEditor(QObject *parent = 0) : IEditor(parent) {}
virtual bool open(const QString &/*fileName*/,const QString &/*mimeType*/) { return false; }
virtual bool reload() { return false; }
virtual bool save() { return false; }
virtual bool saveAs(const QString &/*fileName*/){ return false; }
virtual void setReadOnly(bool /*b*/) {}
virtual bool isReadOnly() const { return true; }
virtual bool isModified() const { return false; }
virtual QString filePath() const { return QString(); }
virtual QMap<QString,QString> editorInfo() const { return QMap<QString,QString>(); }
virtual QMap<QString,QString> targetInfo() const { return QMap<QString,QString>(); }
virtual QByteArray saveState() const {return QByteArray(); }
virtual bool restoreState(const QByteArray &) { return false; }
virtual void onActive(){}
};
class IWebKitBrowser : public IBrowserEditor
{
Q_OBJECT
public:
IWebKitBrowser(QObject *parent = 0) : IBrowserEditor(parent) {}
virtual void openUrl(const QUrl &url) = 0;
signals:
void loadFinished(bool);
};
class IProject : public IView
{
Q_OBJECT
public:
virtual QString filePath() const = 0;
virtual QString mimeType() const = 0;
virtual QStringList folderList() const = 0;
virtual QStringList fileNameList() const = 0;
virtual QStringList filePathList() const = 0;
virtual QString fileNameToFullPath(const QString &filePath) = 0;
virtual QMap<QString,QString> targetInfo() const = 0;
virtual void load() = 0;
signals:
void reloaded();
};
class IFileProject : public IProject
{
Q_OBJECT
public:
virtual bool isFolder() const { return false; }
};
class IFolderProject : public IProject
{
Q_OBJECT
public:
virtual bool isFolder() const { return true; }
virtual QStringList folderList() const = 0;
};
class IOption : public IView
{
Q_OBJECT
public:
IOption(QObject *parent = 0) : IView(parent) {}
virtual QString mimeType() const = 0;
virtual void save() = 0;
virtual void load() = 0;
};
class IOptionFactory : public QObject
{
Q_OBJECT
public:
IOptionFactory(QObject *parent = 0) : QObject(parent) {}
virtual QStringList mimeTypes() const = 0;
virtual IOption *create(const QString &mimeType) = 0;
};
class IOptionManager : public IManager
{
Q_OBJECT
public:
IOptionManager(QObject *parent = 0) : IManager(parent) {}
virtual void addFactory(IOptionFactory *factory) = 0;
virtual void removeFactory(IOptionFactory *factory) = 0;
virtual QList<IOptionFactory*> factoryList() const = 0;
virtual void emitApplyOption(const QString &mimetype) = 0;
public slots:
virtual void exec(const QString &mimeType) = 0;
signals:
void applyOption(QString);
};
class IProjectManager : public IManager
{
Q_OBJECT
public:
IProjectManager(QObject *parent = 0) : IManager(parent) {}
virtual IFolderProject *openFolder(const QString &folderPath) = 0;
virtual IProject *openProject(const QString &fileName, const QString &mimeType) = 0;
virtual void addFactory(IProjectFactory *factory) = 0;
virtual void removeFactory(IProjectFactory *factory) = 0;
virtual QList<IProjectFactory*> factoryList() const = 0;
virtual QStringList mimeTypeList() const = 0;
virtual void setCurrentProject(IProject *project) = 0;
virtual IProject *currentProject() const = 0;
virtual QList<IEditor*> editorList(IProject *project) const = 0;
virtual void addImportAction(QAction *act) = 0;
virtual QWidget *widget() = 0;
public slots:
virtual void saveProject(IProject *project = 0) = 0;
virtual void closeProject(IProject *project = 0) = 0;
virtual void openSchemeDialog(const QString &scheme) = 0;
signals:
void currentProjectChanged(LiteApi::IProject *project);
void projectAboutToClose(LiteApi::IProject *project);
};
class IToolWindowManager : public IManager
{
Q_OBJECT
public:
IToolWindowManager(QObject *parent = 0) : IManager(parent) {}
virtual QAction *addToolWindow(Qt::DockWidgetArea area, QWidget *widget, const QString &id, const QString &title, bool split,
QList<QAction*> widgetActions = QList<QAction*>(),
QList<QWidget*> widgetList = QList<QWidget*>() ) = 0;
virtual void moveToolWindow(Qt::DockWidgetArea from, Qt::DockWidgetArea to,QAction *action, bool split) = 0;
virtual QAction *findToolWindow(QWidget *widget) = 0;
virtual void removeToolWindow(QAction *action) = 0;
virtual void removeToolWindow(QWidget *widget) = 0;
};
class IDockManager : public IManager
{
Q_OBJECT
public:
IDockManager(QObject *parent = 0) : IManager(parent) {}
virtual QWidget *widget() = 0;
virtual QDockWidget *addDock(QWidget *widget,
const QString &title,
Qt::DockWidgetArea ares = Qt::LeftDockWidgetArea,
Qt::DockWidgetAreas alowedAreas = Qt::LeftDockWidgetArea|Qt::RightDockWidgetArea,
QDockWidget::DockWidgetFeatures features = QDockWidget::AllDockWidgetFeatures) = 0;
virtual void removeDock(QWidget *widget) = 0;
virtual void showDock(QWidget *widget) = 0;
virtual void hideDock(QWidget *widget) = 0;
virtual QDockWidget *dockWidget(QWidget *widget) = 0;
};
enum VIEWMENU_ACTION_POS
{
ViewMenuToolBarPos = 1,
ViewMenuToolWindowPos,
ViewMenuBrowserPos,
ViewMenuLastPos
};
struct ActionInfo {
QString label;
QString defks;
QString ks;
bool standard;
QList<QKeySequence> keys;
QAction *action;
};
class IActionContext {
public:
virtual ~IActionContext() {}
virtual QString contextName() const = 0;
virtual void regAction(QAction *act, const QString &id, const QString &defks, bool standard = false) = 0;
virtual void regAction(QAction *act, const QString &id, const QKeySequence::StandardKey &def) = 0;
virtual QStringList actionKeys() const = 0;
virtual ActionInfo *actionInfo(const QString &id) const = 0;
virtual void setActionShortcuts(const QString &id, const QString &shortcuts) = 0;
};
class IActionManager : public IManager
{
Q_OBJECT
public:
IActionManager(QObject *parent = 0) : IManager(parent) {}
virtual QMenu *insertMenu(const QString &id, const QString &title, const QString &idBefore = QString()) = 0;
virtual QMenu *loadMenu(const QString &id) = 0;
virtual void removeMenu(QMenu *menu) = 0;
virtual QList<QString> menuList() const = 0;
virtual QToolBar *insertToolBar(const QString &id, const QString &title, const QString &before = QString()) = 0;
virtual void insertToolBar(QToolBar *toolBar,const QString &before = QString()) = 0;
virtual QToolBar *loadToolBar(const QString &id) = 0;
virtual void removeToolBar(QToolBar* toolBar) = 0;
virtual QList<QString> toolBarList() const = 0;
virtual void insertViewMenu(VIEWMENU_ACTION_POS pos, QAction *act) = 0;
virtual void setViewMenuSeparator(const QString &sepid, bool group = false) = 0;
virtual void insertViewMenuAction(QAction *act, const QString &sepid) = 0;
virtual bool insertMenuActions(const QString &idMenu, const QString &idBeforeSep, bool newGroup, QList<QAction*> &actions) = 0;
virtual IActionContext *getActionContext(QObject *obj, const QString &name) = 0;
virtual QStringList actionKeys() const = 0;
virtual ActionInfo *actionInfo(const QString &id) const = 0;
virtual void setActionShourtcuts(const QString &id, const QString &shortcuts) = 0;
virtual QStringList actionContextNameList() const = 0;
virtual IActionContext *actionContextForName(const QString &name) = 0;
};
class IGoProxy : public QObject
{
Q_OBJECT
public:
IGoProxy(QObject *parent) : QObject(parent) {}
virtual bool isValid() const = 0;
virtual bool isRunning() const = 0;
virtual QByteArray commandId() const = 0;
virtual void writeStdin(const QByteArray &data) = 0;
signals:
void started();
void stdoutput(const QByteArray &data);
void stderror(const QByteArray &data);
void finished(int code, const QByteArray &msg);
public slots:
virtual void call(const QByteArray &id, const QByteArray &args = QByteArray()) = 0;
};
class IPlugin;
class IApplication : public IObject
{
Q_OBJECT
public:
virtual ~IApplication() {}
virtual IApplication *newInstance(const QString &session) = 0;
virtual QList<IApplication*> instanceList() const = 0;
virtual bool hasGoProxy() const = 0;
virtual IGoProxy *createGoProxy(QObject *parent) = 0;
virtual IProjectManager *projectManager() = 0;
virtual IEditorManager *editorManager() = 0;
virtual IFileManager *fileManager() = 0;
virtual IActionManager *actionManager() = 0;
virtual IMimeTypeManager *mimeTypeManager() = 0;
virtual IOptionManager *optionManager() = 0;
virtual IToolWindowManager *toolWindowManager() = 0;
virtual IHtmlWidgetManager *htmlWidgetManager() = 0;
virtual IRecentManager *recentManager() = 0;
virtual QMainWindow *mainWindow() const = 0;
virtual QSettings *settings() = 0;
virtual QMap<QString,QVariant> &globalCookie() = 0; //global cookie
virtual QString rootPath() const = 0;
virtual QString applicationPath() const = 0;
virtual QString toolPath() const = 0;
virtual QString resourcePath() const = 0;
virtual QString pluginPath() const = 0;
virtual QString storagePath() const = 0;
virtual QString ideVersion() const = 0;
virtual QString ideFullName() const = 0;
virtual QString ideName() const = 0;
virtual QString ideCopyright() const = 0;
virtual QList<IPlugin*> pluginList() const = 0;
virtual void loadSession(const QString &sessioin) = 0;
virtual void saveSession(const QString &sessioin) = 0;
virtual QStringList sessionList() const = 0;
virtual QString currentSession() const = 0;
virtual void loadState() = 0;
virtual void saveState() = 0;
virtual void appendLog(const QString &model, const QString &log, bool error = false) = 0;
virtual void sendBroadcast(const QString &module, const QString &id, const QVariant ¶m = QVariant()) = 0;
signals:
void loaded();
void aboutToQuit();
void key_escape();
void broadcast(QString,QString,QVariant);
void sessionListChanged();
};
class PluginInfo
{
public:
PluginInfo() : m_mustLoad(false)
{}
virtual ~PluginInfo() {}
QString author() const { return m_author; }
QString info() const { return m_info; }
QString id() const { return m_id; }
QString name() const { return m_name; }
QString ver() const { return m_ver; }
QStringList dependList() const { return m_dependList; }
QString filePath() const { return m_filePath; }
bool isMustLoad() const { return m_mustLoad; }
void setAuthor(const QString &author) { m_author = author; }
void setInfo(const QString &info) { m_info = info; }
void setId(const QString &id) { m_id = id.toLower(); }
void setName(const QString &name) { m_name = name; }
void setVer(const QString &ver) { m_ver = ver; }
void setFilePath(const QString &path) { m_filePath = path; }
void setDependList(const QStringList &dependList) { m_dependList = dependList; }
void appendDepend(const QString &depend) { m_dependList.append(depend); }
void setMustLoad(bool b) { m_mustLoad = b; }
protected:
bool m_mustLoad;
QString m_author;
QString m_info;
QString m_id;
QString m_name;
QString m_filePath;
QString m_ver;
QStringList m_dependList;
};
class IPlugin : public IObject
{
Q_OBJECT
public:
virtual bool load(LiteApi::IApplication *app) = 0;
};
class IPluginFactory : public QObject
{
Q_OBJECT
public:
virtual ~IPluginFactory() {}
virtual QString id() const = 0;
virtual PluginInfo *info() const = 0;
virtual QStringList dependPluginList() const = 0;
virtual void setFilePath(const QString &path) = 0;
virtual QString filePath() const = 0;
virtual IPlugin *createPlugin() = 0;
};
class IPluginFactoryImpl : public IPluginFactory
{
Q_OBJECT
public:
IPluginFactoryImpl() : m_info(new PluginInfo)
{
}
virtual ~IPluginFactoryImpl()
{
delete m_info;
}
virtual QString id() const
{
return m_info->id();
}
virtual PluginInfo *info() const
{
return m_info;
}
virtual QStringList dependPluginList() const{
return m_info->dependList();
}
virtual void setFilePath(const QString &path)
{
m_info->setFilePath(path);
}
virtual QString filePath() const
{
return m_info->filePath();
}
protected:
PluginInfo *m_info;
};
template <typename T>
class PluginFactoryT : public IPluginFactoryImpl
{
public:
virtual IPlugin *createPlugin()
{
return new T;
}
};
class IAppIdleTimer : public QObject
{
Q_OBJECT
signals:
void appIdle(int sec);
public:
virtual void resetTimer() = 0;
};
inline IAppIdleTimer *GetAppIdleTimer(LiteApi::IApplication *app)
{
return static_cast<IAppIdleTimer*>(app->extension()->findObject("LiteApi.IAppIdleTimer"));
}
inline bool gotoLine(IApplication *app, const QString &fileName, int line, int col, bool forceCenter, bool saveHistory) {
if (saveHistory) {
app->editorManager()->addNavigationHistory();
}
IEditor *cur = app->editorManager()->currentEditor();
IEditor *edit = app->fileManager()->openEditor(fileName);
ITextEditor *textEdit = getTextEditor(edit);
if (textEdit) {
if (cur == edit) {
textEdit->gotoLine(line,col,forceCenter);
} else {
textEdit->gotoLine(line,col,true);
}
return true;
}
return false;
}
inline QSize getToolBarIconSize(LiteApi::IApplication *app) {
int v = app->settings()->value("General/ToolBarIconSize",0).toInt();
switch (v) {
case 0:
return QSize(16,16);
case 1:
return QSize(18,18);
case 2:
return QSize(20,20);
case 3:
return QSize(22,22);
case 4:
return QSize(24,24);
}
return QSize(16,16);
}
inline IWebKitBrowser *getWebKitBrowser(LiteApi::IApplication *app)
{
return static_cast<IWebKitBrowser*>(app->extension()->findObject("LiteApp.IWebKitBrowser"));
}
inline QString getGotools(LiteApi::IApplication *app)
{
#ifdef Q_OS_WIN
return app->toolPath()+"/gotools.exe";
#else
return app->toolPath()+"/gotools";
#endif
}
inline QString findPackageByMimeType(LiteApi::IApplication *app, const QString mimeType)
{
return app->mimeTypeManager()->findPackageByMimeType(mimeType);
}
} //namespace LiteApi
Q_DECLARE_INTERFACE(LiteApi::IPluginFactory,"LiteApi.IPluginFactory.X37")
#endif //LITEAPI_H
| 31,105
|
C++
|
.h
| 868
| 31.667051
| 155
| 0.708144
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,064
|
liteids.h
|
visualfc_liteide/liteidex/src/api/liteapi/liteids.h
|
/**************************************************************************
** This file is part of LiteIDE
**
** Copyright (c) 2011-2017 LiteIDE. All rights reserved.
**
** This library is free software; you can redistribute it and/or
** modify it under the terms of the GNU Lesser General Public
** License as published by the Free Software Foundation; either
** version 2.1 of the License, or (at your option) any later version.
**
** This library is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
** Lesser General Public License for more details.
**
** In addition, as a special exception, that plugins developed for LiteIDE,
** are allowed to remain closed sourced and can be distributed under any license .
** These rights are included in the file LGPL_EXCEPTION.txt in this package.
**
**************************************************************************/
// Module: liteids.h
// Creator: visualfc <visualfc@gmail.com>
#ifndef LITEIDS_H
#define LITEIDS_H
#define ID_MENU_FILE "menu/file"
#define ID_MENU_RECENT "menu/recent"
#define ID_MENU_VIEW "menu/view"
#define ID_MENU_EDIT "menu/edit"
#define ID_MENU_FIND "menu/find"
#define ID_MENU_TOOLS "menu/tools"
#define ID_MENU_BUILD "menu/build"
#define ID_MENU_DEBUG "menu/debug"
#define ID_MENU_HELP "menu/help"
#define ID_TOOLBAR_STD "toolbar/std"
#define ID_TOOLBAR_ENV "toolbar/env"
#define ID_TOOLBAR_BUILD "toolbar/build"
#endif // LITEIDS_H
| 1,593
|
C++
|
.h
| 37
| 40.837838
| 83
| 0.675065
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,067
|
fullwidth.inc
|
visualfc_liteide/liteidex/src/3rdparty/libvterm/src/fullwidth.inc
|
{ 0x1100, 0x115f },
{ 0x231a, 0x231b },
{ 0x2329, 0x232a },
{ 0x23e9, 0x23ec },
{ 0x23f0, 0x23f0 },
{ 0x23f3, 0x23f3 },
{ 0x25fd, 0x25fe },
{ 0x2614, 0x2615 },
{ 0x2648, 0x2653 },
{ 0x267f, 0x267f },
{ 0x2693, 0x2693 },
{ 0x26a1, 0x26a1 },
{ 0x26aa, 0x26ab },
{ 0x26bd, 0x26be },
{ 0x26c4, 0x26c5 },
{ 0x26ce, 0x26ce },
{ 0x26d4, 0x26d4 },
{ 0x26ea, 0x26ea },
{ 0x26f2, 0x26f3 },
{ 0x26f5, 0x26f5 },
{ 0x26fa, 0x26fa },
{ 0x26fd, 0x26fd },
{ 0x2705, 0x2705 },
{ 0x270a, 0x270b },
{ 0x2728, 0x2728 },
{ 0x274c, 0x274c },
{ 0x274e, 0x274e },
{ 0x2753, 0x2755 },
{ 0x2757, 0x2757 },
{ 0x2795, 0x2797 },
{ 0x27b0, 0x27b0 },
{ 0x27bf, 0x27bf },
{ 0x2b1b, 0x2b1c },
{ 0x2b50, 0x2b50 },
{ 0x2b55, 0x2b55 },
{ 0x2e80, 0x2e99 },
{ 0x2e9b, 0x2ef3 },
{ 0x2f00, 0x2fd5 },
{ 0x2ff0, 0x2ffb },
{ 0x3000, 0x303e },
{ 0x3041, 0x3096 },
{ 0x3099, 0x30ff },
{ 0x3105, 0x312d },
{ 0x3131, 0x318e },
{ 0x3190, 0x31ba },
{ 0x31c0, 0x31e3 },
{ 0x31f0, 0x321e },
{ 0x3220, 0x3247 },
{ 0x3250, 0x32fe },
{ 0x3300, 0x4dbf },
{ 0x4e00, 0xa48c },
{ 0xa490, 0xa4c6 },
{ 0xa960, 0xa97c },
{ 0xac00, 0xd7a3 },
{ 0xf900, 0xfaff },
{ 0xfe10, 0xfe19 },
{ 0xfe30, 0xfe52 },
{ 0xfe54, 0xfe66 },
{ 0xfe68, 0xfe6b },
{ 0xff01, 0xff60 },
{ 0xffe0, 0xffe6 },
{ 0x16fe0, 0x16fe0 },
{ 0x17000, 0x187ec },
{ 0x18800, 0x18af2 },
{ 0x1b000, 0x1b001 },
{ 0x1f004, 0x1f004 },
{ 0x1f0cf, 0x1f0cf },
{ 0x1f18e, 0x1f18e },
{ 0x1f191, 0x1f19a },
{ 0x1f200, 0x1f202 },
{ 0x1f210, 0x1f23b },
{ 0x1f240, 0x1f248 },
{ 0x1f250, 0x1f251 },
{ 0x1f300, 0x1f320 },
{ 0x1f32d, 0x1f335 },
{ 0x1f337, 0x1f37c },
{ 0x1f37e, 0x1f393 },
{ 0x1f3a0, 0x1f3ca },
{ 0x1f3cf, 0x1f3d3 },
{ 0x1f3e0, 0x1f3f0 },
{ 0x1f3f4, 0x1f3f4 },
{ 0x1f3f8, 0x1f43e },
{ 0x1f440, 0x1f440 },
{ 0x1f442, 0x1f4fc },
{ 0x1f4ff, 0x1f53d },
{ 0x1f54b, 0x1f54e },
{ 0x1f550, 0x1f567 },
{ 0x1f57a, 0x1f57a },
{ 0x1f595, 0x1f596 },
{ 0x1f5a4, 0x1f5a4 },
{ 0x1f5fb, 0x1f64f },
{ 0x1f680, 0x1f6c5 },
{ 0x1f6cc, 0x1f6cc },
{ 0x1f6d0, 0x1f6d2 },
{ 0x1f6eb, 0x1f6ec },
{ 0x1f6f4, 0x1f6f6 },
{ 0x1f910, 0x1f91e },
{ 0x1f920, 0x1f927 },
{ 0x1f930, 0x1f930 },
{ 0x1f933, 0x1f93e },
{ 0x1f940, 0x1f94b },
{ 0x1f950, 0x1f95e },
{ 0x1f980, 0x1f991 },
{ 0x1f9c0, 0x1f9c0 },
| 2,374
|
C++
|
.inc
| 104
| 19.826923
| 23
| 0.587665
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,068
|
DECdrawing.inc
|
visualfc_liteide/liteidex/src/3rdparty/libvterm/src/encoding/DECdrawing.inc
|
static const struct StaticTableEncoding encoding_DECdrawing = {
{ .decode = &decode_table },
{
[0x60] = 0x25C6,
[0x61] = 0x2592,
[0x62] = 0x2409,
[0x63] = 0x240C,
[0x64] = 0x240D,
[0x65] = 0x240A,
[0x66] = 0x00B0,
[0x67] = 0x00B1,
[0x68] = 0x2424,
[0x69] = 0x240B,
[0x6a] = 0x2518,
[0x6b] = 0x2510,
[0x6c] = 0x250C,
[0x6d] = 0x2514,
[0x6e] = 0x253C,
[0x6f] = 0x23BA,
[0x70] = 0x23BB,
[0x71] = 0x2500,
[0x72] = 0x23BC,
[0x73] = 0x23BD,
[0x74] = 0x251C,
[0x75] = 0x2524,
[0x76] = 0x2534,
[0x77] = 0x252C,
[0x78] = 0x2502,
[0x79] = 0x2A7D,
[0x7a] = 0x2A7E,
[0x7b] = 0x03C0,
[0x7c] = 0x2260,
[0x7d] = 0x00A3,
[0x7e] = 0x00B7,
}
};
| 757
|
C++
|
.inc
| 36
| 16.416667
| 63
| 0.528433
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,069
|
uk.inc
|
visualfc_liteide/liteidex/src/3rdparty/libvterm/src/encoding/uk.inc
|
static const struct StaticTableEncoding encoding_uk = {
{ .decode = &decode_table },
{
[0x23] = 0x00a3,
}
};
| 119
|
C++
|
.inc
| 6
| 17.166667
| 55
| 0.646018
|
visualfc/liteide
| 7,563
| 970
| 395
|
LGPL-2.1
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,072
|
LAVFVideoHelper.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/LAVFVideoHelper.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Contributions by Ti-BEN from the XBMC DSPlayer Project, also under GPLv2
*/
#include "stdafx.h"
#include "LAVFUtils.h"
#include "LAVFVideoHelper.h"
#include "moreuuids.h"
#include "BaseDemuxer.h"
#include "ExtradataParser.h"
#include "H264Nalu.h"
// 250fps is the highest we accept as "sane"
#define MIN_TIME_PER_FRAME 40000
// 8fps is the lowest that is "sane" in our definition
#define MAX_TIME_PER_FRAME 1250000
CLAVFVideoHelper g_VideoHelper;
// Map codec ids to media subtypes
// clang-format off
static FormatMapping video_map[] = {
{ AV_CODEC_ID_H263, &MEDIASUBTYPE_H263, 0, nullptr },
{ AV_CODEC_ID_H263I, &MEDIASUBTYPE_I263, 0, nullptr },
{ AV_CODEC_ID_H264, &MEDIASUBTYPE_AVC1, 0, &FORMAT_MPEG2Video },
{ AV_CODEC_ID_HEVC, &MEDIASUBTYPE_HEVC, 0, &FORMAT_MPEG2Video },
{ AV_CODEC_ID_MPEG1VIDEO, &MEDIASUBTYPE_MPEG1Payload, 0, &FORMAT_MPEGVideo },
{ AV_CODEC_ID_MPEG2VIDEO, &MEDIASUBTYPE_MPEG2_VIDEO, 0, &FORMAT_MPEG2Video },
{ AV_CODEC_ID_RV10, &MEDIASUBTYPE_RV10, MKTAG('R','V','1','0'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_RV20, &MEDIASUBTYPE_RV20, MKTAG('R','V','2','0'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_RV30, &MEDIASUBTYPE_RV30, MKTAG('R','V','3','0'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_RV40, &MEDIASUBTYPE_RV40, MKTAG('R','V','4','0'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_AMV, &MEDIASUBTYPE_AMVV, MKTAG('A','M','V','V'), nullptr },
{ AV_CODEC_ID_TIFF, &MEDIASUBTYPE_TIFF, MKTAG('T','I','F','F'), nullptr },
{ AV_CODEC_ID_PNG, &MEDIASUBTYPE_PNG, MKTAG('P','N','G',' '), nullptr },
{ AV_CODEC_ID_BMP, &MEDIASUBTYPE_BMP, MKTAG('B','M','P',' '), nullptr },
{ AV_CODEC_ID_GIF, &MEDIASUBTYPE_GIF, MKTAG('G','I','F',' '), nullptr },
{ AV_CODEC_ID_TARGA, &MEDIASUBTYPE_TGA, MKTAG('T','G','A',' '), nullptr },
{ AV_CODEC_ID_VP8, &MEDIASUBTYPE_VP80, MKTAG('V','P','8','0'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_VP9, &MEDIASUBTYPE_VP90, MKTAG('V','P','9','0'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_AV1, &MEDIASUBTYPE_AV01, MKTAG('A','V','0','1'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_CFHD, &MEDIASUBTYPE_CFHD, MKTAG('C','F','H','D'), &FORMAT_VideoInfo2 },
{ AV_CODEC_ID_VVC, &MEDIASUBTYPE_VVC1, MKTAG('V','V','C','1'), &FORMAT_MPEG2Video },
};
// clang-format on
CMediaType CLAVFVideoHelper::initVideoType(AVCodecID codecId, unsigned int &codecTag, std::string container)
{
CMediaType mediaType;
mediaType.InitMediaType();
mediaType.majortype = MEDIATYPE_Video;
mediaType.subtype = FOURCCMap(codecTag);
mediaType.formattype = FORMAT_VideoInfo; // default value
// Check against values from the map above
for (unsigned i = 0; i < countof(video_map); ++i)
{
if (video_map[i].codec == codecId)
{
if (video_map[i].subtype)
mediaType.subtype = *video_map[i].subtype;
if (video_map[i].codecTag)
codecTag = video_map[i].codecTag;
if (video_map[i].format)
mediaType.formattype = *video_map[i].format;
break;
}
}
switch (codecId)
{
// All these codecs should use VideoInfo2
case AV_CODEC_ID_ASV1:
case AV_CODEC_ID_ASV2:
case AV_CODEC_ID_FLV1:
case AV_CODEC_ID_HUFFYUV:
case AV_CODEC_ID_WMV3: mediaType.formattype = FORMAT_VideoInfo2; break;
case AV_CODEC_ID_MPEG4:
if (container == "mp4")
{
mediaType.formattype = FORMAT_MPEG2Video;
}
else if (container == "mpegts")
{
mediaType.formattype = FORMAT_VideoInfo2;
mediaType.subtype = MEDIASUBTYPE_MP4V;
}
else
{
mediaType.formattype = FORMAT_VideoInfo2;
}
break;
case AV_CODEC_ID_VC1:
if (codecTag != MKTAG('W', 'M', 'V', 'A'))
codecTag = MKTAG('W', 'V', 'C', '1');
mediaType.formattype = FORMAT_VideoInfo2;
mediaType.subtype = FOURCCMap(codecTag);
break;
case AV_CODEC_ID_DVVIDEO:
if (codecTag == 0)
mediaType.subtype = MEDIASUBTYPE_DVCP;
break;
}
return mediaType;
}
DWORD avc_quant(BYTE *src, BYTE *dst, int extralen)
{
DWORD cb = 0;
BYTE *src_end = (BYTE *)src + extralen;
BYTE *dst_end = (BYTE *)dst + extralen;
src += 5;
// Two runs, for sps and pps
for (int i = 0; i < 2; i++)
{
for (int n = *(src++) & 0x1f; n > 0; n--)
{
unsigned len = (((unsigned)src[0] << 8) | src[1]) + 2;
if (src + len > src_end || dst + len > dst_end)
{
ASSERT(0);
break;
}
memcpy(dst, src, len);
src += len;
dst += len;
cb += len;
}
}
return cb;
}
size_t avc_parse_annexb(BYTE *extra, int extrasize, BYTE *dst)
{
size_t dstSize = 0;
CH264Nalu Nalu;
Nalu.SetBuffer(extra, extrasize, 0);
while (Nalu.ReadNext())
{
if (Nalu.GetType() == NALU_TYPE_SPS || Nalu.GetType() == NALU_TYPE_PPS)
{
size_t len = Nalu.GetDataLength();
AV_WB16(dst + dstSize, (uint16_t)len);
dstSize += 2;
memcpy(dst + dstSize, Nalu.GetDataBuffer(), Nalu.GetDataLength());
dstSize += Nalu.GetDataLength();
}
}
return dstSize;
}
VIDEOINFOHEADER *CLAVFVideoHelper::CreateVIH(const AVStream *avstream, ULONG *size, std::string container)
{
VIDEOINFOHEADER *pvi =
(VIDEOINFOHEADER *)CoTaskMemAlloc(ULONG(sizeof(VIDEOINFOHEADER) + avstream->codecpar->extradata_size));
if (!pvi)
return nullptr;
memset(pvi, 0, sizeof(VIDEOINFOHEADER));
// Get the frame rate
const AVCodecDescriptor *desc = avcodec_descriptor_get(avstream->codecpar->codec_id);
bool fields = (desc && (desc->props & AV_CODEC_PROP_FIELDS));
REFERENCE_TIME r_avg = 0, avg_avg = 0, codec_avg = 0;
if (avstream->r_frame_rate.den > 0 && avstream->r_frame_rate.num > 0)
{
r_avg = av_rescale(DSHOW_TIME_BASE, avstream->r_frame_rate.den, avstream->r_frame_rate.num);
}
if (avstream->avg_frame_rate.den > 0 && avstream->avg_frame_rate.num > 0)
{
avg_avg = av_rescale(DSHOW_TIME_BASE, avstream->avg_frame_rate.den, avstream->avg_frame_rate.num);
}
if (avstream->codecpar->framerate.den > 0 && avstream->codecpar->framerate.num > 0)
{
codec_avg = av_rescale(DSHOW_TIME_BASE, avstream->codecpar->framerate.den, avstream->codecpar->framerate.num);
}
DbgLog((LOG_TRACE, 10, L"CreateVIH: r_avg: %I64d, avg_avg: %I64d, tb_avg: %I64d", r_avg, avg_avg, codec_avg));
if (avg_avg >= MIN_TIME_PER_FRAME && avg_avg <= MAX_TIME_PER_FRAME)
{
// prefer the more accurate r_avg when its close to the average
if (abs(r_avg - avg_avg) < 10000)
pvi->AvgTimePerFrame = r_avg;
else
pvi->AvgTimePerFrame = avg_avg;
}
else if (r_avg >= MIN_TIME_PER_FRAME && r_avg <= MAX_TIME_PER_FRAME)
pvi->AvgTimePerFrame = r_avg;
else if (codec_avg >= MIN_TIME_PER_FRAME && codec_avg <= MAX_TIME_PER_FRAME)
pvi->AvgTimePerFrame = codec_avg;
else
pvi->AvgTimePerFrame = r_avg;
if ((container == "matroska" || container == "mp4") && r_avg && codec_avg &&
(avstream->codecpar->codec_id == AV_CODEC_ID_H264 || avstream->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO))
{
float factor = (float)r_avg / (float)codec_avg;
if ((factor > 0.4 && factor < 0.6) || (factor > 1.9 && factor < 2.1))
{
pvi->AvgTimePerFrame = codec_avg;
}
}
pvi->dwBitErrorRate = 0;
pvi->dwBitRate = (DWORD)avstream->codecpar->bit_rate;
RECT empty_tagrect = {0, 0, 0, 0};
pvi->rcSource = empty_tagrect; // Some codecs like wmv are setting that value to the video current value
pvi->rcTarget = empty_tagrect;
pvi->rcTarget.right = pvi->rcSource.right = avstream->codecpar->width;
pvi->rcTarget.bottom = pvi->rcSource.bottom = avstream->codecpar->height;
memcpy((BYTE *)&pvi->bmiHeader + sizeof(BITMAPINFOHEADER), avstream->codecpar->extradata,
avstream->codecpar->extradata_size);
pvi->bmiHeader.biSize = ULONG(sizeof(BITMAPINFOHEADER) + avstream->codecpar->extradata_size);
pvi->bmiHeader.biWidth = avstream->codecpar->width;
pvi->bmiHeader.biHeight = avstream->codecpar->height;
pvi->bmiHeader.biBitCount = avstream->codecpar->bits_per_coded_sample;
// Validate biBitCount is set to something useful
if ((pvi->bmiHeader.biBitCount == 0 || avstream->codecpar->codec_id == AV_CODEC_ID_RAWVIDEO) &&
avstream->codecpar->format != AV_PIX_FMT_NONE)
{
const AVPixFmtDescriptor *pixdecs = av_pix_fmt_desc_get((AVPixelFormat)avstream->codecpar->format);
if (pixdecs)
pvi->bmiHeader.biBitCount = av_get_bits_per_pixel(pixdecs);
}
pvi->bmiHeader.biSizeImage = DIBSIZE(pvi->bmiHeader); // Calculating this value doesn't really make alot of sense,
// but apparently some decoders freak out if its 0
pvi->bmiHeader.biCompression = avstream->codecpar->codec_tag;
// TOFIX The bitplanes is depending on the subtype
pvi->bmiHeader.biPlanes = 1;
pvi->bmiHeader.biClrUsed = 0;
pvi->bmiHeader.biClrImportant = 0;
pvi->bmiHeader.biYPelsPerMeter = 0;
pvi->bmiHeader.biXPelsPerMeter = 0;
*size = sizeof(VIDEOINFOHEADER) + avstream->codecpar->extradata_size;
return pvi;
}
#define VC1_CODE_RES0 0x00000100
#define IS_VC1_MARKER(x) (((x) & ~0xFF) == VC1_CODE_RES0)
VIDEOINFOHEADER2 *CLAVFVideoHelper::CreateVIH2(const AVStream *avstream, ULONG *size, std::string container)
{
int extra = 0;
BYTE *extradata = nullptr;
BOOL bZeroPad = FALSE;
if (avstream->codecpar->codec_id == AV_CODEC_ID_VC1 && avstream->codecpar->extradata_size)
{
int i = 0;
for (i = 0; i < (avstream->codecpar->extradata_size - 4); i++)
{
uint32_t code = AV_RB32(avstream->codecpar->extradata + i);
if (IS_VC1_MARKER(code))
break;
}
if (i == 0)
{
bZeroPad = TRUE;
}
else if (i > 1)
{
DbgLog((LOG_TRACE, 10,
L"CLAVFVideoHelper::CreateVIH2(): VC-1 extradata does not start at position 0/1, but %d", i));
}
}
// Create a VIH that we'll convert
VIDEOINFOHEADER *vih = CreateVIH(avstream, size, container);
if (!vih)
return nullptr;
if (avstream->codecpar->extradata_size > 0)
{
extra = avstream->codecpar->extradata_size;
// increase extra size by one, because VIH2 requires one 0 byte between header and extra data
if (bZeroPad)
{
extra++;
}
extradata = avstream->codecpar->extradata;
}
VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER2) + extra);
if (!vih2)
return nullptr;
memset(vih2, 0, sizeof(VIDEOINFOHEADER2));
vih2->rcSource = vih->rcSource;
vih2->rcTarget = vih->rcTarget;
vih2->dwBitRate = vih->dwBitRate;
vih2->dwBitErrorRate = vih->dwBitErrorRate;
vih2->AvgTimePerFrame = vih->AvgTimePerFrame;
// Calculate aspect ratio
AVRational r = avstream->sample_aspect_ratio;
AVRational rc = avstream->codecpar->sample_aspect_ratio;
int num = vih->bmiHeader.biWidth, den = vih->bmiHeader.biHeight;
if (r.den > 0 && r.num > 0)
{
av_reduce(&num, &den, (int64_t)r.num * num, (int64_t)r.den * den, UINT16_MAX);
}
else if (rc.den > 0 && rc.num > 0)
{
av_reduce(&num, &den, (int64_t)rc.num * num, (int64_t)rc.den * den, UINT16_MAX);
}
else
{
av_reduce(&num, &den, num, den, num);
}
vih2->dwPictAspectRatioX = num;
vih2->dwPictAspectRatioY = den;
memcpy(&vih2->bmiHeader, &vih->bmiHeader, sizeof(BITMAPINFOHEADER));
vih2->bmiHeader.biSize = sizeof(BITMAPINFOHEADER) + extra;
vih2->dwInterlaceFlags = 0;
vih2->dwCopyProtectFlags = 0;
vih2->dwControlFlags = 0;
vih2->dwReserved2 = 0;
if (extra)
{
// The first byte after the infoheader has to be 0 in mpeg-ts
if (bZeroPad)
{
*((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2)) = 0;
// after that, the extradata .. size reduced by one again
memcpy((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2) + 1, extradata, extra - 1);
}
else
{
memcpy((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2), extradata, extra);
}
}
// Free the VIH that we converted
CoTaskMemFree(vih);
*size = sizeof(VIDEOINFOHEADER2) + extra;
return vih2;
}
MPEG1VIDEOINFO *CLAVFVideoHelper::CreateMPEG1VI(const AVStream *avstream, ULONG *size, std::string container)
{
int extra = 0;
BYTE *extradata = nullptr;
// Create a VIH that we'll convert
VIDEOINFOHEADER *vih = CreateVIH(avstream, size, container);
if (!vih)
return nullptr;
if (avstream->codecpar->extradata_size > 0)
{
extra = avstream->codecpar->extradata_size;
extradata = avstream->codecpar->extradata;
}
MPEG1VIDEOINFO *mp1vi = (MPEG1VIDEOINFO *)CoTaskMemAlloc(sizeof(MPEG1VIDEOINFO) + extra);
if (!mp1vi)
return nullptr;
memset(mp1vi, 0, sizeof(MPEG1VIDEOINFO));
// The MPEG1VI is a thin wrapper around a VIH, so its easy!
memcpy(&mp1vi->hdr, vih, sizeof(VIDEOINFOHEADER));
mp1vi->hdr.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
mp1vi->dwStartTimeCode = 0; // is this not 0 anywhere..?
mp1vi->hdr.bmiHeader.biPlanes = 1;
mp1vi->hdr.bmiHeader.biCompression = 0;
// copy extradata over
if (extra)
{
CExtradataParser parser = CExtradataParser(extradata, extra);
mp1vi->cbSequenceHeader = (DWORD)parser.ParseMPEGSequenceHeader(mp1vi->bSequenceHeader);
}
// Free the VIH that we converted
CoTaskMemFree(vih);
*size = SIZE_MPEG1VIDEOINFO(mp1vi);
return mp1vi;
}
MPEG2VIDEOINFO *CLAVFVideoHelper::CreateMPEG2VI(const AVStream *avstream, ULONG *size, std::string container,
BOOL bConvertToAVC1)
{
int extra = 0;
BYTE *extradata = nullptr;
// Create a VIH that we'll convert
VIDEOINFOHEADER2 *vih2 = CreateVIH2(avstream, size, container);
if (!vih2)
return nullptr;
if (avstream->codecpar->extradata_size > 0)
{
extra = avstream->codecpar->extradata_size;
extradata = avstream->codecpar->extradata;
}
MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)CoTaskMemAlloc(sizeof(MPEG2VIDEOINFO) + max(extra - 4, 0));
if (!mp2vi)
return nullptr;
memset(mp2vi, 0, sizeof(MPEG2VIDEOINFO));
memcpy(&mp2vi->hdr, vih2, sizeof(VIDEOINFOHEADER2));
mp2vi->hdr.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
// Set profile/level if we know them
mp2vi->dwProfile = (avstream->codecpar->profile != FF_PROFILE_UNKNOWN) ? avstream->codecpar->profile : 0;
mp2vi->dwLevel = (avstream->codecpar->level != FF_LEVEL_UNKNOWN) ? avstream->codecpar->level : 0;
// mp2vi->dwFlags = 4; // where do we get flags otherwise..?
if (extra > 0)
{
BOOL bCopyUntouched = FALSE;
if (avstream->codecpar->codec_id == AV_CODEC_ID_H264)
{
int ret = ProcessH264Extradata(extradata, extra, mp2vi, bConvertToAVC1);
if (ret < 0)
bCopyUntouched = TRUE;
}
else if (avstream->codecpar->codec_id == AV_CODEC_ID_HEVC)
{
int ret = ProcessHEVCExtradata(extradata, extra, mp2vi);
if (ret < 0)
bCopyUntouched = TRUE;
}
else if (avstream->codecpar->codec_id == AV_CODEC_ID_VVC)
{
int ret = ProcessVVCExtradata(extradata, extra, mp2vi);
if (ret < 0)
bCopyUntouched = TRUE;
}
else if (avstream->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO)
{
CExtradataParser parser = CExtradataParser(extradata, extra);
mp2vi->cbSequenceHeader = (DWORD)parser.ParseMPEGSequenceHeader((BYTE *)&mp2vi->dwSequenceHeader[0]);
mp2vi->hdr.bmiHeader.biPlanes = 1;
mp2vi->hdr.bmiHeader.biCompression = 0;
}
else
{
bCopyUntouched = TRUE;
}
if (bCopyUntouched)
{
mp2vi->cbSequenceHeader = extra;
memcpy(&mp2vi->dwSequenceHeader[0], extradata, extra);
}
}
// Free the VIH2 that we converted
CoTaskMemFree(vih2);
*size = SIZE_MPEG2VIDEOINFO(mp2vi);
return mp2vi;
}
HRESULT CLAVFVideoHelper::ProcessH264Extradata(BYTE *extradata, int extradata_size, MPEG2VIDEOINFO *mp2vi, BOOL bAnnexB)
{
if (*(char *)extradata == 1)
{
if (extradata[1])
mp2vi->dwProfile = extradata[1];
if (extradata[3])
mp2vi->dwLevel = extradata[3];
mp2vi->dwFlags = (extradata[4] & 3) + 1;
mp2vi->cbSequenceHeader = avc_quant(extradata, (BYTE *)(&mp2vi->dwSequenceHeader[0]), extradata_size);
}
else
{
// MPEG-TS gets converted for improved compat.. for now!
if (bAnnexB)
{
mp2vi->dwFlags = 4;
mp2vi->cbSequenceHeader =
(DWORD)avc_parse_annexb(extradata, extradata_size, (BYTE *)(&mp2vi->dwSequenceHeader[0]));
}
else
{
return -1;
}
}
return 0;
}
HRESULT CLAVFVideoHelper::ProcessH264MVCExtradata(BYTE *extradata, int extradata_size, MPEG2VIDEOINFO *mp2vi)
{
if (*(char *)extradata == 1)
{
// Find "mvcC" atom
uint32_t state = -1;
int i = 0;
for (; i < extradata_size; i++)
{
state = (state << 8) | extradata[i];
if (state == MKBETAG('m', 'v', 'c', 'C'))
break;
}
if (i == extradata_size || i < 8)
return E_FAIL;
// Update pointers to the start of the mvcC atom
extradata = extradata + i - 7;
extradata_size = extradata_size - i + 7;
int sizeAtom = AV_RB32(extradata);
// verify size atom and actual size
if ((sizeAtom + 4) > extradata_size || extradata_size < 14)
return E_FAIL;
// Skip atom headers
extradata += 8;
extradata_size -= 8;
// Process as a normal "avcC" record
ProcessH264Extradata(extradata, extradata_size, mp2vi, FALSE);
return S_OK;
}
return E_FAIL;
}
HRESULT CLAVFVideoHelper::ProcessHEVCExtradata(BYTE *extradata, int extradata_size, MPEG2VIDEOINFO *mp2vi)
{
if (extradata[0] || extradata[1] || extradata[2] > 1 && extradata_size > 25)
{
mp2vi->dwFlags = (extradata[21] & 3) + 1;
}
return -1;
}
HRESULT CLAVFVideoHelper::ProcessVVCExtradata(BYTE *extradata, int extradata_size, MPEG2VIDEOINFO *mp2vi)
{
if (extradata[0] || extradata[1] || extradata[2] > 1 && extradata_size > 25)
{
mp2vi->dwFlags = (extradata[21] & 3) + 1;
}
return -1;
}
| 20,344
|
C++
|
.cpp
| 505
| 33.308911
| 120
| 0.608271
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,073
|
BaseDemuxer.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/BaseDemuxer.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "BaseDemuxer.h"
#include "moreuuids.h"
CBaseDemuxer::CBaseDemuxer(LPCTSTR pName, CCritSec *pLock)
: CUnknown(pName, nullptr)
, m_pLock(pLock)
{
for (int i = 0; i < unknown; ++i)
{
m_dActiveStreams[i] = -1;
}
}
void CBaseDemuxer::CreateNoSubtitleStream()
{
stream s;
s.pid = NO_SUBTITLE_PID;
s.streamInfo = new CStreamInfo();
s.language = "und";
// Create the media type
CMediaType mtype;
mtype.majortype = MEDIATYPE_Subtitle;
mtype.subtype = MEDIASUBTYPE_UTF8;
mtype.formattype = FORMAT_SubtitleInfo;
SUBTITLEINFO *subInfo = (SUBTITLEINFO *)mtype.AllocFormatBuffer(sizeof(SUBTITLEINFO));
memset(subInfo, 0, mtype.FormatLength());
wcscpy_s(subInfo->TrackName, NO_SUB_STRING);
strcpy_s(subInfo->IsoLang, "und");
subInfo->dwOffset = sizeof(SUBTITLEINFO);
s.streamInfo->mtypes.push_back(mtype);
// Append it to the list
m_streams[subpic].push_back(s);
}
void CBaseDemuxer::CreatePGSForcedSubtitleStream()
{
stream s;
s.pid = FORCED_SUBTITLE_PID;
s.streamInfo = new CStreamInfo();
s.language = "und";
// Create the media type
CMediaType mtype;
mtype.majortype = MEDIATYPE_Subtitle;
mtype.subtype = MEDIASUBTYPE_HDMVSUB;
mtype.formattype = FORMAT_SubtitleInfo;
SUBTITLEINFO *subInfo = (SUBTITLEINFO *)mtype.AllocFormatBuffer(sizeof(SUBTITLEINFO));
memset(subInfo, 0, mtype.FormatLength());
wcscpy_s(subInfo->TrackName, FORCED_SUB_STRING);
subInfo->dwOffset = sizeof(SUBTITLEINFO);
s.streamInfo->mtypes.push_back(mtype);
// Append it to the list
m_streams[subpic].push_back(s);
}
// CStreamList
const WCHAR *CBaseDemuxer::CStreamList::ToStringW(int type)
{
return type == video ? L"Video" : type == audio ? L"Audio" : type == subpic ? L"Subtitle" : L"Unknown";
}
const CHAR *CBaseDemuxer::CStreamList::ToString(int type)
{
return type == video ? "Video" : type == audio ? "Audio" : type == subpic ? "Subtitle" : "Unknown";
}
CBaseDemuxer::stream *CBaseDemuxer::CStreamList::FindStream(DWORD pid)
{
std::deque<stream>::iterator it;
for (it = begin(); it != end(); ++it)
{
if ((*it).pid == pid)
{
return &(*it);
}
}
return nullptr;
}
void CBaseDemuxer::CStreamList::Clear()
{
std::deque<stream>::iterator it;
for (it = begin(); it != end(); ++it)
{
delete (*it).streamInfo;
}
__super::clear();
}
CBaseDemuxer::stream *CBaseDemuxer::FindStream(DWORD pid)
{
for (int i = 0; i < StreamType::unknown; i++)
{
stream *pStream = m_streams[i].FindStream(pid);
if (pStream)
return pStream;
}
return nullptr;
}
| 3,540
|
C++
|
.cpp
| 109
| 28.504587
| 107
| 0.679825
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,075
|
Packet.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/Packet.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include <stdafx.h>
#include "Packet.h"
Packet::Packet()
{
}
Packet::~Packet()
{
DeleteMediaType(pmt);
av_packet_free(&m_Packet);
}
int Packet::SetDataSize(int len)
{
if (len < 0)
return -1;
if (len <= GetDataSize())
{
av_shrink_packet(m_Packet, len);
return 0;
}
if (!m_Packet)
{
m_Packet = av_packet_alloc();
if (av_new_packet(m_Packet, len) < 0)
return -1;
}
else
{
if (av_grow_packet(m_Packet, (len - m_Packet->size)) < 0)
return -1;
}
return 0;
}
int Packet::SetData(const void *ptr, int len)
{
if (!ptr || len < 0)
return -1;
int ret = SetDataSize(len);
if (ret < 0)
return ret;
memcpy(m_Packet->data, ptr, len);
return 0;
}
int Packet::SetPacket(AVPacket *pkt)
{
ASSERT(!m_Packet);
m_Packet = av_packet_alloc();
if (!m_Packet)
return -1;
return av_packet_ref(m_Packet, pkt);
}
int Packet::Append(Packet *ptr)
{
return AppendData(ptr->GetData(), ptr->GetDataSize());
}
int Packet::AppendData(const void *ptr, int len)
{
int prevSize = GetDataSize();
int ret = SetDataSize(prevSize + len);
if (ret < 0)
return ret;
memcpy(m_Packet->data + prevSize, ptr, len);
return 0;
}
int Packet::RemoveHead(int count)
{
m_Packet->data += count;
m_Packet->size -= (int)count;
return 0;
}
bool Packet::CopyProperties(const Packet *src)
{
StreamId = src->StreamId;
bDiscontinuity = src->bDiscontinuity;
bSyncPoint = src->bSyncPoint;
bPosition = src->bPosition;
rtStart = src->rtStart;
rtStop = src->rtStop;
rtPTS = src->rtPTS;
rtDTS = src->rtDTS;
if (src->pmt)
pmt = CreateMediaType(src->pmt);
dwFlags = src->dwFlags;
return true;
}
| 2,632
|
C++
|
.cpp
| 102
| 21.72549
| 75
| 0.643852
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,076
|
LAVFAudioHelper.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/LAVFAudioHelper.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Contributions by Ti-BEN from the XBMC DSPlayer Project, also under GPLv2
*/
#include "stdafx.h"
#include "moreuuids.h"
#include "BaseDemuxer.h"
#include "LAVFAudioHelper.h"
#include "LAVFUtils.h"
#include "ExtradataParser.h"
#include <vector>
CLAVFAudioHelper g_AudioHelper;
// Map codec ids to media subtypes
// clang-format off
static FormatMapping audio_map[] = {
{ AV_CODEC_ID_AC3, &MEDIASUBTYPE_DOLBY_AC3, WAVE_FORMAT_DOLBY_AC3, nullptr },
{ AV_CODEC_ID_AAC, &MEDIASUBTYPE_AAC, WAVE_FORMAT_AAC, nullptr },
{ AV_CODEC_ID_AAC_LATM, &MEDIASUBTYPE_LATM_AAC, WAVE_FORMAT_LATM_AAC, nullptr },
{ AV_CODEC_ID_DTS, &MEDIASUBTYPE_WAVE_DTS, 0, nullptr },
{ AV_CODEC_ID_EAC3, &MEDIASUBTYPE_DOLBY_DDPLUS, 0, nullptr },
{ AV_CODEC_ID_TRUEHD, &MEDIASUBTYPE_DOLBY_TRUEHD, 0, nullptr },
{ AV_CODEC_ID_MLP, &MEDIASUBTYPE_MLP, WAVE_FORMAT_MLP, nullptr },
{ AV_CODEC_ID_VORBIS, &MEDIASUBTYPE_Vorbis2, 0, &FORMAT_VorbisFormat2 },
{ AV_CODEC_ID_MP1, &MEDIASUBTYPE_MPEG1AudioPayload, WAVE_FORMAT_MPEG, nullptr },
{ AV_CODEC_ID_MP2, &MEDIASUBTYPE_MPEG2_AUDIO, WAVE_FORMAT_MPEG, nullptr },
{ AV_CODEC_ID_MP3, &MEDIASUBTYPE_MP3, WAVE_FORMAT_MPEGLAYER3, nullptr },
{ AV_CODEC_ID_PCM_BLURAY, &MEDIASUBTYPE_BD_LPCM_AUDIO, 0, nullptr },
{ AV_CODEC_ID_PCM_DVD, &MEDIASUBTYPE_DVD_LPCM_AUDIO, 0, nullptr },
{ AV_CODEC_ID_PCM_S16LE, &MEDIASUBTYPE_PCM, WAVE_FORMAT_PCM, nullptr },
{ AV_CODEC_ID_PCM_S24LE, &MEDIASUBTYPE_PCM, WAVE_FORMAT_PCM, nullptr },
{ AV_CODEC_ID_PCM_S32LE, &MEDIASUBTYPE_PCM, WAVE_FORMAT_PCM, nullptr },
{ AV_CODEC_ID_PCM_F32LE, &MEDIASUBTYPE_IEEE_FLOAT, WAVE_FORMAT_IEEE_FLOAT, nullptr },
{ AV_CODEC_ID_PCM_S16LE_PLANAR, &MEDIASUBTYPE_PCM, WAVE_FORMAT_PCM, nullptr },
{ AV_CODEC_ID_PCM_S24LE_PLANAR, &MEDIASUBTYPE_PCM, WAVE_FORMAT_PCM, nullptr },
{ AV_CODEC_ID_PCM_S32LE_PLANAR, &MEDIASUBTYPE_PCM, WAVE_FORMAT_PCM, nullptr },
{ AV_CODEC_ID_WMAV1, &MEDIASUBTYPE_MSAUDIO1, WAVE_FORMAT_MSAUDIO1, nullptr },
{ AV_CODEC_ID_WMAV2, &MEDIASUBTYPE_WMAUDIO2, WAVE_FORMAT_WMAUDIO2, nullptr },
{ AV_CODEC_ID_WMAPRO, &MEDIASUBTYPE_WMAUDIO3, WAVE_FORMAT_WMAUDIO3, nullptr },
{ AV_CODEC_ID_ADPCM_IMA_AMV, &MEDIASUBTYPE_IMA_AMV, 0, nullptr },
{ AV_CODEC_ID_FLAC, &MEDIASUBTYPE_FLAC_FRAMED, WAVE_FORMAT_FLAC, nullptr },
{ AV_CODEC_ID_COOK, &MEDIASUBTYPE_COOK, WAVE_FORMAT_COOK, nullptr },
{ AV_CODEC_ID_SIPR, &MEDIASUBTYPE_SIPR, WAVE_FORMAT_SIPR, nullptr },
{ AV_CODEC_ID_RA_288, &MEDIASUBTYPE_28_8, WAVE_FORMAT_28_8, nullptr },
{ AV_CODEC_ID_RA_144, &MEDIASUBTYPE_14_4, WAVE_FORMAT_14_4, nullptr },
{ AV_CODEC_ID_RALF, &MEDIASUBTYPE_RALF, WAVE_FORMAT_RALF, nullptr },
{ AV_CODEC_ID_ALAC, &MEDIASUBTYPE_ALAC, 0, nullptr },
{ AV_CODEC_ID_MP4ALS, &MEDIASUBTYPE_ALS, 0, nullptr },
{ AV_CODEC_ID_OPUS, &MEDIASUBTYPE_OPUS, WAVE_FORMAT_OPUS, nullptr },
{ AV_CODEC_ID_NELLYMOSER, &MEDIASUBTYPE_NELLYMOSER, 0, nullptr },
{ AV_CODEC_ID_PCM_ALAW, &MEDIASUBTYPE_ALAW, WAVE_FORMAT_ALAW, nullptr },
{ AV_CODEC_ID_PCM_MULAW, &MEDIASUBTYPE_MULAW, WAVE_FORMAT_MULAW, nullptr },
{ AV_CODEC_ID_GSM_MS, &MEDIASUBTYPE_MSGSM610, WAVE_FORMAT_GSM610, nullptr },
{ AV_CODEC_ID_ADPCM_MS, &MEDIASUBTYPE_ADPCM_MS, WAVE_FORMAT_ADPCM, nullptr },
{ AV_CODEC_ID_TRUESPEECH, &MEDIASUBTYPE_TRUESPEECH, WAVE_FORMAT_DSPGROUP_TRUESPEECH, nullptr },
{ AV_CODEC_ID_TAK, &MEDIASUBTYPE_TAK, 0, nullptr },
{ AV_CODEC_ID_S302M, &MEDIASUBTYPE_AES3, 0, nullptr },
{ AV_CODEC_ID_PCM_S16BE, &MEDIASUBTYPE_PCM_TWOS, 0, nullptr },
{ AV_CODEC_ID_PCM_S16BE_PLANAR, &MEDIASUBTYPE_PCM_TWOS, 0, nullptr },
{ AV_CODEC_ID_PCM_S24BE, &MEDIASUBTYPE_PCM_IN24, 0, nullptr },
{ AV_CODEC_ID_PCM_S32BE, &MEDIASUBTYPE_PCM_IN32, 0, nullptr },
{ AV_CODEC_ID_QDM2, &MEDIASUBTYPE_QDM2, WAVE_FORMAT_QDESIGN_MUSIC, nullptr },
{ AV_CODEC_ID_ATRAC1, &MEDIASUBTYPE_ATRC, WAVE_FORMAT_ATRC, nullptr },
{ AV_CODEC_ID_ATRAC3, &MEDIASUBTYPE_ATRC, WAVE_FORMAT_ATRC, nullptr },
{ AV_CODEC_ID_ATRAC3P, &MEDIASUBTYPE_ATRAC3P, WAVE_FORMAT_ATRAC3P, nullptr },
{ AV_CODEC_ID_DSD_LSBF, &MEDIASUBTYPE_DSDL, 0, nullptr },
{ AV_CODEC_ID_DSD_MSBF, &MEDIASUBTYPE_DSDM, 0, nullptr },
{ AV_CODEC_ID_DSD_LSBF_PLANAR, &MEDIASUBTYPE_DSD1, 0, nullptr },
{ AV_CODEC_ID_DSD_MSBF_PLANAR, &MEDIASUBTYPE_DSD8, 0, nullptr },
};
// clang-format on
CMediaType CLAVFAudioHelper::initAudioType(AVCodecParameters *codecpar, unsigned int &codecTag, std::string container)
{
CMediaType mediaType;
mediaType.InitMediaType();
mediaType.majortype = MEDIATYPE_Audio;
mediaType.subtype = FOURCCMap(codecTag);
mediaType.formattype = FORMAT_WaveFormatEx; // default value
mediaType.SetSampleSize(256000);
// Check against values from the map above
for (unsigned i = 0; i < countof(audio_map); ++i)
{
if (audio_map[i].codec == codecpar->codec_id)
{
if (audio_map[i].subtype)
mediaType.subtype = *audio_map[i].subtype;
if (audio_map[i].codecTag)
codecTag = audio_map[i].codecTag;
if (audio_map[i].format)
mediaType.formattype = *audio_map[i].format;
break;
}
}
// special cases
switch (codecpar->codec_id)
{
case AV_CODEC_ID_PCM_F64LE:
// Qt PCM
if (codecTag == MKTAG('f', 'l', '6', '4'))
mediaType.subtype = MEDIASUBTYPE_PCM_FL64_le;
break;
}
return mediaType;
}
WAVEFORMATEX *CLAVFAudioHelper::CreateWVFMTEX(const AVStream *avstream, ULONG *size)
{
WAVEFORMATEX *wvfmt = (WAVEFORMATEX *)CoTaskMemAlloc(sizeof(WAVEFORMATEX) + avstream->codecpar->extradata_size);
if (!wvfmt)
return nullptr;
memset(wvfmt, 0, sizeof(WAVEFORMATEX));
wvfmt->wFormatTag = avstream->codecpar->codec_tag;
wvfmt->nChannels = avstream->codecpar->ch_layout.nb_channels ? avstream->codecpar->ch_layout.nb_channels : 2;
wvfmt->nSamplesPerSec = avstream->codecpar->sample_rate ? avstream->codecpar->sample_rate : 48000;
wvfmt->nAvgBytesPerSec = (DWORD)(avstream->codecpar->bit_rate / 8);
if (avstream->codecpar->codec_id == AV_CODEC_ID_AAC || avstream->codecpar->codec_id == AV_CODEC_ID_AAC_LATM)
{
wvfmt->wBitsPerSample = 0;
wvfmt->nBlockAlign = 1;
}
else
{
wvfmt->wBitsPerSample = get_bits_per_sample(avstream->codecpar);
if (avstream->codecpar->block_align > 0)
{
wvfmt->nBlockAlign = avstream->codecpar->block_align;
}
else
{
if (wvfmt->wBitsPerSample == 0)
{
DbgOutString(L"BitsPerSample is 0, no good!");
}
wvfmt->nBlockAlign =
(WORD)(wvfmt->nChannels * av_get_bytes_per_sample((AVSampleFormat)avstream->codecpar->format));
}
}
if (!wvfmt->nAvgBytesPerSec)
wvfmt->nAvgBytesPerSec = (wvfmt->nSamplesPerSec * wvfmt->nChannels * wvfmt->wBitsPerSample) >> 3;
if (avstream->codecpar->extradata_size > 0)
{
wvfmt->cbSize = avstream->codecpar->extradata_size;
memcpy((BYTE *)wvfmt + sizeof(WAVEFORMATEX), avstream->codecpar->extradata, avstream->codecpar->extradata_size);
}
*size = sizeof(WAVEFORMATEX) + avstream->codecpar->extradata_size;
return wvfmt;
}
WAVEFORMATEXFFMPEG *CLAVFAudioHelper::CreateWVFMTEX_FF(const AVStream *avstream, ULONG *size)
{
WAVEFORMATEX *wvfmt = CreateWVFMTEX(avstream, size);
if (!wvfmt)
return nullptr;
const size_t diff_size = sizeof(WAVEFORMATEXFFMPEG) - sizeof(WAVEFORMATEX);
WAVEFORMATEXFFMPEG *wfex_ff = (WAVEFORMATEXFFMPEG *)CoTaskMemAlloc(diff_size + *size);
if (!wfex_ff)
return nullptr;
memset(wfex_ff, 0, sizeof(WAVEFORMATEXFFMPEG));
memcpy(&wfex_ff->wfex, wvfmt, *size);
wfex_ff->nCodecId = avstream->codecpar->codec_id;
CoTaskMemFree(wvfmt);
*size = sizeof(WAVEFORMATEXFFMPEG) + wfex_ff->wfex.cbSize;
return wfex_ff;
}
WAVEFORMATEXFFMPEG *CLAVFAudioHelper::CreateWFMTEX_RAW_PCM_FF(const AVStream *avstream, ULONG *size, const GUID subtype, ULONG *samplesize)
{
WAVEFORMATEXTENSIBLE *wvfmt = CreateWFMTEX_RAW_PCM(avstream, size, subtype, samplesize);
if (!wvfmt)
return nullptr;
const size_t diff_size = sizeof(WAVEFORMATEXFFMPEG) - sizeof(WAVEFORMATEX);
WAVEFORMATEXFFMPEG *wfex_ff = (WAVEFORMATEXFFMPEG *)CoTaskMemAlloc(diff_size + *size);
if (!wfex_ff)
return nullptr;
memset(wfex_ff, 0, sizeof(WAVEFORMATEXFFMPEG));
memcpy(&wfex_ff->wfex, wvfmt, *size);
wfex_ff->nCodecId = avstream->codecpar->codec_id;
CoTaskMemFree(wvfmt);
*size = sizeof(WAVEFORMATEXFFMPEG) + wfex_ff->wfex.cbSize;
return wfex_ff;
}
WAVEFORMATEX_HDMV_LPCM *CLAVFAudioHelper::CreateWVFMTEX_LPCM(const AVStream *avstream, ULONG *size)
{
WAVEFORMATEX *wvfmt = CreateWVFMTEX(avstream, size);
WAVEFORMATEX_HDMV_LPCM *lpcm = (WAVEFORMATEX_HDMV_LPCM *)CoTaskMemAlloc(sizeof(WAVEFORMATEX_HDMV_LPCM));
memset(lpcm, 0, sizeof(WAVEFORMATEX_HDMV_LPCM));
memcpy(lpcm, wvfmt, sizeof(WAVEFORMATEX));
lpcm->cbSize = sizeof(WAVEFORMATEX_HDMV_LPCM) - sizeof(WAVEFORMATEX);
BYTE channel_conf = 0;
if (avstream->codecpar->ch_layout.order == AV_CHANNEL_ORDER_NATIVE)
{
switch (avstream->codecpar->ch_layout.u.mask)
{
case AV_CH_LAYOUT_MONO: channel_conf = 1; break;
case AV_CH_LAYOUT_STEREO: channel_conf = 3; break;
case AV_CH_LAYOUT_SURROUND: channel_conf = 4; break;
case AV_CH_LAYOUT_2_1: channel_conf = 5; break;
case AV_CH_LAYOUT_4POINT0: channel_conf = 6; break;
case AV_CH_LAYOUT_2_2: channel_conf = 7; break;
case AV_CH_LAYOUT_5POINT0: channel_conf = 8; break;
case AV_CH_LAYOUT_5POINT1: channel_conf = 9; break;
case AV_CH_LAYOUT_7POINT0: channel_conf = 10; break;
case AV_CH_LAYOUT_7POINT1: channel_conf = 11; break;
default: channel_conf = 0;
}
}
lpcm->channel_conf = channel_conf;
CoTaskMemFree(wvfmt);
*size = sizeof(WAVEFORMATEX_HDMV_LPCM);
return lpcm;
}
WAVEFORMATEXTENSIBLE *CLAVFAudioHelper::CreateWFMTEX_RAW_PCM(const AVStream *avstream, ULONG *size, const GUID subtype,
ULONG *samplesize)
{
WAVEFORMATEXTENSIBLE *wfex = (WAVEFORMATEXTENSIBLE *)CoTaskMemAlloc(sizeof(WAVEFORMATEXTENSIBLE));
if (!wfex)
return nullptr;
memset(wfex, 0, sizeof(*wfex));
WAVEFORMATEX *wfe = &wfex->Format;
wfe->wFormatTag = (WORD)subtype.Data1;
wfe->nChannels = avstream->codecpar->ch_layout.nb_channels;
wfe->nSamplesPerSec = avstream->codecpar->sample_rate;
if (avstream->codecpar->format == AV_SAMPLE_FMT_S32 && avstream->codecpar->bits_per_raw_sample > 0)
{
wfe->wBitsPerSample = avstream->codecpar->bits_per_raw_sample > 24
? 32
: (avstream->codecpar->bits_per_raw_sample > 16 ? 24 : 16);
}
else
{
wfe->wBitsPerSample = av_get_bytes_per_sample((AVSampleFormat)avstream->codecpar->format) << 3;
}
wfe->nBlockAlign = wfe->nChannels * wfe->wBitsPerSample / 8;
wfe->nAvgBytesPerSec = wfe->nSamplesPerSec * wfe->nBlockAlign;
bool bUseExtensible = false;
DWORD dwChannelMask = 0;
if (wfe->nChannels > 2)
{
bUseExtensible = true;
if (avstream->codecpar->ch_layout.order == AV_CHANNEL_ORDER_NATIVE)
dwChannelMask = (DWORD)avstream->codecpar->ch_layout.u.mask;
if (!dwChannelMask)
{
AVChannelLayout Layout{};
av_channel_layout_default(&Layout, wfe->nChannels);
dwChannelMask = (DWORD)Layout.u.mask;
}
if (dwChannelMask && av_popcount(dwChannelMask) != wfe->nChannels)
dwChannelMask = 0;
}
else if ((wfe->wBitsPerSample > 16 || wfe->nSamplesPerSec > 48000) && wfe->nChannels <= 2)
{
bUseExtensible = true;
dwChannelMask = wfe->nChannels == 2 ? (SPEAKER_FRONT_LEFT | SPEAKER_FRONT_RIGHT) : SPEAKER_FRONT_CENTER;
}
if (bUseExtensible)
{
wfex->Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfex->Format.cbSize = sizeof(*wfex) - sizeof(wfex->Format);
wfex->dwChannelMask = dwChannelMask;
if (avstream->codecpar->format == AV_SAMPLE_FMT_S32 && avstream->codecpar->bits_per_raw_sample > 0)
{
wfex->Samples.wValidBitsPerSample = avstream->codecpar->bits_per_raw_sample;
}
else
{
wfex->Samples.wValidBitsPerSample = wfex->Format.wBitsPerSample;
}
wfex->SubFormat = subtype;
*size = sizeof(WAVEFORMATEXTENSIBLE);
}
else
{
*size = sizeof(WAVEFORMATEX);
}
*samplesize = wfe->wBitsPerSample * wfe->nChannels / 8;
return wfex;
}
MPEG1WAVEFORMAT *CLAVFAudioHelper::CreateMP1WVFMT(const AVStream *avstream, ULONG *size)
{
WAVEFORMATEX *wvfmt = CreateWVFMTEX(avstream, size);
if (!wvfmt)
return nullptr;
MPEG1WAVEFORMAT *mpwvfmt = (MPEG1WAVEFORMAT *)CoTaskMemAlloc(sizeof(MPEG1WAVEFORMAT));
if (!mpwvfmt)
return nullptr;
memset(mpwvfmt, 0, sizeof(MPEG1WAVEFORMAT));
memcpy(&mpwvfmt->wfx, wvfmt, sizeof(WAVEFORMATEX));
mpwvfmt->dwHeadBitrate = (DWORD)avstream->codecpar->bit_rate;
mpwvfmt->fwHeadMode = avstream->codecpar->ch_layout.nb_channels == 1 ? ACM_MPEG_SINGLECHANNEL : ACM_MPEG_DUALCHANNEL;
mpwvfmt->fwHeadLayer = (avstream->codecpar->codec_id == AV_CODEC_ID_MP1) ? ACM_MPEG_LAYER1 : ACM_MPEG_LAYER2;
if (avstream->codecpar->sample_rate == 0)
{
avstream->codecpar->sample_rate = 48000;
}
mpwvfmt->wfx.wFormatTag = WAVE_FORMAT_MPEG;
mpwvfmt->wfx.nBlockAlign = WORD((avstream->codecpar->codec_id == AV_CODEC_ID_MP1)
? (12 * avstream->codecpar->bit_rate / avstream->codecpar->sample_rate) * 4
: 144 * avstream->codecpar->bit_rate / avstream->codecpar->sample_rate);
mpwvfmt->wfx.cbSize = sizeof(MPEG1WAVEFORMAT) - sizeof(WAVEFORMATEX);
CoTaskMemFree(wvfmt);
*size = sizeof(MPEG1WAVEFORMAT);
return mpwvfmt;
}
VORBISFORMAT *CLAVFAudioHelper::CreateVorbis(const AVStream *avstream, ULONG *size)
{
VORBISFORMAT *vfmt = (VORBISFORMAT *)CoTaskMemAlloc(sizeof(VORBISFORMAT));
if (!vfmt)
return nullptr;
memset(vfmt, 0, sizeof(VORBISFORMAT));
vfmt->nChannels = avstream->codecpar->ch_layout.nb_channels;
vfmt->nSamplesPerSec = avstream->codecpar->sample_rate;
vfmt->nAvgBitsPerSec = (DWORD)avstream->codecpar->bit_rate;
vfmt->nMinBitsPerSec = vfmt->nMaxBitsPerSec = (DWORD)-1;
*size = sizeof(VORBISFORMAT);
return vfmt;
}
VORBISFORMAT2 *CLAVFAudioHelper::CreateVorbis2(const AVStream *avstream, ULONG *size)
{
BYTE *p = avstream->codecpar->extradata;
std::vector<int> sizes;
// read the number of blocks, and then the sizes of the individual blocks
for (BYTE n = *p++; n > 0; n--)
{
int size = 0;
// Xiph Lacing
do
{
size += *p;
} while (*p++ == 0xFF);
sizes.push_back(size);
}
int totalsize = 0;
for (unsigned int i = 0; i < sizes.size(); i++)
totalsize += sizes[i];
// Get the size of the last block
sizes.push_back(avstream->codecpar->extradata_size - (int)(p - avstream->codecpar->extradata) - totalsize);
totalsize += sizes[sizes.size() - 1];
// 3 blocks is the currently valid Vorbis format
if (sizes.size() == 3)
{
VORBISFORMAT2 *pvf2 = (VORBISFORMAT2 *)CoTaskMemAlloc(sizeof(VORBISFORMAT2) + totalsize);
if (!pvf2)
return nullptr;
memset(pvf2, 0, sizeof(VORBISFORMAT2));
pvf2->Channels = avstream->codecpar->ch_layout.nb_channels;
pvf2->SamplesPerSec = avstream->codecpar->sample_rate;
pvf2->BitsPerSample = get_bits_per_sample(avstream->codecpar);
BYTE *p2 = (BYTE *)pvf2 + sizeof(VORBISFORMAT2);
for (unsigned int i = 0; i < sizes.size(); p += sizes[i], p2 += sizes[i], i++)
{
memcpy(p2, p, pvf2->HeaderSize[i] = sizes[i]);
}
*size = sizeof(VORBISFORMAT2) + totalsize;
return pvf2;
}
*size = 0;
return nullptr;
}
| 18,348
|
C++
|
.cpp
| 371
| 43.188679
| 139
| 0.621729
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,078
|
BDDemuxer.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/BDDemuxer.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "BDDemuxer.h"
#include "libbluray/bdnav/mpls_data.h"
extern "C"
{
#include "libavutil/avstring.h"
};
#define BD_READ_BUFFER_SIZE (6144 * 20)
int CBDDemuxer::BDByteStreamRead(void *opaque, uint8_t *buf, int buf_size)
{
CBDDemuxer *demux = (CBDDemuxer *)opaque;
int ret = bd_read(demux->m_pBD, buf, buf_size);
return (ret != 0) ? ret : AVERROR_EOF;
}
int64_t CBDDemuxer::BDByteStreamSeek(void *opaque, int64_t offset, int whence)
{
CBDDemuxer *demux = (CBDDemuxer *)opaque;
BLURAY *bd = demux->m_pBD;
int64_t pos = 0;
if (whence == SEEK_SET)
{
pos = offset;
}
else if (whence == SEEK_CUR)
{
if (offset == 0)
return bd_tell(bd);
pos = bd_tell(bd) + offset;
}
else if (whence == SEEK_END)
{
pos = bd_get_title_size(bd) - offset;
}
else if (whence == AVSEEK_SIZE)
{
return bd_get_title_size(bd);
}
else
return -1;
if (pos < 0)
pos = 0;
int64_t achieved = bd_seek(bd, pos);
if (pos > achieved)
{
offset = pos - achieved;
DbgLog((LOG_TRACE, 10, L"BD Seek to %I64d, achieved %I64d, correcting target by %I64d", pos, achieved, offset));
uint8_t *dump_buffer = (uint8_t *)CoTaskMemAlloc(6144);
while (offset > 0)
{
int to_read = (int)min(offset, 6144);
int did_read = bd_read(bd, dump_buffer, to_read);
offset -= did_read;
if (did_read < to_read)
{
DbgLog((LOG_TRACE, 10, L"Reached EOF with %I64d offset remaining", offset));
break;
}
}
CoTaskMemFree(dump_buffer);
achieved = bd_tell(bd);
}
demux->ProcessBDEvents();
return achieved;
}
static inline REFERENCE_TIME Convert90KhzToDSTime(int64_t timestamp)
{
return av_rescale(timestamp, 1000, 9);
}
static inline int64_t ConvertDSTimeTo90Khz(REFERENCE_TIME timestamp)
{
return av_rescale(timestamp, 9, 1000);
}
#ifdef DEBUG
static void bd_log(const char *log)
{
const char *path = __FILE__;
const char *subpath = "libbluray\\src\\";
// skip the path these two have in common
while (*log == *path)
{
log++;
path++;
}
while (*log == *subpath)
{
log++;
subpath++;
}
wchar_t line[4096] = {0};
size_t len = strlen(log);
if (log[len - 1] == '\n')
{
len--;
}
SafeMultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, log, (int)len, line, 4096);
DbgLog((LOG_TRACE, 40, L"[BD] %s", line));
}
#endif
CBDDemuxer::CBDDemuxer(CCritSec *pLock, ILAVFSettingsInternal *pSettings)
: CBaseDemuxer(L"bluray demuxer", pLock)
, m_pSettings(pSettings)
{
#ifdef DEBUG
bd_set_debug_mask(DBG_FILE | DBG_BLURAY | DBG_DIR | DBG_NAV | DBG_CRIT);
bd_set_debug_handler(&bd_log);
#else
bd_set_debug_mask(0);
#endif
}
CBDDemuxer::~CBDDemuxer(void)
{
CloseMVCExtensionDemuxer();
if (m_pTitle)
{
bd_free_title_info(m_pTitle);
m_pTitle = nullptr;
}
if (m_pBD)
{
bd_close(m_pBD);
m_pBD = nullptr;
}
if (m_pb)
{
av_free(m_pb->buffer);
av_free(m_pb);
}
SafeRelease(&m_lavfDemuxer);
SAFE_CO_FREE(m_StreamClip);
SAFE_CO_FREE(m_rtOffset);
}
STDMETHODIMP CBDDemuxer::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
*ppv = nullptr;
if (m_lavfDemuxer &&
(riid == __uuidof(IKeyFrameInfo) || riid == __uuidof(ITrackInfo) || riid == __uuidof(IPropertyBag)))
{
return m_lavfDemuxer->QueryInterface(riid, ppv);
}
return QI2(IAMExtendedSeeking) __super::NonDelegatingQueryInterface(riid, ppv);
}
/////////////////////////////////////////////////////////////////////////////
// Demuxer Functions
STDMETHODIMP CBDDemuxer::Open(LPCOLESTR pszFileName, LPCOLESTR pszUserAgent, LPCOLESTR pszReferrer)
{
CAutoLock lock(m_pLock);
HRESULT hr = S_OK;
int ret; // return code from C functions
// Convert the filename from wchar to char for libbluray
char fileName[4096];
ret = SafeWideCharToMultiByte(CP_UTF8, 0, pszFileName, -1, fileName, 4096, nullptr, nullptr);
int iPlaylist = -1;
DbgLog((LOG_TRACE, 10, L"Initializing BluRay Demuxer; Entry Point: %s", pszFileName));
size_t len = strlen(fileName);
if (len > 16)
{
char *bd_path = fileName;
if (_strcmpi(bd_path + strlen(bd_path) - 16, "\\BDMV\\index.bdmv") == 0)
{
bd_path[strlen(bd_path) - 15] = 0;
}
else if (len > 22 && _strcmpi(bd_path + strlen(bd_path) - 22, "\\BDMV\\MovieObject.bdmv") == 0)
{
bd_path[strlen(bd_path) - 21] = 0;
}
else if (len > 25 && _strnicmp(bd_path + strlen(bd_path) - 25, "\\BDMV\\PLAYLIST\\", 15) == 0)
{
char *playlist = &bd_path[strlen(bd_path) - 10];
bd_path[strlen(bd_path) - 24] = 0;
playlist[5] = 0;
iPlaylist = atoi(playlist);
}
else
{
return E_FAIL;
}
// Open BluRay
BLURAY *bd = bd_open(bd_path, nullptr);
if (!bd)
{
return E_FAIL;
}
m_pBD = bd;
strcpy_s(m_cBDRootPath, bd_path);
uint32_t timelimit = (iPlaylist != -1) ? 0 : 180;
uint8_t flags = (iPlaylist != -1) ? TITLES_ALL : TITLES_RELEVANT;
// Fetch titles
fetchtitles:
m_nTitleCount = bd_get_titles(bd, flags, timelimit);
if (m_nTitleCount <= 0)
{
if (timelimit > 0)
{
timelimit = 0;
goto fetchtitles;
}
if (flags != TITLES_ALL)
{
flags = TITLES_ALL;
goto fetchtitles;
}
return E_FAIL;
}
DbgLog((LOG_TRACE, 20, L"Found %d titles", m_nTitleCount));
DbgLog((LOG_TRACE, 20, L" ------ Begin Title Listing ------"));
uint64_t longest_duration = 0;
uint32_t title_id = 0;
boolean found = false;
for (uint32_t i = 0; i < m_nTitleCount; i++)
{
BLURAY_TITLE_INFO *info = bd_get_title_info(bd, i, 0);
if (info)
{
DbgLog((LOG_TRACE, 20, L"Title %u, Playlist %u (%u clips, %u chapters), Duration %I64u (%I64u seconds)",
i, info->playlist, info->clip_count, info->chapter_count, info->duration,
Convert90KhzToDSTime(info->duration) / DSHOW_TIME_BASE));
if (iPlaylist != -1 && info->playlist == iPlaylist)
{
title_id = i;
found = true;
}
else if (iPlaylist == -1 && info->duration > longest_duration)
{
title_id = i;
longest_duration = info->duration;
}
bd_free_title_info(info);
}
if (found)
break;
}
DbgLog((LOG_TRACE, 20, L" ------ End Title Listing ------"));
hr = SetTitle(title_id);
}
return hr;
}
STDMETHODIMP CBDDemuxer::Start()
{
HRESULT hr = m_lavfDemuxer->Start();
if (m_MVCPlayback && !m_lavfDemuxer->m_bH264MVCCombine)
{
CloseMVCExtensionDemuxer();
m_MVCPlayback = FALSE;
}
return hr;
}
REFERENCE_TIME CBDDemuxer::GetDuration() const
{
if (m_pTitle)
{
return av_rescale(m_pTitle->duration, 1000, 9);
}
return m_lavfDemuxer->GetDuration();
}
void CBDDemuxer::ProcessBDEvents()
{
// Check for clip change
BD_EVENT event;
while (bd_get_event(m_pBD, &event))
{
if (event.event == BD_EVENT_PLAYITEM)
{
uint64_t clip_start, clip_in, bytepos;
int ret = bd_get_clip_infos(m_pBD, event.param, &clip_start, &clip_in, &bytepos, nullptr);
if (ret && m_lavfDemuxer->GetStartTime() != AV_NOPTS_VALUE)
{
m_rtNewOffset = Convert90KhzToDSTime(clip_start - clip_in) + m_lavfDemuxer->GetStartTime();
m_bNewOffsetPos = bytepos - 4;
m_NewClip = event.param;
DbgLog((LOG_TRACE, 10, L"New clip! offset: %I64d bytepos: %I64u", m_rtNewOffset, bytepos));
}
m_EndOfStreamPacketFlushProtection = FALSE;
}
else if (event.event == BD_EVENT_END_OF_TITLE)
{
m_EndOfStreamPacketFlushProtection = TRUE;
}
else if (event.event == BD_EVENT_SEEK)
{
m_EndOfStreamPacketFlushProtection = FALSE;
}
}
}
STDMETHODIMP CBDDemuxer::ProcessPacket(Packet *pPacket)
{
ProcessBDEvents();
if (pPacket && pPacket->rtStart != Packet::INVALID_TIME)
{
REFERENCE_TIME rtOffset = m_rtOffset[pPacket->StreamId];
if (m_StreamClip[pPacket->StreamId] != m_NewClip && pPacket->bPosition != -1 &&
pPacket->bPosition >= m_bNewOffsetPos)
{
DbgLog((LOG_TRACE, 10,
L"Actual clip change detected in stream %d; time: %I64d, old offset: %I64d, new offset: %I64d",
pPacket->StreamId, pPacket->rtStart, rtOffset, m_rtNewOffset));
rtOffset = m_rtOffset[pPacket->StreamId] = m_rtNewOffset;
m_StreamClip[pPacket->StreamId] = m_NewClip;
// Flush MVC extensions on stream change, it'll re-fill automatically
if (m_MVCPlayback && pPacket->StreamId == m_lavfDemuxer->m_nH264MVCBaseStream &&
m_MVCExtensionClip != m_NewClip)
{
m_lavfDemuxer->FlushMVCExtensionQueue();
CloseMVCExtensionDemuxer();
OpenMVCExtensionDemuxer(m_NewClip);
}
}
// DbgLog((LOG_TRACE, 10, L"Frame: stream: %d, start: %I64d, corrected: %I64d, bytepos: %I64d",
// pPacket->StreamId, pPacket->rtStart, pPacket->rtStart + rtOffset, pPacket->bPosition));
pPacket->rtStart += rtOffset;
pPacket->rtStop += rtOffset;
}
if (m_EndOfStreamPacketFlushProtection && pPacket && pPacket->bPosition != -1)
{
if (pPacket->bPosition < m_bNewOffsetPos)
{
DbgLog((
LOG_TRACE, 10,
L"Dropping packet from a previous segment (pos %I64d, segment started at %I64d) at EOS, from stream %d",
pPacket->bPosition, m_bNewOffsetPos, pPacket->StreamId));
return S_FALSE;
}
}
return S_OK;
}
void CBDDemuxer::CloseMVCExtensionDemuxer()
{
if (m_MVCFormatContext)
avformat_close_input(&m_MVCFormatContext);
m_MVCExtensionClip = -1;
}
STDMETHODIMP CBDDemuxer::OpenMVCExtensionDemuxer(int playItem)
{
int ret;
MPLS_PL *pl = bd_get_title_mpls(m_pBD);
if (!pl)
return E_FAIL;
const char *clip_id = pl->ext_sub_path[m_MVCExtensionSubPathIndex].sub_play_item[playItem].clip->clip_id;
char *fileName = av_asprintf("%sBDMV\\STREAM\\%s.m2ts", m_cBDRootPath, clip_id);
DbgLog((LOG_TRACE, 10, "CBDDemuxer::OpenMVCExtensionDemuxer(): Opening MVC extension stream at %s", fileName));
// Try to open the MVC stream
const AVInputFormat *format = av_find_input_format("mpegts");
ret = avformat_open_input(&m_MVCFormatContext, fileName, format, nullptr);
if (ret < 0)
{
DbgLog((LOG_TRACE, 10, "-> Opening MVC demuxing context failed (%d)", ret));
goto fail;
}
av_opt_set_int(m_MVCFormatContext, "correct_ts_overflow", 0, 0);
// Find the streams
ret = avformat_find_stream_info(m_MVCFormatContext, nullptr);
if (ret < 0)
{
DbgLog((LOG_TRACE, 10, "-> avformat_find_stream_info failed (%d)", ret));
goto fail;
}
// Find and select our MVC stream
DbgLog((LOG_TRACE, 10, "-> MVC m2ts has %d streams", m_MVCFormatContext->nb_streams));
for (unsigned i = 0; i < m_MVCFormatContext->nb_streams; i++)
{
if (m_MVCFormatContext->streams[i]->codecpar->codec_id == AV_CODEC_ID_H264_MVC &&
m_MVCFormatContext->streams[i]->codecpar->extradata_size > 0)
{
m_MVCStreamIndex = i;
break;
}
else
{
m_MVCFormatContext->streams[i]->discard = AVDISCARD_ALL;
}
}
if (m_MVCStreamIndex < 0)
{
DbgLog((LOG_TRACE, 10, "-> MVC Stream not found"));
goto fail;
}
m_MVCExtensionClip = playItem;
return S_OK;
fail:
CloseMVCExtensionDemuxer();
return E_FAIL;
}
#define MVC_DEMUX_COUNT 100
STDMETHODIMP CBDDemuxer::FillMVCExtensionQueue(REFERENCE_TIME rtBase)
{
if (!m_MVCFormatContext)
return E_FAIL;
int ret, count = 0;
bool found = (rtBase == Packet::INVALID_TIME);
AVPacket *pMVCPacket = av_packet_alloc();
while (count < MVC_DEMUX_COUNT)
{
av_packet_unref(pMVCPacket);
ret = av_read_frame(m_MVCFormatContext, pMVCPacket);
if (ret == AVERROR(EINTR) || ret == AVERROR(EAGAIN))
{
continue;
}
else if (ret == AVERROR_EOF)
{
DbgLog((LOG_TRACE, 10, L"EOF reading MVC extension data"));
break;
}
else if (pMVCPacket->size <= 0 || pMVCPacket->stream_index != m_MVCStreamIndex)
{
continue;
}
else
{
AVStream *stream = m_MVCFormatContext->streams[pMVCPacket->stream_index];
REFERENCE_TIME rtDTS =
m_lavfDemuxer->ConvertTimestampToRT(pMVCPacket->dts, stream->time_base.num, stream->time_base.den);
REFERENCE_TIME rtPTS =
m_lavfDemuxer->ConvertTimestampToRT(pMVCPacket->pts, stream->time_base.num, stream->time_base.den);
if (rtBase == Packet::INVALID_TIME || rtDTS == Packet::INVALID_TIME)
{
// do nothing, can't compare timestamps when they are not set
}
else if (rtDTS < rtBase)
{
DbgLog((LOG_TRACE, 10,
L"CBDDemuxer::FillMVCExtensionQueue(): Dropping MVC extension at %I64d, base is %I64d", rtDTS,
rtBase));
continue;
}
else if (rtDTS == rtBase)
{
found = true;
}
Packet *pPacket = new Packet();
if (!pPacket)
{
av_packet_free(&pMVCPacket);
return E_OUTOFMEMORY;
}
pPacket->SetPacket(pMVCPacket);
pPacket->rtDTS = rtDTS;
pPacket->rtPTS = rtPTS;
m_lavfDemuxer->QueueMVCExtension(pPacket);
count++;
}
};
av_packet_free(&pMVCPacket);
if (found)
return S_OK;
else if (count > 0)
return S_FALSE;
else
return E_FAIL;
}
STDMETHODIMP CBDDemuxer::SetTitle(int idx)
{
HRESULT hr = S_OK;
int ret; // return values
if (m_pTitle)
{
bd_free_title_info(m_pTitle);
}
// Init Event Queue
bd_get_event(m_pBD, nullptr);
// Select title
m_pTitle = bd_get_title_info(m_pBD, idx, 0);
ret = bd_select_title(m_pBD, idx);
if (ret == 0)
{
return E_FAIL;
}
MPLS_PL *mpls = bd_get_title_mpls(m_pBD);
if (mpls)
{
for (int i = 0; i < mpls->ext_sub_count; i++)
{
if (mpls->ext_sub_path[i].type == 8 && mpls->ext_sub_path[i].sub_playitem_count == mpls->list_count)
{
DbgLog((LOG_TRACE, 20, L"CBDDemuxer::SetTitle(): Enabling BD3D MVC demuxing"));
DbgLog((LOG_TRACE, 20, L" -> MVC_Base_view_R_flag: %d", m_pTitle->mvc_base_view_r_flag));
m_MVCPlayback = TRUE;
m_MVCExtensionSubPathIndex = i;
break;
}
}
}
CloseMVCExtensionDemuxer();
if (m_pb)
{
av_free(m_pb->buffer);
av_free(m_pb);
}
uint8_t *buffer = (uint8_t *)av_mallocz(BD_READ_BUFFER_SIZE + AV_INPUT_BUFFER_PADDING_SIZE);
m_pb = avio_alloc_context(buffer, BD_READ_BUFFER_SIZE, 0, this, BDByteStreamRead, nullptr, BDByteStreamSeek);
SafeRelease(&m_lavfDemuxer);
SAFE_CO_FREE(m_StreamClip);
SAFE_CO_FREE(m_rtOffset);
m_lavfDemuxer = new CLAVFDemuxer(m_pLock, m_pSettings);
m_lavfDemuxer->AddRef();
m_lavfDemuxer->SetBluRay(this);
if (FAILED(hr = m_lavfDemuxer->OpenInputStream(m_pb, nullptr, "mpegts", TRUE)))
{
SafeRelease(&m_lavfDemuxer);
return hr;
}
if (m_MVCPlayback && !m_lavfDemuxer->m_bH264MVCCombine)
{
DbgLog((LOG_TRACE, 10,
L"CBDDemuxer::SetTitle(): MVC demuxing was requested, but main demuxer did not activate MVC mode, "
L"disabling."));
CloseMVCExtensionDemuxer();
m_MVCPlayback = FALSE;
}
m_lavfDemuxer->SeekByte(0, 0);
// Process any events that occurred during opening
ProcessBDEvents();
// Reset EOS protection
m_EndOfStreamPacketFlushProtection = FALSE;
// space for storing stream offsets
m_StreamClip = (uint16_t *)CoTaskMemAlloc(sizeof(*m_StreamClip) * m_lavfDemuxer->GetNumStreams());
if (!m_StreamClip)
return E_OUTOFMEMORY;
memset(m_StreamClip, -1, sizeof(*m_StreamClip) * m_lavfDemuxer->GetNumStreams());
m_rtOffset = (REFERENCE_TIME *)CoTaskMemAlloc(sizeof(*m_rtOffset) * m_lavfDemuxer->GetNumStreams());
if (!m_rtOffset)
return E_OUTOFMEMORY;
memset(m_rtOffset, 0, sizeof(*m_rtOffset) * m_lavfDemuxer->GetNumStreams());
DbgLog((LOG_TRACE, 20, L"Opened BD title with %d clips and %d chapters", m_pTitle->clip_count,
m_pTitle->chapter_count));
return S_OK;
}
void CBDDemuxer::ProcessBluRayMetadata()
{
if (m_MVCPlayback)
{
HRESULT hr = OpenMVCExtensionDemuxer(m_NewClip);
if (SUCCEEDED(hr))
{
AVStream *mvcStream = m_MVCFormatContext->streams[m_MVCStreamIndex];
// Create a fake stream and set the appropriate properties
m_lavfDemuxer->AddMPEGTSStream(mvcStream->id, 0x20);
AVStream *avstream = m_lavfDemuxer->GetAVStreamByPID(mvcStream->id);
if (avstream)
{
avstream->codecpar->codec_id = AV_CODEC_ID_H264_MVC;
avstream->codecpar->extradata =
(BYTE *)av_mallocz(mvcStream->codecpar->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
avstream->codecpar->extradata_size = mvcStream->codecpar->extradata_size;
memcpy(avstream->codecpar->extradata, mvcStream->codecpar->extradata,
mvcStream->codecpar->extradata_size);
}
}
else
{
m_MVCPlayback = FALSE;
}
}
ASSERT(m_pTitle->clip_count >= 1 && m_pTitle->clips);
int64_t max_clip_duration = 0;
for (uint32_t i = 0; i < m_pTitle->clip_count; ++i)
{
int64_t clip_duration = (m_pTitle->clips[i].out_time - m_pTitle->clips[i].in_time);
bool overwrite_info = false;
if (clip_duration > max_clip_duration)
{
overwrite_info = true;
max_clip_duration = clip_duration;
}
CLPI_CL *clpi = bd_get_clpi(m_pBD, i);
ProcessClipInfo(clpi, overwrite_info);
bd_free_clpi(clpi);
}
MPLS_PL *mpls = bd_get_title_mpls(m_pBD);
if (mpls)
{
// Read the PG offsets and store them as metadata
std::list<uint8_t> pg_sequences;
for (int i = 0; i < mpls->play_item[0].stn.num_pg; i++)
{
AVStream *avstream = m_lavfDemuxer->GetAVStreamByPID(mpls->play_item[0].stn.pg[i].pid);
if (mpls->play_item[0].stn.pg[i].ss_offset_sequence_id != 0xFF)
{
pg_sequences.push_back(mpls->play_item[0].stn.pg[i].ss_offset_sequence_id);
if (avstream)
{
char offset[4];
_itoa_s(mpls->play_item[0].stn.pg[i].ss_offset_sequence_id, offset, 10);
av_dict_set(&avstream->metadata, "3d-plane", offset, 0);
}
}
}
// export the list of pg sequences
if (pg_sequences.size() > 0)
{
// strip duplicate entries
pg_sequences.sort();
pg_sequences.unique();
size_t size = pg_sequences.size() * 4;
char *offsets = new char[size];
offsets[0] = 0;
// Append all offsets to the string
for (auto it = pg_sequences.begin(); it != pg_sequences.end(); it++)
{
size_t len = strlen(offsets);
if (len > 0)
{
offsets[len] = ',';
len++;
}
_itoa_s(*it, offsets + len, size - len, 10);
}
av_dict_set(&m_lavfDemuxer->m_avFormat->metadata, "pg_offset_sequences", offsets, 0);
delete[] offsets;
}
// Export a list of all IG offsets
if (mpls->play_item[0].stn.num_ig > 0)
{
std::list<uint8_t> ig_sequences;
for (int i = 0; i < mpls->play_item[0].stn.num_ig; i++)
{
if (mpls->play_item[0].stn.ig[i].ss_offset_sequence_id != 0xFF)
{
ig_sequences.push_back(mpls->play_item[0].stn.ig[i].ss_offset_sequence_id);
}
}
if (ig_sequences.size() > 0)
{
// strip duplicate entries
ig_sequences.unique();
size_t size = ig_sequences.size() * 4;
char *offsets = new char[size];
offsets[0] = 0;
// Append all offsets to the string
for (auto it = ig_sequences.begin(); it != ig_sequences.end(); it++)
{
size_t len = strlen(offsets);
if (len > 0)
{
offsets[len] = ',';
len++;
}
_itoa_s(*it, offsets + len, size - len, 10);
}
av_dict_set(&m_lavfDemuxer->m_avFormat->metadata, "ig_offset_sequences", offsets, 0);
delete[] offsets;
}
}
}
}
/*STDMETHODIMP_(int) CBDDemuxer::GetNumTitles()
{
return m_nTitleCount;
}
STDMETHODIMP CBDDemuxer::GetTitleInfo(int idx, REFERENCE_TIME *rtDuration, WCHAR **ppszName)
{
if (idx >= m_nTitleCount) { return E_FAIL; }
BLURAY_TITLE_INFO *info = bd_get_title_info(m_pBD, idx, 0);
if (info) {
if (rtDuration) {
*rtDuration = Convert90KhzToDSTime(info->duration);
}
if (ppszName) {
WCHAR buffer[80];
swprintf_s(buffer, L"Title %d", idx + 1);
size_t size = (wcslen(buffer) + 1) * sizeof(WCHAR);
*ppszName = (WCHAR *)CoTaskMemAlloc(size);
if (*ppszName)
memcpy(*ppszName, buffer, size);
}
return S_OK;
}
return E_FAIL;
}*/
void CBDDemuxer::ProcessClipInfo(CLPI_CL *clpi, bool overwrite)
{
if (!clpi)
{
return;
}
for (int k = 0; k < clpi->program.num_prog; k++)
{
for (int i = 0; i < clpi->program.progs[k].num_streams; i++)
{
CLPI_PROG_STREAM *stream = &clpi->program.progs[k].streams[i];
AVStream *avstream = m_lavfDemuxer->GetAVStreamByPID(stream->pid);
if (!avstream)
{
DbgLog((LOG_TRACE, 10,
"CBDDemuxer::ProcessStreams(): Stream with PID 0x%04x not found, trying to add it..",
stream->pid));
m_lavfDemuxer->AddMPEGTSStream(stream->pid, stream->coding_type);
avstream = m_lavfDemuxer->GetAVStreamByPID(stream->pid);
}
if (avstream)
{
if (stream->lang[0] != 0)
av_dict_set(&avstream->metadata, "language", (const char *)stream->lang,
overwrite ? 0 : AV_DICT_DONT_OVERWRITE);
if (avstream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
if (avstream->codecpar->width == 0 || avstream->codecpar->height == 0)
{
switch (stream->format)
{
case BLURAY_VIDEO_FORMAT_480I:
case BLURAY_VIDEO_FORMAT_480P:
avstream->codecpar->width = 720;
avstream->codecpar->height = 480;
break;
case BLURAY_VIDEO_FORMAT_576I:
case BLURAY_VIDEO_FORMAT_576P:
avstream->codecpar->width = 720;
avstream->codecpar->height = 576;
break;
case BLURAY_VIDEO_FORMAT_720P:
avstream->codecpar->width = 1280;
avstream->codecpar->height = 720;
break;
case BLURAY_VIDEO_FORMAT_1080I:
case BLURAY_VIDEO_FORMAT_1080P:
default:
avstream->codecpar->width = 1920;
avstream->codecpar->height = 1080;
break;
case BLURAY_VIDEO_FORMAT_2160P:
avstream->codecpar->width = 3840;
avstream->codecpar->height = 2160;
break;
}
}
if (m_MVCPlayback && stream->coding_type == BLURAY_STREAM_TYPE_VIDEO_H264)
{
av_dict_set(&avstream->metadata, "stereo_mode",
m_pTitle->mvc_base_view_r_flag ? "mvc_rl" : "mvc_lr", 0);
}
}
else if (avstream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
if (avstream->codecpar->ch_layout.nb_channels == 0)
{
av_channel_layout_default(&avstream->codecpar->ch_layout,
(stream->format == BLURAY_AUDIO_FORMAT_MONO) ? 1
: (stream->format == BLURAY_AUDIO_FORMAT_STEREO) ? 2
: 6);
avstream->codecpar->sample_rate =
(stream->rate == BLURAY_AUDIO_RATE_96 || stream->rate == BLURAY_AUDIO_RATE_96_COMBO)
? 96000
: (stream->rate == BLURAY_AUDIO_RATE_192 || stream->rate == BLURAY_AUDIO_RATE_192_COMBO)
? 192000
: 48000;
if (avstream->codecpar->codec_id == AV_CODEC_ID_DTS)
{
if (stream->coding_type == BLURAY_STREAM_TYPE_AUDIO_DTSHD)
avstream->codecpar->profile = FF_PROFILE_DTS_HD_HRA;
else if (stream->coding_type == BLURAY_STREAM_TYPE_AUDIO_DTSHD_MASTER)
avstream->codecpar->profile = FF_PROFILE_DTS_HD_MA;
}
}
}
}
}
}
}
STDMETHODIMP CBDDemuxer::Seek(REFERENCE_TIME rTime)
{
int64_t prev = bd_tell(m_pBD);
int64_t target = bd_find_seek_point(m_pBD, ConvertDSTimeTo90Khz(rTime));
m_EndOfStreamPacketFlushProtection = FALSE;
DbgLog((LOG_TRACE, 1, "Seek Request: %I64u (time); %I64u (byte), %I64u (prev byte)", rTime, target, prev));
HRESULT hr = m_lavfDemuxer->SeekByte(target + 4, AVSEEK_FLAG_BACKWARD);
if (m_MVCPlayback && m_MVCFormatContext)
{
// Re-open to switch clip if needed
CloseMVCExtensionDemuxer();
if (FAILED(OpenMVCExtensionDemuxer(m_NewClip)))
return E_FAIL;
// Adjust for clip offset
int64_t seek_pts = 0;
if (rTime > 0)
{
AVStream *stream = m_MVCFormatContext->streams[m_MVCStreamIndex];
rTime -= m_rtNewOffset;
rTime -=
10000000; // seek one second before the target to ensure the MVC queue isn't out of sync for too long
seek_pts = m_lavfDemuxer->ConvertRTToTimestamp(rTime, stream->time_base.num, stream->time_base.den);
}
if (seek_pts < 0)
seek_pts = 0;
av_seek_frame(m_MVCFormatContext, m_MVCStreamIndex, seek_pts, AVSEEK_FLAG_BACKWARD);
}
return hr;
}
const char *CBDDemuxer::GetContainerFormat() const
{
return "mpegts";
}
/////////////////////////////////////////////////////////////////////////////
// IAMExtendedSeeking
STDMETHODIMP CBDDemuxer::get_ExSeekCapabilities(long *pExCapabilities)
{
CheckPointer(pExCapabilities, E_POINTER);
*pExCapabilities = AM_EXSEEK_CANSEEK;
if (m_pTitle->chapter_count > 1)
*pExCapabilities |= AM_EXSEEK_MARKERSEEK;
return S_OK;
}
STDMETHODIMP CBDDemuxer::get_MarkerCount(long *pMarkerCount)
{
CheckPointer(pMarkerCount, E_POINTER);
*pMarkerCount = (long)m_pTitle->chapter_count;
return S_OK;
}
STDMETHODIMP CBDDemuxer::get_CurrentMarker(long *pCurrentMarker)
{
CheckPointer(pCurrentMarker, E_POINTER);
*pCurrentMarker = bd_get_current_chapter(m_pBD) + 1;
return E_FAIL;
}
STDMETHODIMP CBDDemuxer::GetMarkerTime(long MarkerNum, double *pMarkerTime)
{
CheckPointer(pMarkerTime, E_POINTER);
// Chapters go by a 1-based index, doh
unsigned int index = MarkerNum - 1;
if (index >= m_pTitle->chapter_count)
{
return E_FAIL;
}
REFERENCE_TIME rt = Convert90KhzToDSTime(m_pTitle->chapters[index].start);
*pMarkerTime = (double)rt / DSHOW_TIME_BASE;
return S_OK;
}
STDMETHODIMP CBDDemuxer::GetMarkerName(long MarkerNum, BSTR *pbstrMarkerName)
{
CheckPointer(pbstrMarkerName, E_POINTER);
// Chapters go by a 1-based index, doh
unsigned int index = MarkerNum - 1;
if (index >= m_pTitle->chapter_count)
{
return E_FAIL;
}
// Get the title, or generate one
OLECHAR wTitle[128];
swprintf_s(wTitle, L"Chapter %d", MarkerNum);
*pbstrMarkerName = SysAllocString(wTitle);
return S_OK;
}
| 31,505
|
C++
|
.cpp
| 860
| 26.788372
| 120
| 0.55122
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,079
|
LAVFDemuxer.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/LAVFDemuxer.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVFDemuxer.h"
#include "LAVFUtils.h"
#include "LAVFStreamInfo.h"
#include "ILAVPinInfo.h"
#include "LAVFVideoHelper.h"
#include "ExtradataParser.h"
#include "IMediaSideDataFFmpeg.h"
#include "LAVSplitterSettingsInternal.h"
#include "moreuuids.h"
extern "C"
{
typedef struct CodecMime
{
char str[32];
enum AVCodecID id;
} CodecMime;
#include "libavformat/mpegts.h"
#include "libavformat/matroska.h"
#include "libavutil/avstring.h"
enum AVCodecID ff_get_pcm_codec_id(int bps, int flt, int be, int sflags);
#include "libavformat/isom.h"
#include "libavformat/demux.h"
}
#ifdef DEBUG
#include "lavf_log.h"
#endif
#include "BDDemuxer.h"
#include "CueSheet.h"
#define AVFORMAT_OPEN_TIMEOUT 20
extern void lavf_get_iformat_infos(const AVInputFormat *pFormat, const char **pszName, const char **pszDescription);
static const AVRational AV_RATIONAL_TIMEBASE = {1, AV_TIME_BASE};
std::set<FormatInfo> CLAVFDemuxer::GetFormatList()
{
std::set<FormatInfo> formats;
const AVInputFormat *f = nullptr;
void *state = NULL;
while (f = av_demuxer_iterate(&state))
{
FormatInfo format;
lavf_get_iformat_infos(f, &format.strName, &format.strDescription);
if (format.strName)
formats.insert(format);
}
return formats;
}
CLAVFDemuxer::CLAVFDemuxer(CCritSec *pLock, ILAVFSettingsInternal *settings)
: CBaseDemuxer(L"lavf demuxer", pLock)
{
#ifdef DEBUG
DbgSetModuleLevel(LOG_CUSTOM1, DWORD_MAX); // FFMPEG messages use custom1
av_log_set_callback(lavf_log_callback);
#else
av_log_set_callback(nullptr);
#endif
m_bSubStreams = settings->GetSubstreamsEnabled();
m_pSettings = settings;
WCHAR fileName[1024];
GetModuleFileName(nullptr, fileName, 1024);
const WCHAR *file = PathFindFileName(fileName);
if (_wcsicmp(file, L"zplayer.exe") == 0)
{
m_bEnableTrackInfo = FALSE;
// TrackInfo is only properly handled in ZoomPlayer 8.0.0.74 and above
DWORD dwVersionSize = GetFileVersionInfoSize(fileName, nullptr);
if (dwVersionSize > 0)
{
void *versionInfo = CoTaskMemAlloc(dwVersionSize);
if (!versionInfo)
return;
GetFileVersionInfo(fileName, 0, dwVersionSize, versionInfo);
VS_FIXEDFILEINFO *info;
unsigned cbInfo;
BOOL bInfoPresent = VerQueryValue(versionInfo, TEXT("\\"), (LPVOID *)&info, &cbInfo);
if (bInfoPresent)
{
bInfoPresent = bInfoPresent;
uint64_t version = info->dwFileVersionMS;
version <<= 32;
version += info->dwFileVersionLS;
if (version >= 0x000800000000004A)
m_bEnableTrackInfo = TRUE;
}
CoTaskMemFree(versionInfo);
}
}
}
CLAVFDemuxer::~CLAVFDemuxer()
{
CleanupAVFormat();
SAFE_DELETE(m_pFontInstaller);
}
STDMETHODIMP CLAVFDemuxer::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
*ppv = nullptr;
return QI(IKeyFrameInfo) m_bEnableTrackInfo &&
QI(ITrackInfo) QI2(IAMExtendedSeeking) QI2(IAMMediaContent) QI(IPropertyBag)
QI(IDSMResourceBag) __super::NonDelegatingQueryInterface(riid, ppv);
}
/////////////////////////////////////////////////////////////////////////////
// Demuxer Functions
STDMETHODIMP CLAVFDemuxer::Open(LPCOLESTR pszFileName, LPCOLESTR pszUserAgent, LPCOLESTR pszReferrer)
{
return OpenInputStream(nullptr, pszFileName, nullptr, TRUE, false, pszUserAgent, pszReferrer);
}
STDMETHODIMP CLAVFDemuxer::Start()
{
if (m_bH264MVCCombine)
{
CMediaType *pmt = m_pSettings->GetOutputMediatype(m_nH264MVCBaseStream);
if (pmt)
{
if (pmt->subtype != MEDIASUBTYPE_AMVC && pmt->subtype != MEDIASUBTYPE_MVC1)
{
DbgLog(
(LOG_TRACE, 10,
L"CLAVFDemuxer::Start(): Disabling MVC demuxing, downstream did not select an appropriate type"));
m_bH264MVCCombine = FALSE;
m_nH264MVCBaseStream = -1;
m_nH264MVCExtensionStream = -1;
}
}
}
if (m_avFormat)
av_read_play(m_avFormat);
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::AbortOpening(int mode, int timeout)
{
m_Abort = mode;
m_timeAbort = timeout ? time(nullptr) + timeout : 0;
return S_OK;
}
int CLAVFDemuxer::avio_interrupt_cb(void *opaque)
{
CLAVFDemuxer *demux = (CLAVFDemuxer *)opaque;
// Check for file opening timeout
time_t now = time(nullptr);
if (demux->m_timeOpening && now > (demux->m_timeOpening + AVFORMAT_OPEN_TIMEOUT))
return 1;
if (demux->m_Abort && now > demux->m_timeAbort)
return 1;
return 0;
}
static LPCWSTR wszImageExtensions[] = {
L".png", L".mng", L".pns", // PNG
L".tif", L".tiff", // TIFF
L".jpeg", L".jpg", L".jps", // JPEG
L".tga", // TGA
L".bmp", // BMP
L".j2c", // JPEG2000
};
static LPCWSTR wszBlockedExtensions[] = {L".ifo", L".bup"};
static std::pair<const char *, const char *> rtmpParametersTranslate[] = {
std::make_pair("app", "rtmp_app"),
std::make_pair("buffer", "rtmp_buffer"),
std::make_pair("conn", "rtmp_conn"),
std::make_pair("flashVer", "rtmp_flashver"),
std::make_pair("rtmp_flush_interval", "rtmp_flush_interval"),
std::make_pair("live", "rtmp_live"),
std::make_pair("pageUrl", "rtmp_pageurl"),
std::make_pair("playpath", "rtmp_playpath"),
std::make_pair("subscribe", "rtmp_subscribe"),
std::make_pair("swfHash", "rtmp_swfhash"),
std::make_pair("swfSize", "rtmp_swfsize"),
std::make_pair("swfUrl", "rtmp_swfurl"),
std::make_pair("swfVfy", "rtmp_swfverify"),
std::make_pair("tcUrl", "rtmp_tcurl")};
STDMETHODIMP CLAVFDemuxer::OpenInputStream(AVIOContext *byteContext, LPCOLESTR pszFileName, const char *format,
BOOL bForce, BOOL bFileSource, LPCOLESTR pszUserAgent,
LPCOLESTR pszReferrer)
{
CAutoLock lock(m_pLock);
HRESULT hr = S_OK;
int ret; // return code from avformat functions
// Convert the filename from wchar to char for avformat
char *fileName = NULL;
if (pszFileName)
fileName = CoTaskGetMultiByteFromWideChar(CP_UTF8, 0, pszFileName, -1);
if (fileName == NULL)
{
fileName = (char *)CoTaskMemAlloc(1);
*fileName = 0;
}
// handle pipe, we only support stdin pipes
if (_strnicmp("pipe://", fileName, 7) == 0)
{
// convert pipe://stdin to pipe:0
fileName[5] = '0';
fileName[6] = 0;
}
if (_strnicmp("mms:", fileName, 4) == 0)
{
memmove(fileName + 1, fileName, strlen(fileName));
memcpy(fileName, "mmsh", 4);
}
// replace "icyx" protocol by http
if (_strnicmp("icyx:", fileName, 5) == 0)
{
memcpy(fileName, "http", 4);
}
char *rtmp_prameters = nullptr;
const char *rtsp_transport = nullptr;
// check for rtsp transport protocol options
if (_strnicmp("rtsp", fileName, 4) == 0)
{
if (_strnicmp("rtspu:", fileName, 6) == 0)
{
rtsp_transport = "udp";
}
else if (_strnicmp("rtspm:", fileName, 6) == 0)
{
rtsp_transport = "udp_multicast";
}
else if (_strnicmp("rtspt:", fileName, 6) == 0)
{
rtsp_transport = "tcp";
}
else if (_strnicmp("rtsph:", fileName, 6) == 0)
{
rtsp_transport = "http";
}
// replace "rtsp[u|m|t|h]" protocol by rtsp
if (rtsp_transport != nullptr)
{
memmove(fileName + 4, fileName + 5, strlen(fileName) - 4);
}
}
else if (_strnicmp("rtmp", fileName, 4) == 0)
{
rtmp_prameters = strchr(fileName, ' ');
if (rtmp_prameters)
{
*rtmp_prameters = '\0'; // Trim not supported part form fileName
}
}
AVIOInterruptCB cb = {avio_interrupt_cb, this};
trynoformat:
// Create the avformat_context
m_avFormat = avformat_alloc_context();
m_avFormat->pb = byteContext;
m_avFormat->interrupt_callback = cb;
if (m_avFormat->pb)
m_avFormat->flags |= AVFMT_FLAG_CUSTOM_IO;
LPWSTR extension = pszFileName ? PathFindExtensionW(pszFileName) : nullptr;
const AVInputFormat *inputFormat = nullptr;
if (format)
{
inputFormat = av_find_input_format(format);
}
else if (pszFileName)
{
LPWSTR extension = PathFindExtensionW(pszFileName);
for (int i = 0; i < countof(wszImageExtensions); i++)
{
if (_wcsicmp(extension, wszImageExtensions[i]) == 0)
{
if (byteContext)
{
inputFormat = av_find_input_format("image2pipe");
}
else
{
inputFormat = av_find_input_format("image2");
}
break;
}
}
if (byteContext == nullptr || bFileSource)
{
for (int i = 0; i < countof(wszBlockedExtensions); i++)
{
if (_wcsicmp(extension, wszBlockedExtensions[i]) == 0)
{
goto done;
}
}
}
}
// Disable loading of external mkv segments, if required
if (!m_pSettings->GetLoadMatroskaExternalSegments())
m_avFormat->flags |= AVFMT_FLAG_NOEXTERNAL;
// demuxer/protocol options
AVDictionary *options = nullptr;
av_dict_set(&options, "icy", "1", 0); // request ICY metadata
av_dict_set(&options, "advanced_editlist", "0", 0); // disable broken mov editlist handling
av_dict_set(&options, "reconnect", "1", 0); // for http, reconnect if we get disconnected
av_dict_set(&options, "skip_clear", "1", 0); // mpegts program handling
av_dict_set(&options, "max_reload", "7", 0); // playlist reloading for HLS
if (pszUserAgent)
{
char *strUserAgent = CoTaskGetMultiByteFromWideChar(CP_UTF8, 0, pszUserAgent, -1);
if (strUserAgent && *strUserAgent) // if valid, and non-empty
av_dict_set(&options, "user_agent", strUserAgent, 0);
SAFE_CO_FREE(strUserAgent);
}
if (pszReferrer != NULL)
{
char *strReferrer = CoTaskGetMultiByteFromWideChar(CP_UTF8, 0, pszReferrer, -1);
if (strReferrer && *strReferrer) // if valid, and non-empty
av_dict_set(&options, "referer", strReferrer, 0);
SAFE_CO_FREE(strReferrer);
}
else
{
av_dict_set(&options, "referer", fileName, 0); // for http, send self as referer if none was specified explicitly
}
// send global side data to the decoder
av_format_inject_global_side_data(m_avFormat);
if (rtsp_transport != nullptr)
{
av_dict_set(&options, "rtsp_transport", rtsp_transport, 0);
}
if (rtmp_prameters != nullptr)
{
char buff[4100];
char *next_token = nullptr;
bool bSwfVerify = false;
strcpy_s(buff, rtmp_prameters + 1);
const char *token = strtok_s(buff, " ", &next_token);
while (token)
{
for (size_t i = 0; i < _countof(rtmpParametersTranslate); i++)
{
const size_t len = strlen(rtmpParametersTranslate[i].first);
if (_strnicmp(token, rtmpParametersTranslate[i].first, len) == 0)
{
if (strlen(token) > len + 1 && token[len] == '=')
{
if (_strnicmp("swfVfy", rtmpParametersTranslate[i].first, len) == 0)
{
bSwfVerify = token[len + 1] == '1';
continue;
}
else if (_strnicmp("live", rtmpParametersTranslate[i].first, len) == 0)
{
if (token[len + 1] == '1')
{
av_dict_set(&options, rtmpParametersTranslate[i].second, "live", 0);
}
else if (token[len + 1] == '0')
{
av_dict_set(&options, rtmpParametersTranslate[i].second, "recorded", 0);
}
continue;
}
av_dict_set(&options, rtmpParametersTranslate[i].second, token + len + 1, 0);
}
}
}
token = strtok_s(nullptr, " ", &next_token);
}
if (bSwfVerify)
{
const AVDictionaryEntry *swfUrlEntry = av_dict_get(options, "rtmp_swfurl", nullptr, 0);
if (swfUrlEntry)
{
av_dict_set(&options, "rtmp_swfverify", swfUrlEntry->value, 0);
}
}
}
m_timeOpening = time(nullptr);
ret = avformat_open_input(&m_avFormat, fileName, inputFormat, &options);
av_dict_free(&options);
if (ret < 0)
{
DbgLog((LOG_ERROR, 0, TEXT("::OpenInputStream(): avformat_open_input failed (%d)"), ret));
if (format)
{
DbgLog((LOG_ERROR, 0, TEXT(" -> trying again without specific format")));
format = nullptr;
avformat_close_input(&m_avFormat);
goto trynoformat;
}
goto done;
}
DbgLog((LOG_TRACE, 10,
TEXT("::OpenInputStream(): avformat_open_input opened file of type '%S' (took %I64d seconds)"),
m_avFormat->iformat->name, time(nullptr) - m_timeOpening));
m_timeOpening = 0;
CHECK_HR(hr = InitAVFormat(pszFileName, bForce));
SAFE_CO_FREE(fileName);
return S_OK;
done:
CleanupAVFormat();
SAFE_CO_FREE(fileName);
return E_FAIL;
}
void CLAVFDemuxer::AddMPEGTSStream(int pid, uint32_t stream_type)
{
if (m_avFormat)
{
int program = -1;
if (m_avFormat->nb_programs > 0)
{
unsigned nb_streams = 0;
for (unsigned i = 0; i < m_avFormat->nb_programs; i++)
{
if (m_avFormat->programs[i]->nb_stream_indexes > nb_streams)
program = i;
}
}
avpriv_mpegts_add_stream(m_avFormat, pid, stream_type, program >= 0 ? m_avFormat->programs[program]->id : -1);
}
}
HRESULT CLAVFDemuxer::CheckBDM2TSCPLI(LPCOLESTR pszFileName)
{
size_t len = wcslen(pszFileName);
if (len <= 23 || (_wcsnicmp(pszFileName + len - 23, L"\\BDMV\\STREAM\\", 13) != 0 &&
(len <= 28 || _wcsnicmp(pszFileName + len - 28, L"\\BDMV\\STREAM\\SSIF\\", 18) != 0)))
return E_FAIL;
// Get the base file name (should be a number, like 00000)
const WCHAR *file = pszFileName + (len - 10);
WCHAR basename[6];
wcsncpy_s(basename, file, 5);
basename[5] = 0;
// Convert to UTF-8 path
size_t a_len = WideCharToMultiByte(CP_UTF8, 0, pszFileName, -1, nullptr, 0, nullptr, nullptr);
a_len += 2; // one extra char because CLIPINF is 7 chars and STREAM is 6, and one for the terminating-zero
char *path = (char *)CoTaskMemAlloc(a_len * sizeof(char));
if (!path)
return E_OUTOFMEMORY;
WideCharToMultiByte(CP_UTF8, 0, pszFileName, -1, path, (int)a_len, nullptr, nullptr);
// Remove file name itself
PathRemoveFileSpecA(path);
// Remove SSIF if appropriate
BOOL bSSIF = FALSE;
if (_strnicmp(path + strlen(path) - 5, "\\SSIF", 5) == 0)
{
bSSIF = TRUE;
PathRemoveFileSpecA(path);
}
// Remove STREAM folder
PathRemoveFileSpecA(path);
// Write new path
sprintf_s(path + strlen(path), a_len - strlen(path), "\\CLIPINF\\%S.clpi", basename);
CLPI_CL *cl = bd_read_clpi(path);
if (!cl)
return E_FAIL;
// Clip Info was found, add the language metadata to the AVStreams
for (unsigned i = 0; i < cl->program.num_prog; ++i)
{
CLPI_PROG *p = &cl->program.progs[i];
for (unsigned k = 0; k < p->num_streams; ++k)
{
CLPI_PROG_STREAM *s = &p->streams[k];
AVStream *avstream = GetAVStreamByPID(s->pid);
if (avstream)
{
if (s->lang[0] != 0)
av_dict_set(&avstream->metadata, "language", (const char *)s->lang, 0);
}
}
}
// Free the clip
bd_free_clpi(cl);
cl = nullptr;
if (bSSIF)
{
uint32_t clip_id = _wtoi(basename);
// Remove filename
PathRemoveFileSpecA(path);
// Remove CLIPINF
PathRemoveFileSpecA(path);
// Remove BDMV
PathRemoveFileSpecA(path);
BLURAY *bd = bd_open(path, nullptr);
if (!bd)
return S_FALSE;
uint32_t nTitles = bd_get_titles(bd, TITLES_RELEVANT, 0);
BOOL found = FALSE;
for (uint32_t n = 0; n < nTitles && !found; n++)
{
BLURAY_TITLE_INFO *TitleInfo = bd_get_title_info(bd, n, 0);
if (TitleInfo)
{
for (uint32_t i = 0; i < TitleInfo->clip_count; i++)
{
BLURAY_CLIP_INFO *Clip = &TitleInfo->clips[i];
if (Clip->idx == clip_id)
{
AVStream *avstream = nullptr;
for (uint8_t c = 0; c < Clip->video_stream_count && !avstream; c++)
{
if (Clip->video_streams[c].coding_type == BLURAY_STREAM_TYPE_VIDEO_H264)
avstream = GetAVStreamByPID(Clip->video_streams[c].pid);
}
if (avstream)
av_dict_set(&avstream->metadata, "stereo_mode",
TitleInfo->mvc_base_view_r_flag ? "mvc_rl" : "mvc_lr", 0);
found = TRUE;
break;
}
}
bd_free_title_info(TitleInfo);
}
}
bd_close(bd);
}
return S_OK;
}
inline static int init_parser(AVFormatContext *s, AVStream *st)
{
if (av_lav_stream_parser_get_needed(st) && !(s->flags & AVFMT_FLAG_NOPARSE))
{
av_lav_stream_parser_init(st);
}
return 0;
}
void CLAVFDemuxer::UpdateParserFlags(AVStream *st)
{
int flags = av_lav_stream_parser_get_flags(st);
if ((st->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO || st->codecpar->codec_id == AV_CODEC_ID_MPEG1VIDEO) &&
_stricmp(m_pszInputFormat, "mpegvideo") != 0)
{
flags |= PARSER_FLAG_NO_TIMESTAMP_MANGLING;
}
else if (st->codecpar->codec_id == AV_CODEC_ID_H264)
{
flags |= PARSER_FLAG_NO_TIMESTAMP_MANGLING;
}
else if (st->codecpar->codec_id == AV_CODEC_ID_VC1)
{
if (m_bVC1Correction)
{
flags &= ~PARSER_FLAG_NO_TIMESTAMP_MANGLING;
}
else
{
flags |= PARSER_FLAG_NO_TIMESTAMP_MANGLING;
}
}
av_lav_stream_parser_update_flags(st, flags);
}
static struct sCoverMimeTypes
{
AVCodecID codec;
LPCWSTR mime;
LPCWSTR ext;
} CoverMimeTypes[] = {
{AV_CODEC_ID_MJPEG, L"image/jpeg", L".jpg"}, {AV_CODEC_ID_PNG, L"image/png", L".png"},
{AV_CODEC_ID_GIF, L"image/gif", L".gif"}, {AV_CODEC_ID_BMP, L"image/bmp", L".bmp"},
{AV_CODEC_ID_TIFF, L"image/tiff", L".tiff"},
};
STDMETHODIMP CLAVFDemuxer::InitAVFormat(LPCOLESTR pszFileName, BOOL bForce)
{
HRESULT hr = S_OK;
const char *format = nullptr;
lavf_get_iformat_infos(m_avFormat->iformat, &format, nullptr);
if (!bForce && (!format || !m_pSettings->IsFormatEnabled(format)))
{
DbgLog((LOG_TRACE, 20, L"::InitAVFormat() - format of type '%S' disabled, failing",
format ? format : m_avFormat->iformat->name));
return E_FAIL;
}
m_pszInputFormat = format ? format : m_avFormat->iformat->name;
m_bVC1SeenTimestamp = FALSE;
LPWSTR extension = pszFileName ? PathFindExtensionW(pszFileName) : nullptr;
m_bMatroska = (_strnicmp(m_pszInputFormat, "matroska", 8) == 0);
m_bOgg = (_strnicmp(m_pszInputFormat, "ogg", 3) == 0);
m_bAVI = (_strnicmp(m_pszInputFormat, "avi", 3) == 0);
m_bMPEGTS = (_strnicmp(m_pszInputFormat, "mpegts", 6) == 0);
m_bMPEGPS = (_stricmp(m_pszInputFormat, "mpeg") == 0);
m_bRM = (_stricmp(m_pszInputFormat, "rm") == 0);
m_bPMP = (_stricmp(m_pszInputFormat, "pmp") == 0);
m_bMP4 = (_stricmp(m_pszInputFormat, "mp4") == 0);
m_bTSDiscont = (m_avFormat->iformat->flags & AVFMT_TS_DISCONT) || m_bRM || (_stricmp(m_pszInputFormat, "dash") == 0);
WCHAR szProt[24] = L"file";
if (pszFileName)
{
DWORD dwNumChars = 24;
hr = UrlGetPart(pszFileName, szProt, &dwNumChars, URL_PART_SCHEME, 0);
if (SUCCEEDED(hr) && dwNumChars && (_wcsicmp(szProt, L"file") != 0))
{
m_avFormat->flags |= AVFMT_FLAG_NETWORK;
DbgLog((LOG_TRACE, 10, TEXT("::InitAVFormat(): detected network protocol: %s"), szProt));
}
}
// TODO: make both durations below configurable
// decrease analyze duration for network streams
if (m_avFormat->flags & AVFMT_FLAG_NETWORK ||
(m_avFormat->flags & AVFMT_FLAG_CUSTOM_IO && !m_avFormat->pb->seekable))
{
// require at least 0.2 seconds
av_opt_set_int(m_avFormat, "analyzeduration",
max(m_pSettings->GetNetworkStreamAnalysisDuration() * 1000, 200000), 0);
}
else
{
av_opt_set_int(m_avFormat, "analyzeduration", 7500000, 0);
// And increase it for mpeg-ts/ps files
if (m_bMPEGTS || m_bMPEGPS)
{
av_opt_set_int(m_avFormat, "analyzeduration", 30000000, 0);
av_opt_set_int(m_avFormat, "probesize", 75000000, 0);
}
}
av_opt_set_int(m_avFormat, "correct_ts_overflow", !m_pBluRay, 0);
m_timeOpening = time(nullptr);
int ret = avformat_find_stream_info(m_avFormat, nullptr);
if (ret < 0)
{
DbgLog((LOG_ERROR, 0, TEXT("::InitAVFormat(): av_find_stream_info failed (%d)"), ret));
goto done;
}
DbgLog((LOG_TRACE, 10, TEXT("::InitAVFormat(): avformat_find_stream_info finished, took %I64d seconds"),
time(nullptr) - m_timeOpening));
m_timeOpening = 0;
// Check if this is a m2ts in a BD structure, and if it is, read some extra stream properties out of the CLPI files
if (m_pBluRay)
{
m_pBluRay->ProcessBluRayMetadata();
}
else if (pszFileName && m_bMPEGTS)
{
CheckBDM2TSCPLI(pszFileName);
}
char *icy_headers = nullptr;
if (av_opt_get(m_avFormat, "icy_metadata_headers", AV_OPT_SEARCH_CHILDREN, (uint8_t **)&icy_headers) >= 0 &&
icy_headers && strlen(icy_headers) > 0)
{
std::string icyHeaders(icy_headers);
std::stringstream icyHeaderStream(icyHeaders);
std::string line;
while (std::getline(icyHeaderStream, line))
{
size_t seperatorIdx = line.find_first_of(":");
std::string token = line.substr(0, seperatorIdx);
std::string value = line.substr(seperatorIdx + 1);
if (_stricmp(token.c_str(), "icy-name") == 0)
{
// not entirely correct, but this way it gets exported through IAMMediaContent
av_dict_set(&m_avFormat->metadata, "artist", value.c_str(), 0);
}
else if (_stricmp(token.c_str(), "icy-description") == 0)
{
av_dict_set(&m_avFormat->metadata, "comment", value.c_str(), 0);
}
else if (_stricmp(token.c_str(), "icy-genre") == 0)
{
av_dict_set(&m_avFormat->metadata, "genre", value.c_str(), 0);
}
}
ParseICYMetadataPacket();
}
av_freep(&icy_headers);
SAFE_CO_FREE(m_stOrigParser);
m_stOrigParser = (enum AVStreamParseType *)CoTaskMemAlloc(m_avFormat->nb_streams * sizeof(enum AVStreamParseType));
if (!m_stOrigParser)
return E_OUTOFMEMORY;
for (unsigned int idx = 0; idx < m_avFormat->nb_streams; ++idx)
{
AVStream *st = m_avFormat->streams[idx];
// Disable full stream parsing for these formats
if (av_lav_stream_parser_get_needed(st) == AVSTREAM_PARSE_FULL)
{
if (st->codecpar->codec_id == AV_CODEC_ID_DVB_SUBTITLE)
{
av_lav_stream_parser_set_needed(st, AVSTREAM_PARSE_NONE);
}
}
if (m_bOgg && st->codecpar->codec_id == AV_CODEC_ID_H264)
{
av_lav_stream_parser_set_needed(st, AVSTREAM_PARSE_FULL);
}
// Create the parsers with the appropriate flags
init_parser(m_avFormat, st);
UpdateParserFlags(st);
#ifdef DEBUG
AVProgram *streamProg = av_find_program_from_stream(m_avFormat, nullptr, idx);
DbgLog((LOG_TRACE, 30, L"Stream %d (pid %d) - program: %d, codec: %S; parsing: %S;", idx, st->id,
streamProg ? streamProg->pmt_pid : -1, avcodec_get_name(st->codecpar->codec_id),
lavf_get_parsing_string(av_lav_stream_parser_get_needed(st))));
#endif
m_stOrigParser[idx] = av_lav_stream_parser_get_needed(st);
if ((st->codecpar->codec_id == AV_CODEC_ID_DTS && st->codecpar->codec_tag == 0xA2) ||
(st->codecpar->codec_id == AV_CODEC_ID_EAC3 && st->codecpar->codec_tag == 0xA1))
st->disposition |= LAVF_DISPOSITION_SECONDARY_AUDIO;
UpdateSubStreams();
if (st->codecpar->codec_type == AVMEDIA_TYPE_ATTACHMENT)
{
const AVDictionaryEntry *attachFilename = av_dict_get(st->metadata, "filename", nullptr, 0);
const AVDictionaryEntry *attachMimeType = av_dict_get(st->metadata, "mimetype", nullptr, 0);
const AVDictionaryEntry *attachDescription = av_dict_get(st->metadata, "comment", nullptr, 0);
if (attachFilename && attachMimeType)
{
LPWSTR chFilename =
CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, attachFilename->value, -1);
LPWSTR chMimetype =
CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, attachMimeType->value, -1);
LPWSTR chDescription = nullptr;
if (attachDescription)
chDescription =
CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, attachDescription->value, -1);
if (chFilename && chMimetype)
ResAppend(chFilename, chDescription, chMimetype, st->codecpar->extradata,
(DWORD)st->codecpar->extradata_size);
SAFE_CO_FREE(chFilename);
SAFE_CO_FREE(chMimetype);
SAFE_CO_FREE(chDescription);
}
else
{
DbgLog((LOG_TRACE, 10, L" -> Unknown attachment, missing filename or mimetype"));
}
// Try to guess the codec id for fonts only listed by name
if (st->codecpar->codec_id == AV_CODEC_ID_NONE && attachFilename)
{
char *dot = strrchr(attachFilename->value, '.');
if (dot && !_stricmp(dot, ".ttf"))
st->codecpar->codec_id = AV_CODEC_ID_TTF;
else if (dot && !_stricmp(dot, ".otf"))
st->codecpar->codec_id = AV_CODEC_ID_OTF;
}
if (st->codecpar->codec_id == AV_CODEC_ID_TTF || st->codecpar->codec_id == AV_CODEC_ID_OTF)
{
if (!m_pFontInstaller)
{
m_pFontInstaller = new CFontInstaller();
}
m_pFontInstaller->InstallFont(st->codecpar->extradata, st->codecpar->extradata_size);
}
}
else if (st->disposition & AV_DISPOSITION_ATTACHED_PIC && st->attached_pic.data && st->attached_pic.size > 0)
{
LPWSTR chFilename = nullptr;
LPWSTR chMimeType = nullptr;
LPWSTR chDescription = nullptr;
// gather a filename
const AVDictionaryEntry *attachFilename = av_dict_get(st->metadata, "filename", nullptr, 0);
if (attachFilename)
chFilename = CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, attachFilename->value, -1);
// gather a mimetype
const AVDictionaryEntry *attachMimeType = av_dict_get(st->metadata, "mimetype", nullptr, 0);
if (attachMimeType)
chMimeType = CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, attachMimeType->value, -1);
// gather description
const AVDictionaryEntry *attachDescription = av_dict_get(st->metadata, "comment", nullptr, 0);
if (attachDescription)
chDescription =
CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, attachDescription->value, -1);
for (int c = 0; c < countof(CoverMimeTypes); c++)
{
if (CoverMimeTypes[c].codec == st->codecpar->codec_id)
{
if (chFilename == nullptr)
{
size_t size = wcslen(CoverMimeTypes[c].ext) + 15;
chFilename = (LPWSTR)CoTaskMemAlloc(size * sizeof(wchar_t));
wcscpy_s(chFilename, size, L"EmbeddedCover");
wcscat_s(chFilename, size, CoverMimeTypes[c].ext);
}
if (chMimeType == nullptr)
{
size_t size = wcslen(CoverMimeTypes[c].mime) + 1;
chMimeType = (LPWSTR)CoTaskMemAlloc(size * sizeof(wchar_t));
wcscpy_s(chMimeType, size, CoverMimeTypes[c].mime);
}
break;
}
}
// Export embedded cover-art through IDSMResourceBag interface
if (chFilename && chMimeType)
{
ResAppend(chFilename, chDescription, chMimeType, st->attached_pic.data, (DWORD)st->attached_pic.size);
}
else
{
DbgLog((LOG_TRACE, 10, L" -> Unknown attachment, missing filename or mimetype"));
}
SAFE_CO_FREE(chFilename);
SAFE_CO_FREE(chMimeType);
SAFE_CO_FREE(chDescription);
}
}
if (AVDictionaryEntry *cue = av_dict_get(m_avFormat->metadata, "CUESHEET", nullptr, 0))
{
CCueSheet cueSheet;
if (SUCCEEDED(cueSheet.Parse(cue->value)))
{
// Metadata
if (!cueSheet.m_Title.empty() && !av_dict_get(m_avFormat->metadata, "title", nullptr, 0))
av_dict_set(&m_avFormat->metadata, "title", cueSheet.m_Title.c_str(), 0);
if (!cueSheet.m_Performer.empty() && !av_dict_get(m_avFormat->metadata, "artist", nullptr, 0))
av_dict_set(&m_avFormat->metadata, "artist", cueSheet.m_Performer.c_str(), 0);
// Free old chapters
while (m_avFormat->nb_chapters--)
{
av_dict_free(&m_avFormat->chapters[m_avFormat->nb_chapters]->metadata);
av_freep(&m_avFormat->chapters[m_avFormat->nb_chapters]);
}
av_freep(&m_avFormat->chapters);
m_avFormat->nb_chapters = 0;
for (CCueSheet::Track track : cueSheet.m_Tracks)
{
avpriv_new_chapter(m_avFormat, track.index, AVRational{1, DSHOW_TIME_BASE}, track.Time, track.Time,
cueSheet.FormatTrack(track).c_str());
}
}
}
CHECK_HR(hr = CreateStreams());
return S_OK;
done:
CleanupAVFormat();
return E_FAIL;
}
void CLAVFDemuxer::CleanupAVFormat()
{
FlushMVCExtensionQueue();
if (m_avFormat)
{
// Override abort timer to ensure the close function in network protocols can actually close the stream
AbortOpening(1, 5);
avformat_close_input(&m_avFormat);
}
SAFE_CO_FREE(m_stOrigParser);
}
AVStream *CLAVFDemuxer::GetAVStreamByPID(int pid)
{
if (!m_avFormat)
return nullptr;
for (unsigned int idx = 0; idx < m_avFormat->nb_streams; ++idx)
{
if (m_avFormat->streams[idx]->id == pid &&
!(m_avFormat->streams[idx]->disposition & LAVF_DISPOSITION_SUB_STREAM))
return m_avFormat->streams[idx];
}
return nullptr;
}
HRESULT CLAVFDemuxer::SetActiveStream(StreamType type, int pid)
{
HRESULT hr = S_OK;
if (type == audio)
UpdateForcedSubtitleStream(pid);
hr = __super::SetActiveStream(type, pid);
// Usually selecting an audio stream would set the forced substream (since it uses the audio stream language)
// but in case there is no audio stream, do a fallback selection of any PGS stream here.
if (type == subpic && pid == FORCED_SUBTITLE_PID && m_ForcedSubStream == -1)
{
std::list<CSubtitleSelector> selectors;
CSubtitleSelector selector;
selector.audioLanguage = "*";
selector.subtitleLanguage = "*";
selector.dwFlagsSet = SUBTITLE_FLAG_PGS;
selector.dwFlagsNot = 0;
selectors.push_back(selector);
const stream *subst = SelectSubtitleStream(selectors, "");
if (subst)
m_ForcedSubStream = subst->pid;
}
for (unsigned int idx = 0; idx < m_avFormat->nb_streams; ++idx)
{
AVStream *st = m_avFormat->streams[idx];
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
st->discard = (m_dActiveStreams[video] == idx) ? AVDISCARD_DEFAULT : AVDISCARD_ALL;
// don't discard h264 mvc streams
if (m_bH264MVCCombine && st->codecpar->codec_id == AV_CODEC_ID_H264_MVC)
st->discard = AVDISCARD_DEFAULT;
}
else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
st->discard = (m_dActiveStreams[audio] == idx) ? AVDISCARD_DEFAULT : AVDISCARD_ALL;
// If the stream is a sub stream, make sure to activate the main stream as well
if (m_bMPEGTS && (st->disposition & LAVF_DISPOSITION_SUB_STREAM) && st->discard == AVDISCARD_DEFAULT)
{
for (unsigned int idx2 = 0; idx2 < m_avFormat->nb_streams; ++idx2)
{
AVStream *mst = m_avFormat->streams[idx2];
if (mst->id == st->id)
{
mst->discard = AVDISCARD_DEFAULT;
break;
}
}
}
}
else if (st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE)
{
st->discard = (m_dActiveStreams[subpic] == idx ||
(m_dActiveStreams[subpic] == FORCED_SUBTITLE_PID && m_ForcedSubStream == idx))
? AVDISCARD_DEFAULT
: AVDISCARD_ALL;
}
else
{
st->discard = AVDISCARD_ALL;
}
}
return hr;
}
void CLAVFDemuxer::UpdateSubStreams()
{
for (unsigned int idx = 0; idx < m_avFormat->nb_streams; ++idx)
{
AVStream *st = m_avFormat->streams[idx];
// Find and flag the AC-3 substream
if (m_bMPEGTS && st->codecpar->codec_id == AV_CODEC_ID_TRUEHD)
{
int id = st->id;
AVStream *sub_st = nullptr;
for (unsigned int i = 0; i < m_avFormat->nb_streams; ++i)
{
AVStream *sst = m_avFormat->streams[i];
if (idx != i && sst->id == id)
{
sub_st = sst;
break;
}
}
if (sub_st)
{
sub_st->disposition = st->disposition | LAVF_DISPOSITION_SUB_STREAM;
av_dict_copy(&sub_st->metadata, st->metadata, 0);
}
}
}
}
STDMETHODIMP CLAVFDemuxer::SetTitle(int idx)
{
if (!m_bMatroska)
return E_NOTIMPL;
av_mkv_set_next_edition(m_avFormat, idx);
// Update duration
AVEdition *editions = nullptr;
av_mkv_get_editions(m_avFormat, &editions);
m_avFormat->duration = editions[idx].duration;
return S_OK;
}
STDMETHODIMP_(int) CLAVFDemuxer::GetTitle()
{
if (!m_bMatroska)
return 0;
return av_mkv_get_edition(m_avFormat);
}
STDMETHODIMP CLAVFDemuxer::GetTitleInfo(int idx, REFERENCE_TIME *rtDuration, WCHAR **ppszName)
{
if (!m_bMatroska)
return E_NOTIMPL;
AVEdition *editions = nullptr;
av_mkv_get_editions(m_avFormat, &editions);
AVEdition *current_edition = &editions[idx];
if (rtDuration)
*rtDuration = av_rescale(current_edition->duration, DSHOW_TIME_BASE, AV_TIME_BASE);
if (ppszName)
{
char *title = nullptr;
int total_seconds = (int)(current_edition->duration / AV_TIME_BASE);
int seconds = total_seconds % 60;
int minutes = total_seconds / 60 % 60;
int hours = total_seconds / 3600;
if (current_edition->title)
{
title = av_asprintf("E: %s [%02d:%02d:%02d]", current_edition->title, hours, minutes, seconds);
}
else
{
title = av_asprintf("E: Edition %d [%02d:%02d:%02d]", idx + 1, hours, minutes, seconds);
}
*ppszName = CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, title, -1);
av_freep(&title);
}
return S_OK;
}
STDMETHODIMP_(int) CLAVFDemuxer::GetNumTitles()
{
if (!m_bMatroska || !m_avFormat || !m_avFormat->priv_data || !m_avFormat->iformat || strcmp(m_avFormat->iformat->name, "matroska") != 0)
return 0;
return av_mkv_get_num_editions(m_avFormat);
}
void CLAVFDemuxer::SettingsChanged(ILAVFSettingsInternal *pSettings)
{
int vc1Mode = pSettings->GetVC1TimestampMode();
if (vc1Mode == 1 || strcmp(m_pszInputFormat, "rawvideo") == 0)
{
m_bVC1Correction = true;
}
else if (vc1Mode == 2)
{
BOOL bReq = pSettings->IsVC1CorrectionRequired();
m_bVC1Correction = m_bMatroska ? !bReq : bReq;
}
else
{
m_bVC1Correction = false;
}
for (unsigned int idx = 0; idx < m_avFormat->nb_streams; ++idx)
{
AVStream *st = m_avFormat->streams[idx];
if (st->codecpar->codec_id == AV_CODEC_ID_VC1)
{
UpdateParserFlags(st);
}
else if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
av_lav_stream_parser_set_needed(st, m_stOrigParser[idx]);
}
}
m_bPGSNoParsing = !pSettings->GetPGSOnlyForced();
}
REFERENCE_TIME CLAVFDemuxer::GetDuration() const
{
int64_t iLength = 0;
if (m_avFormat->duration == (int64_t)AV_NOPTS_VALUE || m_avFormat->duration < 0LL)
{
// no duration is available for us
// try to calculate it
// TODO
/*if (m_rtCurrent != Packet::INVALID_TIME && m_avFormat->file_size > 0 && m_avFormat->pb && m_avFormat->pb->pos
> 0) { iLength = (((m_rtCurrent * m_avFormat->file_size) / m_avFormat->pb->pos) / 1000) & 0xFFFFFFFF;
}*/
// DbgLog((LOG_ERROR, 1, TEXT("duration is not available")));
return -1;
}
else
{
iLength = m_avFormat->duration;
}
return ConvertTimestampToRT(iLength, 1, AV_TIME_BASE, 0);
}
#define VC1_CODE_RES0 0x00000100
#define IS_VC1_MARKER(x) (((x) & ~0xFF) == VC1_CODE_RES0)
STDMETHODIMP CLAVFDemuxer::CreatePacketMediaType(Packet *pPacket, enum AVCodecID codec_id, BYTE *extradata,
int extradata_size, BYTE *paramchange, int paramchange_size)
{
CMediaType *pmt = m_pSettings->GetOutputMediatype(pPacket->StreamId);
if (pmt)
{
if (extradata && extradata_size)
{
if (codec_id == AV_CODEC_ID_H264)
{
MPEG2VIDEOINFO *mp2vi =
(MPEG2VIDEOINFO *)pmt->ReallocFormatBuffer(sizeof(MPEG2VIDEOINFO) + extradata_size);
int ret = g_VideoHelper.ProcessH264Extradata(extradata, extradata_size, mp2vi, FALSE);
if (ret < 0)
{
mp2vi->cbSequenceHeader = extradata_size;
memcpy(&mp2vi->dwSequenceHeader[0], extradata, extradata_size);
}
else
{
int mp2visize = SIZE_MPEG2VIDEOINFO(mp2vi);
memset((BYTE *)mp2vi + mp2visize, 0, pmt->cbFormat - mp2visize);
}
}
else if (codec_id == AV_CODEC_ID_MPEG2VIDEO)
{
MPEG2VIDEOINFO *mp2vi =
(MPEG2VIDEOINFO *)pmt->ReallocFormatBuffer(sizeof(MPEG2VIDEOINFO) + extradata_size);
CExtradataParser parser = CExtradataParser(extradata, extradata_size);
mp2vi->cbSequenceHeader = (DWORD)parser.ParseMPEGSequenceHeader((BYTE *)&mp2vi->dwSequenceHeader[0]);
}
else if (codec_id == AV_CODEC_ID_VC1)
{
VIDEOINFOHEADER2 *vih2 =
(VIDEOINFOHEADER2 *)pmt->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER2) + extradata_size + 1);
int i = 0;
for (i = 0; i < (extradata_size - 4); i++)
{
uint32_t code = AV_RB32(extradata + i);
if (IS_VC1_MARKER(code))
break;
}
if (i == 0)
{
*((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2)) = 0;
memcpy((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2) + 1, extradata, extradata_size);
}
else
{
memcpy((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2), extradata, extradata_size);
}
}
else if (codec_id == AV_CODEC_ID_ASS)
{
SUBTITLEINFO *sif = (SUBTITLEINFO *)pmt->ReallocFormatBuffer(sizeof(SUBTITLEINFO) + extradata_size);
memcpy((BYTE *)sif + sizeof(SUBTITLEINFO), extradata, extradata_size);
}
else
{
if (pmt->formattype == FORMAT_VideoInfo)
{
VIDEOINFOHEADER *vih =
(VIDEOINFOHEADER *)pmt->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER) + extradata_size);
vih->bmiHeader.biSize = sizeof(BITMAPINFOHEADER) + extradata_size;
memcpy((BYTE *)vih + sizeof(VIDEOINFOHEADER), extradata, extradata_size);
}
else if (pmt->formattype == FORMAT_VideoInfo2)
{
VIDEOINFOHEADER2 *vih2 =
(VIDEOINFOHEADER2 *)pmt->ReallocFormatBuffer(sizeof(VIDEOINFOHEADER2) + extradata_size);
vih2->bmiHeader.biSize = sizeof(BITMAPINFOHEADER) + extradata_size;
memcpy((BYTE *)vih2 + sizeof(VIDEOINFOHEADER2), extradata, extradata_size);
}
else if (pmt->formattype == FORMAT_WaveFormatEx)
{
WAVEFORMATEX *wfex =
(WAVEFORMATEX *)pmt->ReallocFormatBuffer(sizeof(WAVEFORMATEX) + extradata_size);
wfex->cbSize = extradata_size;
memcpy((BYTE *)wfex + sizeof(WAVEFORMATEX), extradata, extradata_size);
}
else if (pmt->formattype == FORMAT_WaveFormatExFFMPEG)
{
WAVEFORMATEXFFMPEG *wfex =
(WAVEFORMATEXFFMPEG *)pmt->ReallocFormatBuffer(sizeof(WAVEFORMATEXFFMPEG) + extradata_size);
wfex->wfex.cbSize = extradata_size;
memcpy((BYTE *)wfex + sizeof(WAVEFORMATEXFFMPEG), extradata, extradata_size);
}
else if (pmt->formattype == FORMAT_VorbisFormat2)
{
BYTE *p = extradata;
std::vector<int> sizes;
for (BYTE n = *p++; n > 0; n--)
{
int size = 0;
// Xiph Lacing
do
{
size += *p;
} while (*p++ == 0xFF);
sizes.push_back(size);
}
int totalsize = 0;
for (size_t i = 0; i < sizes.size(); i++)
totalsize += sizes[i];
sizes.push_back(extradata_size - (int)(p - extradata) - totalsize);
totalsize += sizes[sizes.size() - 1];
// 3 blocks is the currently valid Vorbis format
if (sizes.size() == 3)
{
VORBISFORMAT2 *pvf2 =
(VORBISFORMAT2 *)pmt->ReallocFormatBuffer(sizeof(VORBISFORMAT2) + totalsize);
BYTE *p2 = (BYTE *)pvf2 + sizeof(VORBISFORMAT2);
for (unsigned int i = 0; i < sizes.size(); p += sizes[i], p2 += sizes[i], i++)
{
memcpy(p2, p, pvf2->HeaderSize[i] = sizes[i]);
}
}
}
else
{
DbgLog((LOG_TRACE, 10, L"::GetNextPacket() - Unsupported PMT change on codec %S",
avcodec_get_name(codec_id)));
}
}
}
if (paramchange)
{
uint32_t flags = AV_RL32(paramchange);
int channels = 0, sample_rate = 0, width = 0, height = 0, aspect_num = 0, aspect_den = 0;
paramchange += 4;
if (flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT)
{
channels = AV_RL32(paramchange);
paramchange += 4;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT)
{
paramchange += 8;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE)
{
sample_rate = AV_RL32(paramchange);
paramchange += 4;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS)
{
width = AV_RL32(paramchange);
height = AV_RL32(paramchange + 4);
paramchange += 8;
}
if (flags & AV_SIDE_DATA_PARAM_CHANGE_ASPECTRATIO)
{
aspect_num = AV_RL32(paramchange);
aspect_den = AV_RL32(paramchange + 4);
paramchange += 8;
}
if (pmt->majortype == MEDIATYPE_Video)
{
if ((pmt->formattype == FORMAT_VideoInfo || pmt->formattype == FORMAT_MPEGVideo) && width && height)
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->pbFormat;
vih->bmiHeader.biWidth = width;
vih->bmiHeader.biHeight = height;
vih->rcTarget.right = vih->rcSource.right = width;
vih->rcTarget.bottom = vih->rcSource.bottom = height;
}
else if ((pmt->formattype == FORMAT_VideoInfo2 || pmt->formattype == FORMAT_MPEG2Video) &&
((width && height) || (aspect_num && aspect_den)))
{
VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)pmt->pbFormat;
if (width && height)
{
vih2->bmiHeader.biWidth = width;
vih2->bmiHeader.biHeight = height;
vih2->rcTarget.right = vih2->rcSource.right = width;
vih2->rcTarget.bottom = vih2->rcSource.bottom = height;
}
if (aspect_num && aspect_den)
{
int num = vih2->bmiHeader.biWidth, den = vih2->bmiHeader.biHeight;
av_reduce(&num, &den, (int64_t)aspect_num * num, (int64_t)aspect_den * den, INT_MAX);
vih2->dwPictAspectRatioX = num;
vih2->dwPictAspectRatioY = den;
}
}
}
else if (pmt->majortype == MEDIATYPE_Audio)
{
if ((pmt->formattype == FORMAT_WaveFormatEx || pmt->formattype == FORMAT_WaveFormatExFFMPEG) &&
(channels || sample_rate))
{
WAVEFORMATEX *wfex = nullptr;
if (pmt->formattype == FORMAT_WaveFormatExFFMPEG)
{
WAVEFORMATEXFFMPEG *wfexff = (WAVEFORMATEXFFMPEG *)pmt->pbFormat;
wfex = &wfexff->wfex;
}
else
{
wfex = (WAVEFORMATEX *)pmt->pbFormat;
}
if (channels)
wfex->nChannels = channels;
if (sample_rate)
wfex->nSamplesPerSec = sample_rate;
}
}
}
if (pmt)
{
pPacket->pmt = CreateMediaType(pmt);
SAFE_DELETE(pmt);
}
}
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::ParseICYMetadataPacket()
{
char *icy_data = nullptr;
if (av_opt_get(m_avFormat, "icy_metadata_packet", AV_OPT_SEARCH_CHILDREN, (uint8_t **)&icy_data) >= 0 && icy_data &&
strlen(icy_data) > 0)
{
std::string icyData(icy_data);
size_t idx = icyData.find("StreamTitle");
if (idx != std::string::npos)
{
// strip StreamTitle token and =
std::string value = icyData.substr(idx + 12);
idx = value.find_first_of(";");
if (idx != std::string::npos)
value = value.substr(0, idx);
if (value[0] == '\'' || value[0] == '"')
value = value.substr(1);
if (value[value.length() - 1] == '\'' || value[value.length() - 1] == '"')
value = value.substr(0, value.length() - 1);
if (value.length() > 0)
{
av_dict_set(&m_avFormat->metadata, "title", value.c_str(), 0);
}
}
// clear value, and only read again when its send again
av_opt_set(m_avFormat, "icy_metadata_packet", "", AV_OPT_SEARCH_CHILDREN);
}
av_freep(&icy_data);
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::GetNextPacket(Packet **ppPacket)
{
CheckPointer(ppPacket, E_POINTER);
// If true, S_FALSE is returned, indicating a soft-failure
bool bReturnEmpty = false;
// Read packet
AVPacket pkt;
Packet *pPacket = nullptr;
// assume we are not eof
if (m_avFormat->pb)
{
m_avFormat->pb->eof_reached = 0;
}
int result = 0;
try
{
DBG_TIMING("av_read_frame", 30, result = av_read_frame(m_avFormat, &pkt))
}
catch (...)
{
// ignore..
}
if (result == AVERROR(EINTR) || result == AVERROR(EAGAIN))
{
// timeout, probably no real error, return empty packet
bReturnEmpty = true;
}
else if (result == AVERROR_EOF)
{
DbgLog((LOG_TRACE, 10, L"::GetNextPacket(): End of File reached"));
}
else if (result < 0)
{
// meh, fail
}
else if (pkt.size <= 0 || pkt.stream_index < 0 || (unsigned)pkt.stream_index >= m_avFormat->nb_streams)
{
// XXX, in some cases ffmpeg returns a zero or negative packet size
if (m_avFormat->pb && !m_avFormat->pb->eof_reached)
{
bReturnEmpty = true;
}
av_packet_unref(&pkt);
}
else
{
// Check right here if the stream is active, we can drop the package otherwise.
AVStream *stream = m_avFormat->streams[pkt.stream_index];
BOOL streamActive = FALSE;
BOOL forcedSubStream = FALSE;
for (int i = 0; i < unknown; ++i)
{
if (m_dActiveStreams[i] == pkt.stream_index)
{
streamActive = TRUE;
break;
}
}
// Accept it if its the forced subpic stream
if (m_dActiveStreams[subpic] == FORCED_SUBTITLE_PID && pkt.stream_index == m_ForcedSubStream)
{
forcedSubStream = streamActive = TRUE;
}
// Accept H264 MVC streams, as they get combined with the base stream later
if (m_bH264MVCCombine && stream->codecpar->codec_id == AV_CODEC_ID_H264_MVC)
streamActive = TRUE;
if (!streamActive)
{
av_packet_unref(&pkt);
return S_FALSE;
}
pPacket = new Packet();
if (!pPacket)
return E_OUTOFMEMORY;
// Convert timestamps to reference time and set them on the packet
REFERENCE_TIME pts = ConvertTimestampToRT(pkt.pts, stream->time_base.num, stream->time_base.den);
REFERENCE_TIME dts = ConvertTimestampToRT(pkt.dts, stream->time_base.num, stream->time_base.den);
REFERENCE_TIME duration = ConvertTimestampToRT(pkt.duration, stream->time_base.num, stream->time_base.den, 0);
pPacket->rtPTS = pts;
pPacket->rtDTS = dts;
pPacket->StreamId = (DWORD)pkt.stream_index;
pPacket->bPosition = pkt.pos;
if (stream->codecpar->codec_id == AV_CODEC_ID_H264)
{
if (m_bMatroska || m_bOgg)
{
if (!stream->codecpar->extradata_size || stream->codecpar->extradata[0] != 1 ||
AV_RB32(pkt.data) == 0x00000001)
{
pPacket->dwFlags |= LAV_PACKET_H264_ANNEXB;
}
else
{ // No DTS for H264 in native format
dts = Packet::INVALID_TIME;
}
}
else if (!m_bPMP && !m_bAVI)
{ // For most formats, DTS timestamps for h.264 are no fun
dts = Packet::INVALID_TIME;
}
}
if (m_bAVI && stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
// AVI's always have borked pts, specially if m_pFormatContext->flags includes
// AVFMT_FLAG_GENPTS so always use dts
pts = Packet::INVALID_TIME;
}
if (stream->codecpar->codec_id == AV_CODEC_ID_RV10 || stream->codecpar->codec_id == AV_CODEC_ID_RV20 ||
stream->codecpar->codec_id == AV_CODEC_ID_RV30 || stream->codecpar->codec_id == AV_CODEC_ID_RV40)
{
pts = Packet::INVALID_TIME;
}
// Never use DTS for these formats
if (!m_bAVI && (stream->codecpar->codec_id == AV_CODEC_ID_MPEG2VIDEO ||
stream->codecpar->codec_id == AV_CODEC_ID_MPEG1VIDEO))
dts = Packet::INVALID_TIME;
if (pkt.data)
{
result = pPacket->SetPacket(&pkt);
if (result < 0)
{
SAFE_DELETE(pPacket);
return E_OUTOFMEMORY;
}
}
// Select the appropriate timestamps
REFERENCE_TIME rt = Packet::INVALID_TIME;
// Try the different times set, pts first, dts when pts is not valid
if (pts != Packet::INVALID_TIME)
{
rt = pts;
}
else if (dts != Packet::INVALID_TIME)
{
rt = dts;
}
if (stream->codecpar->codec_id == AV_CODEC_ID_VC1)
{
if (m_bMatroska && m_bVC1Correction)
{
rt = pts;
if (!m_bVC1SeenTimestamp)
{
if (rt == Packet::INVALID_TIME && dts != Packet::INVALID_TIME)
rt = dts;
m_bVC1SeenTimestamp = (pts != Packet::INVALID_TIME);
}
}
else if (m_bVC1Correction)
{
rt = dts;
pPacket->dwFlags |= LAV_PACKET_PARSED;
}
}
else if (stream->codecpar->codec_id == AV_CODEC_ID_MOV_TEXT)
{
pPacket->dwFlags |= LAV_PACKET_MOV_TEXT;
}
// Mark the packet as parsed, so the forced subtitle parser doesn't hit it
if (stream->codecpar->codec_id == AV_CODEC_ID_HDMV_PGS_SUBTITLE && m_bPGSNoParsing)
{
pPacket->dwFlags |= LAV_PACKET_PARSED;
}
pPacket->rtStart = pPacket->rtStop = rt;
if (rt != Packet::INVALID_TIME)
{
pPacket->rtStop += (duration > 0 || stream->codecpar->codec_id == AV_CODEC_ID_TRUEHD) ? duration : 1;
}
if (stream->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE)
{
pPacket->bDiscontinuity = TRUE;
if (forcedSubStream)
{
pPacket->dwFlags |= LAV_PACKET_FORCED_SUBTITLE;
pPacket->dwFlags &= ~LAV_PACKET_PARSED;
}
if (stream->codecpar->codec_id == AV_CODEC_ID_SRT)
{
pPacket->dwFlags |= LAV_PACKET_SRT;
}
}
if (stream->codecpar->codec_id == AV_CODEC_ID_PCM_S16BE_PLANAR ||
stream->codecpar->codec_id == AV_CODEC_ID_PCM_S16LE_PLANAR ||
stream->codecpar->codec_id == AV_CODEC_ID_PCM_S24LE_PLANAR ||
stream->codecpar->codec_id == AV_CODEC_ID_PCM_S32LE_PLANAR)
pPacket->dwFlags |= LAV_PACKET_PLANAR_PCM;
if (stream->codecpar->codec_id == AV_CODEC_ID_WEBVTT)
{
size_t id_size = 0, settings_size = 0;
uint8_t *id = NULL, *settings = NULL;
id = av_packet_get_side_data(&pkt, AV_PKT_DATA_WEBVTT_IDENTIFIER, &id_size);
settings = av_packet_get_side_data(&pkt, AV_PKT_DATA_WEBVTT_SETTINGS, &settings_size);
int text_size = pPacket->GetDataSize();
// allocate size for id/settings
int pkt_size = text_size + (int)id_size + 2 + (int)settings_size + 2;
pPacket->SetDataSize(pkt_size);
uint8_t *data = pPacket->GetData();
// offset data
memmove(data + id_size + 2 + settings_size + 2, data, text_size);
// write id
if (id && id_size > 0)
memcpy(data, id, id_size);
data[id_size + 0] = '\r';
data[id_size + 1] = '\n';
// write settings
if (settings && settings_size > 0)
memcpy(data + id_size + 2, settings, settings_size);
data[id_size + 2 + settings_size + 0] = '\r';
data[id_size + 2 + settings_size + 1] = '\n';
}
// Update extradata and send new mediatype, when required
size_t sidedata_size = 0;
uint8_t *sidedata = av_packet_get_side_data(&pkt, AV_PKT_DATA_NEW_EXTRADATA, &sidedata_size);
size_t paramchange_size = 0;
uint8_t *paramchange = av_packet_get_side_data(&pkt, AV_PKT_DATA_PARAM_CHANGE, ¶mchange_size);
if ((sidedata && sidedata_size) || (paramchange && paramchange_size))
{
CreatePacketMediaType(pPacket, stream->codecpar->codec_id, sidedata, (int)sidedata_size, paramchange,
(int)paramchange_size);
}
pPacket->bSyncPoint = pkt.flags & AV_PKT_FLAG_KEY;
pPacket->bDiscontinuity = !m_pBluRay && (pkt.flags & AV_PKT_FLAG_CORRUPT);
#ifdef DEBUG
if (pkt.flags & AV_PKT_FLAG_CORRUPT)
DbgLog((LOG_TRACE, 10, L"::GetNextPacket() - Signaling Discontinuinty because of corrupt package"));
#endif
if (pPacket->rtStart != AV_NOPTS_VALUE)
m_rtCurrent = pPacket->rtStart;
av_packet_unref(&pkt);
}
if (m_pBluRay && pPacket)
{
HRESULT hr = m_pBluRay->ProcessPacket(pPacket);
if (hr != S_OK)
{
SAFE_DELETE(pPacket);
bReturnEmpty = bReturnEmpty || hr == S_FALSE;
}
}
if (m_bH264MVCCombine && pPacket && pPacket->StreamId == m_nH264MVCExtensionStream)
{
if (FAILED(QueueMVCExtension(pPacket)))
{
SAFE_DELETE(pPacket);
return E_FAIL;
}
return S_FALSE;
}
if (m_bH264MVCCombine && pPacket && pPacket->StreamId == m_nH264MVCBaseStream)
{
HRESULT hr = CombineMVCBaseExtension(pPacket);
if (hr != S_OK)
{
SAFE_DELETE(pPacket);
// S_FALSE indicates a skipped packet, not a hard failure
if (hr == S_FALSE)
bReturnEmpty = true;
}
}
if (bReturnEmpty && !pPacket)
{
return S_FALSE;
}
if (!pPacket)
{
return E_FAIL;
}
ParseICYMetadataPacket();
*ppPacket = pPacket;
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::QueueMVCExtension(Packet *pPacket)
{
m_MVCExtensionQueue.push_back(pPacket);
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::FlushMVCExtensionQueue()
{
for (auto it = m_MVCExtensionQueue.begin(); it != m_MVCExtensionQueue.end(); it++)
{
delete (*it);
}
m_MVCExtensionQueue.clear();
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::CombineMVCBaseExtension(Packet *pBasePacket)
{
while (!m_MVCExtensionQueue.empty())
{
Packet *pExtensionPacket = m_MVCExtensionQueue.front();
if (pExtensionPacket->rtDTS == pBasePacket->rtDTS || pBasePacket->rtDTS == Packet::INVALID_TIME ||
pExtensionPacket->rtDTS == Packet::INVALID_TIME)
{
if (pBasePacket->Append(pExtensionPacket) < 0)
return E_FAIL;
m_MVCExtensionQueue.pop_front();
delete pExtensionPacket;
return S_OK;
}
else if (pExtensionPacket->rtDTS < pBasePacket->rtDTS)
{
DbgLog((LOG_TRACE, 10, L"CLAVFDemuxer::CombineMVCBaseExtension(): Dropping extension %I64d, base is %I64d",
pExtensionPacket->rtDTS, pBasePacket->rtDTS));
m_MVCExtensionQueue.pop_front();
delete pExtensionPacket;
}
else if (pExtensionPacket->rtDTS > pBasePacket->rtDTS)
{
DbgLog((LOG_TRACE, 10,
L"CLAVFDemuxer::CombineMVCBaseExtension(): Dropping base %I64d, next extension is %I64d",
pBasePacket->rtDTS, pExtensionPacket->rtDTS));
return S_FALSE;
}
}
if (m_pBluRay && m_MVCExtensionQueue.empty())
{
HRESULT hr = m_pBluRay->FillMVCExtensionQueue(pBasePacket->rtDTS);
if (hr == S_OK)
return CombineMVCBaseExtension(pBasePacket);
else if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"CLAVFDemuxer::CombineMVCBaseExtension(): Filling MVC extension queue failed"));
return hr;
}
}
DbgLog((LOG_TRACE, 10, L"CLAVFDemuxer::CombineMVCBaseExtension(): Ran out of extension packets for base %I64d",
pBasePacket->rtDTS));
return S_FALSE;
}
STDMETHODIMP CLAVFDemuxer::Seek(REFERENCE_TIME rTime)
{
int seekStreamId = m_dActiveStreams[video];
int64_t seek_pts = 0;
retry:
// If we have a video stream, seek on that one. If we don't, well, then don't!
if (rTime > 0)
{
if (seekStreamId != -1)
{
AVStream *stream = m_avFormat->streams[seekStreamId];
seek_pts = ConvertRTToTimestamp(rTime, stream->time_base.num, stream->time_base.den);
}
else
{
seek_pts = ConvertRTToTimestamp(rTime, 1, AV_TIME_BASE);
}
}
if (seek_pts < 0)
seek_pts = 0;
if (strcmp(m_pszInputFormat, "rawvideo") == 0 && seek_pts == 0)
return SeekByte(0, AVSEEK_FLAG_BACKWARD);
int flags = AVSEEK_FLAG_BACKWARD;
int ret = av_seek_frame(m_avFormat, seekStreamId, seek_pts, flags);
if (ret < 0)
{
DbgLog((LOG_CUSTOM1, 1, L"::Seek() -- Key-Frame Seek failed"));
ret = av_seek_frame(m_avFormat, seekStreamId, seek_pts, flags | AVSEEK_FLAG_ANY);
if (ret < 0)
{
DbgLog((LOG_ERROR, 1, L"::Seek() -- Inaccurate Seek failed as well"));
if (seekStreamId == m_dActiveStreams[video] && seekStreamId != -1 && m_dActiveStreams[audio] != -1)
{
DbgLog((LOG_ERROR, 1, L"::Seek() -- retrying seek on audio stream"));
seekStreamId = m_dActiveStreams[audio];
goto retry;
}
if (seek_pts == 0)
{
DbgLog((LOG_ERROR, 1, L" -> attempting byte seek to position 0"));
return SeekByte(0, AVSEEK_FLAG_BACKWARD);
}
}
}
for (unsigned i = 0; i < m_avFormat->nb_streams; i++)
{
init_parser(m_avFormat, m_avFormat->streams[i]);
UpdateParserFlags(m_avFormat->streams[i]);
}
m_bVC1SeenTimestamp = FALSE;
// Flush MVC extensions on seek (no-op if empty)
FlushMVCExtensionQueue();
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::SeekByte(int64_t pos, int flags)
{
int ret = av_seek_frame(m_avFormat, -1, pos, flags | AVSEEK_FLAG_BYTE);
if (ret < 0)
{
DbgLog((LOG_ERROR, 1, L"::SeekByte() -- Seek failed"));
}
for (unsigned i = 0; i < m_avFormat->nb_streams; i++)
{
init_parser(m_avFormat, m_avFormat->streams[i]);
UpdateParserFlags(m_avFormat->streams[i]);
}
m_bVC1SeenTimestamp = FALSE;
// Flush MVC extensions on seek (no-op if empty)
FlushMVCExtensionQueue();
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::Reset()
{
return SeekByte(0, AVSEEK_FLAG_ANY);
}
const char *CLAVFDemuxer::GetContainerFormat() const
{
return m_pszInputFormat;
}
/////////////////////////////////////////////////////////////////////////////
// IAMExtendedSeeking
STDMETHODIMP CLAVFDemuxer::get_ExSeekCapabilities(long *pExCapabilities)
{
CheckPointer(pExCapabilities, E_POINTER);
*pExCapabilities = AM_EXSEEK_CANSEEK;
if (m_avFormat->nb_chapters > 0)
*pExCapabilities |= AM_EXSEEK_MARKERSEEK;
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::get_MarkerCount(long *pMarkerCount)
{
CheckPointer(pMarkerCount, E_POINTER);
*pMarkerCount = (long)m_avFormat->nb_chapters;
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::get_CurrentMarker(long *pCurrentMarker)
{
CheckPointer(pCurrentMarker, E_POINTER);
*pCurrentMarker = 0;
REFERENCE_TIME rtCurrent = m_rtCurrent;
IFilterGraph *pGraph = m_pSettings->GetFilterGraph();
if (pGraph)
{
IMediaSeeking *pSeeking = nullptr;
if (SUCCEEDED(pGraph->QueryInterface(&pSeeking)))
{
if (FAILED(pSeeking->GetCurrentPosition(&rtCurrent)))
{
DbgLog((LOG_TRACE, 10, L"get_CurrentMarker: Obtaining current playback position failed"));
rtCurrent = m_rtCurrent;
}
SafeRelease(&pSeeking);
}
SafeRelease(&pGraph);
}
// Can the time_base change in between chapters?
// Anyhow, we do the calculation in the loop, just to be safe
for (unsigned int i = 0; i < m_avFormat->nb_chapters; ++i)
{
int64_t pts = ConvertRTToTimestamp(rtCurrent, m_avFormat->chapters[i]->time_base.num,
m_avFormat->chapters[i]->time_base.den);
// Check if the pts is in between the bounds of the chapter
if (pts >= m_avFormat->chapters[i]->start)
{
*pCurrentMarker = (i + 1);
// Many files only have chapter start points and no end times
if (pts <= m_avFormat->chapters[i]->end)
return S_OK;
}
}
return *pCurrentMarker > 0 ? S_OK : E_FAIL;
}
STDMETHODIMP CLAVFDemuxer::GetMarkerTime(long MarkerNum, double *pMarkerTime)
{
CheckPointer(pMarkerTime, E_POINTER);
// Chapters go by a 1-based index, doh
unsigned int index = MarkerNum - 1;
if (index >= m_avFormat->nb_chapters)
{
return E_FAIL;
}
REFERENCE_TIME rt =
ConvertTimestampToRT(m_avFormat->chapters[index]->start, m_avFormat->chapters[index]->time_base.num,
m_avFormat->chapters[index]->time_base.den);
*pMarkerTime = (double)rt / DSHOW_TIME_BASE;
return S_OK;
}
STDMETHODIMP CLAVFDemuxer::GetMarkerName(long MarkerNum, BSTR *pbstrMarkerName)
{
CheckPointer(pbstrMarkerName, E_POINTER);
// Chapters go by a 1-based index, doh
unsigned int index = MarkerNum - 1;
if (index >= m_avFormat->nb_chapters)
{
return E_FAIL;
}
// Get the title, or generate one
if (AVDictionaryEntry *dictEntry = av_dict_get(m_avFormat->chapters[index]->metadata, "title", nullptr, 0))
{
*pbstrMarkerName = ConvertCharToBSTR(dictEntry->value);
}
else
{
OLECHAR wTitle[128];
swprintf_s(wTitle, L"Chapter %d", MarkerNum);
*pbstrMarkerName = SysAllocString(wTitle);
}
return S_OK;
}
/////////////////////////////////////////////////////////////////////////////
// IKeyFrameInfo
STDMETHODIMP CLAVFDemuxer::GetKeyFrameCount(UINT &nKFs)
{
if (m_dActiveStreams[video] < 0)
{
return E_NOTIMPL;
}
if (!m_bMatroska && !m_bAVI && !m_bMP4)
{
return E_FAIL;
}
// No reliable info for fragmented mp4 files
if (m_bMP4)
{
MOVContext *mov = (MOVContext *)m_avFormat->priv_data;
if (mov->frag_index.nb_items)
return S_FALSE;
}
nKFs = 0;
AVStream *stream = m_avFormat->streams[m_dActiveStreams[video]];
int nb_indexes = avformat_index_get_entries_count(stream);
for (int i = 0; i < nb_indexes; i++)
{
const AVIndexEntry *entry = avformat_index_get_entry(stream, i);
if (entry && (entry->flags & AVINDEX_KEYFRAME))
nKFs++;
}
return (nKFs == stream->nb_frames) ? S_FALSE : S_OK;
}
STDMETHODIMP CLAVFDemuxer::GetKeyFrames(const GUID *pFormat, REFERENCE_TIME *pKFs, UINT &nKFs)
{
CheckPointer(pFormat, E_POINTER);
CheckPointer(pKFs, E_POINTER);
if (m_dActiveStreams[video] < 0)
{
return E_NOTIMPL;
}
if (!m_bMatroska && !m_bAVI && !m_bMP4)
{
return E_FAIL;
}
// No reliable info for fragmented mp4 files
if (m_bMP4)
{
MOVContext *mov = (MOVContext *)m_avFormat->priv_data;
if (mov->frag_index.nb_items)
return S_FALSE;
}
if (*pFormat != TIME_FORMAT_MEDIA_TIME)
return E_INVALIDARG;
UINT nKFsMax = nKFs;
nKFs = 0;
// CTTS counter for MP4
int ctts_sample_counter = 0;
uint32_t ctts_index = 0;
AVStream *stream = m_avFormat->streams[m_dActiveStreams[video]];
int nb_indexes = avformat_index_get_entries_count(stream);
for (int i = 0; i < nb_indexes && nKFs < nKFsMax; i++)
{
const AVIndexEntry *entry = avformat_index_get_entry(stream, i);
if (entry && (entry->flags & AVINDEX_KEYFRAME))
{
int64_t timestamp = entry->timestamp;
// MP4 index timestamps are DTS, seeking expects PTS however, so offset them accordingly to ensure seeking
// works as expected
if (m_bMP4)
{
MOVStreamContext *sc = (MOVStreamContext *)stream->priv_data;
if (i < sc->sample_offsets_count)
timestamp += (sc->sample_offsets[i] + sc->dts_shift);
else if (sc->ctts_count)
{
// find the next CTTS entry, if needed
while (ctts_sample_counter <= i && ctts_index < sc->ctts_count)
{
ctts_sample_counter += sc->ctts_data[ctts_index++].count;
}
// apply the CTTS offset to the timestamp
if (ctts_sample_counter > i)
timestamp += (sc->ctts_data[ctts_index - 1].duration + sc->dts_shift);
else
timestamp += (sc->min_corrected_pts + sc->dts_shift);
}
else
timestamp += (sc->min_corrected_pts + sc->dts_shift);
}
pKFs[nKFs] = ConvertTimestampToRT(timestamp, stream->time_base.num, stream->time_base.den);
nKFs++;
}
}
return S_OK;
}
int CLAVFDemuxer::GetStreamIdxFromTotalIdx(size_t index) const
{
const stream *st = GetStreamFromTotalIdx(index);
if (st)
return st->pid;
return -1;
}
const CBaseDemuxer::stream *CLAVFDemuxer::GetStreamFromTotalIdx(size_t index) const
{
int type = video;
size_t count_v = m_streams[video].size();
size_t count_a = m_streams[audio].size();
size_t count_s = m_streams[subpic].size();
if (index >= count_v)
{
index -= count_v;
type = audio;
if (index >= count_a)
{
index -= count_a;
type = subpic;
if (index >= count_s)
return nullptr;
}
}
return &m_streams[type][index];
}
/////////////////////////////////////////////////////////////////////////////
// ITrackInfo
STDMETHODIMP_(UINT) CLAVFDemuxer::GetTrackCount()
{
if (!m_avFormat)
return 0;
size_t count = m_streams[video].size() + m_streams[audio].size() + m_streams[subpic].size();
return (UINT)count;
}
// \param aTrackIdx the track index (from 0 to GetTrackCount()-1)
STDMETHODIMP_(BOOL) CLAVFDemuxer::GetTrackInfo(UINT aTrackIdx, struct TrackElement *pStructureToFill)
{
DbgLog((LOG_TRACE, 20, L"ITrackInfo::GetTrackInfo(): index %d, struct: %p", aTrackIdx, pStructureToFill));
if (!m_avFormat || !pStructureToFill)
return FALSE;
ZeroMemory(pStructureToFill, sizeof(*pStructureToFill));
pStructureToFill->Size = sizeof(*pStructureToFill);
const stream *st = GetStreamFromTotalIdx(aTrackIdx);
if (!st || st->pid < 0 || st->pid == NO_SUBTITLE_PID)
return FALSE;
if (st->pid == FORCED_SUBTITLE_PID)
{
pStructureToFill->FlagDefault = 0;
pStructureToFill->FlagForced = 1;
pStructureToFill->Type = TypeSubtitle;
strcpy_s(pStructureToFill->Language, "und");
}
else
{
const AVStream *avst = m_avFormat->streams[st->pid];
// Fill structure
pStructureToFill->FlagDefault = (avst->disposition & AV_DISPOSITION_DEFAULT);
pStructureToFill->FlagForced = (avst->disposition & AV_DISPOSITION_FORCED);
strncpy_s(pStructureToFill->Language, st->language.c_str(), _TRUNCATE);
pStructureToFill->Language[3] = '\0';
pStructureToFill->Type = (avst->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
? TypeVideo
: (avst->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
? TypeAudio
: (avst->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) ? TypeSubtitle : 0;
}
// The following flags are not exported via avformat
pStructureToFill->FlagLacing = 0;
pStructureToFill->MaxCache = 0;
pStructureToFill->MinCache = 0;
return TRUE;
}
// Get an extended information struct relative to the track type
STDMETHODIMP_(BOOL) CLAVFDemuxer::GetTrackExtendedInfo(UINT aTrackIdx, void *pStructureToFill)
{
if (!m_avFormat || !pStructureToFill)
return FALSE;
int id = GetStreamIdxFromTotalIdx(aTrackIdx);
if (id < 0 || (unsigned)id >= m_avFormat->nb_streams)
return FALSE;
const AVStream *st = m_avFormat->streams[id];
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
TrackExtendedInfoVideo *pTEIV = (TrackExtendedInfoVideo *)pStructureToFill;
ZeroMemory(pTEIV, sizeof(*pTEIV));
pTEIV->Size = sizeof(*pTEIV);
pTEIV->DisplayUnit = 0; // always pixels
pTEIV->DisplayWidth = st->codecpar->width;
pTEIV->DisplayHeight = st->codecpar->height;
pTEIV->PixelWidth = st->codecpar->width;
pTEIV->PixelHeight = st->codecpar->height;
pTEIV->AspectRatioType = 0;
pTEIV->Interlaced = 0;
}
else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
TrackExtendedInfoAudio *pTEIA = (TrackExtendedInfoAudio *)pStructureToFill;
ZeroMemory(pTEIA, sizeof(*pTEIA));
pTEIA->Size = sizeof(*pTEIA);
pTEIA->BitDepth = st->codecpar->bits_per_coded_sample;
pTEIA->Channels = st->codecpar->ch_layout.nb_channels;
pTEIA->OutputSamplingFrequency = (FLOAT)st->codecpar->sample_rate;
pTEIA->SamplingFreq = (FLOAT)st->codecpar->sample_rate;
}
return TRUE;
}
STDMETHODIMP_(BSTR) CLAVFDemuxer::GetTrackName(UINT aTrackIdx)
{
const stream *st = GetStreamFromTotalIdx(aTrackIdx);
if (!st)
return nullptr;
BSTR trackName = nullptr;
if (!st->trackName.empty())
{
trackName = ConvertCharToBSTR(st->trackName.c_str());
}
return trackName;
}
STDMETHODIMP_(BSTR) CLAVFDemuxer::GetTrackCodecName(UINT aTrackIdx)
{
if (!m_avFormat)
return nullptr;
int id = GetStreamIdxFromTotalIdx(aTrackIdx);
if (id < 0 || (unsigned)id >= m_avFormat->nb_streams)
return FALSE;
const AVStream *st = m_avFormat->streams[id];
BSTR codecName = nullptr;
std::string codec = get_codec_name(st->codecpar);
if (!codec.empty())
{
codecName = ConvertCharToBSTR(codec.c_str());
}
return codecName;
}
/////////////////////////////////////////////////////////////////////////////
// IPropertyBag
static struct
{
const char *original;
const char *map;
int stream; // 0 = none, 1 = video, 2 = audio, 3 = sub
} mappedPropertys[] = {
{"rotation", "rotate", 1},
{"rotate", nullptr, 1},
{"stereoscopic3dmode", "stereo_mode", 1},
{"stereo_mode", nullptr, 1},
{"stereo_subtitle_offset_id", "3d-plane", 3},
{"stereo_subtitle_offset_ids", "pg_offset_sequences", 0},
{"stereo_interactive_offset_ids", "ig_offset_sequences", 0},
};
STDMETHODIMP CLAVFDemuxer::Read(LPCOLESTR pszPropName, VARIANT *pVar, IErrorLog *pErrorLog)
{
CheckPointer(pszPropName, E_INVALIDARG);
CheckPointer(pVar, E_INVALIDARG);
int stream = -1;
// Verify type
if (pVar->vt != VT_EMPTY && pVar->vt != VT_BSTR)
return E_FAIL;
ATL::CW2A propNameConv(pszPropName);
const char *propName = propNameConv;
// Map property names
for (int i = 0; i < countof(mappedPropertys); i++)
{
if (_stricmp(propName, mappedPropertys[i].original) == 0)
{
if (mappedPropertys[i].map)
propName = mappedPropertys[i].map;
if (mappedPropertys[i].stream)
{
int nStreamType = mappedPropertys[i].stream - 1;
stream = m_dActiveStreams[nStreamType];
if (nStreamType == subpic && stream == FORCED_SUBTITLE_PID)
stream = m_ForcedSubStream;
}
break;
}
}
BSTR bstrValue = nullptr;
HRESULT hr = GetBSTRMetadata(propName, &bstrValue, stream);
if (SUCCEEDED(hr))
{
VariantClear(pVar);
pVar->vt = VT_BSTR;
pVar->bstrVal = bstrValue;
}
return hr;
}
STDMETHODIMP CLAVFDemuxer::Write(LPCOLESTR pszPropName, VARIANT *pVar)
{
return E_NOTIMPL;
}
/////////////////////////////////////////////////////////////////////////////
// Internal Functions
STDMETHODIMP CLAVFDemuxer::AddStream(int streamId)
{
HRESULT hr = S_OK;
AVStream *pStream = m_avFormat->streams[streamId];
if (pStream->codecpar->codec_type == AVMEDIA_TYPE_UNKNOWN || pStream->discard == AVDISCARD_ALL ||
(pStream->codecpar->codec_id == AV_CODEC_ID_NONE && pStream->codecpar->codec_tag == 0) ||
(!m_bSubStreams && (pStream->disposition & LAVF_DISPOSITION_SUB_STREAM)) ||
(pStream->disposition & AV_DISPOSITION_ATTACHED_PIC))
{
pStream->discard = AVDISCARD_ALL;
return S_FALSE;
}
stream s;
s.pid = streamId;
// Extract language
const char *lang = nullptr;
if (AVDictionaryEntry *dictEntry = av_dict_get(pStream->metadata, "language", nullptr, 0))
{
lang = dictEntry->value;
}
if (lang)
{
s.language = ProbeForISO6392(lang);
s.lcid = ProbeLangForLCID(s.language.c_str());
}
else
{
s.language = "und";
s.lcid = 0;
}
const char *title = lavf_get_stream_title(pStream);
if (title)
s.trackName = title;
s.streamInfo = new CLAVFStreamInfo(m_avFormat, pStream, m_pszInputFormat, hr);
if (hr != S_OK)
{
delete s.streamInfo;
pStream->discard = AVDISCARD_ALL;
return hr;
}
switch (pStream->codecpar->codec_type)
{
case AVMEDIA_TYPE_VIDEO: m_streams[video].push_back(s); break;
case AVMEDIA_TYPE_AUDIO: m_streams[audio].push_back(s); break;
case AVMEDIA_TYPE_SUBTITLE: m_streams[subpic].push_back(s); break;
default:
// unsupported stream
// Normally this should be caught while creating the stream info already.
delete s.streamInfo;
return E_FAIL;
}
return S_OK;
}
// Pin creation
STDMETHODIMP CLAVFDemuxer::CreateStreams()
{
DbgLog((LOG_TRACE, 10, L"CLAVFDemuxer::CreateStreams()"));
CAutoLock lock(m_pLock);
for (int i = 0; i < countof(m_streams); ++i)
{
m_streams[i].Clear();
}
m_program = UINT_MAX;
if (m_avFormat->nb_programs && !m_pBluRay)
{
DbgLog(
(LOG_TRACE, 10, L" -> File has %d programs, trying to detect the correct one..", m_avFormat->nb_programs));
// Use a scoring system to select the best available program
// A "good" program at least has a valid video and audio stream
// We'll try here to detect these streams and decide on the best program
// Every present stream gets one point, if it appears to be valid, it gets 4
// Every present video stream has also video resolution score: width x height.
// Valid video streams have a width and height, valid audio streams have a channel count.
// Total program bitrate is used as a tiebreaker.
// We search for "good" program with highest score.
DWORD dwScore = 0; // Stream found: 1, stream valid: 4
DWORD dwVideoResolutionProgramScore = 0; // Score = width x height
DWORD dwProgramBitrate = 0; // Total bitrate of the program
for (unsigned int i = 0; i < m_avFormat->nb_programs; ++i)
{
AVProgram *program = m_avFormat->programs[i];
if (program->nb_stream_indexes > 0)
{
DWORD dwVideoScore = 0;
DWORD dwVideoResolutionScore = 0;
DWORD dwAudioScore = 0;
DWORD dwBitrate = 0;
for (unsigned k = 0; k < program->nb_stream_indexes; ++k)
{
unsigned streamIdx = program->stream_index[k];
AVStream *st = m_avFormat->streams[streamIdx];
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
if (st->codecpar->width != 0 && st->codecpar->height != 0)
{
dwVideoScore = 4;
DWORD dwResolutionScore = st->codecpar->width * st->codecpar->height;
if (dwResolutionScore > dwVideoResolutionScore)
dwVideoResolutionScore = dwResolutionScore;
}
else if (dwVideoScore == 0)
dwVideoScore = 1;
}
else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO && dwAudioScore < 4)
{
if (st->codecpar->ch_layout.nb_channels != 0)
dwAudioScore = 4;
else
dwAudioScore = 1;
}
}
AVDictionaryEntry *dict = av_dict_get(program->metadata, "variant_bitrate", nullptr, 0);
if (dict && dict->value)
dwBitrate = atol(dict->value);
// Check the score of the previously found stream
// In addition, we always require a valid video stream (or none), a invalid one is not allowed.
DbgLog((LOG_TRACE, 10,
L" -> Program %d with score: %ld (video), %ld (video resolution), %ld (audio), %ld (bitrate)",
i, dwVideoScore, dwVideoResolutionScore, dwAudioScore, dwBitrate));
DWORD dwVideoAndAudioScore = dwVideoScore + dwAudioScore;
if (dwVideoScore != 1 &&
(dwVideoAndAudioScore > dwScore ||
(dwVideoAndAudioScore == dwScore && dwVideoResolutionScore > dwVideoResolutionProgramScore) ||
(dwVideoAndAudioScore == dwScore && dwVideoResolutionScore == dwVideoResolutionProgramScore &&
dwBitrate > dwProgramBitrate)))
{
dwScore = dwVideoAndAudioScore;
dwVideoResolutionProgramScore = dwVideoResolutionScore;
dwProgramBitrate = dwBitrate;
m_program = i;
}
}
}
DbgLog((LOG_TRACE, 10, L" -> Using Program %d", m_program));
}
// File has programs
bool bProgram = (m_program < m_avFormat->nb_programs);
// Discard unwanted programs
if (bProgram)
{
for (unsigned int i = 0; i < m_avFormat->nb_programs; ++i)
{
if (i != m_program)
m_avFormat->programs[i]->discard = AVDISCARD_ALL;
}
}
// Re-compute the overall file duration based on video and audio durations
int64_t duration = INT64_MIN;
int64_t st_duration = 0;
int64_t start_time = INT64_MAX;
int64_t st_start_time = 0;
// Number of streams (either in file or in program)
unsigned int nbIndex = bProgram ? m_avFormat->programs[m_program]->nb_stream_indexes : m_avFormat->nb_streams;
// File has PGS streams
bool bHasPGS = false;
// add streams from selected program, or all streams if no program was selected
for (unsigned int i = 0; i < nbIndex; ++i)
{
int streamIdx = bProgram ? m_avFormat->programs[m_program]->stream_index[i] : i;
if (S_OK != AddStream(streamIdx))
continue;
AVStream *st = m_avFormat->streams[streamIdx];
if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
if (st->duration != AV_NOPTS_VALUE)
{
st_duration = av_rescale_q(st->duration, st->time_base, AV_RATIONAL_TIMEBASE);
if (st_duration > duration)
duration = st_duration;
}
if (st->start_time != AV_NOPTS_VALUE)
{
st_start_time = av_rescale_q(st->start_time, st->time_base, AV_RATIONAL_TIMEBASE);
if (st_start_time < start_time)
start_time = st_start_time;
}
}
if (st->codecpar->codec_id == AV_CODEC_ID_HDMV_PGS_SUBTITLE)
bHasPGS = true;
}
if ((m_bTSDiscont || m_avFormat->duration == AV_NOPTS_VALUE) && duration != INT64_MIN)
{
DbgLog(
(LOG_TRACE, 10, L" -> Changing duration to %I64d (from %I64d, diff %.3fs)", duration, m_avFormat->duration,
m_avFormat->duration == AV_NOPTS_VALUE ? 0.0f
: (float)(duration - m_avFormat->duration) / (float)AV_TIME_BASE));
m_avFormat->duration = duration;
}
if ((m_bTSDiscont || m_avFormat->start_time == AV_NOPTS_VALUE) && start_time != INT64_MAX)
{
DbgLog((LOG_TRACE, 10, L" -> Changing start_time to %I64d (from %I64d, diff %.3fs)", start_time,
m_avFormat->start_time,
m_avFormat->start_time == AV_NOPTS_VALUE
? 0.0f
: (float)(start_time - m_avFormat->start_time) / (float)AV_TIME_BASE));
m_avFormat->start_time = start_time;
}
if (bHasPGS && m_pSettings->GetPGSForcedStream())
{
CreatePGSForcedSubtitleStream();
}
// Create fake subtitle pin
if (!m_streams[subpic].empty())
{
CreateNoSubtitleStream();
}
if (m_bMPEGTS)
{
m_bH264MVCCombine = GetH264MVCStreamIndices(m_avFormat, &m_nH264MVCBaseStream, &m_nH264MVCExtensionStream);
}
if (m_bMatroska)
{
std::list<std::string> pg_sequences;
for (unsigned i = 0; i < m_avFormat->nb_streams; i++)
{
if (m_avFormat->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE)
{
AVDictionaryEntry *e =
av_dict_get(m_avFormat->streams[i]->metadata, "3d-plane", nullptr, AV_DICT_IGNORE_SUFFIX);
if (e && e->value)
{
pg_sequences.push_back(std::string(e->value));
}
}
}
// export the list of pg sequences
if (pg_sequences.size() > 0)
{
// strip duplicate entries
pg_sequences.sort();
pg_sequences.unique();
size_t size = pg_sequences.size() * 4;
char *offsets = new char[size];
offsets[0] = 0;
// Append all offsets to the string
for (auto it = pg_sequences.begin(); it != pg_sequences.end(); it++)
{
size_t len = strlen(offsets);
if (len > 0)
{
offsets[len] = ',';
len++;
}
strcpy_s(offsets + len, size - len, it->c_str());
}
av_dict_set(&m_avFormat->metadata, "pg_offset_sequences", offsets, 0);
delete[] offsets;
}
}
return S_OK;
}
REFERENCE_TIME CLAVFDemuxer::GetStartTime() const
{
return av_rescale(m_avFormat->start_time, DSHOW_TIME_BASE, AV_TIME_BASE);
}
// Converts the lavf pts timestamp to a DShow REFERENCE_TIME
// Based on DVDDemuxFFMPEG
REFERENCE_TIME CLAVFDemuxer::ConvertTimestampToRT(int64_t pts, int num, int den, int64_t starttime) const
{
if (pts == (int64_t)AV_NOPTS_VALUE)
{
return Packet::INVALID_TIME;
}
if (starttime == AV_NOPTS_VALUE)
{
if (m_avFormat->start_time != AV_NOPTS_VALUE)
{
starttime = av_rescale(m_avFormat->start_time, den, (int64_t)AV_TIME_BASE * num);
}
else
{
starttime = 0;
}
}
if (starttime != 0)
{
pts -= starttime;
}
// Let av_rescale do the work, its smart enough to not overflow
REFERENCE_TIME timestamp = av_rescale(pts, (int64_t)num * DSHOW_TIME_BASE, den);
return timestamp;
}
// Converts the lavf pts timestamp to a DShow REFERENCE_TIME
// Based on DVDDemuxFFMPEG
int64_t CLAVFDemuxer::ConvertRTToTimestamp(REFERENCE_TIME timestamp, int num, int den, int64_t starttime) const
{
if (timestamp == Packet::INVALID_TIME)
{
return (int64_t)AV_NOPTS_VALUE;
}
if (starttime == AV_NOPTS_VALUE)
{
if (m_avFormat->start_time != AV_NOPTS_VALUE)
{
starttime = av_rescale(m_avFormat->start_time, den, (int64_t)AV_TIME_BASE * num);
}
else
{
starttime = 0;
}
}
int64_t pts = av_rescale(timestamp, den, (int64_t)num * DSHOW_TIME_BASE);
if (starttime != 0)
{
pts += starttime;
}
return pts;
}
HRESULT CLAVFDemuxer::UpdateForcedSubtitleStream(unsigned audio_pid)
{
if (!m_avFormat || audio_pid >= m_avFormat->nb_streams)
return E_UNEXPECTED;
stream *audiost = GetStreams(audio)->FindStream(audio_pid);
if (!audiost)
return E_FAIL;
// Build CSubtitleSelector for this special case
std::list<CSubtitleSelector> selectors;
CSubtitleSelector selector;
selector.audioLanguage = "*";
selector.subtitleLanguage = audiost->language;
selector.dwFlagsSet = SUBTITLE_FLAG_PGS;
selector.dwFlagsNot = 0;
selectors.push_back(selector);
selector.subtitleLanguage = "*";
selectors.push_back(selector);
const stream *subst = SelectSubtitleStream(selectors, audiost->language);
if (subst)
{
m_ForcedSubStream = subst->pid;
CStreamList *streams = GetStreams(subpic);
stream *forced = streams->FindStream(FORCED_SUBTITLE_PID);
if (forced)
{
CMediaType mtype = forced->streamInfo->mtypes.back();
forced->streamInfo->mtypes.pop_back();
forced->language = audiost->language;
forced->lcid = audiost->lcid;
SUBTITLEINFO *subInfo = (SUBTITLEINFO *)mtype.Format();
strncpy_s(subInfo->IsoLang, audiost->language.c_str(), 3);
subInfo->IsoLang[3] = 0;
forced->streamInfo->mtypes.push_back(mtype);
}
}
return subst ? S_OK : S_FALSE;
}
// Select the best video stream
const CBaseDemuxer::stream *CLAVFDemuxer::SelectVideoStream()
{
const stream *best = nullptr;
CStreamList *streams = GetStreams(video);
std::deque<stream>::iterator it;
for (it = streams->begin(); it != streams->end(); ++it)
{
stream *check = &*it;
if (!best)
{
best = check;
continue;
}
// if the best stream is an unknown codec, prefer any other
if (m_avFormat->streams[best->pid]->codecpar->codec_id == AV_CODEC_ID_NONE &&
m_avFormat->streams[check->pid]->codecpar->codec_id != AV_CODEC_ID_NONE)
{
best = check;
continue;
}
// prefer default streams
bool checkDefault = m_avFormat->streams[check->pid]->disposition & AV_DISPOSITION_DEFAULT;
bool bestDefault = m_avFormat->streams[best->pid]->disposition & AV_DISPOSITION_DEFAULT;
if (checkDefault != bestDefault)
{
if (checkDefault)
best = check;
continue;
}
uint64_t bestPixels = (uint64_t)m_avFormat->streams[best->pid]->codecpar->width *
m_avFormat->streams[best->pid]->codecpar->height;
uint64_t checkPixels = (uint64_t)m_avFormat->streams[check->pid]->codecpar->width *
m_avFormat->streams[check->pid]->codecpar->height;
int check_nb_f = av_lav_stream_codec_info_nb_frames(m_avFormat->streams[check->pid]);
int best_nb_f = av_lav_stream_codec_info_nb_frames(m_avFormat->streams[best->pid]);
if (m_bRM && (check_nb_f > 0 && best_nb_f <= 0))
{
best = check;
}
else if (m_bMP4 && m_avFormat->streams[check->pid]->nb_frames == 1 && m_avFormat->streams[best->pid]->nb_frames > 1)
{
// avoid selecting a video stream with only one frame
}
else if (m_bMP4 && m_avFormat->streams[best->pid]->nb_frames == 1 && m_avFormat->streams[check->pid]->nb_frames > 1)
{
// prefer a stream with more then one frame, if available
best = check;
}
else if (!m_bRM || check_nb_f > 0)
{
if (checkPixels > bestPixels)
{
best = check;
}
else if (checkPixels == bestPixels)
{
int64_t best_rate = m_avFormat->streams[best->pid]->codecpar->bit_rate;
int64_t check_rate = m_avFormat->streams[check->pid]->codecpar->bit_rate;
if (best_rate && check_rate && check_rate > best_rate)
best = check;
}
}
}
return best;
}
static int audio_codec_priority(const AVCodecParameters *par)
{
int priority = 0;
const AVCodecDescriptor *desc = avcodec_descriptor_get(par->codec_id);
// lossless codecs have highest priority
if (desc && ((desc->props & (AV_CODEC_PROP_LOSSLESS | AV_CODEC_PROP_LOSSY)) == AV_CODEC_PROP_LOSSLESS))
{
priority = 10;
}
else if (desc && (desc->props & AV_CODEC_PROP_LOSSLESS))
{
priority = 8;
if (par->codec_id == AV_CODEC_ID_DTS)
{
priority = 7;
if (par->profile == FF_PROFILE_DTS_EXPRESS)
{
priority -= 1;
}
else if (par->profile == FF_PROFILE_DTS_HD_MA)
{
priority += 3;
}
else if (par->profile == FF_PROFILE_DTS_HD_HRA)
{
priority += 2;
}
else if (par->profile >= FF_PROFILE_DTS_ES)
{
priority += 1;
}
}
}
else
{
switch (par->codec_id)
{
case AV_CODEC_ID_EAC3: priority = 7; break;
case AV_CODEC_ID_AC3:
case AV_CODEC_ID_AAC:
case AV_CODEC_ID_AAC_LATM: priority = 5; break;
case AV_CODEC_ID_MP3: priority = 3; break;
}
// WAVE_FORMAT_EXTENSIBLE is multi-channel PCM, which doesn't have a proper tag otherwise
if (par->codec_tag == WAVE_FORMAT_EXTENSIBLE)
{
priority = 10;
}
}
// low priority for S302M with non-pcm content
if (par->codec_id == AV_CODEC_ID_S302M && par->codec_tag != -1)
priority = -1;
return priority;
}
// Select the best audio stream
const CBaseDemuxer::stream *CLAVFDemuxer::SelectAudioStream(std::list<std::string> prefLanguages)
{
const stream *best = nullptr;
CStreamList *streams = GetStreams(audio);
std::deque<stream *> checkedStreams;
// Filter for language
if (!prefLanguages.empty())
{
std::list<std::string>::iterator it;
for (it = prefLanguages.begin(); it != prefLanguages.end(); ++it)
{
std::string checkLanguage = ProbeForISO6392(it->c_str());
std::deque<stream>::iterator sit;
for (sit = streams->begin(); sit != streams->end(); ++sit)
{
std::string language = sit->language;
// check if the language matches
if (language == checkLanguage)
{
checkedStreams.push_back(&*sit);
}
}
// First language that has any streams is a match
if (!checkedStreams.empty())
{
break;
}
}
}
// If no language was set, or no matching streams were found
// Put all streams in there
if (checkedStreams.empty())
{
std::deque<stream>::iterator sit;
for (sit = streams->begin(); sit != streams->end(); ++sit)
{
checkedStreams.push_back(&*sit);
}
}
// Check for a stream with a default flag
// If in our current set is one, that one prevails
std::deque<stream *>::iterator sit;
for (sit = checkedStreams.begin(); sit != checkedStreams.end(); ++sit)
{
if (m_avFormat->streams[(*sit)->pid]->disposition & AV_DISPOSITION_DEFAULT)
{
best = *sit;
break;
}
}
BOOL bCheckQuality = m_pSettings->GetPreferHighQualityAudioStreams();
BOOL bImpaired = m_pSettings->GetUseAudioForHearingVisuallyImpaired();
#define DISPO_IMPAIRED (AV_DISPOSITION_HEARING_IMPAIRED | AV_DISPOSITION_VISUAL_IMPAIRED)
if (!best && !checkedStreams.empty())
{
// If only one stream is left, just use that one
if (checkedStreams.size() == 1)
{
best = checkedStreams.at(0);
}
else
{
// Check for quality
std::deque<stream *>::iterator sit;
for (sit = checkedStreams.begin(); sit != checkedStreams.end(); ++sit)
{
if (!best)
{
best = *sit;
continue;
}
AVStream *old_stream = m_avFormat->streams[best->pid];
AVStream *new_stream = m_avFormat->streams[(*sit)->pid];
int check_nb_f = av_lav_stream_codec_info_nb_frames(new_stream);
int best_nb_f = av_lav_stream_codec_info_nb_frames(old_stream);
if (m_bRM && (check_nb_f > 0 && best_nb_f <= 0))
{
best = *sit;
}
else if (!m_bRM || check_nb_f > 0)
{
if (!(old_stream->disposition & DISPO_IMPAIRED) != !(new_stream->disposition & DISPO_IMPAIRED))
{
if ((bImpaired && !(old_stream->disposition & DISPO_IMPAIRED)) ||
(!bImpaired && !(new_stream->disposition & DISPO_IMPAIRED)))
{
best = *sit;
}
continue;
}
if (!bCheckQuality)
continue;
// First, check number of channels
int old_num_chans = old_stream->codecpar->ch_layout.nb_channels;
int new_num_chans = new_stream->codecpar->ch_layout.nb_channels;
if (new_num_chans > old_num_chans)
{
best = *sit;
}
else if (new_num_chans == old_num_chans)
{
// Same number of channels, check codec
int old_priority = audio_codec_priority(old_stream->codecpar);
int new_priority = audio_codec_priority(new_stream->codecpar);
if (new_priority > old_priority)
{
best = *sit;
}
}
}
}
}
}
return best;
}
static inline bool does_language_match(std::string selector, std::string selectee)
{
return (selector == "*" || selector == selectee);
}
// ugly hack to only convert ascii to lower case, as there is no proper unicode function for utf-8 in std::string
static inline char asciitolower(char in)
{
if (in <= 'Z' && in >= 'A')
return in - ('Z' - 'z');
return in;
}
// Select the best subtitle stream
const CBaseDemuxer::stream *CLAVFDemuxer::SelectSubtitleStream(std::list<CSubtitleSelector> subtitleSelectors,
std::string audioLanguage)
{
const stream *best = nullptr;
CStreamList *streams = GetStreams(subpic);
std::deque<stream *> checkedStreams;
std::list<CSubtitleSelector>::iterator it = subtitleSelectors.begin();
for (it = subtitleSelectors.begin(); it != subtitleSelectors.end() && checkedStreams.empty(); it++)
{
if (!does_language_match(it->audioLanguage, audioLanguage))
continue;
if (it->subtitleLanguage == "off")
break;
// lower-case version of the trackname query
std::string subtitleTrackNameQueryLower = it->subtitleTrackName;
if (subtitleTrackNameQueryLower.empty() == false)
std::transform(subtitleTrackNameQueryLower.begin(), subtitleTrackNameQueryLower.end(),
subtitleTrackNameQueryLower.begin(), asciitolower);
std::deque<stream>::iterator sit;
for (sit = streams->begin(); sit != streams->end(); sit++)
{
if (sit->pid == NO_SUBTITLE_PID)
continue;
if (!subtitleTrackNameQueryLower.empty())
{
// create lowercase version of the track name
std::string trackNameLower = sit->trackName;
std::transform(trackNameLower.begin(), trackNameLower.end(), trackNameLower.begin(), asciitolower);
if (trackNameLower.find(subtitleTrackNameQueryLower) == std::string::npos)
continue;
}
if (sit->pid == FORCED_SUBTITLE_PID)
{
if ((it->dwFlagsSet == 0 || it->dwFlagsSet & SUBTITLE_FLAG_VIRTUAL) &&
does_language_match(it->subtitleLanguage, audioLanguage))
checkedStreams.push_back(&*sit);
continue;
}
bool streamIsDefault = m_avFormat->streams[sit->pid]->disposition & AV_DISPOSITION_DEFAULT;
bool streamIsForced = m_avFormat->streams[sit->pid]->disposition & AV_DISPOSITION_FORCED;
bool streamIsImpaired = m_avFormat->streams[sit->pid]->disposition &
(AV_DISPOSITION_HEARING_IMPAIRED | AV_DISPOSITION_VISUAL_IMPAIRED);
bool streamIsNormal = !streamIsDefault && !streamIsForced && !streamIsImpaired;
bool streamIsPgsFormat =
(m_avFormat->streams[sit->pid]->codecpar->codec_id == AV_CODEC_ID_HDMV_PGS_SUBTITLE);
bool flagsSetMatch =
((it->dwFlagsSet == 0) ||
((it->dwFlagsSet & SUBTITLE_FLAG_DEFAULT) && streamIsDefault) ||
((it->dwFlagsSet & SUBTITLE_FLAG_FORCED) && streamIsForced) ||
((it->dwFlagsSet & SUBTITLE_FLAG_IMPAIRED) && streamIsImpaired) ||
((it->dwFlagsSet & SUBTITLE_FLAG_PGS) && streamIsPgsFormat) ||
((it->dwFlagsSet & SUBTITLE_FLAG_NORMAL) && streamIsNormal)
);
bool flagsNotMatch =
(!((it->dwFlagsNot & SUBTITLE_FLAG_DEFAULT) && streamIsDefault) &&
!((it->dwFlagsNot & SUBTITLE_FLAG_FORCED) && streamIsForced) &&
!((it->dwFlagsNot & SUBTITLE_FLAG_IMPAIRED) && streamIsImpaired) &&
!((it->dwFlagsNot & SUBTITLE_FLAG_NORMAL) && streamIsNormal)
);
if (flagsSetMatch && flagsNotMatch)
{
std::string streamLanguage = sit->language;
if (does_language_match(it->subtitleLanguage, streamLanguage))
checkedStreams.push_back(&*sit);
}
}
}
if (!checkedStreams.empty())
best = streams->FindStream(checkedStreams.front()->pid);
else
best = streams->FindStream(NO_SUBTITLE_PID);
return best;
}
#include "libavformat/isom.h"
STDMETHODIMP_(DWORD) CLAVFDemuxer::GetStreamFlags(DWORD dwStream)
{
if (!m_avFormat || dwStream >= m_avFormat->nb_streams)
return 0;
DWORD dwFlags = 0;
AVStream *st = m_avFormat->streams[dwStream];
if (strcmp(m_pszInputFormat, "rawvideo") == 0)
dwFlags |= LAV_STREAM_FLAG_ONLY_DTS;
if (st->codecpar->codec_id == AV_CODEC_ID_H264 &&
(m_bAVI || m_bPMP || (m_bMatroska && (!st->codecpar->extradata_size || st->codecpar->extradata[0] != 1)) ||
(m_bMP4 && st->priv_data && ((MOVStreamContext *)st->priv_data)->ctts_count == 0)))
dwFlags |= LAV_STREAM_FLAG_ONLY_DTS;
if (st->codecpar->codec_id == AV_CODEC_ID_HEVC &&
(m_bAVI || (m_bMP4 && st->priv_data && ((MOVStreamContext *)st->priv_data)->ctts_count == 0)))
dwFlags |= LAV_STREAM_FLAG_ONLY_DTS;
if (m_bMatroska && (st->codecpar->codec_id == AV_CODEC_ID_RV30 || st->codecpar->codec_id == AV_CODEC_ID_RV40))
dwFlags |= LAV_STREAM_FLAG_RV34_MKV;
if (m_avFormat->flags & AVFMT_FLAG_NETWORK)
dwFlags |= LAV_STREAM_FLAG_LIVE;
return dwFlags;
}
STDMETHODIMP_(int) CLAVFDemuxer::GetPixelFormat(DWORD dwStream)
{
if (!m_avFormat || dwStream >= m_avFormat->nb_streams)
return AV_PIX_FMT_NONE;
return m_avFormat->streams[dwStream]->codecpar->format;
}
STDMETHODIMP_(int) CLAVFDemuxer::GetHasBFrames(DWORD dwStream)
{
if (!m_avFormat || dwStream >= m_avFormat->nb_streams)
return -1;
return m_avFormat->streams[dwStream]->codecpar->video_delay;
}
STDMETHODIMP CLAVFDemuxer::GetSideData(DWORD dwStream, GUID guidType, const BYTE **pData, size_t *pSize)
{
if (!m_avFormat || dwStream >= m_avFormat->nb_streams)
return E_INVALIDARG;
if (guidType == IID_MediaSideDataFFMpeg)
{
CBaseDemuxer::stream *pStream = FindStream(dwStream);
if (!pStream)
return E_FAIL;
pStream->SideData.side_data = m_avFormat->streams[dwStream]->codecpar->coded_side_data;
pStream->SideData.side_data_elems = m_avFormat->streams[dwStream]->codecpar->nb_coded_side_data;
*pData = (BYTE *)&pStream->SideData;
*pSize = sizeof(pStream->SideData);
return S_OK;
}
return E_INVALIDARG;
}
STDMETHODIMP CLAVFDemuxer::GetBSTRMetadata(const char *key, BSTR *pbstrValue, int stream)
{
if (!m_avFormat)
return VFW_E_NOT_FOUND;
if (stream >= (int)m_avFormat->nb_streams)
return E_INVALIDARG;
if (_stricmp(key, "rotate") == 0 && stream >= 0)
{
const AVPacketSideData *sd = av_packet_side_data_get(
m_avFormat->streams[stream]->codecpar->coded_side_data,
m_avFormat->streams[stream]->codecpar->nb_coded_side_data, AV_PKT_DATA_DISPLAYMATRIX);
if (sd && sd->size == (9*sizeof(int32_t)))
{
double dRotation = av_display_rotation_get((const int32_t *)sd->data);
int nRotation = -lrint(dRotation);
// normalize rotation to 0-360
while (nRotation < 0)
nRotation += 360;
while (nRotation >= 360)
nRotation -= 360;
char buf[34] = {0};
sprintf_s(buf, "%d", nRotation);
*pbstrValue = ConvertCharToBSTR(buf);
if (*pbstrValue == nullptr)
return E_OUTOFMEMORY;
return S_OK;
}
}
AVDictionaryEntry *entry = av_dict_get(stream >= 0 ? m_avFormat->streams[stream]->metadata : m_avFormat->metadata,
key, nullptr, AV_DICT_IGNORE_SUFFIX);
if (!entry || !entry->value || entry->value[0] == '\0')
return VFW_E_NOT_FOUND;
*pbstrValue = ConvertCharToBSTR(entry->value);
if (*pbstrValue == nullptr)
return E_OUTOFMEMORY;
return S_OK;
}
| 111,853
|
C++
|
.cpp
| 2,827
| 29.388044
| 140
| 0.559858
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,080
|
ExtradataParser.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/ExtradataParser.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "ExtradataParser.h"
#define MARKER \
if (BitRead(1) != 1) \
{ \
ASSERT(0); \
return 0; \
}
CExtradataParser::CExtradataParser(BYTE *pExtradata, size_t extra_len)
: CByteParser(pExtradata, extra_len)
{
}
CExtradataParser::~CExtradataParser()
{
}
bool CExtradataParser::NextMPEGStartCode(BYTE &code)
{
BitByteAlign();
DWORD dw = (DWORD)-1;
do
{
if (!Remaining())
return false;
dw = (dw << 8) | (BYTE)BitRead(8);
} while ((dw & 0xffffff00) != 0x00000100);
code = (BYTE)(dw & 0xff);
return true;
}
size_t CExtradataParser::ParseMPEGSequenceHeader(BYTE *pTarget)
{
BYTE id = 0;
while (Remaining() && id != 0xb3)
{
if (!NextMPEGStartCode(id))
{
return 0;
}
}
if (id != 0xb3)
{
return 0;
}
size_t shpos = Pos() - 4;
BitRead(12); // Width
BitRead(12); // Height
BitRead(4); // AR
BitRead(4); // FPS
BitRead(18); // Bitrate
MARKER;
BitRead(10); // VBV
BitRead(1); // Constrained Flag
// intra quantisizer matrix
if (BitRead(1))
{
for (uint8_t i = 0; i < 64; i++)
{
BitRead(8);
}
}
// non-intra quantisizer matrix
if (BitRead(1))
{
for (uint8_t i = 0; i < 64; i++)
{
BitRead(8);
}
}
size_t shlen = Pos() - shpos;
size_t shextpos = 0;
size_t shextlen = 0;
if (NextMPEGStartCode(id) && id == 0xb5)
{ // sequence header ext
shextpos = Pos() - 4;
int startcode = BitRead(4); // Start Code Id; TODO: DIfferent start code ids mean different length of da2a
ASSERT(startcode == 1);
BitRead(1); // Profile Level Escape
BitRead(3); // Profile
BitRead(4); // Level
BitRead(1); // Progressive
BitRead(2); // Chroma
BitRead(2); // Width Extension
BitRead(2); // Height Extension
BitRead(12); // Bitrate Extension
MARKER;
BitRead(8); // VBV Buffer Size Extension
BitRead(1); // Low Delay
BitRead(2); // FPS Extension n
BitRead(5); // FPS Extension d
shextlen = Pos() - shextpos;
}
memcpy(pTarget, Start() + shpos, shlen);
if (shextpos)
{
memcpy(pTarget + shlen, Start() + shextpos, shextlen);
}
return shlen + shextlen;
}
| 3,294
|
C++
|
.cpp
| 115
| 23.121739
| 114
| 0.590076
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,081
|
LAVFInputFormats.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/LAVFInputFormats.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
static const char *RAW_VIDEO = "rawvideo";
static const char *RAW_VIDEO_DESC = "raw video files";
static const char *RAW_AUDIO = "rawaudio";
static const char *RAW_AUDIO_DESC = "raw audio files";
struct lavf_iformat_map
{
const char *orig_format;
const char *new_format;
const char *new_description;
} lavf_input_formats[] = {
// Shorten these formats
{"matroska,webm", "matroska", nullptr},
{"mov,mp4,m4a,3gp,3g2,mj2", "mp4", "MPEG-4/QuickTime format"},
{"yuv4mpegpipe", "y4m", "YUV4MPEG"},
{"asf_o", "asf", nullptr},
// Raw Video formats (grouped into "rawvideo")
{"dirac", RAW_VIDEO, RAW_VIDEO_DESC},
{"dnxhd", RAW_VIDEO, RAW_VIDEO_DESC},
{"h261", RAW_VIDEO, RAW_VIDEO_DESC},
{"h263", RAW_VIDEO, RAW_VIDEO_DESC},
{"h264", RAW_VIDEO, RAW_VIDEO_DESC},
{"hevc", RAW_VIDEO, RAW_VIDEO_DESC},
{"ingenient", RAW_VIDEO, RAW_VIDEO_DESC},
{"mjpeg", RAW_VIDEO, RAW_VIDEO_DESC},
{"vc1", RAW_VIDEO, RAW_VIDEO_DESC},
// Raw Audio Formats (grouped into "rawaudio")
{"f32be", RAW_AUDIO, RAW_AUDIO_DESC},
{"f32le", RAW_AUDIO, RAW_AUDIO_DESC},
{"f64be", RAW_AUDIO, RAW_AUDIO_DESC},
{"f64le", RAW_AUDIO, RAW_AUDIO_DESC},
{"g722", RAW_AUDIO, RAW_AUDIO_DESC},
{"gsm", RAW_AUDIO, RAW_AUDIO_DESC},
{"s16be", RAW_AUDIO, RAW_AUDIO_DESC},
{"s16le", RAW_AUDIO, RAW_AUDIO_DESC},
{"s24be", RAW_AUDIO, RAW_AUDIO_DESC},
{"s24le", RAW_AUDIO, RAW_AUDIO_DESC},
{"s32be", RAW_AUDIO, RAW_AUDIO_DESC},
{"s32le", RAW_AUDIO, RAW_AUDIO_DESC},
{"s8", RAW_AUDIO, RAW_AUDIO_DESC},
{"u16be", RAW_AUDIO, RAW_AUDIO_DESC},
{"u16le", RAW_AUDIO, RAW_AUDIO_DESC},
{"u24be", RAW_AUDIO, RAW_AUDIO_DESC},
{"u24le", RAW_AUDIO, RAW_AUDIO_DESC},
{"u32be", RAW_AUDIO, RAW_AUDIO_DESC},
{"u32le", RAW_AUDIO, RAW_AUDIO_DESC},
{"u8", RAW_AUDIO, RAW_AUDIO_DESC},
{"image2", "image2", "Image Files"},
{"image2pipe", "image2", "Image Files"},
// Disabled Formats
{"ffm", nullptr, nullptr},
{"ffmetadata", nullptr, nullptr},
{"mpegtsraw", nullptr, nullptr},
{"spdif", nullptr, nullptr},
{"tty", nullptr, nullptr},
{"vc1test", nullptr, nullptr},
};
void lavf_get_iformat_infos(const AVInputFormat *pFormat, const char **pszName, const char **pszDescription)
{
const char *name = pFormat->name;
const char *desc = pFormat->long_name;
for (int i = 0; i < countof(lavf_input_formats); ++i)
{
if (strcmp(lavf_input_formats[i].orig_format, name) == 0)
{
name = lavf_input_formats[i].new_format;
if (lavf_input_formats[i].new_description)
desc = lavf_input_formats[i].new_description;
break;
}
}
if (pszName)
*pszName = name;
if (pszDescription)
*pszDescription = desc;
}
| 3,656
|
C++
|
.cpp
| 94
| 34.297872
| 108
| 0.646777
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,082
|
StreamInfo.cpp
|
Nevcairiel_LAVFilters/demuxer/Demuxers/StreamInfo.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "StreamInfo.h"
CStreamInfo::CStreamInfo()
{
}
CStreamInfo::~CStreamInfo()
{
}
| 921
|
C++
|
.cpp
| 26
| 33.692308
| 75
| 0.742441
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,086
|
LAVSplitterTrayIcon.cpp
|
Nevcairiel_LAVFilters/demuxer/LAVSplitter/LAVSplitterTrayIcon.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVSplitterTrayIcon.h"
#include "PopupMenu.h"
#include <Qnetwork.h>
#define STREAM_CMD_OFFSET 100
#define CHAPTER_CMD_OFFSET 500
CLAVSplitterTrayIcon::CLAVSplitterTrayIcon(IBaseFilter *pFilter, const WCHAR *wszName, int resIcon)
: CBaseTrayIcon(pFilter, wszName, resIcon)
{
}
CLAVSplitterTrayIcon::~CLAVSplitterTrayIcon(void)
{
}
HMENU CLAVSplitterTrayIcon::GetPopupMenu()
{
CheckPointer(m_pFilter, nullptr);
CPopupMenu menu;
IAMStreamSelect *pStreamSelect = nullptr;
if (SUCCEEDED(m_pFilter->QueryInterface(&pStreamSelect)))
{
DWORD dwStreams = 0;
if (FAILED(pStreamSelect->Count(&dwStreams)))
dwStreams = 0;
DWORD dwLastGroup = DWORD_MAX;
for (DWORD i = 0; i < dwStreams; i++)
{
DWORD dwFlags = 0, dwGroup = 0;
LPWSTR pszName = nullptr;
if (FAILED(pStreamSelect->Info(i, nullptr, &dwFlags, nullptr, &dwGroup, &pszName, nullptr, nullptr)))
continue;
if (dwGroup != dwLastGroup)
{
switch (dwGroup)
{
case 0: menu.AddItem(dwGroup, L"Video", FALSE, FALSE); break;
case 1: menu.AddItem(dwGroup, L"Audio", FALSE, FALSE); break;
case 2: menu.AddItem(dwGroup, L"Subtitles", FALSE, FALSE); break;
case 18: menu.AddItem(dwGroup, L"Editions", FALSE, FALSE); break;
default: menu.AddSeparator(); break;
}
dwLastGroup = dwGroup;
}
menu.AddItem(STREAM_CMD_OFFSET + i, pszName, dwFlags & AMSTREAMSELECTINFO_ENABLED);
CoTaskMemFree(pszName);
}
if (dwStreams)
menu.AddSeparator();
m_NumStreams = dwStreams;
SafeRelease(&pStreamSelect);
}
// Chapters
IAMExtendedSeeking *pExSeeking = nullptr;
if (SUCCEEDED(m_pFilter->QueryInterface(IID_IAMExtendedSeeking, (void **)&pExSeeking)))
{
long count = 0, current = 0;
if (FAILED(pExSeeking->get_MarkerCount(&count)))
count = 0;
if (FAILED(pExSeeking->get_CurrentMarker(¤t)))
current = 0;
CPopupMenu chapters;
for (long i = 1; i <= count; i++)
{
double markerTime;
if (FAILED(pExSeeking->GetMarkerTime(i, &markerTime)))
continue;
BSTR bstrName = nullptr;
if (FAILED(pExSeeking->GetMarkerName(i, &bstrName)))
continue;
// Create compound chapter name
int total_seconds = (int)(markerTime + 0.5);
int seconds = total_seconds % 60;
int minutes = total_seconds / 60 % 60;
int hours = total_seconds / 3600;
WCHAR chapterName[512];
_snwprintf_s(chapterName, _TRUNCATE, L"%s\t[%02d:%02d:%02d]", bstrName, hours, minutes, seconds);
// Sanitize any tab chars in the chapter name (replace by space)
// More then one tab in the string messes with the popup menu rendering
WCHAR *nextMatch, *tabMatch = wcsstr(chapterName, L"\t");
while (nextMatch = wcsstr(tabMatch + 1, L"\t"))
{
*tabMatch = L' ';
tabMatch = nextMatch;
}
chapters.AddItem(CHAPTER_CMD_OFFSET + i, chapterName, i == current);
SysFreeString(bstrName);
}
if (count)
{
menu.AddSubmenu(chapters.Finish(), L"Chapters");
menu.AddSeparator();
}
m_NumChapters = count;
SafeRelease(&pExSeeking);
}
menu.AddItem(STREAM_CMD_OFFSET - 1, L"Properties");
HMENU hMenu = menu.Finish();
return hMenu;
}
HRESULT CLAVSplitterTrayIcon::ProcessMenuCommand(HMENU hMenu, int cmd)
{
CheckPointer(m_pFilter, E_FAIL);
DbgLog((LOG_TRACE, 10, L"Menu Command %d", cmd));
if (cmd >= STREAM_CMD_OFFSET && cmd < m_NumStreams + STREAM_CMD_OFFSET)
{
IAMStreamSelect *pStreamSelect = nullptr;
if (SUCCEEDED(m_pFilter->QueryInterface(&pStreamSelect)))
{
pStreamSelect->Enable(cmd - STREAM_CMD_OFFSET, AMSTREAMSELECTENABLE_ENABLE);
SafeRelease(&pStreamSelect);
}
}
else if (cmd > CHAPTER_CMD_OFFSET && cmd <= m_NumChapters + CHAPTER_CMD_OFFSET)
{
IAMExtendedSeeking *pExSeeking = nullptr;
if (SUCCEEDED(m_pFilter->QueryInterface(IID_IAMExtendedSeeking, (void **)&pExSeeking)))
{
double markerTime;
if (FAILED(pExSeeking->GetMarkerTime(cmd - CHAPTER_CMD_OFFSET, &markerTime)))
goto failchapterseek;
REFERENCE_TIME rtMarkerTime = (REFERENCE_TIME)(markerTime * 10000000.0);
// Try to get the graph to seek on, its much safer than directly trying to seek on LAV
FILTER_INFO info;
if (FAILED(m_pFilter->QueryFilterInfo(&info)) || !info.pGraph)
goto failchapterseek;
IMediaSeeking *pSeeking = nullptr;
if (SUCCEEDED(info.pGraph->QueryInterface(&pSeeking)))
{
pSeeking->SetPositions(&rtMarkerTime, AM_SEEKING_AbsolutePositioning, nullptr,
AM_SEEKING_NoPositioning);
SafeRelease(&pSeeking);
}
SafeRelease(&info.pGraph);
failchapterseek:
SafeRelease(&pExSeeking);
}
}
else if (cmd == STREAM_CMD_OFFSET - 1)
{
OpenPropPage();
}
else
{
return E_UNEXPECTED;
}
return S_OK;
}
| 6,444
|
C++
|
.cpp
| 164
| 30.329268
| 113
| 0.610259
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,087
|
SettingsProp.cpp
|
Nevcairiel_LAVFilters/demuxer/LAVSplitter/SettingsProp.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <assert.h>
#include <Commctrl.h>
#include "SettingsProp.h"
#include "DShowUtil.h"
#include "version.h"
#define LANG_BUFFER_SIZE 256
CLAVSplitterSettingsProp::CLAVSplitterSettingsProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVF Settings"), pUnk, IDD_PROPPAGE_LAVFSETTINGS, IDS_PAGE_TITLE)
{
}
CLAVSplitterSettingsProp::~CLAVSplitterSettingsProp(void)
{
SAFE_CO_FREE(m_pszPrefLang);
SAFE_CO_FREE(m_pszPrefSubLang);
SAFE_CO_FREE(m_pszAdvSubConfig);
SafeRelease(&m_pLAVF);
}
HRESULT CLAVSplitterSettingsProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pLAVF == nullptr);
return pUnk->QueryInterface(&m_pLAVF);
}
HRESULT CLAVSplitterSettingsProp::OnDisconnect()
{
SafeRelease(&m_pLAVF);
return S_OK;
}
HRESULT CLAVSplitterSettingsProp::OnApplyChanges()
{
ASSERT(m_pLAVF != nullptr);
HRESULT hr = S_OK;
DWORD dwVal;
BOOL bFlag;
WCHAR buffer[LANG_BUFFER_SIZE];
// Save audio language
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&buffer);
CHECK_HR(hr = m_pLAVF->SetPreferredLanguages(buffer));
// Save subtitle language
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG_SUBS, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&buffer);
if (m_selectedSubMode == LAVSubtitleMode_Advanced)
{
CHECK_HR(hr = m_pLAVF->SetPreferredSubtitleLanguages(m_subLangBuffer));
CHECK_HR(hr = m_pLAVF->SetAdvancedSubtitleConfig(buffer));
}
else
{
CHECK_HR(hr = m_pLAVF->SetPreferredSubtitleLanguages(buffer));
CHECK_HR(hr = m_pLAVF->SetAdvancedSubtitleConfig(m_advSubBuffer));
}
// Save subtitle mode
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_GETCURSEL, 0, 0);
CHECK_HR(hr = m_pLAVF->SetSubtitleMode((LAVSubtitleMode)dwVal));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BD_SEPARATE_FORCED_SUBS, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetPGSForcedStream(bFlag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BD_ONLY_FORCED_SUBS, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetPGSOnlyForced(bFlag));
int vc1flag = (int)SendDlgItemMessage(m_Dlg, IDC_VC1TIMESTAMP, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetVC1TimestampMode(vc1flag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_MKV_EXTERNAL, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetLoadMatroskaExternalSegments(bFlag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_SUBSTREAMS, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetSubstreamsEnabled(bFlag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_STREAM_SWITCH_REMOVE_AUDIO, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetStreamSwitchRemoveAudio(bFlag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_STREAM_SWITCH_RESELECT_SUBS, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetStreamSwitchReselectSubtitles(bFlag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_SELECT_AUDIO_QUALITY, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetPreferHighQualityAudioStreams(bFlag));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_IMPAIRED_AUDIO, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetUseAudioForHearingVisuallyImpaired(bFlag));
SendDlgItemMessage(m_Dlg, IDC_QUEUE_MEM, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&buffer);
int maxMem = _wtoi(buffer);
CHECK_HR(hr = m_pLAVF->SetMaxQueueMemSize(maxMem));
SendDlgItemMessage(m_Dlg, IDC_QUEUE_PACKETS, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&buffer);
int maxPackets = _wtoi(buffer);
CHECK_HR(hr = m_pLAVF->SetMaxQueueSize(maxPackets));
SendDlgItemMessage(m_Dlg, IDC_STREAM_ANADUR, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&buffer);
int duration = _wtoi(buffer);
CHECK_HR(hr = m_pLAVF->SetNetworkStreamAnalysisDuration(duration));
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_GETCHECK, 0, 0);
CHECK_HR(hr = m_pLAVF->SetTrayIcon(bFlag));
LoadData();
done:
return hr;
}
void CLAVSplitterSettingsProp::UpdateSubtitleMode(LAVSubtitleMode mode)
{
if (mode == LAVSubtitleMode_NoSubs)
{
WCHAR *note = L"No subtitles: Subtitles are disabled and will not be loaded by default.";
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_NOTE, WM_SETTEXT, 0, (LPARAM)note);
}
else if (mode == LAVSubtitleMode_ForcedOnly)
{
WCHAR *note = L"Only Forced Subtitles: Only subtitles marked as \"forced\" will be loaded.";
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_NOTE, WM_SETTEXT, 0, (LPARAM)note);
}
else if (mode == LAVSubtitleMode_Default)
{
WCHAR *note = L"Default Mode: Subtitles matching the preferred languages, as well as \"default\" and "
L"\"forced\" subtitles will be loaded.";
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_NOTE, WM_SETTEXT, 0, (LPARAM)note);
}
else if (mode == LAVSubtitleMode_Advanced)
{
WCHAR *note = L"Advanced Mode: Refer to the README or the documentation on http://1f0.de for details.";
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_NOTE, WM_SETTEXT, 0, (LPARAM)note);
}
else
{
WCHAR *empty = L"";
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_NOTE, WM_SETTEXT, 0, (LPARAM)empty);
}
LAVSubtitleMode oldMode = m_selectedSubMode;
m_selectedSubMode = mode;
// Switch away from advanced
if (oldMode != mode && oldMode == LAVSubtitleMode_Advanced)
{
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG_SUBS, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&m_advSubBuffer);
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG_SUBS, WM_SETTEXT, 0, (LPARAM)&m_subLangBuffer);
// Switch to advanced
}
else if (oldMode != mode && mode == LAVSubtitleMode_Advanced)
{
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG_SUBS, WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&m_subLangBuffer);
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG_SUBS, WM_SETTEXT, 0, (LPARAM)&m_advSubBuffer);
}
}
HRESULT CLAVSplitterSettingsProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pLAVF != nullptr);
const WCHAR *version = TEXT(LAV_SPLITTER) L" " TEXT(LAV_VERSION_STR);
SendDlgItemMessage(m_Dlg, IDC_SPLITTER_FOOTER, WM_SETTEXT, 0, (LPARAM)version);
hr = LoadData();
memset(m_subLangBuffer, 0, sizeof(m_advSubBuffer));
memset(m_advSubBuffer, 0, sizeof(m_advSubBuffer));
m_selectedSubMode = LAVSubtitleMode_Default;
if (m_pszAdvSubConfig)
wcsncpy_s(m_advSubBuffer, m_pszAdvSubConfig, _TRUNCATE);
// Notify the UI about those settings
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG, WM_SETTEXT, 0, (LPARAM)m_pszPrefLang);
SendDlgItemMessage(m_Dlg, IDC_PREF_LANG_SUBS, WM_SETTEXT, 0, (LPARAM)m_pszPrefSubLang);
// Init the Combo Box
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_RESETCONTENT, 0, 0);
WideStringFromResource(stringBuffer, IDS_SUBMODE_NO_SUBS);
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_SUBMODE_FORCED_SUBS);
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_SUBMODE_DEFAULT);
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_SUBMODE_ADVANCED);
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_SETCURSEL, m_subtitleMode, 0);
addHint(IDC_SUBTITLE_MODE, L"Configure how subtitles are selected.");
SendDlgItemMessage(m_Dlg, IDC_BD_SEPARATE_FORCED_SUBS, BM_SETCHECK, m_PGSForcedStream, 0);
addHint(IDC_BD_SEPARATE_FORCED_SUBS,
L"Enabling this causes the creation of a new \"Forced Subtitles\" stream, which will try to always display "
L"forced subtitles matching your selected audio language.\n\nNOTE: This option may not work on all Blu-ray "
L"discs.\nRequires restart to take effect.");
SendDlgItemMessage(m_Dlg, IDC_BD_ONLY_FORCED_SUBS, BM_SETCHECK, m_PGSOnlyForced, 0);
addHint(IDC_BD_ONLY_FORCED_SUBS,
L"When enabled, all Blu-ray (PGS) subtitles will be filtered, and only forced subtitles will be sent to "
L"the renderer.\n\nNOTE: When this option is active, you will not be able to get the \"full\" subtitles.");
SendDlgItemMessage(m_Dlg, IDC_VC1TIMESTAMP, BM_SETCHECK, m_VC1Mode, 0);
addHint(IDC_VC1TIMESTAMP, L"Checked - Frame timings will be corrected.\nUnchecked - Frame timings will be sent "
L"untouched.\nIndeterminate (Auto) - Only enabled for decoders that rely on the splitter "
L"doing the corrections.\n\nNOTE: Only for debugging, if unsure, set to \"Auto\".");
SendDlgItemMessage(m_Dlg, IDC_MKV_EXTERNAL, BM_SETCHECK, m_MKVExternal, 0);
SendDlgItemMessage(m_Dlg, IDC_SUBSTREAMS, BM_SETCHECK, m_substreams, 0);
addHint(IDC_SUBSTREAMS,
L"Controls if sub-streams should be exposed as a separate stream.\nSub-streams are typically streams for "
L"backwards compatibility, for example the AC3 part of TrueHD streams on Blu-rays.");
SendDlgItemMessage(m_Dlg, IDC_STREAM_SWITCH_REMOVE_AUDIO, BM_SETCHECK, m_StreamSwitchRemoveAudio, 0);
addHint(IDC_STREAM_SWITCH_REMOVE_AUDIO,
L"Remove the old Audio Decoder from the Playback Chain before switching the audio stream, forcing "
L"DirectShow to select a new one.\n\nThis option ensures that the preferred decoder is always used, "
L"however it does not work properly with all players.");
SendDlgItemMessage(m_Dlg, IDC_STREAM_SWITCH_RESELECT_SUBS, BM_SETCHECK, m_StreamSwitchReselectSubs, 0);
addHint(IDC_STREAM_SWITCH_RESELECT_SUBS,
L"Reapply the subtitle-selection rules when the audio-stream has changed. This automatically adjusts the "
L"selected subtitle to the selected audio track.\nThe subtitle track can still be changed manually, "
L"however it will be overridden again on the next audio track switch.");
addHint(IDC_SELECT_AUDIO_QUALITY,
L"Controls if the stream with the highest quality (matching your language preferences) should always be "
L"used.\nIf disabled, the first stream is always used.");
SendDlgItemMessage(m_Dlg, IDC_SELECT_AUDIO_QUALITY, BM_SETCHECK, m_PreferHighQualityAudio, 0);
SendDlgItemMessage(m_Dlg, IDC_IMPAIRED_AUDIO, BM_SETCHECK, m_ImpairedAudio, 0);
SendDlgItemMessage(m_Dlg, IDC_QUEUE_MEM_SPIN, UDM_SETRANGE32, 0, 2048);
addHint(IDC_QUEUE_MEM,
L"Set the maximum memory a frame queue can use for buffering (in megabytes).\nNote that this is the "
L"maximum value, only very high bitrate files will usually even reach the default maximum value.");
addHint(IDC_QUEUE_MEM_SPIN,
L"Set the maximum memory a frame queue can use for buffering (in megabytes).\nNote that this is the "
L"maximum value, only very high bitrate files will usually even reach the default maximum value.");
swprintf_s(stringBuffer, L"%d", m_QueueMaxMem);
SendDlgItemMessage(m_Dlg, IDC_QUEUE_MEM, WM_SETTEXT, 0, (LPARAM)stringBuffer);
SendDlgItemMessage(m_Dlg, IDC_QUEUE_PACKETS_SPIN, UDM_SETRANGE32, 100, 100000);
addHint(IDC_QUEUE_PACKETS, L"Set the maximum numbers of packets to buffer in the frame queue.\nNote that the frame "
L"queue will never exceed the memory limited set above.");
addHint(IDC_QUEUE_PACKETS_SPIN, L"Set the maximum numbers of packets to buffer in the frame queue.\nNote that the "
L"frame queue will never exceed the memory limited set above.");
swprintf_s(stringBuffer, L"%d", m_QueueMaxPackets);
SendDlgItemMessage(m_Dlg, IDC_QUEUE_PACKETS, WM_SETTEXT, 0, (LPARAM)stringBuffer);
SendDlgItemMessage(m_Dlg, IDC_STREAM_ANADUR_SPIN, UDM_SETRANGE32, 200, 10000);
addHint(IDC_STREAM_ANADUR, L"Set the duration (in milliseconds) a network stream is analyzed for before playback "
L"starts.\nA longer duration ensures the stream parameters are properly detected, "
L"however it will delay playback start.\n\nDefault: 1000 (1 second)");
addHint(IDC_STREAM_ANADUR_SPIN, L"Set the duration (in milliseconds) a network stream is analyzed for before "
L"playback starts.\nA longer duration ensures the stream parameters are properly "
L"detected, however it will delay playback start.\n\nDefault: 1000 (1 second)");
swprintf_s(stringBuffer, L"%d", m_NetworkAnalysisDuration);
SendDlgItemMessage(m_Dlg, IDC_STREAM_ANADUR, WM_SETTEXT, 0, (LPARAM)stringBuffer);
UpdateSubtitleMode(m_subtitleMode);
SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_SETCHECK, m_TrayIcon, 0);
return hr;
}
HRESULT CLAVSplitterSettingsProp::LoadData()
{
HRESULT hr = S_OK;
// Free old strings
SAFE_CO_FREE(m_pszPrefLang);
SAFE_CO_FREE(m_pszPrefSubLang);
SAFE_CO_FREE(m_pszAdvSubConfig);
// Query current settings
CHECK_HR(hr = m_pLAVF->GetPreferredLanguages(&m_pszPrefLang));
CHECK_HR(hr = m_pLAVF->GetPreferredSubtitleLanguages(&m_pszPrefSubLang));
CHECK_HR(hr = m_pLAVF->GetAdvancedSubtitleConfig(&m_pszAdvSubConfig));
m_subtitleMode = m_pLAVF->GetSubtitleMode();
m_PGSForcedStream = m_pLAVF->GetPGSForcedStream();
m_PGSOnlyForced = m_pLAVF->GetPGSOnlyForced();
m_VC1Mode = m_pLAVF->GetVC1TimestampMode();
m_substreams = m_pLAVF->GetSubstreamsEnabled();
m_MKVExternal = m_pLAVF->GetLoadMatroskaExternalSegments();
m_StreamSwitchReselectSubs = m_pLAVF->GetStreamSwitchReselectSubtitles();
m_StreamSwitchRemoveAudio = m_pLAVF->GetStreamSwitchRemoveAudio();
m_PreferHighQualityAudio = m_pLAVF->GetPreferHighQualityAudioStreams();
m_ImpairedAudio = m_pLAVF->GetUseAudioForHearingVisuallyImpaired();
m_QueueMaxMem = m_pLAVF->GetMaxQueueMemSize();
m_QueueMaxPackets = m_pLAVF->GetMaxQueueSize();
m_NetworkAnalysisDuration = m_pLAVF->GetNetworkStreamAnalysisDuration();
m_TrayIcon = m_pLAVF->GetTrayIcon();
done:
return hr;
}
INT_PTR CLAVSplitterSettingsProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
switch (uMsg)
{
case WM_COMMAND:
// Mark the page dirty if the text changed
if (HIWORD(wParam) == EN_CHANGE && (LOWORD(wParam) == IDC_PREF_LANG || LOWORD(wParam) == IDC_PREF_LANG_SUBS))
{
WCHAR buffer[LANG_BUFFER_SIZE];
SendDlgItemMessage(m_Dlg, LOWORD(wParam), WM_GETTEXT, LANG_BUFFER_SIZE, (LPARAM)&buffer);
int dirty = 0;
WCHAR *source = nullptr;
if (LOWORD(wParam) == IDC_PREF_LANG)
{
source = m_pszPrefLang;
}
else
{
source = (m_selectedSubMode == LAVSubtitleMode_Advanced) ? m_pszAdvSubConfig : m_pszPrefSubLang;
}
if (source)
{
dirty = _wcsicmp(buffer, source);
}
else
{
dirty = (int)wcslen(buffer);
}
if (dirty != 0)
{
SetDirty();
}
}
else if (HIWORD(wParam) == CBN_SELCHANGE && LOWORD(wParam) == IDC_SUBTITLE_MODE)
{
DWORD dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_SUBTITLE_MODE, CB_GETCURSEL, 0, 0);
UpdateSubtitleMode((LAVSubtitleMode)dwVal);
if (dwVal != m_subtitleMode)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_BD_SEPARATE_FORCED_SUBS)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BD_SEPARATE_FORCED_SUBS, BM_GETCHECK, 0, 0);
if (bFlag != m_PGSForcedStream)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_BD_ONLY_FORCED_SUBS)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BD_ONLY_FORCED_SUBS, BM_GETCHECK, 0, 0);
if (bFlag != m_PGSOnlyForced)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_VC1TIMESTAMP)
{
int iFlag = (int)SendDlgItemMessage(m_Dlg, IDC_VC1TIMESTAMP, BM_GETCHECK, 0, 0);
if (iFlag != m_VC1Mode)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_MKV_EXTERNAL)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_MKV_EXTERNAL, BM_GETCHECK, 0, 0);
if (bFlag != m_MKVExternal)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_SUBSTREAMS)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_SUBSTREAMS, BM_GETCHECK, 0, 0);
if (bFlag != m_substreams)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_STREAM_SWITCH_REMOVE_AUDIO)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_STREAM_SWITCH_REMOVE_AUDIO, BM_GETCHECK, 0, 0);
if (bFlag != m_StreamSwitchRemoveAudio)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_STREAM_SWITCH_RESELECT_SUBS)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_STREAM_SWITCH_RESELECT_SUBS, BM_GETCHECK, 0, 0);
if (bFlag != m_StreamSwitchReselectSubs)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_SELECT_AUDIO_QUALITY)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_SELECT_AUDIO_QUALITY, BM_GETCHECK, 0, 0);
if (bFlag != m_PreferHighQualityAudio)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_IMPAIRED_AUDIO)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_IMPAIRED_AUDIO, BM_GETCHECK, 0, 0);
if (bFlag != m_ImpairedAudio)
{
SetDirty();
}
}
else if (HIWORD(wParam) == BN_CLICKED && LOWORD(wParam) == IDC_TRAYICON)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_GETCHECK, 0, 0);
if (bFlag != m_TrayIcon)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_QUEUE_MEM && HIWORD(wParam) == EN_CHANGE)
{
WCHAR buffer[100];
SendDlgItemMessage(m_Dlg, LOWORD(wParam), WM_GETTEXT, 100, (LPARAM)&buffer);
int maxMem = _wtoi(buffer);
size_t len = wcslen(buffer);
if (maxMem == 0 && (buffer[0] != L'0' || len > 1))
{
SendDlgItemMessage(m_Dlg, LOWORD(wParam), EM_UNDO, 0, 0);
}
else
{
swprintf_s(buffer, L"%d", maxMem);
if (wcslen(buffer) != len)
SendDlgItemMessage(m_Dlg, IDC_QUEUE_MEM, WM_SETTEXT, 0, (LPARAM)buffer);
if (maxMem != m_QueueMaxMem)
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_QUEUE_PACKETS && HIWORD(wParam) == EN_CHANGE)
{
WCHAR buffer[100];
SendDlgItemMessage(m_Dlg, LOWORD(wParam), WM_GETTEXT, 100, (LPARAM)&buffer);
int maxMem = _wtoi(buffer);
size_t len = wcslen(buffer);
if (maxMem == 0 && (buffer[0] != L'0' || len > 1))
{
SendDlgItemMessage(m_Dlg, LOWORD(wParam), EM_UNDO, 0, 0);
}
else
{
swprintf_s(buffer, L"%d", maxMem);
if (wcslen(buffer) != len)
SendDlgItemMessage(m_Dlg, IDC_QUEUE_PACKETS, WM_SETTEXT, 0, (LPARAM)buffer);
if (maxMem != m_QueueMaxPackets)
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_STREAM_ANADUR && HIWORD(wParam) == EN_CHANGE)
{
WCHAR buffer[100];
SendDlgItemMessage(m_Dlg, LOWORD(wParam), WM_GETTEXT, 100, (LPARAM)&buffer);
int duration = _wtoi(buffer);
size_t len = wcslen(buffer);
if (duration == 0 && (buffer[0] != L'0' || len > 1))
{
SendDlgItemMessage(m_Dlg, LOWORD(wParam), EM_UNDO, 0, 0);
}
else
{
swprintf_s(buffer, L"%d", duration);
if (wcslen(buffer) != len)
SendDlgItemMessage(m_Dlg, IDC_STREAM_ANADUR, WM_SETTEXT, 0, (LPARAM)buffer);
if (duration != m_NetworkAnalysisDuration)
SetDirty();
}
}
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
CLAVSplitterFormatsProp::CLAVSplitterFormatsProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVF Settings"), pUnk, IDD_PROPPAGE_FORMATS, IDS_INPUT_FORMATS)
{
}
CLAVSplitterFormatsProp::~CLAVSplitterFormatsProp(void)
{
SAFE_CO_FREE(m_bFormats);
SafeRelease(&m_pLAVF);
}
HRESULT CLAVSplitterFormatsProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pLAVF == nullptr);
return pUnk->QueryInterface(&m_pLAVF);
}
HRESULT CLAVSplitterFormatsProp::OnDisconnect()
{
SafeRelease(&m_pLAVF);
return S_OK;
}
HRESULT CLAVSplitterFormatsProp::OnApplyChanges()
{
HRESULT hr = S_OK;
ASSERT(m_pLAVF != nullptr);
HWND hlv = GetDlgItem(m_Dlg, IDC_FORMATS);
int nItem = 0;
std::set<FormatInfo>::const_iterator it;
for (it = m_Formats.begin(); it != m_Formats.end(); ++it)
{
m_bFormats[nItem] = ListView_GetCheckState(hlv, nItem);
m_pLAVF->SetFormatEnabled(it->strName, m_bFormats[nItem]);
nItem++;
}
return hr;
}
HRESULT CLAVSplitterFormatsProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pLAVF != nullptr);
memset(stringBuffer, 0, sizeof(stringBuffer));
const char *pszInput = m_pLAVF->GetInputFormat();
if (pszInput)
{
_snwprintf_s(stringBuffer, _TRUNCATE, L"%S", pszInput);
}
SendDlgItemMessage(m_Dlg, IDC_CUR_INPUT, WM_SETTEXT, 0, (LPARAM)stringBuffer);
m_Formats = m_pLAVF->GetInputFormats();
// Setup ListView control for format configuration
SendDlgItemMessage(m_Dlg, IDC_FORMATS, CCM_DPISCALE, TRUE, 0);
HWND hlv = GetDlgItem(m_Dlg, IDC_FORMATS);
ListView_SetExtendedListViewStyle(hlv, LVS_EX_CHECKBOXES | LVS_EX_FULLROWSELECT | LVS_EX_GRIDLINES);
int nCol = 1;
LVCOLUMN lvc = {LVCF_WIDTH, 0, 20, 0};
ListView_InsertColumn(hlv, 0, &lvc);
ListView_AddCol(hlv, nCol, 75, L"Format", false);
ListView_AddCol(hlv, nCol, 210, L"Description", false);
ListView_DeleteAllItems(hlv);
ListView_SetItemCount(hlv, m_Formats.size());
SAFE_CO_FREE(m_bFormats);
m_bFormats = (BOOL *)CoTaskMemAlloc(sizeof(BOOL) * m_Formats.size());
if (!m_bFormats)
return E_OUTOFMEMORY;
memset(m_bFormats, 0, sizeof(BOOL) * m_Formats.size());
// Create entries for the formats
LVITEM lvi;
memset(&lvi, 0, sizeof(lvi));
lvi.mask = LVIF_TEXT | LVIF_PARAM;
int nItem = 0;
std::set<FormatInfo>::const_iterator it;
for (it = m_Formats.begin(); it != m_Formats.end(); ++it)
{
// Create main entry
lvi.iItem = nItem + 1;
ListView_InsertItem(hlv, &lvi);
// Set sub item texts
_snwprintf_s(stringBuffer, _TRUNCATE, L"%S", it->strName);
ListView_SetItemText(hlv, nItem, 1, (LPWSTR)stringBuffer);
_snwprintf_s(stringBuffer, _TRUNCATE, L"%S", it->strDescription);
ListView_SetItemText(hlv, nItem, 2, (LPWSTR)stringBuffer);
m_bFormats[nItem] = m_pLAVF->IsFormatEnabled(it->strName);
ListView_SetCheckState(hlv, nItem, m_bFormats[nItem]);
nItem++;
}
return hr;
}
INT_PTR CLAVSplitterFormatsProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
switch (uMsg)
{
case WM_NOTIFY:
NMHDR *hdr = (LPNMHDR)lParam;
if (hdr->idFrom == IDC_FORMATS)
{
switch (hdr->code)
{
case LVN_ITEMCHANGED:
LPNMLISTVIEW nmlv = (LPNMLISTVIEW)lParam;
BOOL check = ListView_GetCheckState(hdr->hwndFrom, nmlv->iItem);
if (check != m_bFormats[nmlv->iItem])
{
SetDirty();
}
return TRUE;
}
}
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
| 26,526
|
C++
|
.cpp
| 579
| 37.559585
| 120
| 0.644888
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,088
|
PacketQueue.cpp
|
Nevcairiel_LAVFilters/demuxer/LAVSplitter/PacketQueue.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "PacketQueue.h"
#include "BaseDemuxer.h"
// Queue a new packet at the end of the list
void CPacketQueue::Queue(Packet *pPacket)
{
CAutoLock cAutoLock(this);
if (pPacket)
m_dataSize += (size_t)pPacket->GetDataSize();
m_queue.push_back(pPacket);
}
// Get a packet from the beginning of the list
Packet *CPacketQueue::Get()
{
CAutoLock cAutoLock(this);
if (m_queue.size() == 0)
{
return nullptr;
}
Packet *pPacket = m_queue.front();
m_queue.pop_front();
if (pPacket)
m_dataSize -= (size_t)pPacket->GetDataSize();
return pPacket;
}
// Get the size of the queue
size_t CPacketQueue::Size()
{
CAutoLock cAutoLock(this);
return m_queue.size();
}
// Get the size of the queue
size_t CPacketQueue::DataSize()
{
CAutoLock cAutoLock(this);
return m_dataSize;
}
// Clear the List (all elements are free'ed)
void CPacketQueue::Clear()
{
CAutoLock cAutoLock(this);
DbgLog((LOG_TRACE, 10, L"CPacketQueue::Clear() - clearing queue with %d entries", m_queue.size()));
std::deque<Packet *>::iterator it;
for (it = m_queue.begin(); it != m_queue.end(); ++it)
{
delete *it;
}
m_queue.clear();
m_dataSize = 0;
}
| 2,071
|
C++
|
.cpp
| 68
| 27.161765
| 103
| 0.690141
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,090
|
PacketAllocator.cpp
|
Nevcairiel_LAVFilters/demuxer/LAVSplitter/PacketAllocator.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Initial design and concept by Gabest and the MPC-HC Team, copyright under GPLv2
* Contributions by Ti-BEN from the XBMC DSPlayer Project, also under GPLv2
*/
#include "stdafx.h"
#include "PacketAllocator.h"
CMediaPacketSample::CMediaPacketSample(LPCTSTR pName, CBaseAllocator *pAllocator, HRESULT *phr)
: CMediaSample(pName, pAllocator, phr)
{
}
STDMETHODIMP CMediaPacketSample::QueryInterface(REFIID riid, void **ppv)
{
if (riid == __uuidof(ILAVMediaSample))
{
return GetInterface((ILAVMediaSample *)this, ppv);
}
else if (riid == __uuidof(IMediaSideData))
{
return GetInterface((IMediaSideData *)this, ppv);
}
return CMediaSample::QueryInterface(riid, ppv);
}
STDMETHODIMP_(ULONG) CMediaPacketSample::AddRef()
{
return CMediaSample::AddRef();
}
STDMETHODIMP_(ULONG) CMediaPacketSample::Release()
{
/* Decrement our own private reference count */
LONG lRef;
if (m_cRef == 1)
{
lRef = 0;
m_cRef = 0;
}
else
{
lRef = InterlockedDecrement(&m_cRef);
}
ASSERT(lRef >= 0);
DbgLog((LOG_MEMORY, 3, TEXT(" Unknown %X ref-- = %d"), this, m_cRef));
/* Did we release our final reference count */
if (lRef == 0)
{
/* Free all resources */
if (m_dwFlags & Sample_TypeChanged)
{
SetMediaType(nullptr);
}
ASSERT(m_pMediaType == nullptr);
m_dwFlags = 0;
m_dwTypeSpecificFlags = 0;
m_dwStreamId = AM_STREAM_MEDIA;
SAFE_DELETE(m_pPacket);
SetPointer(nullptr, 0);
SAFE_DELETE(m_pSideData);
/* This may cause us to be deleted */
// Our refcount is reliably 0 thus no-one will mess with us
m_pAllocator->ReleaseBuffer(this);
}
return (ULONG)lRef;
}
STDMETHODIMP CMediaPacketSample::SetPacket(Packet *pPacket)
{
SAFE_DELETE(m_pPacket);
m_pPacket = pPacket;
SetPointer(pPacket->GetData(), (LONG)pPacket->GetDataSize());
SAFE_DELETE(m_pSideData);
if (pPacket->GetNumSideData() > 0)
{
m_pSideData = new MediaSideDataFFMpeg();
m_pSideData->side_data = pPacket->GetSideData();
m_pSideData->side_data_elems = pPacket->GetNumSideData();
}
return S_OK;
}
STDMETHODIMP CMediaPacketSample::SetSideData(GUID guidType, const BYTE *pData, size_t size)
{
return E_NOTIMPL;
}
STDMETHODIMP CMediaPacketSample::GetSideData(GUID guidType, const BYTE **pData, size_t *pSize)
{
if (guidType == IID_MediaSideDataFFMpeg && m_pSideData)
{
*pData = (const BYTE *)m_pSideData;
*pSize = sizeof(MediaSideDataFFMpeg);
return S_OK;
}
return E_INVALIDARG;
}
CPacketAllocator::CPacketAllocator(LPCTSTR pName, LPUNKNOWN pUnk, HRESULT *phr)
: CBaseAllocator(pName, pUnk, phr, TRUE, TRUE)
{
}
CPacketAllocator::~CPacketAllocator(void)
{
Decommit();
ReallyFree();
}
STDMETHODIMP CPacketAllocator::NonDelegatingQueryInterface(REFIID riid, __deref_out void **ppv)
{
if (riid == IID_ILAVDynamicAllocator)
{
return GetInterface((ILAVDynamicAllocator *)this, ppv);
}
else
{
return __super::NonDelegatingQueryInterface(riid, ppv);
}
}
STDMETHODIMP CPacketAllocator::SetProperties(ALLOCATOR_PROPERTIES *pRequest, ALLOCATOR_PROPERTIES *pActual)
{
CheckPointer(pActual, E_POINTER);
ValidateReadWritePtr(pActual, sizeof(ALLOCATOR_PROPERTIES));
CAutoLock cObjectLock(this);
ZeroMemory(pActual, sizeof(ALLOCATOR_PROPERTIES));
ASSERT(pRequest->cbBuffer > 0);
SYSTEM_INFO SysInfo;
GetSystemInfo(&SysInfo);
/* Check the alignment request is a power of 2 */
if ((-pRequest->cbAlign & pRequest->cbAlign) != pRequest->cbAlign)
{
DbgLog((LOG_ERROR, 1, TEXT("Alignment requested 0x%x not a power of 2!"), pRequest->cbAlign));
}
/* Check the alignment requested */
if (pRequest->cbAlign == 0 || (SysInfo.dwAllocationGranularity & (pRequest->cbAlign - 1)) != 0)
{
DbgLog((LOG_ERROR, 1, TEXT("Invalid alignment 0x%x requested - granularity = 0x%x"), pRequest->cbAlign,
SysInfo.dwAllocationGranularity));
return VFW_E_BADALIGN;
}
/* Can't do this if already committed, there is an argument that says we
should not reject the SetProperties call if there are buffers still
active. However this is called by the source filter, which is the same
person who is holding the samples. Therefore it is not unreasonable
for them to free all their samples before changing the requirements */
if (m_bCommitted == TRUE)
{
return VFW_E_ALREADY_COMMITTED;
}
/* Must be no outstanding buffers */
if (m_lFree.GetCount() < m_lAllocated)
{
return VFW_E_BUFFERS_OUTSTANDING;
}
/* There isn't any real need to check the parameters as they
will just be rejected when the user finally calls Commit */
// round length up to alignment - remember that prefix is included in
// the alignment
LONG lSize = pRequest->cbBuffer + pRequest->cbPrefix;
LONG lRemainder = lSize % pRequest->cbAlign;
if (lRemainder != 0)
{
lSize = lSize - lRemainder + pRequest->cbAlign;
}
pActual->cbBuffer = m_lSize = (lSize - pRequest->cbPrefix);
pActual->cBuffers = m_lCount = pRequest->cBuffers;
pActual->cbAlign = m_lAlignment = pRequest->cbAlign;
pActual->cbPrefix = m_lPrefix = pRequest->cbPrefix;
m_bChanged = TRUE;
return NOERROR;
}
HRESULT CPacketAllocator::Alloc(void)
{
CAutoLock lck(this);
/* Check he has called SetProperties */
HRESULT hr = CBaseAllocator::Alloc();
if (FAILED(hr))
{
return hr;
}
/* If the requirements haven't changed then don't reallocate */
if (hr == S_FALSE)
{
return NOERROR;
}
ASSERT(hr == S_OK); // we use this fact in the loop below
/* Free the old resources */
if (m_bAllocated)
{
ReallyFree();
}
/* Make sure we've got reasonable values */
if (m_lSize < 0 || m_lPrefix < 0 || m_lCount < 0)
{
return E_OUTOFMEMORY;
}
m_bAllocated = TRUE;
CMediaPacketSample *pSample = nullptr;
ASSERT(m_lAllocated == 0);
// Create the initial set of samples
for (; m_lAllocated < m_lCount; m_lAllocated++)
{
pSample = new CMediaPacketSample(NAME("LAV Package media sample"), this, &hr);
ASSERT(SUCCEEDED(hr));
if (pSample == nullptr)
{
return E_OUTOFMEMORY;
}
// This CANNOT fail
m_lFree.Add(pSample);
}
m_bChanged = FALSE;
return NOERROR;
}
// get container for a sample. Blocking, synchronous call to get the
// next free buffer (as represented by an IMediaSample interface).
// on return, the time etc properties will be invalid, but the buffer
// pointer and size will be correct.
HRESULT CPacketAllocator::GetBuffer(__deref_out IMediaSample **ppBuffer, __in_opt REFERENCE_TIME *pStartTime,
__in_opt REFERENCE_TIME *pEndTime, DWORD dwFlags)
{
UNREFERENCED_PARAMETER(pStartTime);
UNREFERENCED_PARAMETER(pEndTime);
UNREFERENCED_PARAMETER(dwFlags);
CMediaSample *pSample;
*ppBuffer = NULL;
for (;;)
{
{ // scope for lock
CAutoLock cObjectLock(this);
/* Check we are committed */
if (!m_bCommitted)
{
return VFW_E_NOT_COMMITTED;
}
pSample = (CMediaSample *)m_lFree.RemoveHead();
/* if no sample was available, allocate a new one */
if (pSample == NULL)
{
HRESULT hr = S_OK;
pSample = new CMediaPacketSample(NAME("LAV Package media sample"), this, &hr);
ASSERT(SUCCEEDED(hr));
if (pSample)
{
m_lAllocated++;
DbgLog((LOG_TRACE, 10, "Allocated new sample, %d total", m_lAllocated));
}
}
}
/* If we didn't get a sample then wait for the list to signal */
if (pSample)
{
break;
}
if (dwFlags & AM_GBF_NOWAIT)
{
return VFW_E_TIMEOUT;
}
ASSERT(m_hSem != NULL);
WaitForSingleObject(m_hSem, INFINITE);
}
/* Addref the buffer up to one. On release
back to zero instead of being deleted, it will requeue itself by
calling the ReleaseBuffer member function. NOTE the owner of a
media sample must always be derived from CBaseAllocator */
ASSERT(pSample->m_cRef == 0);
pSample->m_cRef = 1;
*ppBuffer = pSample;
#ifdef DXMPERF
PERFLOG_GETBUFFER((IMemAllocator *)this, pSample);
#endif // DXMPERF
return NOERROR;
}
// override this to free up any resources we have allocated.
// called from the base class on Decommit when all buffers have been
// returned to the free list.
//
// caller has already locked the object.
// in our case, we keep the memory until we are deleted, so
// we do nothing here. The memory is deleted in the destructor by
// calling ReallyFree()
void CPacketAllocator::Free(void)
{
return;
}
// called from the destructor (and from Alloc if changing size/count) to
// actually free up the memory
void CPacketAllocator::ReallyFree(void)
{
/* Should never be deleting this unless all buffers are freed */
ASSERT(m_lAllocated == m_lFree.GetCount());
/* Free up all the CMediaSamples */
CMediaSample *pSample;
for (;;)
{
pSample = m_lFree.RemoveHead();
if (pSample != nullptr)
{
delete pSample;
}
else
{
break;
}
}
m_lAllocated = 0;
m_bAllocated = FALSE;
}
| 10,602
|
C++
|
.cpp
| 316
| 27.655063
| 111
| 0.651863
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,094
|
MediaSampleSideData.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/MediaSampleSideData.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "MediaSampleSideData.h"
CMediaSampleSideData::CMediaSampleSideData(LPCTSTR pName, CBaseAllocator *pAllocator, HRESULT *phr, LPBYTE pBuffer,
LONG length)
: CMediaSample(pName, pAllocator, phr, pBuffer, length)
{
}
CMediaSampleSideData::~CMediaSampleSideData()
{
ReleaseSideData();
}
STDMETHODIMP CMediaSampleSideData::QueryInterface(REFIID riid, __deref_out void **ppv)
{
CheckPointer(ppv, E_POINTER);
ValidateReadWritePtr(ppv, sizeof(PVOID));
if (riid == __uuidof(IMediaSideData))
{
return GetInterface((IMediaSideData *)this, ppv);
}
else
{
return __super::QueryInterface(riid, ppv);
}
}
STDMETHODIMP_(ULONG) CMediaSampleSideData::Release()
{
/* Decrement our own private reference count */
LONG lRef;
if (m_cRef == 1)
{
lRef = 0;
m_cRef = 0;
}
else
{
lRef = InterlockedDecrement(&m_cRef);
}
ASSERT(lRef >= 0);
/* Did we release our final reference count */
if (lRef == 0)
{
/* Free all resources */
if (m_dwFlags & Sample_TypeChanged)
{
SetMediaType(NULL);
}
ASSERT(m_pMediaType == NULL);
m_dwFlags = 0;
m_dwTypeSpecificFlags = 0;
m_dwStreamId = AM_STREAM_MEDIA;
ReleaseSideData();
/* This may cause us to be deleted */
// Our refcount is reliably 0 thus no-one will mess with us
m_pAllocator->ReleaseBuffer(this);
}
return (ULONG)lRef;
}
void CMediaSampleSideData::ReleaseSideData()
{
CAutoLock Lock(&m_csSideData);
for (auto it = m_SideData.begin(); it != m_SideData.end(); it++)
{
SideDataEntry *sd = &(it->second);
_aligned_free(sd->pData);
}
m_SideData.clear();
}
// IMediaSideData
STDMETHODIMP CMediaSampleSideData::SetSideData(GUID guidType, const BYTE *pData, size_t size)
{
if (!pData || !size)
return E_POINTER;
CAutoLock Lock(&m_csSideData);
auto it = m_SideData.find(guidType);
if (it != m_SideData.end())
{
SideDataEntry *sd = &(it->second);
BYTE *newData = (BYTE *)_aligned_realloc(sd->pData, size, 16);
if (newData)
{
sd->size = size;
sd->pData = newData;
memcpy(newData, pData, size);
}
else
{
return E_OUTOFMEMORY;
}
}
else
{
SideDataEntry sd;
sd.pData = (BYTE *)_aligned_malloc(size, 16);
if (sd.pData)
{
sd.size = size;
memcpy(sd.pData, pData, size);
}
else
{
return E_OUTOFMEMORY;
}
m_SideData[guidType] = sd;
}
return S_OK;
}
STDMETHODIMP CMediaSampleSideData::GetSideData(GUID guidType, const BYTE **pData, size_t *pSize)
{
if (!pData || !pSize)
return E_POINTER;
CAutoLock Lock(&m_csSideData);
auto it = m_SideData.find(guidType);
if (it != m_SideData.end())
{
*pData = it->second.pData;
*pSize = it->second.size;
return S_OK;
}
return E_FAIL;
}
| 3,985
|
C++
|
.cpp
| 138
| 22.876812
| 115
| 0.619085
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,095
|
FontInstaller.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/FontInstaller.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Initial design and concept by Gabest and the MPC-HC Team, copyright under GPLv2
*/
#include "stdafx.h"
#include "FontInstaller.h"
CFontInstaller::CFontInstaller()
: pAddFontMemResourceEx(nullptr)
, pRemoveFontMemResourceEx(nullptr)
{
if (HMODULE hGdi = GetModuleHandle(_T("gdi32.dll")))
{
pAddFontMemResourceEx =
(HANDLE(WINAPI *)(PVOID, DWORD, PVOID, DWORD *))GetProcAddress(hGdi, "AddFontMemResourceEx");
pRemoveFontMemResourceEx = (BOOL(WINAPI *)(HANDLE))GetProcAddress(hGdi, "RemoveFontMemResourceEx");
}
}
CFontInstaller::~CFontInstaller()
{
UninstallFonts();
}
bool CFontInstaller::InstallFont(const void *pData, UINT len)
{
return InstallFontMemory(pData, len);
}
void CFontInstaller::UninstallFonts()
{
if (pRemoveFontMemResourceEx)
{
std::vector<HANDLE>::iterator it;
for (it = m_fonts.begin(); it != m_fonts.end(); ++it)
{
pRemoveFontMemResourceEx(*it);
}
m_fonts.clear();
}
}
bool CFontInstaller::InstallFontMemory(const void *pData, UINT len)
{
if (!pAddFontMemResourceEx)
{
return false;
}
DWORD nFonts = 0;
HANDLE hFont = pAddFontMemResourceEx((PVOID)pData, len, nullptr, &nFonts);
if (hFont && nFonts > 0)
{
m_fonts.push_back(hFont);
}
return hFont && nFonts > 0;
}
| 2,176
|
C++
|
.cpp
| 67
| 28.522388
| 107
| 0.697431
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,096
|
DSMResourceBag.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/DSMResourceBag.cpp
|
#include "stdafx.h"
#include "DSMResourceBag.h"
#include <algorithm>
CDSMResource::CDSMResource()
: mime(L"application/octet-stream")
{
}
CDSMResource::CDSMResource(LPCWSTR name, LPCWSTR desc, LPCWSTR mime, BYTE *pData, int len, DWORD_PTR tag)
{
this->name = name;
this->desc = desc;
this->mime = mime;
data.resize(len);
memcpy(data.data(), pData, data.size());
this->tag = tag;
}
CDSMResource &CDSMResource::operator=(const CDSMResource &r)
{
if (this != &r)
{
name = r.name;
desc = r.desc;
mime = r.mime;
data = r.data;
tag = r.tag;
}
return *this;
}
CDSMResourceBag::CDSMResourceBag()
{
}
CDSMResourceBag::~CDSMResourceBag()
{
m_resources.clear();
}
STDMETHODIMP_(DWORD) CDSMResourceBag::ResGetCount()
{
CAutoLock lock(&m_csResources);
return (DWORD)m_resources.size();
}
STDMETHODIMP CDSMResourceBag::ResGet(DWORD iIndex, BSTR *ppName, BSTR *ppDesc, BSTR *ppMime, BYTE **ppData,
DWORD *pDataLen, DWORD_PTR *pTag)
{
CAutoLock lock(&m_csResources);
if (ppData && !pDataLen)
return E_INVALIDARG;
if (iIndex >= m_resources.size())
return E_INVALIDARG;
CDSMResource &r = m_resources[iIndex];
if (ppName)
{
*ppName = SysAllocString(r.name.data());
if (*ppName == NULL)
return E_OUTOFMEMORY;
}
if (ppDesc)
{
*ppDesc = SysAllocString(r.desc.data());
if (*ppDesc == NULL)
return E_OUTOFMEMORY;
}
if (ppMime)
{
*ppMime = SysAllocString(r.mime.data());
if (*ppMime == NULL)
return E_OUTOFMEMORY;
}
if (ppData)
{
*pDataLen = (DWORD)r.data.size();
memcpy(*ppData = (BYTE *)CoTaskMemAlloc(*pDataLen), r.data.data(), *pDataLen);
}
if (pTag)
{
*pTag = r.tag;
}
return S_OK;
}
STDMETHODIMP CDSMResourceBag::ResSet(DWORD iIndex, LPCWSTR pName, LPCWSTR pDesc, LPCWSTR pMime, const BYTE *pData,
DWORD len, DWORD_PTR tag)
{
CAutoLock lock(&m_csResources);
if (iIndex >= m_resources.size())
return E_INVALIDARG;
CDSMResource &r = m_resources[iIndex];
if (pName)
r.name = pName;
if (pDesc)
r.desc = pDesc;
if (pMime)
r.mime = pMime;
if (pData || len == 0)
{
r.data.resize(len);
if (pData)
{
memcpy(r.data.data(), pData, r.data.size());
}
}
r.tag = tag;
return S_OK;
}
STDMETHODIMP CDSMResourceBag::ResAppend(LPCWSTR pName, LPCWSTR pDesc, LPCWSTR pMime, BYTE *pData, DWORD len,
DWORD_PTR tag)
{
CAutoLock lock(&m_csResources);
m_resources.push_back(CDSMResource());
return ResSet((DWORD)m_resources.size() - 1, pName, pDesc, pMime, pData, len, tag);
}
STDMETHODIMP CDSMResourceBag::ResRemoveAt(DWORD iIndex)
{
CAutoLock lock(&m_csResources);
if (iIndex >= m_resources.size())
return E_INVALIDARG;
m_resources.erase(m_resources.cbegin() + iIndex);
return S_OK;
}
STDMETHODIMP CDSMResourceBag::ResRemoveAll(DWORD_PTR tag)
{
CAutoLock lock(&m_csResources);
if (tag)
{
m_resources.erase(
std::remove_if(m_resources.begin(), m_resources.end(), [&](const CDSMResource &r) { return r.tag == tag; }),
m_resources.end());
}
else
{
m_resources.clear();
}
return S_OK;
}
| 3,512
|
C++
|
.cpp
| 132
| 20.515152
| 120
| 0.598806
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,097
|
DShowUtil.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/DShowUtil.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "DShowUtil.h"
#include <Shlwapi.h>
#include <dvdmedia.h>
#include "moreuuids.h"
#include "registry.h"
#include "IMediaSideDataFFmpeg.h"
//
// Usage: SetThreadName (-1, "MainThread");
//
typedef struct tagTHREADNAME_INFO
{
DWORD dwType; // must be 0x1000
LPCSTR szName; // pointer to name (in user addr space)
DWORD dwThreadID; // thread ID (-1=caller thread)
DWORD dwFlags; // reserved for future use, must be zero
} THREADNAME_INFO;
const DWORD MS_VC_EXCEPTION = 0x406D1388;
void SetThreadName(DWORD dwThreadID, LPCSTR szThreadName)
{
THREADNAME_INFO info;
info.dwType = 0x1000;
info.szName = szThreadName;
info.dwThreadID = dwThreadID;
info.dwFlags = 0;
__try
{
RaiseException(MS_VC_EXCEPTION, 0, sizeof(info) / sizeof(ULONG_PTR), (ULONG_PTR *)&info);
}
__except (EXCEPTION_EXECUTE_HANDLER)
{
}
}
#ifdef DEBUG
#include <Shlobj.h>
#include <Shlwapi.h>
extern HANDLE m_hOutput;
volatile LONG hOutputCounter = 0;
extern HRESULT DbgUniqueProcessName(LPCTSTR inName, LPTSTR outName);
void DbgSetLogFile(LPCTSTR szFile)
{
HANDLE hOutput =
CreateFile(szFile, GENERIC_WRITE, FILE_SHARE_READ, nullptr, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, nullptr);
if (INVALID_HANDLE_VALUE == hOutput && GetLastError() == ERROR_SHARING_VIOLATION)
{
TCHAR uniqueName[MAX_PATH] = {0};
if (SUCCEEDED(DbgUniqueProcessName(szFile, uniqueName)))
{
hOutput = CreateFile(uniqueName, GENERIC_WRITE, FILE_SHARE_READ, nullptr, CREATE_ALWAYS,
FILE_ATTRIBUTE_NORMAL, nullptr);
}
}
if (hOutput != INVALID_HANDLE_VALUE)
{
if (InterlockedCompareExchangePointer(&m_hOutput, hOutput, INVALID_HANDLE_VALUE) != INVALID_HANDLE_VALUE)
CloseHandle(hOutput);
}
InterlockedIncrement(&hOutputCounter);
}
void DbgSetLogFileDesktop(LPCTSTR szFile)
{
TCHAR szLogPath[512];
SHGetFolderPath(nullptr, CSIDL_DESKTOPDIRECTORY, nullptr, 0, szLogPath);
PathAppend(szLogPath, szFile);
DbgSetLogFile(szLogPath);
}
void DbgCloseLogFile()
{
LONG count = InterlockedDecrement(&hOutputCounter);
if (count == 0 && m_hOutput != INVALID_HANDLE_VALUE)
{
FlushFileBuffers(m_hOutput);
CloseHandle(m_hOutput);
m_hOutput = INVALID_HANDLE_VALUE;
}
}
#endif
void split(const std::string &text, const std::string &separators, std::list<std::string> &words)
{
size_t n = text.length();
size_t start, stop;
start = text.find_first_not_of(separators);
while ((start >= 0) && (start < n))
{
stop = text.find_first_of(separators, start);
if ((stop < 0) || (stop > n))
stop = n;
words.push_back(text.substr(start, stop - start));
start = text.find_first_not_of(separators, stop + 1);
}
}
IBaseFilter *FindFilter(const GUID &clsid, IFilterGraph *pFG)
{
IBaseFilter *pFilter = nullptr;
IEnumFilters *pEnumFilters = nullptr;
if (pFG && SUCCEEDED(pFG->EnumFilters(&pEnumFilters)))
{
for (IBaseFilter *pBF = nullptr; S_OK == pEnumFilters->Next(1, &pBF, 0);)
{
GUID clsid2;
if (SUCCEEDED(pBF->GetClassID(&clsid2)) && clsid == clsid2)
{
pFilter = pBF;
break;
}
SafeRelease(&pBF);
}
SafeRelease(&pEnumFilters);
}
return pFilter;
}
BOOL FilterInGraph(const GUID &clsid, IFilterGraph *pFG)
{
BOOL bFound = FALSE;
IBaseFilter *pFilter = nullptr;
pFilter = FindFilter(clsid, pFG);
bFound = (pFilter != nullptr);
SafeRelease(&pFilter);
return bFound;
}
BOOL FilterInGraphWithInputSubtype(const GUID &clsid, IFilterGraph *pFG, const GUID &clsidSubtype)
{
BOOL bFound = FALSE;
IBaseFilter *pFilter = nullptr;
pFilter = FindFilter(clsid, pFG);
if (pFilter)
{
IEnumPins *pPinEnum = nullptr;
pFilter->EnumPins(&pPinEnum);
IPin *pPin = nullptr;
while ((S_OK == pPinEnum->Next(1, &pPin, nullptr)) && pPin)
{
PIN_DIRECTION dir;
pPin->QueryDirection(&dir);
if (dir == PINDIR_INPUT)
{
AM_MEDIA_TYPE mt;
pPin->ConnectionMediaType(&mt);
if (mt.subtype == clsidSubtype)
{
bFound = TRUE;
}
FreeMediaType(mt);
}
SafeRelease(&pPin);
if (bFound)
break;
}
SafeRelease(&pPinEnum);
SafeRelease(&pFilter);
}
return bFound;
}
std::wstring WStringFromGUID(const GUID &guid)
{
WCHAR null[128] = {0}, buff[128];
StringFromGUID2(GUID_NULL, null, 127);
return std::wstring(StringFromGUID2(guid, buff, 127) > 0 ? buff : null);
}
int SafeMultiByteToWideChar(UINT CodePage, DWORD dwFlags, LPCSTR lpMultiByteStr, int cbMultiByte, LPWSTR lpWideCharStr,
int cchWideChar)
{
int len = MultiByteToWideChar(CodePage, dwFlags, lpMultiByteStr, cbMultiByte, lpWideCharStr, cchWideChar);
if (cchWideChar)
{
if (len == cchWideChar || (len == 0 && GetLastError() == ERROR_INSUFFICIENT_BUFFER))
{
lpWideCharStr[cchWideChar - 1] = 0;
}
else if (len == 0)
{
DWORD dwErr = GetLastError();
if (dwErr == ERROR_NO_UNICODE_TRANSLATION && CodePage == CP_UTF8)
{
return SafeMultiByteToWideChar(CP_ACP, dwFlags, lpMultiByteStr, cbMultiByte, lpWideCharStr,
cchWideChar);
}
else if (dwErr == ERROR_NO_UNICODE_TRANSLATION && (dwFlags & MB_ERR_INVALID_CHARS))
{
return SafeMultiByteToWideChar(CP_UTF8, (dwFlags & ~MB_ERR_INVALID_CHARS), lpMultiByteStr, cbMultiByte,
lpWideCharStr, cchWideChar);
}
lpWideCharStr[0] = 0;
}
}
return len;
}
int SafeWideCharToMultiByte(UINT CodePage, DWORD dwFlags, LPCWSTR lpWideCharStr, int cchWideChar, LPSTR lpMultiByteStr,
int cbMultiByte, LPCSTR lpDefaultChar, LPBOOL lpUsedDefaultChar)
{
int len = WideCharToMultiByte(CodePage, dwFlags, lpWideCharStr, cchWideChar, lpMultiByteStr, cbMultiByte,
lpDefaultChar, lpUsedDefaultChar);
if (cbMultiByte)
{
if (len == cbMultiByte || (len == 0 && GetLastError() == ERROR_INSUFFICIENT_BUFFER))
{
lpMultiByteStr[cbMultiByte - 1] = 0;
}
else if (len == 0)
{
lpMultiByteStr[0] = 0;
}
}
return len;
}
LPWSTR CoTaskGetWideCharFromMultiByte(UINT CodePage, DWORD dwFlags, LPCSTR lpMultiByteStr, int cbMultiByte)
{
int len = MultiByteToWideChar(CodePage, dwFlags, lpMultiByteStr, cbMultiByte, nullptr, 0);
if (len)
{
LPWSTR pszWideString = (LPWSTR)CoTaskMemAlloc(len * sizeof(WCHAR));
MultiByteToWideChar(CodePage, dwFlags, lpMultiByteStr, cbMultiByte, pszWideString, len);
return pszWideString;
}
else
{
DWORD dwErr = GetLastError();
if (dwErr == ERROR_NO_UNICODE_TRANSLATION && CodePage == CP_UTF8)
{
return CoTaskGetWideCharFromMultiByte(CP_ACP, dwFlags, lpMultiByteStr, cbMultiByte);
}
else if (dwErr == ERROR_NO_UNICODE_TRANSLATION && (dwFlags & MB_ERR_INVALID_CHARS))
{
return CoTaskGetWideCharFromMultiByte(CP_UTF8, (dwFlags & ~MB_ERR_INVALID_CHARS), lpMultiByteStr,
cbMultiByte);
}
}
return NULL;
}
LPSTR CoTaskGetMultiByteFromWideChar(UINT CodePage, DWORD dwFlags, LPCWSTR lpMultiByteStr, int cbMultiByte)
{
int len = WideCharToMultiByte(CodePage, dwFlags, lpMultiByteStr, cbMultiByte, nullptr, 0, nullptr, nullptr);
if (len)
{
LPSTR pszMBString = (LPSTR)CoTaskMemAlloc(len * sizeof(char));
WideCharToMultiByte(CodePage, dwFlags, lpMultiByteStr, cbMultiByte, pszMBString, len, nullptr, nullptr);
return pszMBString;
}
return NULL;
}
BSTR ConvertCharToBSTR(const char *sz)
{
bool acp = false;
if (!sz || strlen(sz) == 0)
return nullptr;
WCHAR *wide = CoTaskGetWideCharFromMultiByte(CP_UTF8, MB_ERR_INVALID_CHARS, sz, -1);
if (!wide)
return nullptr;
BSTR bstr = SysAllocString(wide);
CoTaskMemFree(wide);
return bstr;
}
IBaseFilter *GetFilterFromPin(IPin *pPin)
{
CheckPointer(pPin, nullptr);
PIN_INFO pi;
if (pPin && SUCCEEDED(pPin->QueryPinInfo(&pi)))
{
return pi.pFilter;
}
return nullptr;
}
HRESULT NukeDownstream(IFilterGraph *pGraph, IPin *pPin)
{
PIN_DIRECTION dir;
if (pPin)
{
IPin *pPinTo = nullptr;
if (FAILED(pPin->QueryDirection(&dir)))
return E_FAIL;
if (dir == PINDIR_OUTPUT)
{
if (SUCCEEDED(pPin->ConnectedTo(&pPinTo)) && pPinTo)
{
if (IBaseFilter *pFilter = GetFilterFromPin(pPinTo))
{
NukeDownstream(pGraph, pFilter);
pGraph->Disconnect(pPinTo);
pGraph->Disconnect(pPin);
pGraph->RemoveFilter(pFilter);
SafeRelease(&pFilter);
}
SafeRelease(&pPinTo);
}
}
}
return S_OK;
}
HRESULT NukeDownstream(IFilterGraph *pGraph, IBaseFilter *pFilter)
{
IEnumPins *pEnumPins = nullptr;
if (pFilter && SUCCEEDED(pFilter->EnumPins(&pEnumPins)))
{
for (IPin *pPin = nullptr; S_OK == pEnumPins->Next(1, &pPin, 0); pPin = nullptr)
{
NukeDownstream(pGraph, pPin);
SafeRelease(&pPin);
}
SafeRelease(&pEnumPins);
}
return S_OK;
}
// pPin - pin of our filter to start searching
// refiid - guid of the interface to find
// pUnknown - variable that'll receive the interface
HRESULT FindIntefaceInGraph(IPin *pPin, REFIID refiid, void **pUnknown)
{
PIN_DIRECTION dir;
pPin->QueryDirection(&dir);
IPin *pOtherPin = nullptr;
if (SUCCEEDED(pPin->ConnectedTo(&pOtherPin)) && pOtherPin)
{
IBaseFilter *pFilter = GetFilterFromPin(pOtherPin);
SafeRelease(&pOtherPin);
HRESULT hrFilter = pFilter->QueryInterface(refiid, pUnknown);
if (FAILED(hrFilter))
{
IEnumPins *pPinEnum = nullptr;
pFilter->EnumPins(&pPinEnum);
HRESULT hrPin = E_FAIL;
for (IPin *pOtherPin2 = nullptr; pPinEnum->Next(1, &pOtherPin2, 0) == S_OK; pOtherPin2 = nullptr)
{
PIN_DIRECTION pinDir;
pOtherPin2->QueryDirection(&pinDir);
if (dir == pinDir)
{
hrPin = FindIntefaceInGraph(pOtherPin2, refiid, pUnknown);
}
SafeRelease(&pOtherPin2);
if (SUCCEEDED(hrPin))
break;
}
hrFilter = hrPin;
SafeRelease(&pPinEnum);
}
SafeRelease(&pFilter);
if (SUCCEEDED(hrFilter))
{
return S_OK;
}
}
return E_NOINTERFACE;
}
// pPin - pin of our filter to start searching
// refiid - guid of the interface to find
// pUnknown - variable that'll receive the interface
HRESULT FindPinIntefaceInGraph(IPin *pPin, REFIID refiid, void **pUnknown)
{
PIN_DIRECTION dir;
pPin->QueryDirection(&dir);
IPin *pOtherPin = nullptr;
if (SUCCEEDED(pPin->ConnectedTo(&pOtherPin)) && pOtherPin)
{
IBaseFilter *pFilter = nullptr;
HRESULT hrFilter = pOtherPin->QueryInterface(refiid, pUnknown);
if (FAILED(hrFilter))
{
pFilter = GetFilterFromPin(pOtherPin);
IEnumPins *pPinEnum = nullptr;
pFilter->EnumPins(&pPinEnum);
HRESULT hrPin = E_FAIL;
for (IPin *pOtherPin2 = nullptr; pPinEnum->Next(1, &pOtherPin2, 0) == S_OK; pOtherPin2 = nullptr)
{
PIN_DIRECTION pinDir;
pOtherPin2->QueryDirection(&pinDir);
if (dir == pinDir)
{
hrPin = FindPinIntefaceInGraph(pOtherPin2, refiid, pUnknown);
}
SafeRelease(&pOtherPin2);
if (SUCCEEDED(hrPin))
break;
}
hrFilter = hrPin;
SafeRelease(&pPinEnum);
}
SafeRelease(&pFilter);
SafeRelease(&pOtherPin);
if (SUCCEEDED(hrFilter))
{
return S_OK;
}
}
return E_NOINTERFACE;
}
// pPin - pin of our filter to start searching
// guid - guid of the filter to find
// ppFilter - variable that'll receive a AddRef'd reference to the filter
HRESULT FindFilterSafe(IPin *pPin, const GUID &guid, IBaseFilter **ppFilter, BOOL bReverse)
{
CheckPointer(ppFilter, E_POINTER);
CheckPointer(pPin, E_POINTER);
HRESULT hr = S_OK;
PIN_DIRECTION dir;
pPin->QueryDirection(&dir);
IPin *pOtherPin = nullptr;
if (bReverse)
{
dir = (dir == PINDIR_INPUT) ? PINDIR_OUTPUT : PINDIR_INPUT;
pOtherPin = pPin;
pPin->AddRef();
hr = S_OK;
}
else
{
hr = pPin->ConnectedTo(&pOtherPin);
}
if (SUCCEEDED(hr) && pOtherPin)
{
IBaseFilter *pFilter = GetFilterFromPin(pOtherPin);
SafeRelease(&pOtherPin);
HRESULT hrFilter = E_NOINTERFACE;
CLSID filterGUID;
if (SUCCEEDED(pFilter->GetClassID(&filterGUID)))
{
if (filterGUID == guid)
{
*ppFilter = pFilter;
hrFilter = S_OK;
}
else
{
IEnumPins *pPinEnum = nullptr;
pFilter->EnumPins(&pPinEnum);
HRESULT hrPin = E_FAIL;
for (IPin *pOtherPin2 = nullptr; pPinEnum->Next(1, &pOtherPin2, 0) == S_OK; pOtherPin2 = nullptr)
{
PIN_DIRECTION pinDir;
pOtherPin2->QueryDirection(&pinDir);
if (dir == pinDir)
{
hrPin = FindFilterSafe(pOtherPin2, guid, ppFilter);
}
SafeRelease(&pOtherPin2);
if (SUCCEEDED(hrPin))
break;
}
hrFilter = hrPin;
SafeRelease(&pPinEnum);
SafeRelease(&pFilter);
}
}
if (SUCCEEDED(hrFilter))
{
return S_OK;
}
}
return E_NOINTERFACE;
}
// pPin - pin of our filter to start searching
// guid - guid of the filter to find
// ppFilter - variable that'll receive a AddRef'd reference to the filter
BOOL HasSourceWithType(IPin *pPin, const GUID &mediaType)
{
CheckPointer(pPin, false);
BOOL bFound = FALSE;
PIN_DIRECTION dir;
pPin->QueryDirection(&dir);
IPin *pOtherPin = nullptr;
if (SUCCEEDED(pPin->ConnectedTo(&pOtherPin)) && pOtherPin)
{
IBaseFilter *pFilter = GetFilterFromPin(pOtherPin);
HRESULT hrFilter = E_NOINTERFACE;
IEnumPins *pPinEnum = nullptr;
pFilter->EnumPins(&pPinEnum);
HRESULT hrPin = E_FAIL;
for (IPin *pOtherPin2 = nullptr; !bFound && pPinEnum->Next(1, &pOtherPin2, 0) == S_OK; pOtherPin2 = nullptr)
{
if (pOtherPin2 != pOtherPin)
{
PIN_DIRECTION pinDir;
pOtherPin2->QueryDirection(&pinDir);
if (dir != pinDir)
{
IEnumMediaTypes *pMediaTypeEnum = nullptr;
if (SUCCEEDED(pOtherPin2->EnumMediaTypes(&pMediaTypeEnum)))
{
for (AM_MEDIA_TYPE *mt = nullptr; pMediaTypeEnum->Next(1, &mt, 0) == S_OK; mt = nullptr)
{
if (mt->majortype == mediaType)
{
bFound = TRUE;
}
DeleteMediaType(mt);
}
SafeRelease(&pMediaTypeEnum);
}
}
else
{
bFound = HasSourceWithType(pOtherPin2, mediaType);
}
}
SafeRelease(&pOtherPin2);
}
SafeRelease(&pPinEnum);
SafeRelease(&pFilter);
SafeRelease(&pOtherPin);
}
return bFound;
}
// Similar to HasSourceWithType but also checks forward pins for future backwards joins
BOOL HasSourceWithTypeAdvanced(IPin *pPinInput, IPin *pPinOutput, const GUID &mediaType)
{
// check the input pin backwards first
if (pPinInput && HasSourceWithType(pPinInput, mediaType))
return true;
if (pPinOutput == NULL)
return false;
// and check the tree forwards
BOOL bFound = FALSE;
IPin *pOtherPin = nullptr;
if (SUCCEEDED(pPinOutput->ConnectedTo(&pOtherPin)) && pOtherPin)
{
IBaseFilter *pFilter = GetFilterFromPin(pOtherPin);
HRESULT hrFilter = E_NOINTERFACE;
IEnumPins *pPinEnum = nullptr;
pFilter->EnumPins(&pPinEnum);
// Iterate over pins of the filter..
HRESULT hrPin = E_FAIL;
for (IPin *pOtherPin2 = nullptr; !bFound && pPinEnum->Next(1, &pOtherPin2, 0) == S_OK; pOtherPin2 = nullptr)
{
// ignore the pint we're connected to
if (pOtherPin2 != pOtherPin)
{
PIN_DIRECTION pinDir;
pOtherPin2->QueryDirection(&pinDir);
// if its another input, go backwards there
if (pinDir == PINDIR_INPUT)
{
bFound = HasSourceWithType(pOtherPin2, mediaType);
}
// if its an output, go forwards
else if (pinDir == PINDIR_OUTPUT)
{
bFound = HasSourceWithTypeAdvanced(NULL, pOtherPin2, mediaType);
}
}
SafeRelease(&pOtherPin2);
}
SafeRelease(&pPinEnum);
SafeRelease(&pFilter);
SafeRelease(&pOtherPin);
}
return bFound;
}
BOOL FilterInGraphSafe(IPin *pPin, const GUID &guid, BOOL bReverse)
{
IBaseFilter *pFilter = nullptr;
HRESULT hr = FindFilterSafe(pPin, guid, &pFilter, bReverse);
if (SUCCEEDED(hr) && pFilter)
{
SafeRelease(&pFilter);
return TRUE;
}
return FALSE;
}
unsigned int lav_xiphlacing(unsigned char *s, unsigned int v)
{
unsigned int n = 0;
while (v >= 0xff)
{
*s++ = 0xff;
v -= 0xff;
n++;
}
*s = v;
n++;
return n;
}
void videoFormatTypeHandler(const AM_MEDIA_TYPE &mt, BITMAPINFOHEADER **pBMI, REFERENCE_TIME *prtAvgTime,
DWORD *pDwAspectX, DWORD *pDwAspectY)
{
videoFormatTypeHandler(mt.pbFormat, &mt.formattype, pBMI, prtAvgTime, pDwAspectX, pDwAspectY);
}
void videoFormatTypeHandler(const BYTE *format, const GUID *formattype, BITMAPINFOHEADER **pBMI,
REFERENCE_TIME *prtAvgTime, DWORD *pDwAspectX, DWORD *pDwAspectY)
{
REFERENCE_TIME rtAvg = 0;
BITMAPINFOHEADER *bmi = nullptr;
DWORD dwAspectX = 0, dwAspectY = 0;
if (!format)
goto done;
if (*formattype == FORMAT_VideoInfo)
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)format;
rtAvg = vih->AvgTimePerFrame;
bmi = &vih->bmiHeader;
}
else if (*formattype == FORMAT_VideoInfo2)
{
VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)format;
rtAvg = vih2->AvgTimePerFrame;
bmi = &vih2->bmiHeader;
dwAspectX = vih2->dwPictAspectRatioX;
dwAspectY = vih2->dwPictAspectRatioY;
}
else if (*formattype == FORMAT_MPEGVideo)
{
MPEG1VIDEOINFO *mp1vi = (MPEG1VIDEOINFO *)format;
rtAvg = mp1vi->hdr.AvgTimePerFrame;
bmi = &mp1vi->hdr.bmiHeader;
}
else if (*formattype == FORMAT_MPEG2Video)
{
MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)format;
rtAvg = mp2vi->hdr.AvgTimePerFrame;
bmi = &mp2vi->hdr.bmiHeader;
dwAspectX = mp2vi->hdr.dwPictAspectRatioX;
dwAspectY = mp2vi->hdr.dwPictAspectRatioY;
}
else
{
ASSERT(FALSE);
}
done:
if (pBMI)
{
*pBMI = bmi;
}
if (prtAvgTime)
{
*prtAvgTime = rtAvg;
}
if (pDwAspectX && pDwAspectY)
{
*pDwAspectX = dwAspectX;
*pDwAspectY = dwAspectY;
}
}
void audioFormatTypeHandler(const BYTE *format, const GUID *formattype, DWORD *pnSamples, WORD *pnChannels,
WORD *pnBitsPerSample, WORD *pnBlockAlign, DWORD *pnBytesPerSec, DWORD *pnChannelMask)
{
DWORD nSamples = 0;
WORD nChannels = 0;
WORD nBitsPerSample = 0;
WORD nBlockAlign = 0;
DWORD nBytesPerSec = 0;
DWORD nChannelMask = 0;
if (!format)
goto done;
if (*formattype == FORMAT_WaveFormatEx)
{
WAVEFORMATEX *wfex = (WAVEFORMATEX *)format;
nSamples = wfex->nSamplesPerSec;
nChannels = wfex->nChannels;
nBitsPerSample = wfex->wBitsPerSample;
nBlockAlign = wfex->nBlockAlign;
nBytesPerSec = wfex->nAvgBytesPerSec;
if (wfex->wFormatTag == WAVE_FORMAT_EXTENSIBLE && wfex->cbSize >= 22)
{
WAVEFORMATEXTENSIBLE *wfexs = (WAVEFORMATEXTENSIBLE *)wfex;
nChannelMask = wfexs->dwChannelMask;
}
}
else if (*formattype == FORMAT_VorbisFormat2)
{
VORBISFORMAT2 *vf2 = (VORBISFORMAT2 *)format;
nSamples = vf2->SamplesPerSec;
nChannels = (WORD)vf2->Channels;
nBitsPerSample = (WORD)vf2->BitsPerSample;
}
done:
if (pnSamples)
*pnSamples = nSamples;
if (pnChannels)
*pnChannels = nChannels;
if (pnBitsPerSample)
*pnBitsPerSample = nBitsPerSample;
if (pnBlockAlign)
*pnBlockAlign = nBlockAlign;
if (pnBytesPerSec)
*pnBytesPerSec = nBytesPerSec;
if (pnChannelMask)
*pnChannelMask = nChannelMask;
}
void getExtraData(const AM_MEDIA_TYPE &mt, BYTE *extra, size_t *extralen)
{
return getExtraData(mt.pbFormat, &mt.formattype, mt.cbFormat, extra, extralen);
}
void getExtraData(const BYTE *format, const GUID *formattype, const size_t formatlen, BYTE *extra, size_t *extralen)
{
const BYTE *extraposition = nullptr;
size_t extralength = 0;
if (*formattype == FORMAT_WaveFormatEx)
{
WAVEFORMATEX *wfex = (WAVEFORMATEX *)format;
extraposition = format + sizeof(WAVEFORMATEX);
// Protected against over-reads
extralength = formatlen - sizeof(WAVEFORMATEX);
}
else if (*formattype == FORMAT_VorbisFormat2)
{
VORBISFORMAT2 *vf2 = (VORBISFORMAT2 *)format;
BYTE *start = nullptr, *end = nullptr;
unsigned offset = 1;
if (extra)
{
*extra = 2;
offset += lav_xiphlacing(extra + offset, vf2->HeaderSize[0]);
offset += lav_xiphlacing(extra + offset, vf2->HeaderSize[1]);
extra += offset;
}
else
{
BYTE dummy[100];
offset += lav_xiphlacing(dummy, vf2->HeaderSize[0]);
offset += lav_xiphlacing(dummy, vf2->HeaderSize[1]);
}
extralength = vf2->HeaderSize[0] + vf2->HeaderSize[1] + vf2->HeaderSize[2];
extralength = min(extralength, formatlen - sizeof(VORBISFORMAT2));
if (extra && extralength)
memcpy(extra, format + sizeof(VORBISFORMAT2), extralength);
if (extralen)
*extralen = extralength + offset;
return;
}
else if (*formattype == FORMAT_VideoInfo)
{
extraposition = format + sizeof(VIDEOINFOHEADER);
extralength = formatlen - sizeof(VIDEOINFOHEADER);
}
else if (*formattype == FORMAT_VideoInfo2)
{
extraposition = format + sizeof(VIDEOINFOHEADER2);
extralength = formatlen - sizeof(VIDEOINFOHEADER2);
}
else if (*formattype == FORMAT_MPEGVideo)
{
MPEG1VIDEOINFO *mp1vi = (MPEG1VIDEOINFO *)format;
extraposition = (BYTE *)mp1vi->bSequenceHeader;
extralength = min(mp1vi->cbSequenceHeader, formatlen - FIELD_OFFSET(MPEG1VIDEOINFO, bSequenceHeader[0]));
}
else if (*formattype == FORMAT_MPEG2Video)
{
MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)format;
extraposition = (BYTE *)mp2vi->dwSequenceHeader;
extralength = min(mp2vi->cbSequenceHeader, formatlen - FIELD_OFFSET(MPEG2VIDEOINFO, dwSequenceHeader[0]));
}
else if (*formattype == FORMAT_SubtitleInfo)
{
SUBTITLEINFO *sub = (SUBTITLEINFO *)format;
extraposition = format + sub->dwOffset;
extralength = formatlen - sub->dwOffset;
}
if (extra && extralength)
memcpy(extra, extraposition, extralength);
if (extralen)
*extralen = extralength;
}
void CopyMediaSideDataFF(AVPacket *dst, const MediaSideDataFFMpeg **sd)
{
if (!dst)
return;
if (!sd || !*sd)
{
dst->side_data = nullptr;
dst->side_data_elems = 0;
return;
}
// add sidedata to the packet
for (int i = 0; i < (*sd)->side_data_elems; i++)
{
uint8_t *ptr = av_packet_new_side_data(dst, (*sd)->side_data[i].type, (*sd)->side_data[i].size);
memcpy(ptr, (*sd)->side_data[i].data, (*sd)->side_data[i].size);
}
*sd = nullptr;
}
BOOL IsWindows7OrNewer()
{
return (g_osInfo.dwMajorVersion == 6 && g_osInfo.dwMinorVersion >= 1) || (g_osInfo.dwMajorVersion > 6);
}
BOOL IsWindows8OrNewer()
{
return (g_osInfo.dwMajorVersion == 6 && g_osInfo.dwMinorVersion >= 2) || (g_osInfo.dwMajorVersion > 6);
}
BOOL IsWindows10OrNewer()
{
return (g_osInfo.dwMajorVersion >= 10);
}
BOOL IsWindows10BuildOrNewer(DWORD dwBuild)
{
return (g_osInfo.dwMajorVersion > 10 || (g_osInfo.dwMajorVersion == 10 && g_osInfo.dwBuildNumber >= dwBuild));
}
void __cdecl debugprintf(LPCWSTR format, ...)
{
WCHAR buf[4096], *p = buf;
va_list args;
int n;
va_start(args, format);
n = _vsnwprintf_s(p, 4096, 4096 - 3, format, args); // buf-3 is room for CR/LF/NUL
va_end(args);
p += (n < 0) ? (4096 - 3) : n;
while (p > buf && isspace(p[-1]))
*--p = L'\0';
*p++ = L'\r';
*p++ = L'\n';
*p = L'\0';
OutputDebugString(buf);
}
BOOL CheckApplicationBlackList(LPCTSTR subkey)
{
HRESULT hr;
DWORD dwVal;
WCHAR fileName[1024];
GetModuleFileName(NULL, fileName, 1024);
WCHAR *processName = PathFindFileName(fileName);
// Check local machine path
CRegistry regLM = CRegistry(HKEY_LOCAL_MACHINE, subkey, hr, TRUE);
if (SUCCEEDED(hr))
{
dwVal = regLM.ReadDWORD(processName, hr);
return SUCCEEDED(hr) && dwVal;
}
// Check current user path
CRegistry regCU = CRegistry(HKEY_CURRENT_USER, subkey, hr, TRUE);
if (SUCCEEDED(hr))
{
dwVal = regCU.ReadDWORD(processName, hr);
return SUCCEEDED(hr) && dwVal;
}
return FALSE;
}
| 28,250
|
C++
|
.cpp
| 841
| 25.482759
| 119
| 0.601759
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,098
|
BaseDSPropPage.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/BaseDSPropPage.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "BaseDSPropPage.h"
CBaseDSPropPage::CBaseDSPropPage(LPCTSTR pName, __inout_opt LPUNKNOWN pUnk, int DialogId, int TitleId)
: CBasePropertyPage(pName, pUnk, DialogId, TitleId)
{
}
HWND CBaseDSPropPage::createHintWindow(HWND parent, int timePop, int timeInit, int timeReshow)
{
HWND hhint =
CreateWindowEx(WS_EX_TOPMOST, TOOLTIPS_CLASS, nullptr, WS_POPUP | TTS_NOPREFIX | TTS_ALWAYSTIP, CW_USEDEFAULT,
CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, parent, nullptr, nullptr, nullptr);
SetWindowPos(hhint, HWND_TOPMOST, 0, 0, 0, 0, SWP_NOMOVE | SWP_NOSIZE | SWP_NOACTIVATE);
SendMessage(hhint, TTM_SETDELAYTIME, TTDT_AUTOPOP, MAKELONG(timePop, 0));
SendMessage(hhint, TTM_SETDELAYTIME, TTDT_INITIAL, MAKELONG(timeInit, 0));
SendMessage(hhint, TTM_SETDELAYTIME, TTDT_RESHOW, MAKELONG(timeReshow, 0));
SendMessage(hhint, TTM_SETMAXTIPWIDTH, 0, 470);
return hhint;
}
TOOLINFO CBaseDSPropPage::addHint(int id, const LPWSTR text)
{
if (!m_hHint)
m_hHint = createHintWindow(m_Dlg, 15000);
TOOLINFO ti;
ti.cbSize = sizeof(TOOLINFO);
ti.uFlags = TTF_SUBCLASS | TTF_IDISHWND;
ti.hwnd = m_Dlg;
ti.uId = (LPARAM)GetDlgItem(m_Dlg, id);
ti.lpszText = text;
SendMessage(m_hHint, TTM_ADDTOOL, 0, (LPARAM)&ti);
return ti;
}
void CBaseDSPropPage::ListView_AddCol(HWND hlv, int &ncol, int w, const wchar_t *txt, bool right)
{
LVCOLUMN lvc;
lvc.mask = LVCF_FMT | LVCF_WIDTH | LVCF_TEXT | LVCF_SUBITEM;
lvc.iSubItem = ncol;
lvc.pszText = (LPWSTR)txt;
lvc.cx = w;
lvc.fmt = right ? LVCFMT_RIGHT : LVCFMT_LEFT;
ListView_InsertColumn(hlv, ncol, &lvc);
ncol++;
}
HRESULT CBaseDSPropPage::ShowPropPageDialog(IBaseFilter *pFilter, HWND hwndOwner)
{
CheckPointer(pFilter, E_INVALIDARG);
CoInitialize(nullptr);
// Get PropertyPages interface
ISpecifyPropertyPages *pProp = nullptr;
HRESULT hr = pFilter->QueryInterface<ISpecifyPropertyPages>(&pProp);
if (SUCCEEDED(hr) && pProp)
{
// Get the filter's name and IUnknown pointer.
FILTER_INFO FilterInfo;
hr = pFilter->QueryFilterInfo(&FilterInfo);
// We don't need the graph, so don't sit on a ref to it
if (FilterInfo.pGraph)
FilterInfo.pGraph->Release();
IUnknown *pFilterUnk = nullptr;
pFilter->QueryInterface<IUnknown>(&pFilterUnk);
// Show the page.
CAUUID caGUID;
pProp->GetPages(&caGUID);
pProp->Release();
hr = OleCreatePropertyFrame(hwndOwner, // Parent window
0, 0, // Reserved
FilterInfo.achName, // Caption for the dialog box
1, // Number of objects (just the filter)
&pFilterUnk, // Array of object pointers.
caGUID.cElems, // Number of property pages
caGUID.pElems, // Array of property page CLSIDs
0, // Locale identifier
0, nullptr // Reserved
);
// Clean up.
pFilterUnk->Release();
CoTaskMemFree(caGUID.pElems);
hr = S_OK;
}
CoUninitialize();
return hr;
}
| 4,237
|
C++
|
.cpp
| 99
| 35.30303
| 118
| 0.635571
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,099
|
filterreg.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/filterreg.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Initial design and concept by Gabest and the MPC-HC Team, copyright under GPLv2
*/
#include "stdafx.h"
#include "DShowUtil.h"
std::wstring CStringFromGUID(const GUID &guid)
{
WCHAR null[128] = {0}, buff[128];
StringFromGUID2(GUID_NULL, null, 127);
return std::wstring(StringFromGUID2(guid, buff, 127) > 0 ? buff : null);
}
// filter registration helpers
bool DeleteRegKey(std::wstring szKey, std::wstring szSubkey)
{
bool bOK = false;
HKEY hKey;
LONG ec = ::RegOpenKeyEx(HKEY_CLASSES_ROOT, szKey.c_str(), 0, KEY_ALL_ACCESS, &hKey);
if (ec == ERROR_SUCCESS)
{
if (szSubkey.length() > 0)
ec = ::RegDeleteKey(hKey, szSubkey.c_str());
bOK = (ec == ERROR_SUCCESS);
::RegCloseKey(hKey);
}
return bOK;
}
bool SetRegKeyValue(std::wstring szKey, std::wstring szSubkey, std::wstring szValueName, std::wstring szValue)
{
bool bOK = false;
if (szSubkey.length() > 0)
szKey += _T("\\") + szSubkey;
HKEY hKey;
LONG ec =
::RegCreateKeyEx(HKEY_CLASSES_ROOT, szKey.c_str(), 0, 0, REG_OPTION_NON_VOLATILE, KEY_ALL_ACCESS, 0, &hKey, 0);
if (ec == ERROR_SUCCESS)
{
if (szValue.length() > 0)
{
ec = ::RegSetValueEx(hKey, szValueName.c_str(), 0, REG_SZ,
reinterpret_cast<BYTE *>(const_cast<LPTSTR>(szValue.c_str())),
(DWORD)(_tcslen(szValue.c_str()) + 1) * sizeof(TCHAR));
}
bOK = (ec == ERROR_SUCCESS);
::RegCloseKey(hKey);
}
return bOK;
}
bool SetRegKeyValue(std::wstring szKey, std::wstring szSubkey, std::wstring szValue)
{
return SetRegKeyValue(szKey, szSubkey, _T(""), szValue);
}
void RegisterSourceFilter(const CLSID &clsid, const GUID &subtype2, LPCWSTR chkbytes, ...)
{
std::wstring null = CStringFromGUID(GUID_NULL);
std::wstring majortype = CStringFromGUID(MEDIATYPE_Stream);
std::wstring subtype = CStringFromGUID(subtype2);
SetRegKeyValue(_T("Media Type\\") + majortype, subtype, _T("0"), chkbytes);
SetRegKeyValue(_T("Media Type\\") + majortype, subtype, _T("Source Filter"), CStringFromGUID(clsid));
DeleteRegKey(_T("Media Type\\") + null, subtype);
va_list extensions;
va_start(extensions, chkbytes);
LPCWSTR ext = nullptr;
while (ext = va_arg(extensions, LPCWSTR))
{
DeleteRegKey(_T("Media Type\\Extensions"), ext);
}
va_end(extensions);
}
void RegisterProtocolSourceFilter(const CLSID &clsid, LPCWSTR protocol)
{
SetRegKeyValue(protocol, _T(""), _T("Source Filter"), CStringFromGUID(clsid));
}
void UnRegisterProtocolSourceFilter(LPCWSTR protocol)
{
DeleteRegKey(protocol, _T(""));
}
void RegisterSourceFilter(const CLSID &clsid, const GUID &subtype2, std::list<LPCWSTR> chkbytes, ...)
{
std::wstring null = CStringFromGUID(GUID_NULL);
std::wstring majortype = CStringFromGUID(MEDIATYPE_Stream);
std::wstring subtype = CStringFromGUID(subtype2);
int i = 0;
std::list<LPCWSTR>::iterator it;
for (it = chkbytes.begin(); it != chkbytes.end(); ++it)
{
WCHAR idx[10] = {0};
swprintf_s(idx, _T("%d"), i);
SetRegKeyValue(_T("Media Type\\") + majortype, subtype, idx, *it);
i++;
}
SetRegKeyValue(_T("Media Type\\") + majortype, subtype, _T("Source Filter"), CStringFromGUID(clsid));
DeleteRegKey(_T("Media Type\\") + null, subtype);
va_list extensions;
va_start(extensions, chkbytes);
LPCWSTR ext = nullptr;
while (ext = va_arg(extensions, LPCWSTR))
{
DeleteRegKey(_T("Media Type\\Extensions"), ext);
}
va_end(extensions);
}
void UnRegisterSourceFilter(const GUID &subtype)
{
DeleteRegKey(_T("Media Type\\") + CStringFromGUID(MEDIATYPE_Stream), CStringFromGUID(subtype));
}
| 4,632
|
C++
|
.cpp
| 122
| 33.098361
| 119
| 0.664957
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,103
|
PopupMenu.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/PopupMenu.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "PopupMenu.h"
CPopupMenu::CPopupMenu(void)
{
m_hMenu = CreatePopupMenu();
}
CPopupMenu::~CPopupMenu(void)
{
if (m_hMenu)
DestroyMenu(m_hMenu);
}
HRESULT CPopupMenu::AddItem(UINT id, LPWSTR caption, BOOL checked, BOOL enabled)
{
if (!m_hMenu)
return E_UNEXPECTED;
MENUITEMINFO mii;
mii.cbSize = sizeof(mii);
mii.fMask = MIIM_ID | MIIM_STATE | MIIM_FTYPE | MIIM_STRING;
mii.fType = MFT_STRING | MFT_RADIOCHECK;
mii.wID = id;
mii.fState = (checked ? MFS_CHECKED : 0) | (!enabled ? MFS_DISABLED : 0);
mii.dwTypeData = caption;
mii.cch = (UINT)wcslen(mii.dwTypeData);
InsertMenuItem(m_hMenu, order++, TRUE, &mii);
return S_OK;
}
HRESULT CPopupMenu::AddSeparator()
{
if (!m_hMenu)
return E_UNEXPECTED;
MENUITEMINFO mii;
mii.cbSize = sizeof(mii);
mii.fMask = MIIM_TYPE;
mii.fType = MFT_SEPARATOR;
InsertMenuItem(m_hMenu, order++, TRUE, &mii);
return S_OK;
}
HRESULT CPopupMenu::AddSubmenu(HMENU hSubMenu, LPWSTR caption)
{
if (!m_hMenu)
return E_UNEXPECTED;
MENUITEMINFO mii;
mii.cbSize = sizeof(mii);
mii.fMask = MIIM_FTYPE | MIIM_STRING | MIIM_SUBMENU;
mii.fType = MFT_STRING;
mii.hSubMenu = hSubMenu;
mii.dwTypeData = caption;
mii.cch = (UINT)wcslen(mii.dwTypeData);
InsertMenuItem(m_hMenu, order++, TRUE, &mii);
return S_OK;
}
HMENU CPopupMenu::Finish()
{
HMENU hMenu = m_hMenu;
m_hMenu = nullptr;
return hMenu;
}
| 2,323
|
C++
|
.cpp
| 75
| 27.466667
| 80
| 0.692102
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,104
|
locale.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/locale.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Initial design and concept by Gabest and the MPC-HC Team, copyright under GPLv2
*/
#include "stdafx.h"
#include "DShowUtil.h"
#include <string>
#include <regex>
#include <algorithm>
static struct
{
LPCSTR name, iso6392, iso6391, iso6392_2;
LCID lcid;
} s_isolangs[] = // TODO : fill LCID !!!
{
// Based on ISO-639-2, sorted by primary language code. Some manual additions of deprecated tags.
{"Afar", "aar", "aa"},
{"Abkhazian", "abk", "ab"},
{"Achinese", "ace", nullptr},
{"Acoli", "ach", nullptr},
{"Adangme", "ada", nullptr},
{"Adyghe", "ady", nullptr},
{"Afro-Asiatic (Other)", "afa", nullptr},
{"Afrihili", "afh", nullptr},
{"Afrikaans", "afr", "af", nullptr, MAKELCID(MAKELANGID(LANG_AFRIKAANS, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Ainu", "ain", nullptr},
{"Akan", "aka", "ak"},
{"Akkadian", "akk", nullptr},
{"Albanian", "sqi", "sq", "alb", MAKELCID(MAKELANGID(LANG_ALBANIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Aleut", "ale", nullptr},
{"Algonquian languages", "alg", nullptr},
{"Southern Altai", "alt", nullptr},
{"Amharic", "amh", "am"},
{"English, Old (ca.450-1100)", "ang", nullptr},
{"Angika", "anp", nullptr},
{"Apache languages", "apa", nullptr},
{"Arabic", "ara", "ar", nullptr, MAKELCID(MAKELANGID(LANG_ARABIC, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Aramaic", "arc", nullptr},
{"Aragonese", "arg", "an"},
{"Armenian", "arm", "hy", "hye", MAKELCID(MAKELANGID(LANG_ARMENIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Mapudungun", "arn", nullptr},
{"Arapaho", "arp", nullptr},
{"Artificial (Other)", "art", nullptr},
{"Arawak", "arw", nullptr},
{"Assamese", "asm", "as", nullptr, MAKELCID(MAKELANGID(LANG_ASSAMESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Asturian; Bable", "ast", nullptr},
{"Athapascan languages", "ath", nullptr},
{"Australian languages", "aus", nullptr},
{"Avaric", "ava", "av"},
{"Avestan", "ave", "ae"},
{"Awadhi", "awa", nullptr},
{"Aymara", "aym", "ay"},
{"Azerbaijani", "aze", "az", nullptr, MAKELCID(MAKELANGID(LANG_AZERI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Banda", "bad", nullptr},
{"Bamileke languages", "bai", nullptr},
{"Bashkir", "bak", "ba", nullptr, MAKELCID(MAKELANGID(LANG_BASHKIR, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Baluchi", "bal", nullptr},
{"Bambara", "bam", "bm"},
{"Balinese", "ban", nullptr},
{"Basque", "baq", "eu", "eus", MAKELCID(MAKELANGID(LANG_BASQUE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Basa", "bas", nullptr},
{"Baltic (Other)", "bat", nullptr},
{"Beja", "bej", nullptr},
{"Belarusian", "bel", "be", nullptr, MAKELCID(MAKELANGID(LANG_BELARUSIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Bemba", "bem", nullptr},
{"Bengali", "ben", "bn", nullptr, MAKELCID(MAKELANGID(LANG_BENGALI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Berber (Other)", "ber", nullptr},
{"Bhojpuri", "bho", nullptr},
{"Bihari", "bih", "bh"},
{"Bikol", "bik", nullptr},
{"Bini", "bin", nullptr},
{"Bislama", "bis", "bi"},
{"Siksika", "bla", nullptr},
{"Bantu (Other)", "bnt", nullptr},
{"Bosnian", "bos", "bs"},
{"Braj", "bra", nullptr},
{"Breton", "bre", "br", nullptr, MAKELCID(MAKELANGID(LANG_BRETON, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Batak (Indonesia)", "btk", nullptr},
{"Buriat", "bua", nullptr},
{"Buginese", "bug", nullptr},
{"Bulgarian", "bul", "bg", nullptr, MAKELCID(MAKELANGID(LANG_BULGARIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Burmese", "bur", "my", "mya"},
{"Blin", "byn", nullptr},
{"Caddo", "cad", nullptr},
{"Central American Indian (Other)", "cai", nullptr},
{"Carib", "car", nullptr},
{"Catalan", "cat", "ca", nullptr, MAKELCID(MAKELANGID(LANG_CATALAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Caucasian (Other)", "cau", nullptr},
{"Cebuano", "ceb", nullptr},
{"Celtic (Other)", "cel", nullptr},
{"Chamorro", "cha", "ch"},
{"Chibcha", "chb", nullptr},
{"Chechen", "che", "ce"},
{"Chagatai", "chg", nullptr},
{"Chinese", "chi", "zh", "zho", MAKELCID(MAKELANGID(LANG_CHINESE, SUBLANG_NEUTRAL), SORT_DEFAULT)},
{"Chuukese", "chk", nullptr},
{"Mari", "chm", nullptr},
{"Chinook jargon", "chn", nullptr},
{"Choctaw", "cho", nullptr},
{"Chipewyan", "chp", nullptr},
{"Cherokee", "chr", nullptr},
{"Church Slavic", "chu", "cu"},
{"Chuvash", "chv", "cv"},
{"Cheyenne", "chy", nullptr},
{"Chamic languages", "cmc", nullptr},
{"Montenegrin", "cnr", nullptr},
{"Coptic", "cop", nullptr},
{"Cornish", "cor", "kw"},
{"Corsican", "cos", "co", nullptr, MAKELCID(MAKELANGID(LANG_CORSICAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Creoles and pidgins, English-based", "cpe", nullptr},
{"Creoles and pidgins, French-based", "cpf", nullptr},
{"Creoles and pidgins, Portuguese-based", "cpp", nullptr},
{"Cree", "cre", "cr"},
{"Crimean Turkish", "crh", nullptr},
{"Creoles and pidgins (Other)", "crp", nullptr},
{"Kashubian", "csb", nullptr},
{"Cushitic (Other)", "cus", nullptr},
{"Czech", "cze", "cs", "ces", MAKELCID(MAKELANGID(LANG_CZECH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Dakota", "dak", nullptr},
{"Danish", "dan", "da", nullptr, MAKELCID(MAKELANGID(LANG_DANISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Dargwa", "dar", nullptr},
{"Dayak", "day", nullptr},
{"Delaware", "del", nullptr},
{"Slave (Athapascan)", "den", nullptr},
{"Dogrib", "dgr", nullptr},
{"Dinka", "din", nullptr},
{"Divehi", "div", "dv", nullptr, MAKELCID(MAKELANGID(LANG_DIVEHI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Dogri", "doi", nullptr},
{"Dravidian (Other)", "dra", nullptr},
{"Lower Sorbian", "dsb", nullptr},
{"Duala", "dua", nullptr},
{"Dutch, Middle (ca. 1050-1350)", "dum", nullptr},
{"Dutch", "dut", "nl", "nld", MAKELCID(MAKELANGID(LANG_DUTCH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Dyula", "dyu", nullptr},
{"Dzongkha", "dzo", "dz"},
{"Efik", "efi", nullptr},
{"Egyptian (Ancient)", "egy", nullptr},
{"Ekajuk", "eka", nullptr},
{"Elamite", "elx", nullptr},
{"English", "eng", "en", nullptr, MAKELCID(MAKELANGID(LANG_ENGLISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"English, Middle (1100-1500)", "enm", nullptr},
{"Esperanto", "epo", "eo"},
{"Estonian", "est", "et", nullptr, MAKELCID(MAKELANGID(LANG_ESTONIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Ewe", "ewe", "ee"},
{"Ewondo", "ewo", nullptr},
{"Fang", "fan", nullptr},
{"Faroese", "fao", "fo", nullptr, MAKELCID(MAKELANGID(LANG_FAEROESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Fanti", "fat", nullptr},
{"Fijian", "fij", "fj"},
{"Filipino", "fil", nullptr},
{"Finnish", "fin", "fi", nullptr, MAKELCID(MAKELANGID(LANG_FINNISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Finno-Ugrian (Other)", "fiu", nullptr},
{"Fon", "fon", nullptr},
{"French", "fre", "fr", "fra", MAKELCID(MAKELANGID(LANG_FRENCH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"French, Middle (ca.1400-1600)", "frm", nullptr},
{"French, Old (842-ca.1400)", "fro", nullptr},
{"Northern Frisian", "frr", nullptr},
{"Eastern Frisian", "frs", nullptr},
{"Frisian", "fry", "fy", nullptr, MAKELCID(MAKELANGID(LANG_FRISIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Fulah", "ful", "ff"},
{"Friulian", "fur", nullptr},
{"Ga", "gaa", nullptr},
{"Gayo", "gay", nullptr},
{"Gbaya", "gba", nullptr},
{"Germanic (Other)", "gem", nullptr},
{"Georgian", "geo", "ka", "kat", MAKELCID(MAKELANGID(LANG_GEORGIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"German", "ger", "de", "deu", MAKELCID(MAKELANGID(LANG_GERMAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Geez", "gez", nullptr},
{"Gilbertese", "gil", nullptr},
{"Gaelic; Scottish Gaelic", "gla", "gd"},
{"Irish", "gle", "ga", nullptr, MAKELCID(MAKELANGID(LANG_IRISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Galician", "glg", "gl", nullptr, MAKELCID(MAKELANGID(LANG_GALICIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Manx", "glv", "gv"},
{"German, Middle High (ca.1050-1500)", "gmh", nullptr},
{"German, Old High (ca.750-1050)", "goh", nullptr},
{"Gondi", "gon", nullptr},
{"Gorontalo", "gor", nullptr},
{"Gothic", "got", nullptr},
{"Grebo", "grb", nullptr},
{"Ancient Greek", "grc", nullptr},
{"Greek", "gre", "el", "ell", MAKELCID(MAKELANGID(LANG_GREEK, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Guarani", "grn", "gn"},
{"Swiss German", "gsw", nullptr},
{"Gujarati", "guj", "gu", nullptr, MAKELCID(MAKELANGID(LANG_GUJARATI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Gwich´in", "gwi", nullptr},
{"Haida", "hai", nullptr},
{"Haitian", "hat", "ht"},
{"Hausa", "hau", "ha", nullptr, MAKELCID(MAKELANGID(LANG_HAUSA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Hawaiian", "haw", nullptr},
{"Hebrew", "heb", "he", nullptr, MAKELCID(MAKELANGID(LANG_HEBREW, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Herero", "her", "hz"},
{"Hiligaynon", "hil", nullptr},
{"Himachali", "him", nullptr},
{"Hindi", "hin", "hi", nullptr, MAKELCID(MAKELANGID(LANG_HINDI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Hittite", "hit", nullptr},
{"Hmong", "hmn", nullptr},
{"Hiri Motu", "hmo", "ho"},
{"Croatian", "hrv", "hr", "scr", MAKELCID(MAKELANGID(LANG_CROATIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Upper Sorbian", "hsb", nullptr},
{"Hungarian", "hun", "hu", nullptr, MAKELCID(MAKELANGID(LANG_HUNGARIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Hupa", "hup", nullptr},
{"Iban", "iba", nullptr},
{"Igbo", "ibo", "ig", nullptr, MAKELCID(MAKELANGID(LANG_IGBO, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Icelandic", "ice", "is", "isl", MAKELCID(MAKELANGID(LANG_ICELANDIC, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Ido", "ido", "io"},
{"Sichuan Yi", "iii", "ii"},
{"Ijo", "ijo", nullptr},
{"Inuktitut", "iku", "iu", nullptr, MAKELCID(MAKELANGID(LANG_INUKTITUT, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Interlingue", "ile", "ie"},
{"Iloko", "ilo", nullptr},
{"Interlingua", "ina", "ia"},
{"Indic (Other)", "inc", nullptr},
{"Indonesian", "ind", "id", nullptr, MAKELCID(MAKELANGID(LANG_INDONESIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Indo-European (Other)", "ine", nullptr},
{"Ingush", "inh", nullptr},
{"Inupiaq", "ipk", "ik"},
{"Iranian (Other)", "ira", nullptr},
{"Iroquoian languages", "iro", nullptr},
{"Italian", "ita", "it", nullptr, MAKELCID(MAKELANGID(LANG_ITALIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Javanese", "jav", "jv"},
{"Lojban", "jbo", nullptr},
{"Japanese", "jpn", "ja", nullptr, MAKELCID(MAKELANGID(LANG_JAPANESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Judeo-Persian", "jpr", nullptr},
{"Judeo-Arabic", "jrb", nullptr},
{"Kara-Kalpak", "kaa", nullptr},
{"Kabyle", "kab", nullptr},
{"Kachin", "kac", nullptr},
{"Greenlandic; Kalaallisut", "kal", "kl", nullptr, MAKELCID(MAKELANGID(LANG_GREENLANDIC, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Kamba", "kam", nullptr},
{"Kannada", "kan", "kn", nullptr, MAKELCID(MAKELANGID(LANG_KANNADA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Karen", "kar", nullptr},
{"Kashmiri", "kas", "ks", nullptr, MAKELCID(MAKELANGID(LANG_KASHMIRI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Kanuri", "kau", "kr"},
{"Kawi", "kaw", nullptr},
{"Kazakh", "kaz", "kk", nullptr, MAKELCID(MAKELANGID(LANG_KAZAK, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Kabardian", "kbd", nullptr},
{"Khasi", "kha", nullptr},
{"Khoisan (Other)", "khi", nullptr},
{"Khmer", "khm", "km", nullptr, MAKELCID(MAKELANGID(LANG_KHMER, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Khotanese", "kho", nullptr},
{"Kikuyu; Gikuyu", "kik", "ki"},
{"Kinyarwanda", "kin", "rw", nullptr, MAKELCID(MAKELANGID(LANG_KINYARWANDA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Kirghiz", "kir", "ky"},
{"Kimbundu", "kmb", nullptr},
{"Konkani", "kok", nullptr, nullptr, MAKELCID(MAKELANGID(LANG_KONKANI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Komi", "kom", "kv"},
{"Kongo", "kon", "kg"},
{"Korean", "kor", "ko", nullptr, MAKELCID(MAKELANGID(LANG_KOREAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Kosraean", "kos", nullptr},
{"Kpelle", "kpe", nullptr},
{"Karachay-Balkar", "krc", nullptr},
{"Karelian", "krl", nullptr},
{"Kru", "kro", nullptr},
{"Kurukh", "kru", nullptr},
{"Kwanyama, Kuanyama", "kua", "kj"},
{"Kumyk", "kum", nullptr},
{"Kurdish", "kur", "ku"},
{"Kutenai", "kut", nullptr},
{"Ladino", "lad", nullptr},
{"Lahnda", "lah", nullptr},
{"Lamba", "lam", nullptr},
{"Lao", "lao", "lo", nullptr, MAKELCID(MAKELANGID(LANG_LAO, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Latin", "lat", "la"},
{"Latvian", "lav", "lv", nullptr, MAKELCID(MAKELANGID(LANG_LATVIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Lezghian", "lez", nullptr},
{"Limburgan; Limburger; Limburgish", "lim", "li"},
{"Lingala", "lin", "ln"},
{"Lithuanian", "lit", "lt", nullptr, MAKELCID(MAKELANGID(LANG_LITHUANIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Mongo", "lol", nullptr},
{"Lozi", "loz", nullptr},
{"Luxembourgish; Letzeburgesch", "ltz", "lb", nullptr, MAKELCID(MAKELANGID(LANG_LUXEMBOURGISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Luba-Lulua", "lua", nullptr},
{"Luba-Katanga", "lub", "lu"},
{"Ganda", "lug", "lg"},
{"Luiseno", "lui", nullptr},
{"Lunda", "lun", nullptr},
{"Luo (Kenya and Tanzania)", "luo", nullptr},
{"Lushai", "lus", nullptr},
{"Macedonian", "mac", "mk", "mkd", MAKELCID(MAKELANGID(LANG_MACEDONIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Madurese", "mad", nullptr},
{"Magahi", "mag", nullptr},
{"Marshallese", "mah", "mh"},
{"Maithili", "mai", nullptr},
{"Makasar", "mak", nullptr},
{"Malayalam", "mal", "ml", nullptr, MAKELCID(MAKELANGID(LANG_MALAYALAM, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Mandingo", "man", nullptr},
{"Maori", "mao", "mi", "mri", MAKELCID(MAKELANGID(LANG_MAORI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Austronesian (Other)", "map", nullptr},
{"Marathi", "mar", "mr", nullptr, MAKELCID(MAKELANGID(LANG_MARATHI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Masai", "mas", nullptr},
{"Malay", "may", "ms", "msa", MAKELCID(MAKELANGID(LANG_MALAY, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Moksha", "mdf", nullptr},
{"Mandar", "mdr", nullptr},
{"Mende", "men", nullptr},
{"Irish, Middle (900-1200)", "mga", nullptr},
{"Micmac", "mic", nullptr},
{"Minangkabau", "min", nullptr},
{"Miscellaneous languages", "mis", nullptr},
{"Mon-Khmer (Other)", "mkh", nullptr},
{"Malagasy", "mlg", "mg"},
{"Maltese", "mlt", "mt", nullptr, MAKELCID(MAKELANGID(LANG_MALTESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Manchu", "mnc", nullptr},
{"Manipuri", "mni", nullptr, nullptr, MAKELCID(MAKELANGID(LANG_MANIPURI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Manobo languages", "mno", nullptr},
{"Mohawk", "moh", nullptr, nullptr, MAKELCID(MAKELANGID(LANG_MOHAWK, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Moldavian", "mol", "mo"}, // deprecated
{"Mongolian", "mon", "mn", nullptr, MAKELCID(MAKELANGID(LANG_MONGOLIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Mossi", "mos", nullptr},
{"Multiple languages", "mul", nullptr},
{"Munda languages", "mun", nullptr},
{"Creek", "mus", nullptr},
{"Mirandese", "mwl", nullptr},
{"Marwari", "mwr", nullptr},
{"Mayan languages", "myn", nullptr},
{"Erzya", "myv", nullptr},
{"Nahuatl", "nah", nullptr},
{"North American Indian (Other)", "nai", nullptr},
{"Neapolitan", "nap", nullptr},
{"Nauru", "nau", "na"},
{"Navaho, Navajo", "nav", "nv"},
{"Ndebele, South", "nbl", "nr"},
{"Ndebele, North", "nde", "nd"},
{"Ndonga", "ndo", "ng"},
{"Low German; Low Saxon", "nds", nullptr},
{"Nepali", "nep", "ne", nullptr, MAKELCID(MAKELANGID(LANG_NEPALI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Newari", "new", nullptr},
{"Nias", "nia", nullptr},
{"Niger-Kordofanian (Other)", "nic", nullptr},
{"Niuean", "niu", nullptr},
{"Norwegian Nynorsk", "nno", "nn"},
{"Norwegian Bokmål", "nob", "nb"},
{"Nogai", "nog", nullptr},
{"Norse, Old", "non", nullptr},
{"Norwegian", "nor", "no", nullptr, MAKELCID(MAKELANGID(LANG_NORWEGIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"N'Ko", "nqo", nullptr},
{"Pedi; Sepedi; Northern Sotho", "nso", nullptr, nullptr, MAKELCID(MAKELANGID(LANG_SOTHO, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Nubian languages", "nub", nullptr},
{"Classical Newari", "nwc", nullptr},
{"Nyanja; Chichewa; Chewa", "nya", "ny"},
{"Nyamwezi", "nym", nullptr},
{"Nyankole", "nyn", nullptr},
{"Nyoro", "nyo", nullptr},
{"Nzima", "nzi", nullptr},
{"Occitan (post 1500}", "oci", "oc", nullptr, MAKELCID(MAKELANGID(LANG_OCCITAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Ojibwa", "oji", "oj"},
{"Oriya", "ori", "or"},
{"Oromo", "orm", "om"},
{"Osage", "osa", nullptr},
{"Ossetian; Ossetic", "oss", "os"},
{"Turkish, Ottoman (1500-1928)", "ota", nullptr},
{"Otomian languages", "oto", nullptr},
{"Papuan (Other)", "paa", nullptr},
{"Pangasinan", "pag", nullptr},
{"Pahlavi", "pal", nullptr},
{"Pampanga", "pam", nullptr},
{"Panjabi", "pan", "pa"},
{"Papiamento", "pap", nullptr},
{"Palauan", "pau", nullptr},
{"Persian, Old (ca.600-400 B.C.)", "peo", nullptr},
{"Persian", "per", "fa", "fas", MAKELCID(MAKELANGID(LANG_PERSIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Philippine (Other)", "phi", nullptr},
{"Phoenician", "phn", nullptr},
{"Pali", "pli", "pi"},
{"Portuguese (Brazil)", "pob", "pb"}, // deprecated/unofficial
{"Polish", "pol", "pl", nullptr, MAKELCID(MAKELANGID(LANG_POLISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Pohnpeian", "pon", nullptr},
{"Portuguese", "por", "pt", nullptr, MAKELCID(MAKELANGID(LANG_PORTUGUESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Prakrit languages", "pra", nullptr},
{"Provençal, Old (to 1500)", "pro", nullptr},
{"Pushto", "pus", "ps"},
{"Quechua", "que", "qu", nullptr, MAKELCID(MAKELANGID(LANG_QUECHUA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Rajasthani", "raj", nullptr},
{"Rapanui", "rap", nullptr},
{"Rarotongan", "rar", nullptr},
{"Romance (Other)", "roa", nullptr},
{"Romansh", "roh", "rm"},
{"Romany", "rom", nullptr},
{"Romanian", "rum", "ro", "ron", MAKELCID(MAKELANGID(LANG_ROMANIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Rundi", "run", "rn"},
{"Aromanian", "rup", nullptr},
{"Russian", "rus", "ru", nullptr, MAKELCID(MAKELANGID(LANG_RUSSIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Sandawe", "sad", nullptr},
{"Sango", "sag", "sg"},
{"Yakut", "sah", nullptr, nullptr, MAKELCID(MAKELANGID(LANG_YAKUT, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"South American Indian (Other)", "sai", nullptr},
{"Salishan languages", "sal", nullptr},
{"Samaritan Aramaic", "sam", nullptr},
{"Sanskrit", "san", "sa", nullptr, MAKELCID(MAKELANGID(LANG_SANSKRIT, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Sasak", "sas", nullptr},
{"Santali", "sat", nullptr},
{"Sicilian", "scn", nullptr},
{"Scots", "sco", nullptr},
{"Selkup", "sel", nullptr},
{"Semitic (Other)", "sem", nullptr},
{"Irish, Old (to 900)", "sga", nullptr},
{"Sign languages", "sgn", nullptr},
{"Shan", "shn", nullptr},
{"Sidamo", "sid", nullptr},
{"Sinhalese", "sin", "si", nullptr, MAKELCID(MAKELANGID(LANG_SINHALESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Siouan languages", "sio", nullptr},
{"Sino-Tibetan (Other)", "sit", nullptr},
{"Slavic (Other)", "sla", nullptr},
{"Slovak", "slo", "sk", "slk", MAKELCID(MAKELANGID(LANG_SLOVAK, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Slovenian", "slv", "sl", nullptr, MAKELCID(MAKELANGID(LANG_SLOVENIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Southern Sami", "sma", nullptr},
{"Northern Sami", "sme", "se"},
{"Sami languages (Other)", "smi", nullptr},
{"Lule Sami", "smj", nullptr},
{"Inari Sami", "smn", nullptr},
{"Samoan", "smo", "sm"},
{"Skolt Sami", "sms", nullptr},
{"Shona", "sna", "sn"},
{"Sindhi", "snd", "sd", nullptr, MAKELCID(MAKELANGID(LANG_SINDHI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Soninke", "snk", nullptr},
{"Sogdian", "sog", nullptr},
{"Somali", "som", "so"},
{"Songhai", "son", nullptr},
{"Sotho, Southern", "sot", "st", nullptr, MAKELCID(MAKELANGID(LANG_SOTHO, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Spanish", "spa", "es", "esp", MAKELCID(MAKELANGID(LANG_SPANISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Sardinian", "srd", "sc"},
{"Sranan Tongo", "srn", nullptr},
{"Serbian", "srp", "sr", "scc", MAKELCID(MAKELANGID(LANG_SERBIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Serer", "srr", nullptr},
{"Nilo-Saharan (Other)", "ssa", nullptr},
{"Swati", "ssw", "ss"},
{"Sukuma", "suk", nullptr},
{"Sundanese", "sun", "su"},
{"Susu", "sus", nullptr},
{"Sumerian", "sux", nullptr},
{"Swahili", "swa", "sw", nullptr, MAKELCID(MAKELANGID(LANG_SWAHILI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Swedish", "swe", "sv", nullptr, MAKELCID(MAKELANGID(LANG_SWEDISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Classical Syriac", "syc", nullptr},
{"Syriac", "syr", nullptr, nullptr, MAKELCID(MAKELANGID(LANG_SYRIAC, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tahitian", "tah", "ty"},
{"Tai (Other)", "tai", nullptr},
{"Tamil", "tam", "ta", nullptr, MAKELCID(MAKELANGID(LANG_TAMIL, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tatar", "tat", "tt", nullptr, MAKELCID(MAKELANGID(LANG_TATAR, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Telugu", "tel", "te", nullptr, MAKELCID(MAKELANGID(LANG_TELUGU, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Timne", "tem", nullptr},
{"Tereno", "ter", nullptr},
{"Tetum", "tet", nullptr},
{"Tajik", "tgk", "tg", nullptr, MAKELCID(MAKELANGID(LANG_TAJIK, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tagalog", "tgl", "tl"},
{"Thai", "tha", "th", nullptr, MAKELCID(MAKELANGID(LANG_THAI, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tibetan", "tib", "bo", "bod", MAKELCID(MAKELANGID(LANG_TIBETAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tigre", "tig", nullptr},
{"Tigrinya", "tir", "ti", nullptr, MAKELCID(MAKELANGID(LANG_TIGRIGNA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tiv", "tiv", nullptr},
{"Tokelau", "tkl", nullptr},
{"Klingon", "tlh", nullptr},
{"Tlingit", "tli", nullptr},
{"Tamashek", "tmh", nullptr},
{"Tonga (Nyasa)", "tog", nullptr},
{"Tonga (Tonga Islands)", "ton", "to"},
{"Tok Pisin", "tpi", nullptr},
{"Tsimshian", "tsi", nullptr},
{"Tswana", "tsn", "tn", nullptr, MAKELCID(MAKELANGID(LANG_TSWANA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tsonga", "tso", "ts"},
{"Turkmen", "tuk", "tk", nullptr, MAKELCID(MAKELANGID(LANG_TURKMEN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Tumbuka", "tum", nullptr},
{"Tupi languages", "tup", nullptr},
{"Turkish", "tur", "tr", nullptr, MAKELCID(MAKELANGID(LANG_TURKISH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Altaic (Other)", "tut", nullptr},
{"Tuvalu", "tvl", nullptr},
{"Twi", "twi", "tw"},
{"Tuvinian", "tyv", nullptr},
{"Udmurt", "udm", nullptr},
{"Ugaritic", "uga", nullptr},
{"Uighur", "uig", "ug", nullptr, MAKELCID(MAKELANGID(LANG_UIGHUR, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Ukrainian", "ukr", "uk", nullptr, MAKELCID(MAKELANGID(LANG_UKRAINIAN, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Umbundu", "umb", nullptr},
{"Undetermined", "und", nullptr},
{"Urdu", "urd", "ur", nullptr, MAKELCID(MAKELANGID(LANG_URDU, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Uzbek", "uzb", "uz", nullptr, MAKELCID(MAKELANGID(LANG_UZBEK, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Vai", "vai", nullptr},
{"Venda", "ven", "ve"},
{"Vietnamese", "vie", "vi", nullptr, MAKELCID(MAKELANGID(LANG_VIETNAMESE, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Volapük", "vol", "vo"},
{"Votic", "vot", nullptr},
{"Wakashan languages", "wak", nullptr},
{"Walamo", "wal", nullptr},
{"Waray", "war", nullptr},
{"Washo", "was", nullptr},
{"Welsh", "wel", "cy", "cym", MAKELCID(MAKELANGID(LANG_WELSH, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Sorbian languages", "wen", nullptr},
{"Walloon", "wln", "wa"},
{"Wolof", "wol", "wo", nullptr, MAKELCID(MAKELANGID(LANG_WOLOF, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Kalmyk", "xal", nullptr},
{"Xhosa", "xho", "xh", nullptr, MAKELCID(MAKELANGID(LANG_XHOSA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Yao", "yao", nullptr},
{"Yapese", "yap", nullptr},
{"Yiddish", "yid", "yi"},
{"Yoruba", "yor", "yo", nullptr, MAKELCID(MAKELANGID(LANG_YORUBA, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Yupik languages", "ypk", nullptr},
{"Zapotec", "zap", nullptr},
{"Blissymbols", "zbl", nullptr},
{"Zenaga", "zen", nullptr},
{"Standard Moroccan Tamazight", "zgh", nullptr},
{"Zhuang; Chuang", "zha", "za"},
{"Zande", "znd", nullptr},
{"Zulu", "zul", "zu", nullptr, MAKELCID(MAKELANGID(LANG_ZULU, SUBLANG_DEFAULT), SORT_DEFAULT)},
{"Zuni", "zun", nullptr},
{"Zaza", "zza", nullptr},
{nullptr, nullptr, nullptr},
{"No subtitles", "---", nullptr, nullptr, (LCID)LCID_NOSUBTITLES},
};
std::string ISO6391ToLanguage(LPCSTR code)
{
CHAR tmp[2 + 1];
strncpy_s(tmp, code, 2);
tmp[2] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if (s_isolangs[i].iso6391 && !strcmp(s_isolangs[i].iso6391, tmp))
{
std::string ret = std::string(s_isolangs[i].name);
size_t i = ret.find(';');
if (i != std::string::npos)
{
ret = ret.substr(0, i);
}
return ret;
}
}
return std::string();
}
std::string ISO6392ToLanguage(LPCSTR code)
{
CHAR tmp[3 + 1];
strncpy_s(tmp, code, 3);
tmp[3] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if ((s_isolangs[i].iso6392 && !strcmp(s_isolangs[i].iso6392, tmp)) ||
(s_isolangs[i].iso6392_2 && !strcmp(s_isolangs[i].iso6392_2, tmp)))
{
std::string ret = std::string(s_isolangs[i].name);
size_t i = ret.find(';');
if (i != std::string::npos)
{
ret = ret.substr(0, i);
}
return ret;
}
}
return std::string();
}
std::string ProbeLangForLanguage(LPCSTR code)
{
if (strlen(code) == 3)
{
return ISO6392ToLanguage(code);
}
else if (strlen(code) >= 2)
{
return ISO6391ToLanguage(code);
}
return std::string();
}
static std::string ISO6392Check(LPCSTR lang)
{
CHAR tmp[3 + 1];
strncpy_s(tmp, lang, 3);
tmp[3] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if ((s_isolangs[i].iso6392 && !strcmp(s_isolangs[i].iso6392, tmp)) ||
(s_isolangs[i].iso6392_2 && !strcmp(s_isolangs[i].iso6392_2, tmp)))
{
return std::string(s_isolangs[i].iso6392);
}
}
return std::string(tmp);
}
static std::string LanguageToISO6392(LPCSTR code)
{
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if ((s_isolangs[i].name && !_stricmp(s_isolangs[i].name, code)))
{
return std::string(s_isolangs[i].iso6392);
}
}
return std::string();
}
std::string ProbeForISO6392(LPCSTR lang)
{
std::string isoLang;
if (strlen(lang) == 2)
{
isoLang = ISO6391To6392(lang);
}
else if (strlen(lang) == 3)
{
isoLang = ISO6392Check(lang);
}
else if (strlen(lang) > 3)
{
isoLang = LanguageToISO6392(lang);
if (isoLang.empty())
{
std::regex ogmRegex("\\[([[:alpha:]]{3})\\]");
std::cmatch res;
bool found = std::regex_search(lang, res, ogmRegex);
if (found && !res[1].str().empty())
{
isoLang = ISO6392Check(res[1].str().c_str());
}
}
}
if (isoLang.empty())
isoLang = std::string(lang);
return isoLang;
}
LCID ISO6391ToLcid(LPCSTR code)
{
CHAR tmp[2 + 1];
strncpy_s(tmp, code, 2);
tmp[2] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if (s_isolangs[i].iso6391 && !strcmp(s_isolangs[i].iso6391, tmp))
{
return s_isolangs[i].lcid;
}
}
return 0;
}
LCID ISO6392ToLcid(LPCSTR code)
{
CHAR tmp[3 + 1];
strncpy_s(tmp, code, 3);
tmp[3] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if ((s_isolangs[i].iso6392 && !strcmp(s_isolangs[i].iso6392, tmp)) ||
(s_isolangs[i].iso6392_2 && !strcmp(s_isolangs[i].iso6392_2, tmp)))
{
return s_isolangs[i].lcid;
}
}
return 0;
}
std::string ISO6391To6392(LPCSTR code)
{
CHAR tmp[2 + 1];
strncpy_s(tmp, code, 2);
tmp[2] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if (s_isolangs[i].iso6391 && !strcmp(s_isolangs[i].iso6391, tmp))
{
return s_isolangs[i].iso6392;
}
}
return std::string(code);
}
std::string ISO6392To6391(LPCSTR code)
{
CHAR tmp[3 + 1];
strncpy_s(tmp, code, 3);
tmp[3] = 0;
_strlwr_s(tmp);
for (size_t i = 0, j = countof(s_isolangs); i < j; i++)
{
if ((s_isolangs[i].iso6392 && !strcmp(s_isolangs[i].iso6392, tmp)) ||
(s_isolangs[i].iso6392_2 && !strcmp(s_isolangs[i].iso6392_2, tmp)))
{
return s_isolangs[i].iso6391;
}
}
return std::string();
}
LCID ProbeLangForLCID(LPCSTR code)
{
if (strlen(code) == 3)
{
return ISO6392ToLcid(code);
}
else if (strlen(code) >= 2)
{
return ISO6391ToLcid(code);
}
return 0;
}
| 32,760
|
C++
|
.cpp
| 707
| 38.302687
| 136
| 0.55832
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| false
| false
|
22,105
|
BaseTrayIcon.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/BaseTrayIcon.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "DShowUtil.h"
#include "BaseTrayIcon.h"
#include "BaseDSPropPage.h"
#include <time.h>
#include <process.h>
#include <Shlwapi.h>
#define TRAYICON 0x1f0
#define MSG_TRAYICON WM_USER + 1
#define MSG_QUIT WM_USER + 2
// The assumed size of the propery page
#define PROP_WIDTH_OFFSET 400
#define PROP_HEIGHT_OFFSET 250
static const WCHAR *noTrayProcesses[] = {L"dllhost.exe", L"explorer.exe", L"ReClockHelper.dll"};
BOOL CBaseTrayIcon::ProcessBlackList()
{
WCHAR fileName[1024];
GetModuleFileName(nullptr, fileName, 1024);
WCHAR *processName = PathFindFileName(fileName);
for (int i = 0; i < countof(noTrayProcesses); i++)
{
if (_wcsicmp(processName, noTrayProcesses[i]) == 0)
return TRUE;
}
return FALSE;
}
CBaseTrayIcon::CBaseTrayIcon(IBaseFilter *pFilter, const WCHAR *wszName, int resIcon)
: m_pFilter(pFilter)
, m_wszName(wszName)
, m_resIcon(resIcon)
{
memset(&m_NotifyIconData, 0, sizeof(m_NotifyIconData));
m_evSetupFinished.Reset();
StartMessageThread();
}
CBaseTrayIcon::~CBaseTrayIcon(void)
{
m_pFilter = nullptr;
if (m_hWnd)
{
SendMessage(m_hWnd, MSG_QUIT, 0, 0);
WaitForSingleObject(m_hThread, INFINITE);
}
CloseHandle(m_hThread);
}
HRESULT CBaseTrayIcon::StartMessageThread()
{
m_hThread = (HANDLE)_beginthreadex(nullptr, /* Security */
0, /* Stack Size */
InitialThreadProc, /* Thread process */
(LPVOID)this, /* Arguments */
0, /* 0 = Start Immediately */
nullptr /* Thread Address */
);
m_evSetupFinished.Wait();
return S_OK;
}
unsigned int WINAPI CBaseTrayIcon::InitialThreadProc(LPVOID pv)
{
HRESULT hrCo = CoInitialize(nullptr);
CBaseTrayIcon *pTrayIcon = (CBaseTrayIcon *)pv;
unsigned int ret = pTrayIcon->TrayMessageThread();
if (SUCCEEDED(hrCo))
CoUninitialize();
return ret;
}
DWORD CBaseTrayIcon::TrayMessageThread()
{
HRESULT hr;
MSG msg;
// Create the Window Class if it doesn't exist yet
hr = RegisterWindowClass();
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"CBaseTrayIcon::ThreadProc(): Failed to register window class"));
}
// And the Window we use for messages
hr = CreateMessageWindow();
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"CBaseTrayIcon::ThreadProc(): Failed to create message window"));
m_evSetupFinished.Set();
UnregisterClass(m_wszClassName, g_hInst);
return 1;
}
ASSERT(m_hWnd);
CreateTrayIconData();
Shell_NotifyIcon(NIM_ADD, &m_NotifyIconData);
Shell_NotifyIcon(NIM_SETVERSION, &m_NotifyIconData);
m_evSetupFinished.Set();
// Message loop
while (GetMessage(&msg, nullptr, 0, 0) > 0)
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
Shell_NotifyIcon(NIM_DELETE, &m_NotifyIconData);
// Free icon resources
if (m_NotifyIconData.hIcon)
{
DestroyIcon(m_NotifyIconData.hIcon);
m_NotifyIconData.hIcon = nullptr;
}
// Unregister the window class we used
UnregisterClass(m_wszClassName, g_hInst);
return 0;
}
HRESULT CBaseTrayIcon::RegisterWindowClass()
{
swprintf_s(m_wszClassName, countof(m_wszClassName), L"LAVTrayIconClass%d", GetCurrentThreadId());
WNDCLASSEX wx = {};
wx.cbSize = sizeof(WNDCLASSEX);
wx.lpfnWndProc = WindowProc;
wx.hInstance = g_hInst;
wx.lpszClassName = m_wszClassName;
ATOM wndClass = RegisterClassEx(&wx);
return !wndClass ? E_FAIL : S_OK;
}
HRESULT CBaseTrayIcon::CreateMessageWindow()
{
m_hWnd = CreateWindowEx(0, m_wszClassName, L"LAV Tray Message Window", 0, 0, 0, 0, 0, HWND_MESSAGE, nullptr,
nullptr, nullptr);
SetWindowLongPtr(m_hWnd, GWLP_USERDATA, LONG_PTR(this));
return m_hWnd == nullptr ? E_FAIL : S_OK;
}
HRESULT CBaseTrayIcon::CreateTrayIconData()
{
memset(&m_NotifyIconData, 0, sizeof(m_NotifyIconData));
m_NotifyIconData.cbSize = sizeof(m_NotifyIconData);
m_NotifyIconData.hWnd = m_hWnd;
m_NotifyIconData.uID = TRAYICON;
m_NotifyIconData.uFlags = NIF_ICON | NIF_TIP | NIF_MESSAGE;
m_NotifyIconData.hIcon = (HICON)LoadImage(g_hInst, MAKEINTRESOURCE(m_resIcon), IMAGE_ICON, 16, 16, LR_DEFAULTCOLOR);
m_NotifyIconData.uCallbackMessage = MSG_TRAYICON;
m_NotifyIconData.uVersion = NOTIFYICON_VERSION;
wcscpy_s(m_NotifyIconData.szTip, m_wszName);
return S_OK;
}
HRESULT CBaseTrayIcon::OpenPropPage()
{
CheckPointer(m_pFilter, E_UNEXPECTED);
m_bPropPageOpen = TRUE;
RECT desktopRect;
GetWindowRect(GetDesktopWindow(), &desktopRect);
SetWindowPos(m_hWnd, 0, (desktopRect.right / 2) - PROP_WIDTH_OFFSET, (desktopRect.bottom / 2) - PROP_HEIGHT_OFFSET,
0, 0, SWP_NOZORDER | SWP_NOSIZE);
CBaseDSPropPage::ShowPropPageDialog(m_pFilter, m_hWnd);
m_bPropPageOpen = FALSE;
return S_OK;
}
static BOOL CALLBACK enumWindowCallback(HWND hwnd, LPARAM lparam)
{
HWND owner = (HWND)lparam;
if (owner == GetWindow(hwnd, GW_OWNER))
{
SetForegroundWindow(hwnd);
return FALSE;
}
return TRUE;
}
LRESULT CALLBACK CBaseTrayIcon::WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
CBaseTrayIcon *icon = (CBaseTrayIcon *)GetWindowLongPtr(hwnd, GWLP_USERDATA);
switch (uMsg)
{
case WM_DESTROY:
Shell_NotifyIcon(NIM_DELETE, &icon->m_NotifyIconData);
PostQuitMessage(0);
break;
case MSG_QUIT: DestroyWindow(hwnd); break;
case MSG_TRAYICON: {
UINT trayMsg = LOWORD(lParam);
if (icon)
{
switch (trayMsg)
{
case WM_LBUTTONUP:
if (!icon->m_bPropPageOpen)
{
icon->OpenPropPage();
}
else
{
EnumThreadWindows(GetCurrentThreadId(), enumWindowCallback, (LPARAM)icon->m_hWnd);
}
break;
case WM_RBUTTONUP:
case WM_CONTEXTMENU:
if (icon->m_bPropPageOpen)
{
break;
}
HMENU hMenu = icon->GetPopupMenu();
if (hMenu)
{
POINT p;
GetCursorPos(&p);
SetForegroundWindow(hwnd);
int cmd = TrackPopupMenu(hMenu, TPM_RETURNCMD, p.x, p.y, 0, hwnd, nullptr);
PostMessage(hwnd, WM_NULL, 0, 0);
icon->ProcessMenuCommand(hMenu, cmd);
DestroyMenu(hMenu);
}
break;
}
}
}
break;
}
return DefWindowProc(hwnd, uMsg, wParam, lParam);
}
| 7,827
|
C++
|
.cpp
| 229
| 27.113537
| 120
| 0.629527
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,107
|
CueSheet.cpp
|
Nevcairiel_LAVFilters/common/DSUtilLite/CueSheet.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "CueSheet.h"
#include <algorithm>
#include <sstream>
#include <regex>
using namespace std;
enum class ParserState
{
GLOBAL,
FILE,
TRACK
};
typedef string::value_type char_t;
static char_t up_char(char_t ch)
{
return use_facet<ctype<char_t>>(locale()).toupper(ch);
}
static string toupper(const string &src)
{
string result;
transform(src.begin(), src.end(), back_inserter(result), up_char);
return result;
}
static void str_replace(string &s, const string &search, const string &replace)
{
for (string::size_type pos = 0;; pos += replace.length())
{
pos = s.find(search, pos);
if (pos == string::npos)
break;
s.erase(pos, search.length());
s.insert(pos, replace);
}
}
static string GetCueParam(string line, bool firstWord = false)
{
const string delims(" \t\n\r\"'");
string::size_type idx;
// Find beginning of the command word
idx = line.find_first_not_of(delims);
// Find end of the command word
idx = line.find_first_of(delims, idx);
// Find beginning of param
idx = line.find_first_not_of(delims, idx);
if (idx == string::npos)
return string();
string param = line.substr(idx);
// trim spaces off the end
param = param.substr(0, param.find_last_not_of(delims) + 1);
// replace escaped quotes
str_replace(param, "\\\"", "\"");
if (firstWord)
{
idx = param.find_first_of(delims);
if (idx != string::npos)
param = param.substr(0, idx);
}
return param;
}
static REFERENCE_TIME ParseCueIndex(string line)
{
int index, m, s, f, ret;
ret = sscanf_s(line.c_str(), " INDEX %d %d:%d:%d", &index, &m, &s, &f);
if (ret == 4)
return (m * 60i64 + s) * 10000000i64 + (f * 10000000i64 / 75);
else
return 0;
}
CCueSheet::CCueSheet()
{
}
CCueSheet::~CCueSheet()
{
}
HRESULT CCueSheet::Parse(string cueSheet)
{
DbgLog((LOG_TRACE, 10, L"CCueSheet::Parse(): Parsing Cue Sheet"));
int trackCount = 0;
ParserState state(ParserState::GLOBAL);
stringstream cueSheetStream(cueSheet);
string line;
while (getline(cueSheetStream, line))
{
string word;
(stringstream(line)) >> word;
word = toupper(word);
switch (state)
{
case ParserState::GLOBAL:
if (word == "PERFORMER")
{
m_Performer = GetCueParam(line);
}
else if (word == "TITLE")
{
m_Title = GetCueParam(line);
}
else if (word == "FILE")
{
state = ParserState::FILE;
}
break;
case ParserState::FILE:
case ParserState::TRACK:
if (word == "FILE")
{
DbgLog((LOG_TRACE, 10, L"CCueSheet::Parse(): Multiple FILE segments not supported."));
return E_FAIL;
}
if (word == "TRACK")
{
state = ParserState::TRACK;
trackCount++;
string id = GetCueParam(line, true);
Track track{trackCount - 1, id, "Title " + id, 0, ""};
m_Tracks.push_back(track);
}
else if (state == ParserState::TRACK)
{
if (word == "TITLE")
{
m_Tracks.back().Title = GetCueParam(line);
}
else if (word == "INDEX")
{
m_Tracks.back().Time = ParseCueIndex(line);
}
else if (word == "PERFORMER")
{
m_Tracks.back().Performer = GetCueParam(line);
}
}
break;
}
}
return S_OK;
}
std::string CCueSheet::FormatTrack(Track &track)
{
string trackFormat = track.Id + ". ";
if (!track.Performer.empty())
trackFormat += track.Performer + " - ";
trackFormat += track.Title;
return trackFormat;
}
| 4,888
|
C++
|
.cpp
| 163
| 22.883436
| 102
| 0.574039
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,142
|
PostProcessor.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/PostProcessor.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "PostProcessor.h"
#include "LAVAudio.h"
#include "Media.h"
extern "C"
{
#include "libavutil/intreadwrite.h"
};
// PCM Volume Adjustment Factors, both for integer and float math
// entries start at 2 channel mixing, half volume
static int pcm_volume_adjust_integer[7] = {362, 443, 512, 572, 627, 677, 724};
static float pcm_volume_adjust_float[7] = {1.41421356f, 1.73205081f, 2.00000000f, 2.23606798f,
2.44948974f, 2.64575131f, 2.82842712f};
// SCALE_CA helper macro for SampleCopyAdjust
#define SCALE_CA(sample, iFactor, factor) \
{ \
if (iFactor > 0) \
{ \
sample *= factor; \
sample >>= 8; \
} \
else \
{ \
sample <<= 8; \
sample /= factor; \
} \
}
//
// Helper Function that reads one sample from pIn, applys the scale specified by iFactor, and writes it to pOut
//
static inline void SampleCopyAdjust(BYTE *pOut, const BYTE *pIn, int iFactor, LAVAudioSampleFormat sfSampleFormat)
{
ASSERT(abs(iFactor) > 1 && abs(iFactor) <= 8);
const int factorIndex = abs(iFactor) - 2;
switch (sfSampleFormat)
{
case SampleFormat_U8: {
uint8_t *pOutSample = pOut;
int32_t sample = *pIn + INT8_MIN;
SCALE_CA(sample, iFactor, pcm_volume_adjust_integer[factorIndex]);
*pOutSample = av_clip_uint8(sample - INT8_MIN);
}
break;
case SampleFormat_16: {
int16_t *pOutSample = (int16_t *)pOut;
int32_t sample = *((int16_t *)pIn);
SCALE_CA(sample, iFactor, pcm_volume_adjust_integer[factorIndex]);
*pOutSample = av_clip_int16(sample);
}
break;
case SampleFormat_24: {
int32_t sample = (pIn[0] << 8) + (pIn[1] << 16) + (pIn[2] << 24);
sample >>= 8;
SCALE_CA(sample, iFactor, pcm_volume_adjust_integer[factorIndex]);
sample = av_clip(sample, INT24_MIN, INT24_MAX);
pOut[0] = sample & 0xff;
pOut[1] = (sample >> 8) & 0xff;
pOut[2] = (sample >> 16) & 0xff;
}
break;
case SampleFormat_32: {
int32_t *pOutSample = (int32_t *)pOut;
int64_t sample = *((int32_t *)pIn);
SCALE_CA(sample, iFactor, pcm_volume_adjust_integer[factorIndex]);
*pOutSample = av_clipl_int32(sample);
}
break;
case SampleFormat_FP32: {
float *pOutSample = (float *)pOut;
float sample = *((float *)pIn);
if (iFactor > 0)
{
sample *= pcm_volume_adjust_float[factorIndex];
}
else
{
sample /= pcm_volume_adjust_float[factorIndex];
}
*pOutSample = av_clipf(sample, -1.0f, 1.0f);
}
break;
default: ASSERT(0); break;
}
}
//
// Writes one sample of silence into the buffer
//
static inline void Silence(BYTE *pBuffer, LAVAudioSampleFormat sfSampleFormat)
{
switch (sfSampleFormat)
{
case SampleFormat_16:
case SampleFormat_24:
case SampleFormat_32:
case SampleFormat_FP32: memset(pBuffer, 0, get_byte_per_sample(sfSampleFormat)); break;
case SampleFormat_U8: *pBuffer = 128U; break;
default: ASSERT(0);
}
}
//
// Extended Channel Remapping Processor
//
// This function can process a PCM buffer of any sample format, and remap the channels
// into any arbitrary layout and channel count.
//
// The samples are copied byte-by-byte, without any conversion or loss.
//
// The ExtendedChannelMap is assumed to always have at least uOutChannels valid entries.
// Its layout is in output format:
// Map[0] is the first output channel, and should contain the index in the source stream (or -1 for silence)
// Map[1] is the second output channel
//
// Source channels can be applied multiple times to the Destination, but multiple Source channels cannot be merged into one channel.
// Note that when copying one source channel into multiple destinations, you always want to reduce its volume.
// You can either do this in a second step, or use the factor documented below
//
// Examples:
// 5.1 Input Buffer, following map will extract the Center channel, and return it as Mono:
// uOutChannels == 1; map = {2}
//
// Mono Input Buffer, Convert to Stereo
// uOutChannels == 2; map = {0, 0}
//
// Additionally, a factor can be applied to all PCM samples
//
// For optimization, the factor cannot be freely specified.
// Factors -1, 0, 1 are ignored.
// A Factor of 2 doubles the volume, 3 trippled, etc.
// A Factor of -2 will produce half volume, -3 one third, etc.
// The limit is a factor of 8/-8
//
// Otherwise, see ChannelMapping
HRESULT ExtendedChannelMapping(BufferDetails *pcm, const unsigned uOutChannels, const ExtendedChannelMap extMap)
{
#ifdef DEBUG
ASSERT(pcm && pcm->bBuffer);
ASSERT(uOutChannels > 0 && uOutChannels <= 8);
for (unsigned idx = 0; idx < uOutChannels; ++idx)
{
ASSERT(extMap[idx].idx >= -1 && extMap[idx].idx < pcm->layout.nb_channels);
}
#endif
// Sample Size
const unsigned uSampleSize = get_byte_per_sample(pcm->sfFormat);
// New Output Buffer
GrowableArray<BYTE> *out = new GrowableArray<BYTE>();
out->SetSize(uOutChannels * pcm->nSamples * uSampleSize);
const BYTE *pIn = pcm->bBuffer->Ptr();
BYTE *pOut = out->Ptr();
for (unsigned i = 0; i < pcm->nSamples; ++i)
{
for (unsigned ch = 0; ch < uOutChannels; ++ch)
{
if (extMap[ch].idx >= 0)
{
if (!extMap[ch].factor || abs(extMap[ch].factor) == 1)
memcpy(pOut, pIn + (extMap[ch].idx * uSampleSize), uSampleSize);
else
SampleCopyAdjust(pOut, pIn + (extMap[ch].idx * uSampleSize), extMap[ch].factor, pcm->sfFormat);
}
else
Silence(pOut, pcm->sfFormat);
pOut += uSampleSize;
}
pIn += uSampleSize * pcm->layout.nb_channels;
}
// Apply changes to buffer
delete pcm->bBuffer;
pcm->bBuffer = out;
av_channel_layout_uninit(&pcm->layout);
pcm->layout.order = AV_CHANNEL_ORDER_UNSPEC;
pcm->layout.nb_channels = uOutChannels;
return S_OK;
}
#define CHL_CONTAINS_ALL(l, m) (((l) & (m)) == (m))
#define CHL_ALL_OR_NONE(l, m) (((l) & (m)) == (m) || ((l) & (m)) == 0)
HRESULT CLAVAudio::CheckChannelLayoutConformity(AVChannelLayout * layout)
{
int channels = layout->nb_channels;
if (layout->order != AV_CHANNEL_ORDER_NATIVE)
{
DbgLog((LOG_ERROR, 10,
L"::CheckChannelLayoutConformity(): Only native channel orders are supported"));
goto noprocessing;
}
if (layout->u.mask > INT32_MAX)
{
DbgLog((LOG_ERROR, 10,
L"::CheckChannelLayoutConformity(): Layout channels above 32-bit are not handled (mask: 0x%llx)",
layout->u.mask));
goto noprocessing;
}
DWORD dwMask = (DWORD)layout->u.mask;
// We require multi-channel and at least containing stereo
if (!CHL_CONTAINS_ALL(dwMask, AV_CH_LAYOUT_STEREO) || channels == 2)
goto noprocessing;
// We do not know what to do with "top" channels
if (dwMask & (AV_CH_TOP_CENTER | AV_CH_TOP_FRONT_LEFT | AV_CH_TOP_FRONT_CENTER | AV_CH_TOP_FRONT_RIGHT |
AV_CH_TOP_BACK_LEFT | AV_CH_TOP_BACK_CENTER | AV_CH_TOP_BACK_RIGHT))
{
DbgLog((LOG_ERROR, 10,
L"::CheckChannelLayoutConformity(): Layout with top channels is not supported (mask: 0x%x)", dwMask));
goto noprocessing;
}
// We need either both surround channels, or none. One of a type is not supported
if (!CHL_ALL_OR_NONE(dwMask, AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT) ||
!CHL_ALL_OR_NONE(dwMask, AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT) ||
!CHL_ALL_OR_NONE(dwMask, AV_CH_FRONT_LEFT_OF_CENTER | AV_CH_FRONT_RIGHT_OF_CENTER))
{
DbgLog(
(LOG_ERROR, 10,
L"::CheckChannelLayoutConformity(): Layout with only one surround channel is not supported (mask: 0x%x)",
dwMask));
goto noprocessing;
}
// No need to process full 5.1/6.1 layouts, or any 8 channel layouts
if (dwMask == AV_CH_LAYOUT_5POINT1 || dwMask == AV_CH_LAYOUT_5POINT1_BACK || dwMask == AV_CH_LAYOUT_6POINT1_BACK ||
channels == 8)
{
DbgLog((LOG_TRACE, 10, L"::CheckChannelLayoutConformity(): Layout is already a default layout (mask: 0x%x)",
dwMask));
goto noprocessing;
}
// Check 5.1 channels
if (CHL_CONTAINS_ALL(AV_CH_LAYOUT_5POINT1, dwMask) /* 5.1 with side channels */
|| CHL_CONTAINS_ALL(AV_CH_LAYOUT_5POINT1_BACK, dwMask) /* 5.1 with back channels */
|| CHL_CONTAINS_ALL(LAV_CH_LAYOUT_5POINT1_WIDE, dwMask) /* 5.1 with side-front channels */
|| CHL_CONTAINS_ALL(LAV_CH_LAYOUT_5POINT1_BC, dwMask)) /* 3/1/x layouts, front channels with a back center */
return Create51Conformity(dwMask);
// Check 6.1 channels (5.1 layouts + Back Center)
if (CHL_CONTAINS_ALL(AV_CH_LAYOUT_6POINT1, dwMask) /* 6.1 with side channels */
|| CHL_CONTAINS_ALL(AV_CH_LAYOUT_6POINT1_BACK, dwMask) /* 6.1 with back channels */
|| CHL_CONTAINS_ALL(LAV_CH_LAYOUT_5POINT1_WIDE | AV_CH_BACK_CENTER, dwMask)) /* 6.1 with side-front channels */
return Create61Conformity(dwMask);
// Check 7.1 channels
if (CHL_CONTAINS_ALL(AV_CH_LAYOUT_7POINT1, dwMask) /* 7.1 with side+back channels */
|| CHL_CONTAINS_ALL(AV_CH_LAYOUT_7POINT1_WIDE, dwMask) /* 7.1 with front-side+back channels */
|| CHL_CONTAINS_ALL(LAV_CH_LAYOUT_7POINT1_EXTRAWIDE, dwMask)) /* 7.1 with front-side+side channels */
return Create71Conformity(dwMask);
noprocessing:
m_bChannelMappingRequired = FALSE;
return S_FALSE;
}
HRESULT CLAVAudio::Create51Conformity(DWORD dwLayout)
{
DbgLog((LOG_TRACE, 10, L"::Create51Conformity(): Creating 5.1 default layout (mask: 0x%x)", dwLayout));
int ch = 0;
ExtChMapClear(&m_ChannelMap);
// All layouts we support have to contain L/R
ExtChMapSet(&m_ChannelMap, 0, ch++, 0);
ExtChMapSet(&m_ChannelMap, 1, ch++, 0);
// Center channel
if (dwLayout & AV_CH_FRONT_CENTER)
ExtChMapSet(&m_ChannelMap, 2, ch++, 0);
// LFE
if (dwLayout & AV_CH_LOW_FREQUENCY)
ExtChMapSet(&m_ChannelMap, 3, ch++, 0);
// Back/Side
if (dwLayout & (AV_CH_SIDE_LEFT | AV_CH_BACK_LEFT | AV_CH_FRONT_LEFT_OF_CENTER))
{
ExtChMapSet(&m_ChannelMap, 4, ch++, 0);
ExtChMapSet(&m_ChannelMap, 5, ch++, 0);
// Back Center
}
else if (dwLayout & AV_CH_BACK_CENTER)
{
ExtChMapSet(&m_ChannelMap, 4, ch, -2);
ExtChMapSet(&m_ChannelMap, 5, ch++, -2);
}
m_bChannelMappingRequired = TRUE;
av_channel_layout_uninit(&m_ChannelMapOutputLayout);
av_channel_layout_from_mask(&m_ChannelMapOutputLayout, AV_CH_LAYOUT_5POINT1);
return S_OK;
}
HRESULT CLAVAudio::Create61Conformity(DWORD dwLayout)
{
if (m_settings.Expand61)
{
DbgLog((LOG_TRACE, 10, L"::Create61Conformity(): Expanding to 7.1"));
return Create71Conformity(dwLayout);
}
DbgLog((LOG_TRACE, 10, L"::Create61Conformity(): Creating 6.1 default layout (mask: 0x%x)", dwLayout));
int ch = 0;
ExtChMapClear(&m_ChannelMap);
// All layouts we support have to contain L/R
ExtChMapSet(&m_ChannelMap, 0, ch++, 0);
ExtChMapSet(&m_ChannelMap, 1, ch++, 0);
// Center channel
if (dwLayout & AV_CH_FRONT_CENTER)
ExtChMapSet(&m_ChannelMap, 2, ch++, 0);
// LFE
if (dwLayout & AV_CH_LOW_FREQUENCY)
ExtChMapSet(&m_ChannelMap, 3, ch++, 0);
// Back channels, if before BC
if (dwLayout & (AV_CH_BACK_LEFT | AV_CH_FRONT_LEFT_OF_CENTER))
{
DbgLog((LOG_TRACE, 10, L"::Create61Conformity(): Using surround channels *before* BC"));
ExtChMapSet(&m_ChannelMap, 4, ch++, 0);
ExtChMapSet(&m_ChannelMap, 5, ch++, 0);
}
// Back Center
if (dwLayout & AV_CH_BACK_CENTER)
ExtChMapSet(&m_ChannelMap, 6, ch++, 0);
// Back channels, if after BC
if (dwLayout & AV_CH_SIDE_LEFT)
{
DbgLog((LOG_TRACE, 10, L"::Create61Conformity(): Using surround channels *after* BC"));
ExtChMapSet(&m_ChannelMap, 4, ch++, 0);
ExtChMapSet(&m_ChannelMap, 5, ch++, 0);
}
m_bChannelMappingRequired = TRUE;
av_channel_layout_uninit(&m_ChannelMapOutputLayout);
av_channel_layout_from_mask(&m_ChannelMapOutputLayout, AV_CH_LAYOUT_6POINT1_BACK);
return S_OK;
}
HRESULT CLAVAudio::Create71Conformity(DWORD dwLayout)
{
DbgLog((LOG_TRACE, 10, L"::Create71Conformity(): Creating 7.1 default layout (mask: 0x%x)", dwLayout));
int ch = 0;
ExtChMapClear(&m_ChannelMap);
// All layouts we support have to contain L/R
ExtChMapSet(&m_ChannelMap, 0, ch++, 0);
ExtChMapSet(&m_ChannelMap, 1, ch++, 0);
// Center channel
if (dwLayout & AV_CH_FRONT_CENTER)
ExtChMapSet(&m_ChannelMap, 2, ch++, 0);
// LFE
if (dwLayout & AV_CH_LOW_FREQUENCY)
ExtChMapSet(&m_ChannelMap, 3, ch++, 0);
// Back channels
if (dwLayout & AV_CH_BACK_CENTER)
{
DbgLog((LOG_TRACE, 10, L"::Create71Conformity(): Usign BC to fill back channels"));
if (dwLayout & AV_CH_SIDE_LEFT)
{
DbgLog((LOG_TRACE, 10, L"::Create71Conformity(): Using BC before side-surround channels"));
ExtChMapSet(&m_ChannelMap, 4, ch, -2);
ExtChMapSet(&m_ChannelMap, 5, ch++, -2);
ExtChMapSet(&m_ChannelMap, 6, ch++, 0);
ExtChMapSet(&m_ChannelMap, 7, ch++, 0);
}
else
{
DbgLog((LOG_TRACE, 10, L"::Create71Conformity(): Using BC after side-surround channels"));
ExtChMapSet(&m_ChannelMap, 6, ch++, 0);
ExtChMapSet(&m_ChannelMap, 7, ch++, 0);
ExtChMapSet(&m_ChannelMap, 4, ch, -2);
ExtChMapSet(&m_ChannelMap, 5, ch++, -2);
}
}
else
{
DbgLog((LOG_TRACE, 10, L"::Create71Conformity(): Using original 4 surround channels"));
ExtChMapSet(&m_ChannelMap, 4, ch++, 0);
ExtChMapSet(&m_ChannelMap, 5, ch++, 0);
ExtChMapSet(&m_ChannelMap, 6, ch++, 0);
ExtChMapSet(&m_ChannelMap, 7, ch++, 0);
}
m_bChannelMappingRequired = TRUE;
av_channel_layout_uninit(&m_ChannelMapOutputLayout);
av_channel_layout_from_mask(&m_ChannelMapOutputLayout, AV_CH_LAYOUT_7POINT1);
return S_OK;
}
HRESULT CLAVAudio::PadTo32(BufferDetails *buffer)
{
ASSERT(buffer->sfFormat == SampleFormat_24);
const DWORD size = (buffer->nSamples * buffer->layout.nb_channels) * 4;
GrowableArray<BYTE> *pcmOut = new GrowableArray<BYTE>();
pcmOut->SetSize(size);
const BYTE *pDataIn = buffer->bBuffer->Ptr();
BYTE *pDataOut = pcmOut->Ptr();
for (unsigned int i = 0; i < buffer->nSamples; ++i)
{
for (int ch = 0; ch < buffer->layout.nb_channels; ++ch)
{
AV_WL32(pDataOut, AV_RL24(pDataIn) << 8);
pDataOut += 4;
pDataIn += 3;
}
}
delete buffer->bBuffer;
buffer->bBuffer = pcmOut;
buffer->sfFormat = SampleFormat_32;
buffer->wBitsPerSample = 24;
return S_OK;
}
HRESULT CLAVAudio::Truncate32Buffer(BufferDetails *buffer)
{
ASSERT(buffer->sfFormat == SampleFormat_32 && buffer->wBitsPerSample <= 24);
// Determine the bytes per sample to keep. Cut a 16-bit sample to 24 if 16-bit is disabled for some reason
const int bytes_per_sample = buffer->wBitsPerSample <= 16 && GetSampleFormat(SampleFormat_16) ? 2 : 3;
if (bytes_per_sample == 3 && !GetSampleFormat(SampleFormat_24))
return S_FALSE;
const int skip = 4 - bytes_per_sample;
const DWORD size = (buffer->nSamples * buffer->layout.nb_channels) * bytes_per_sample;
GrowableArray<BYTE> *pcmOut = new GrowableArray<BYTE>();
pcmOut->SetSize(size);
const BYTE *pDataIn = buffer->bBuffer->Ptr();
BYTE *pDataOut = pcmOut->Ptr();
pDataIn += skip;
for (unsigned int i = 0; i < buffer->nSamples; ++i)
{
for (int ch = 0; ch < buffer->layout.nb_channels; ++ch)
{
memcpy(pDataOut, pDataIn, bytes_per_sample);
pDataOut += bytes_per_sample;
pDataIn += 4;
}
}
delete buffer->bBuffer;
buffer->bBuffer = pcmOut;
buffer->sfFormat = bytes_per_sample == 3 ? SampleFormat_24 : SampleFormat_16;
return S_OK;
}
HRESULT CLAVAudio::PerformAVRProcessing(BufferDetails *buffer)
{
int ret = 0;
AVChannelLayout chMixingLayout{};
if (av_channel_layout_check(&m_chOverrideMixer) != 0)
{
av_channel_layout_copy(&chMixingLayout, &m_chOverrideMixer);
}
else if (m_settings.MixingEnabled)
{
av_channel_layout_from_mask(&chMixingLayout, m_settings.MixingLayout);
}
else
{
av_channel_layout_copy(&chMixingLayout, &buffer->layout);
}
// No mixing stereo, if the user doesn't want it
if (buffer->layout.nb_channels <= 2 && (m_settings.MixingFlags & LAV_MIXING_FLAG_UNTOUCHED_STEREO))
{
av_channel_layout_uninit(&chMixingLayout);
av_channel_layout_copy(&chMixingLayout, &buffer->layout);
}
LAVAudioSampleFormat outputFormat = av_channel_layout_compare(&buffer->layout, &chMixingLayout) != 0
? GetBestAvailableSampleFormat(SampleFormat_FP32)
: GetBestAvailableSampleFormat(buffer->sfFormat);
// Short Circuit some processing
if (av_channel_layout_compare(&buffer->layout, &chMixingLayout) == 0 && !buffer->bPlanar)
{
if (buffer->sfFormat == SampleFormat_24 && outputFormat == SampleFormat_32)
{
PadTo32(buffer);
return S_OK;
}
else if (buffer->sfFormat == SampleFormat_32 && outputFormat == SampleFormat_24)
{
buffer->wBitsPerSample = 24;
Truncate32Buffer(buffer);
return S_OK;
}
}
// Sadly, we need to convert this, avresample has no 24-bit mode
if (buffer->sfFormat == SampleFormat_24)
{
PadTo32(buffer);
}
if (av_channel_layout_compare(&buffer->layout, &m_MixingInputLayout) != 0 || (!m_swrContext && !m_bAVResampleFailed) || m_bMixingSettingsChanged ||
av_channel_layout_compare(&m_chRemixLayout, &chMixingLayout) != 0 || outputFormat != m_sfRemixFormat ||
buffer->sfFormat != m_MixingInputFormat)
{
m_bAVResampleFailed = FALSE;
m_bMixingSettingsChanged = FALSE;
if (m_swrContext)
{
swr_free(&m_swrContext);
}
av_channel_layout_copy(&m_MixingInputLayout, &buffer->layout);
av_channel_layout_copy(&m_chRemixLayout, &chMixingLayout);
m_MixingInputFormat = buffer->sfFormat;
m_sfRemixFormat = outputFormat;
swr_alloc_set_opts2(&m_swrContext, &chMixingLayout, get_ff_sample_fmt(m_sfRemixFormat), buffer->dwSamplesPerSec,
&buffer->layout, get_ff_sample_fmt(buffer->sfFormat), buffer->dwSamplesPerSec, 0, NULL);
av_opt_set_int(m_swrContext, "dither_method",
m_settings.SampleConvertDither ? SWR_DITHER_TRIANGULAR_HIGHPASS : SWR_DITHER_NONE, 0);
// Setup mixing properties, if needed
if (av_channel_layout_compare(&buffer->layout, &chMixingLayout) != 0)
{
ASSERT(chMixingLayout.order == AV_CHANNEL_ORDER_NATIVE);
BOOL bNormalize = !!(m_settings.MixingFlags & LAV_MIXING_FLAG_NORMALIZE_MATRIX);
BOOL bClipProtection = !!(m_settings.MixingFlags & LAV_MIXING_FLAG_CLIP_PROTECTION);
av_opt_set_int(m_swrContext, "clip_protection", !bNormalize && bClipProtection, 0);
av_opt_set_int(m_swrContext, "internal_sample_fmt", AV_SAMPLE_FMT_FLTP, 0);
// setup matrix parameters
const double center_mix_level = (double)m_settings.MixingCenterLevel / 10000.0;
const double surround_mix_level = (double)m_settings.MixingSurroundLevel / 10000.0;
const double lfe_mix_level =
(double)m_settings.MixingLFELevel / 10000.0 / (chMixingLayout.u.mask == AV_CH_LAYOUT_MONO ? 1.0 : M_SQRT1_2);
av_opt_set_double(m_swrContext, "center_mix_level", center_mix_level, 0);
av_opt_set_double(m_swrContext, "surround_mix_level", surround_mix_level, 0);
av_opt_set_double(m_swrContext, "lfe_mix_level", lfe_mix_level, 0);
av_opt_set_double(m_swrContext, "rematrix_maxval", bNormalize ? 1.0 : 0.0, 0);
av_opt_set_int(m_swrContext, "matrix_encoding", (AVMatrixEncoding)m_settings.MixingMode, 0);
}
// Open Resample Context
ret = swr_init(m_swrContext);
if (ret < 0)
{
DbgLog((LOG_ERROR, 10, L"swr_init failed"));
goto setuperr;
}
}
if (!m_swrContext)
{
DbgLog((LOG_ERROR, 10, L"swresample context missing?"));
goto setuperr;
}
av_channel_layout_uninit(&chMixingLayout);
LAVAudioSampleFormat bufferFormat =
(m_sfRemixFormat == SampleFormat_24) ? SampleFormat_32 : m_sfRemixFormat; // avresample always outputs 32-bit
GrowableArray<BYTE> *pcmOut = new GrowableArray<BYTE>();
pcmOut->Allocate(FFALIGN(buffer->nSamples, 32) * m_chRemixLayout.nb_channels *
get_byte_per_sample(bufferFormat));
BYTE *pOut = pcmOut->Ptr();
BYTE *pIn = buffer->bBuffer->Ptr();
ret = swr_convert(m_swrContext, &pOut, pcmOut->GetAllocated(), (const uint8_t **)&pIn, buffer->nSamples);
if (ret < 0)
{
DbgLog((LOG_ERROR, 10, L"swr_convert failed"));
delete pcmOut;
return S_FALSE;
}
delete buffer->bBuffer;
buffer->bBuffer = pcmOut;
av_channel_layout_copy(&buffer->layout, &m_chRemixLayout);
buffer->sfFormat = bufferFormat;
buffer->wBitsPerSample = get_byte_per_sample(m_sfRemixFormat) << 3;
buffer->bBuffer->SetSize(buffer->layout.nb_channels * buffer->nSamples * get_byte_per_sample(buffer->sfFormat));
return S_OK;
setuperr:
swr_free(&m_swrContext);
m_bAVResampleFailed = TRUE;
av_channel_layout_uninit(&chMixingLayout);
return E_FAIL;
}
HRESULT CLAVAudio::PostProcess(BufferDetails *buffer)
{
// Validate channel mask
if (buffer->layout.order == AV_CHANNEL_ORDER_UNSPEC || (buffer->layout.order == AV_CHANNEL_ORDER_NATIVE && buffer->layout.u.mask == 0) || (buffer->layout.order != AV_CHANNEL_ORDER_UNSPEC && buffer->layout.order != AV_CHANNEL_ORDER_NATIVE))
{
int layout_channels = buffer->layout.nb_channels;
av_channel_layout_uninit(&buffer->layout);
DWORD dwMask = get_channel_mask(layout_channels);
if (dwMask)
{
av_channel_layout_from_mask(&buffer->layout, dwMask);
}
else
{
buffer->layout.order = AV_CHANNEL_ORDER_UNSPEC;
buffer->layout.nb_channels = layout_channels;
}
}
if (m_settings.SuppressFormatChanges)
{
if (av_channel_layout_check(&m_SuppressLayout) == 0)
{
av_channel_layout_copy(&m_SuppressLayout, &buffer->layout);
}
else
{
if (av_channel_layout_compare(&buffer->layout, &m_SuppressLayout) != 0 && buffer->layout.nb_channels <= m_SuppressLayout.nb_channels)
{
// only warn once
if (av_channel_layout_compare(&m_chOverrideMixer, &m_SuppressLayout) != 0)
{
DbgLog((LOG_TRACE, 10, L"Channel Format change suppressed"));
av_channel_layout_copy(&m_chOverrideMixer, &m_SuppressLayout);
}
}
else if (buffer->layout.nb_channels > m_SuppressLayout.nb_channels)
{
DbgLog((LOG_TRACE, 10, L"Channel count increased, allowing change"));
av_channel_layout_uninit(&m_chOverrideMixer);
av_channel_layout_copy(&m_SuppressLayout, &buffer->layout);
}
}
}
ASSERT(buffer->layout.order == AV_CHANNEL_ORDER_NATIVE || buffer->layout.order == AV_CHANNEL_ORDER_UNSPEC);
BOOL bOverrideMixing = av_channel_layout_check(&m_chOverrideMixer) != 0;
// determine the layout we're mixing to
AVChannelLayout chMixingLayout{};
if (bOverrideMixing)
av_channel_layout_copy(&chMixingLayout, &m_chOverrideMixer);
else if (m_settings.MixingEnabled)
av_channel_layout_from_mask(&chMixingLayout, m_settings.MixingLayout);
BOOL bMixing = (m_settings.MixingEnabled || bOverrideMixing) &&
av_channel_layout_compare(&buffer->layout, &chMixingLayout) != 0;
LAVAudioSampleFormat outputFormat = GetBestAvailableSampleFormat(buffer->sfFormat);
// Perform conversion to layout and sample format, if required
if (bMixing || outputFormat != buffer->sfFormat)
{
PerformAVRProcessing(buffer);
}
// Remap to standard configurations, if requested (not in combination with mixing)
if (!bMixing && m_settings.OutputStandardLayout)
{
if (av_channel_layout_compare(&buffer->layout, &m_DecodeLayoutSanified) != 0)
{
av_channel_layout_copy(&m_DecodeLayoutSanified, &buffer->layout);
CheckChannelLayoutConformity(&buffer->layout);
}
if (m_bChannelMappingRequired)
{
ExtendedChannelMapping(buffer, m_ChannelMapOutputLayout.nb_channels, m_ChannelMap);
av_channel_layout_copy(&buffer->layout, &m_ChannelMapOutputLayout);
}
}
// Map to the requested 5.1 layout
if (m_settings.Output51Legacy && buffer->layout.u.mask == AV_CH_LAYOUT_5POINT1)
buffer->layout.u.mask = AV_CH_LAYOUT_5POINT1_BACK;
else if (!m_settings.Output51Legacy && buffer->layout.u.mask == AV_CH_LAYOUT_5POINT1_BACK)
buffer->layout.u.mask = AV_CH_LAYOUT_5POINT1;
// Check if current output uses back layout, and keep it active in that case
if (buffer->layout.u.mask == AV_CH_LAYOUT_5POINT1)
{
WAVEFORMATEX *wfe = (WAVEFORMATEX *)m_pOutput->CurrentMediaType().Format();
if (wfe->wFormatTag == WAVE_FORMAT_EXTENSIBLE)
{
WAVEFORMATEXTENSIBLE *wfex = (WAVEFORMATEXTENSIBLE *)wfe;
if (wfex->dwChannelMask == AV_CH_LAYOUT_5POINT1_BACK)
buffer->layout.u.mask = AV_CH_LAYOUT_5POINT1_BACK;
}
}
// Mono -> Stereo expansion
if (buffer->layout.nb_channels == 1 && m_settings.ExpandMono)
{
ExtendedChannelMap map = {{0, -2}, {0, -2}};
ExtendedChannelMapping(buffer, 2, map);
av_channel_layout_uninit(&buffer->layout);
av_channel_layout_default(&buffer->layout, 2);
}
// 6.1 -> 7.1 expansion
if (m_settings.Expand61)
{
if (buffer->layout.u.mask == AV_CH_LAYOUT_6POINT1_BACK)
{
ExtendedChannelMap map = {{0, 0}, {1, 0}, {2, 0}, {3, 0}, {6, -2}, {6, -2}, {4, 0}, {5, 0}};
ExtendedChannelMapping(buffer, 8, map);
buffer->layout.u.mask = AV_CH_LAYOUT_7POINT1;
}
else if (buffer->layout.u.mask == AV_CH_LAYOUT_6POINT1)
{
ExtendedChannelMap map = {{0, 0}, {1, 0}, {2, 0}, {3, 0}, {4, -2}, {4, -2}, {5, 0}, {6, 0}};
ExtendedChannelMapping(buffer, 8, map);
buffer->layout.u.mask = AV_CH_LAYOUT_7POINT1;
}
}
if (m_bVolumeStats)
{
UpdateVolumeStats(*buffer);
}
// Truncate 24-in-32 to real 24
if (buffer->sfFormat == SampleFormat_32 && buffer->wBitsPerSample && buffer->wBitsPerSample <= 24)
{
Truncate32Buffer(buffer);
}
av_channel_layout_uninit(&chMixingLayout);
return S_OK;
}
| 28,978
|
C++
|
.cpp
| 680
| 35.451471
| 243
| 0.625412
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| false
| false
|
22,144
|
BitstreamMAT.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/BitstreamMAT.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This code was inspired by the ffdshow-tryouts TaudioCodecBitstream module, licensed under GPL 2.0
*/
#include "stdafx.h"
#include "LAVAudio.h"
#pragma warning(push)
#pragma warning(disable : 4101)
#pragma warning(disable : 5033)
extern "C"
{
#include "libavformat/spdif.h"
#define AVCODEC_X86_MATHOPS_H
#include "libavcodec/get_bits.h"
}
#pragma warning(pop)
#define MAT_BUFFER_SIZE (61440)
#define MAT_BUFFER_LIMIT (MAT_BUFFER_SIZE - 24 /* MAT end code size */)
#define MAT_POS_MIDDLE (30708 /* middle point*/ + 8 /* IEC header in front */)
static const BYTE mat_start_code[20] = {0x07, 0x9E, 0x00, 0x03, 0x84, 0x01, 0x01, 0x01, 0x80, 0x00,
0x56, 0xA5, 0x3B, 0xF4, 0x81, 0x83, 0x49, 0x80, 0x77, 0xE0};
static const BYTE mat_middle_code[12] = {0xC3, 0xC1, 0x42, 0x49, 0x3B, 0xFA, 0x82, 0x83, 0x49, 0x80, 0x77, 0xE0};
static const BYTE mat_end_code[24] = {0xC3, 0xC2, 0xC0, 0xC4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x97, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
void CLAVAudio::MATWriteHeader()
{
ASSERT(m_bsOutput.GetCount() == 0);
DWORD dwSize = BURST_HEADER_SIZE + sizeof(mat_start_code);
// reserve size for the IEC header and the MAT start code
m_bsOutput.SetSize(dwSize);
BYTE *p = m_bsOutput.Ptr();
// IEC burst header
AV_WB16(p + 0, SYNCWORD1);
AV_WB16(p + 2, SYNCWORD2);
AV_WB16(p + 4, IEC61937_TRUEHD);
AV_WB16(p + 6, 61424);
// MAT start code
memcpy(p + BURST_HEADER_SIZE, mat_start_code, sizeof(mat_start_code));
// unless the start code falls into the padding, its considered part of the current MAT frame
// Note that audio frames are not always aligned with MAT frames, so we might already have a partial frame at this
// point
m_TrueHDMATState.mat_framesize += dwSize;
// The MAT metadata counts as padding, if we're scheduled to write any, which mean the start bytes should reduce any
// further padding.
if (m_TrueHDMATState.padding > 0)
{
// if the header fits into the padding of the last frame, just reduce the amount of needed padding
if (m_TrueHDMATState.padding > dwSize)
{
m_TrueHDMATState.padding -= dwSize;
m_TrueHDMATState.mat_framesize = 0;
}
else // otherwise, consume all padding and set the size of the next MAT frame to the remaining data
{
m_TrueHDMATState.mat_framesize = (dwSize - m_TrueHDMATState.padding);
m_TrueHDMATState.padding = 0;
}
}
}
void CLAVAudio::MATWritePadding()
{
if (m_TrueHDMATState.padding > 0)
{
// allocate padding (on the stack of possible)
BYTE *padding = (BYTE *)_malloca(m_TrueHDMATState.padding);
memset(padding, 0, m_TrueHDMATState.padding);
int remaining = MATFillDataBuffer(padding, m_TrueHDMATState.padding, true);
// free the padding block
_freea(padding);
// not all padding could be written to the buffer, write it later
if (remaining >= 0)
{
m_TrueHDMATState.padding = remaining;
m_TrueHDMATState.mat_framesize = 0;
}
else // more padding then requested was written, eg. there was a MAT middle/end marker that needed to be written
{
m_TrueHDMATState.padding = 0;
m_TrueHDMATState.mat_framesize = -remaining;
}
}
}
void CLAVAudio::MATAppendData(const BYTE *p, int size)
{
m_bsOutput.Append(p, size);
m_TrueHDMATState.mat_framesize += size;
}
int CLAVAudio::MATFillDataBuffer(const BYTE *p, int size, bool padding)
{
if (m_bsOutput.GetCount() >= MAT_BUFFER_LIMIT)
return size;
int remaining = size;
// Write MAT middle marker, if needed
// The MAT middle marker always needs to be in the exact same spot, any audio data will be split.
// If we're currently writing padding, then the marker will be considered as padding data and reduce the amount of
// padding still required.
if (m_bsOutput.GetCount() <= MAT_POS_MIDDLE && m_bsOutput.GetCount() + size > MAT_POS_MIDDLE)
{
// write as much data before the middle code as we can
int nBytesBefore = MAT_POS_MIDDLE - m_bsOutput.GetCount();
MATAppendData(p, nBytesBefore);
remaining -= nBytesBefore;
// write the MAT middle code
MATAppendData(mat_middle_code, sizeof(mat_middle_code));
// if we're writing padding, deduct the size of the code from it
if (padding)
remaining -= sizeof(mat_middle_code);
// write remaining data after the MAT marker
if (remaining > 0)
{
remaining = MATFillDataBuffer(p + nBytesBefore, remaining, padding);
}
return remaining;
}
// not enough room in the buffer to write all the data, write as much as we can and add the MAT footer
if (m_bsOutput.GetCount() + size >= MAT_BUFFER_LIMIT)
{
// write as much data before the middle code as we can
int nBytesBefore = MAT_BUFFER_LIMIT - m_bsOutput.GetCount();
MATAppendData(p, nBytesBefore);
remaining -= nBytesBefore;
// write the MAT end code
MATAppendData(mat_end_code, sizeof(mat_end_code));
ASSERT(m_bsOutput.GetCount() == MAT_BUFFER_SIZE);
// MAT markers don't displace padding, so reduce the amount of padding
if (padding)
remaining -= sizeof(mat_end_code);
// any remaining data will be written in future calls
return remaining;
}
MATAppendData(p, size);
return 0;
}
void CLAVAudio::MATFlushPacket(HRESULT *hrDeliver)
{
if (m_bsOutput.GetCount() > 0)
{
ASSERT(m_bsOutput.GetCount() == 61440);
// normal number of samples per frame
uint16_t frame_samples = 40 << (m_TrueHDMATState.ratebits & 7);
int nMATSamples = (frame_samples * 24);
// Deliver MAT packet to the audio renderer
*hrDeliver = DeliverBitstream(m_nCodecId, m_bsOutput.Ptr(), m_bsOutput.GetCount(), m_rtStartInputCache,
m_rtStopInputCache, true, m_TrueHDMATState.nSamplesOffset);
// we expect 24 frames per MAT frame, so calculate an offset from that
// this is done after delivery, because it modifies the duration of the frame, eg. the start of the next frame
if (nMATSamples != m_TrueHDMATState.nSamples)
m_TrueHDMATState.nSamplesOffset += m_TrueHDMATState.nSamples - nMATSamples;
m_bsOutput.SetSize(0);
m_TrueHDMATState.nSamples = 0;
}
}
static bool ParseTrueHDMajorSyncHeaders(const BYTE *p, int buffsize, int &ratebits, uint16_t &output_timing,
bool &output_timing_present)
{
ASSERT(AV_RB32(p + 4) == 0xf8726fba);
int length = (AV_RB16(p) & 0xfff) * 2;
if (buffsize < 32)
return false;
// parse major sync and look for a restart header
int major_sync_size = 28;
if (p[29] & 1)
{
int extension_size = p[30] >> 4;
major_sync_size += 2 + extension_size * 2;
}
GetBitContext gb;
init_get_bits8(&gb, p + 4, buffsize - 4);
skip_bits_long(&gb, 32); // format_sync
// v(32) format_info
ratebits = get_bits(&gb, 4); // ratebits
skip_bits1(&gb); // 6ch_multichannel_type
skip_bits1(&gb); // 8ch_multichannel_type
skip_bits(&gb, 2); // reserved
skip_bits(&gb, 2); // 2ch_presentation_channel_modifier
skip_bits(&gb, 2); // 6ch_presentation_channel_modifier
skip_bits(&gb, 5); // 6ch_presentation_channel_assignment
skip_bits(&gb, 2); // 8ch_presentation_channel_modifier
skip_bits(&gb, 13); // 8ch_presentation_channel_assignment
skip_bits(&gb, 16); // signature
skip_bits(&gb, 16); // flags
skip_bits(&gb, 16); // reserved
skip_bits1(&gb); // variable_rate
skip_bits(&gb, 15); // peak_data_rate
int num_substreams = get_bits(&gb, 4);
skip_bits_long(&gb, 4 + (major_sync_size - 17) * 8);
// substream directory
for (int i = 0; i < num_substreams; i++)
{
int extra_substream_word = get_bits1(&gb);
skip_bits1(&gb); // restart_nonexistent
skip_bits1(&gb); // crc_present
skip_bits1(&gb); // reserved
skip_bits(&gb, 12); // substream_end_ptr
if (extra_substream_word)
skip_bits(&gb, 16); // drc_gain_update, drc_time_update, reserved
}
// substream segments
for (int i = 0; i < num_substreams; i++)
{
if (get_bits1(&gb))
{ // block_header_exists
if (get_bits1(&gb))
{ // restart_header_exists
skip_bits(&gb, 14); // restart_sync_word
output_timing = get_bits(&gb, 16);
output_timing_present = true;
// XXX: restart header
}
// XXX: Block header
}
// XXX: All blocks, all substreams?
break;
}
return true;
}
HRESULT CLAVAudio::BitstreamTrueHD(const BYTE *p, int buffsize, HRESULT *hrDeliver)
{
// On a high level, a MAT frame consists of a sequence of padded TrueHD frames
// The size of the padded frame can be determined from the frame time/sequence code in the frame header,
// since it varies to accommodate spikes in bitrate.
// In average all frames are always padded to 2560 bytes, so that 24 frames fit in one MAT frame, however
// due to bitrate spikes single sync frames have been observed to use up to twice that size, in which
// case they'll be preceded by smaller frames to keep the average bitrate constant.
// A constant padding to 2560 bytes can work (this is how the ffmpeg spdifenc module works), however
// high-bitrate streams can overshoot this size and therefor require proper handling of dynamic padding.
uint16_t output_timing = 0;
bool bOutputTimingPresent = false;
// get the ratebits and output timing from the sync frame
if (AV_RB32(p + 4) == 0xf8726fba)
{
if (ParseTrueHDMajorSyncHeaders(p, buffsize, m_TrueHDMATState.ratebits, output_timing, bOutputTimingPresent) ==
false)
return E_FAIL;
}
else if (m_TrueHDMATState.prev_frametime_valid == false)
{
// only start streaming on a major sync frame
m_rtBitstreamCache = AV_NOPTS_VALUE;
m_TrueHDMATState.nSamplesOffset = 0;
return S_FALSE;
}
uint16_t frame_time = AV_RB16(p + 2);
uint32_t space_size = 0;
uint16_t frame_samples = 40 << (m_TrueHDMATState.ratebits & 7);
m_TrueHDMATState.output_timing += frame_samples;
if (bOutputTimingPresent)
{
if (m_TrueHDMATState.output_timing_valid && (output_timing != m_TrueHDMATState.output_timing))
{
DbgLog((LOG_TRACE, 10, _T("BitstreamTrueHD(): Detected a stream discontinuity, reseting framesize cache")));
m_TrueHDMATState.prev_frametime_valid = false;
m_TrueHDMATState.nSamplesOffset = 0;
space_size = 40 * (64 >> (m_TrueHDMATState.ratebits & 7));
}
m_TrueHDMATState.output_timing = output_timing;
m_TrueHDMATState.output_timing_valid = true;
}
// compute final padded size for the previous frame, if any
if (m_TrueHDMATState.prev_frametime_valid)
space_size = uint16_t(frame_time - m_TrueHDMATState.prev_frametime) * (64 >> (m_TrueHDMATState.ratebits & 7));
// compute padding (ie. difference to the size of the previous frame)
ASSERT(!m_TrueHDMATState.prev_frametime_valid || space_size >= m_TrueHDMATState.prev_mat_framesize);
// if for some reason the space_size fails, align the actual frame size
if (space_size < m_TrueHDMATState.prev_mat_framesize)
space_size = FFALIGN(m_TrueHDMATState.prev_mat_framesize, (64 >> (m_TrueHDMATState.ratebits & 7)));
m_TrueHDMATState.padding += (space_size - m_TrueHDMATState.prev_mat_framesize);
// store frame time of the previous frame
m_TrueHDMATState.prev_frametime = frame_time;
m_TrueHDMATState.prev_frametime_valid = true;
// Write the MAT header into the fresh buffer
if (m_bsOutput.GetCount() == 0)
{
MATWriteHeader();
// initial header, don't count it for the frame size
if (m_TrueHDMATState.init == false)
{
m_TrueHDMATState.init = true;
m_TrueHDMATState.mat_framesize = 0;
}
}
// write padding of the previous frame (if any)
while (m_TrueHDMATState.padding > 0)
{
MATWritePadding();
ASSERT(m_TrueHDMATState.padding == 0 || m_bsOutput.GetCount() == MAT_BUFFER_SIZE);
// Buffer is full, submit it
if (m_bsOutput.GetCount() == MAT_BUFFER_SIZE)
{
MATFlushPacket(hrDeliver);
// and setup a new buffer
MATWriteHeader();
}
}
// count the number of samples in this frame
m_TrueHDMATState.nSamples += frame_samples;
// write actual audio data to the buffer
int remaining = MATFillDataBuffer(p, buffsize);
// not all data could be written, or the buffer is full
if (remaining || m_bsOutput.GetCount() == MAT_BUFFER_SIZE)
{
// flush out old data
MATFlushPacket(hrDeliver);
if (remaining)
{
// .. setup a new buffer
MATWriteHeader();
// and write the remaining data
remaining = MATFillDataBuffer(p + (buffsize - remaining), remaining);
ASSERT(remaining == 0);
}
}
// store the size of the current MAT frame, so we can add padding later
m_TrueHDMATState.prev_mat_framesize = m_TrueHDMATState.mat_framesize;
m_TrueHDMATState.mat_framesize = 0;
return S_OK;
}
| 14,709
|
C++
|
.cpp
| 333
| 37.072072
| 120
| 0.646882
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,146
|
Bitstream.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/Bitstream.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVAudio.h"
#include <MMReg.h>
#include "moreuuids.h"
#define LAV_BITSTREAM_BUFFER_SIZE 4096
#define LAV_BITSTREAM_DTS_HD_HR_RATE 192000
#define LAV_BITSTREAM_DTS_HD_MA_RATE 768000
static struct
{
AVCodecID codec;
LAVBitstreamCodec config;
} lavf_bitstream_config[] = {
{AV_CODEC_ID_AC3, Bitstream_AC3},
{AV_CODEC_ID_EAC3, Bitstream_EAC3},
{AV_CODEC_ID_TRUEHD, Bitstream_TRUEHD},
{AV_CODEC_ID_DTS, Bitstream_DTS} // DTS-HD is still DTS, and handled special below
};
// Check whether a codec is bitstreaming eligible and enabled
BOOL CLAVAudio::IsBitstreaming(AVCodecID codec)
{
for (int i = 0; i < countof(lavf_bitstream_config); ++i)
{
if (lavf_bitstream_config[i].codec == codec)
{
return m_bBitstreamOverride[lavf_bitstream_config[i].config]
? FALSE
: m_settings.bBitstream[lavf_bitstream_config[i].config];
}
}
return FALSE;
}
HRESULT CLAVAudio::InitBitstreaming()
{
// Alloc buffer for the AVIO context
BYTE *buffer = (BYTE *)CoTaskMemAlloc(LAV_BITSTREAM_BUFFER_SIZE + AV_INPUT_BUFFER_PADDING_SIZE);
if (!buffer)
return E_OUTOFMEMORY;
// Create AVIO context
m_avioBitstream = avio_alloc_context(buffer, LAV_BITSTREAM_BUFFER_SIZE, 1, this, nullptr, BSWriteBuffer, nullptr);
if (!m_avioBitstream)
{
SAFE_CO_FREE(buffer);
return E_FAIL;
}
return S_OK;
}
HRESULT CLAVAudio::ShutdownBitstreaming()
{
if (m_avioBitstream)
{
SAFE_CO_FREE(m_avioBitstream->buffer);
av_freep(&m_avioBitstream);
}
return S_OK;
}
// Static function for the AVIO context that writes the buffer into our own output buffer
int CLAVAudio::BSWriteBuffer(void *opaque, const uint8_t *buf, int buf_size)
{
CLAVAudio *filter = (CLAVAudio *)opaque;
filter->m_bsOutput.Append(buf, buf_size);
return buf_size;
}
HRESULT CLAVAudio::CreateBitstreamContext(AVCodecID codec, WAVEFORMATEX *wfe)
{
int ret = 0;
if (m_avBSContext)
FreeBitstreamContext();
m_bsParser.Reset();
DbgLog((LOG_TRACE, 20, "Creating Bistreaming Context..."));
ret = avformat_alloc_output_context2(&m_avBSContext, nullptr, "spdif", nullptr);
if (ret < 0 || !m_avBSContext)
{
DbgLog((LOG_ERROR, 10, L"::CreateBitstreamContext() -- alloc of avformat spdif muxer failed (ret: %d)", ret));
goto fail;
}
m_avBSContext->pb = m_avioBitstream;
// flush IO after every packet, so we can send it to the audio renderer immediately
m_avBSContext->flags |= AVFMT_FLAG_FLUSH_PACKETS;
// DTS-HD is by default off, unless explicitly asked for
if (m_settings.DTSHDFraming && m_settings.bBitstream[Bitstream_DTSHD] && !m_bForceDTSCore)
{
m_DTSBitstreamMode = DTS_HDMA;
av_opt_set_int(m_avBSContext->priv_data, "dtshd_rate", LAV_BITSTREAM_DTS_HD_MA_RATE, 0);
}
else
{
m_DTSBitstreamMode = DTS_Core;
av_opt_set_int(m_avBSContext->priv_data, "dtshd_rate", 0, 0);
}
av_opt_set_int(m_avBSContext->priv_data, "dtshd_fallback_time", -1, 0);
AVStream *st = avformat_new_stream(m_avBSContext, 0);
if (!st)
{
DbgLog((LOG_ERROR, 10, L"::CreateBitstreamContext() -- alloc of output stream failed"));
goto fail;
}
st->codecpar->codec_id = codec;
st->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
st->codecpar->ch_layout.order = AV_CHANNEL_ORDER_UNSPEC;
st->codecpar->ch_layout.nb_channels = wfe->nChannels;
st->codecpar->sample_rate = wfe->nSamplesPerSec;
ret = avformat_write_header(m_avBSContext, nullptr);
if (ret < 0)
{
DbgLog((LOG_ERROR, 10, L"::CreateBitstreamContext() -- av_write_header returned an error code (%d)", -ret));
goto fail;
}
return S_OK;
fail:
FreeBitstreamContext();
return E_FAIL;
}
HRESULT CLAVAudio::UpdateBitstreamContext()
{
if (!m_pInput || !m_pInput->IsConnected())
return E_UNEXPECTED;
BOOL bBitstream = IsBitstreaming(m_nCodecId);
if ((bBitstream && !m_avBSContext) || (!bBitstream && m_avBSContext))
{
CMediaType mt = m_pInput->CurrentMediaType();
const void *format = mt.Format();
GUID format_type = mt.formattype;
DWORD formatlen = mt.cbFormat;
// Override the format type
if (mt.subtype == MEDIASUBTYPE_FFMPEG_AUDIO && format_type == FORMAT_WaveFormatExFFMPEG)
{
WAVEFORMATEXFFMPEG *wfexff = (WAVEFORMATEXFFMPEG *)mt.Format();
format = &wfexff->wfex;
format_type = FORMAT_WaveFormatEx;
formatlen -= sizeof(WAVEFORMATEXFFMPEG) - sizeof(WAVEFORMATEX);
}
ffmpeg_init(m_nCodecId, format, format_type, formatlen);
m_bQueueResync = TRUE;
}
// Configure DTS-HD setting
if (m_avBSContext)
{
if (m_settings.bBitstream[Bitstream_DTSHD] && m_settings.DTSHDFraming && !m_bForceDTSCore)
{
m_DTSBitstreamMode = DTS_HDMA;
av_opt_set_int(m_avBSContext->priv_data, "dtshd_rate", LAV_BITSTREAM_DTS_HD_MA_RATE, 0);
}
else
{
m_DTSBitstreamMode = DTS_Core; // Force auto-detection
av_opt_set_int(m_avBSContext->priv_data, "dtshd_rate", 0, 0);
}
}
return S_OK;
}
HRESULT CLAVAudio::FreeBitstreamContext()
{
if (m_avBSContext)
{
av_write_trailer(m_avBSContext); // For the SPDIF muxer that frees the buffers
avformat_free_context(m_avBSContext);
}
m_avBSContext = nullptr;
av_packet_free(&m_pBitstreamPacket);
// Dump any remaining data
m_bsOutput.SetSize(0);
// reset TrueHD MAT state
memset(&m_TrueHDMATState, 0, sizeof(m_TrueHDMATState));
return S_OK;
}
CMediaType CLAVAudio::CreateBitstreamMediaType(AVCodecID codec, DWORD dwSampleRate, BOOL bDTSHDOverride)
{
CMediaType mt;
mt.majortype = MEDIATYPE_Audio;
mt.subtype = MEDIASUBTYPE_PCM;
mt.formattype = FORMAT_WaveFormatEx;
WAVEFORMATEXTENSIBLE wfex;
memset(&wfex, 0, sizeof(wfex));
WAVEFORMATEX *wfe = &wfex.Format;
wfe->nChannels = 2;
wfe->wBitsPerSample = 16;
GUID subtype = GUID_NULL;
switch (codec)
{
case AV_CODEC_ID_AC3:
wfe->wFormatTag = WAVE_FORMAT_DOLBY_AC3_SPDIF;
wfe->nSamplesPerSec = min(dwSampleRate, 48000);
break;
case AV_CODEC_ID_EAC3:
wfe->nSamplesPerSec = 192000;
wfe->nChannels = 2;
subtype = KSDATAFORMAT_SUBTYPE_IEC61937_DOLBY_DIGITAL_PLUS;
break;
case AV_CODEC_ID_TRUEHD:
wfe->nSamplesPerSec = 192000;
wfe->nChannels = 8;
subtype = KSDATAFORMAT_SUBTYPE_IEC61937_DOLBY_MLP;
break;
case AV_CODEC_ID_DTS:
if (m_settings.bBitstream[Bitstream_DTSHD] && !bDTSHDOverride && m_DTSBitstreamMode != DTS_Core)
{
wfe->nSamplesPerSec = 192000;
wfe->nChannels = (m_DTSBitstreamMode == DTS_HDHR) ? 2 : 8;
subtype = KSDATAFORMAT_SUBTYPE_IEC61937_DTS_HD;
}
else
{
wfe->wFormatTag = WAVE_FORMAT_DOLBY_AC3_SPDIF; // huh? but it works.
wfe->nSamplesPerSec = min(dwSampleRate, 48000);
}
break;
default: ASSERT(0); break;
}
wfe->nBlockAlign = wfe->nChannels * wfe->wBitsPerSample / 8;
wfe->nAvgBytesPerSec = wfe->nSamplesPerSec * wfe->nBlockAlign;
if (subtype != GUID_NULL)
{
wfex.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
wfex.Format.cbSize = sizeof(wfex) - sizeof(wfex.Format);
wfex.dwChannelMask = get_channel_mask(wfe->nChannels);
wfex.Samples.wValidBitsPerSample = wfex.Format.wBitsPerSample;
wfex.SubFormat = subtype;
}
mt.SetSampleSize(1);
mt.SetFormat((BYTE *)&wfex, sizeof(wfex.Format) + wfex.Format.cbSize);
return mt;
}
CLAVAudio::DTSBitstreamMode CLAVAudio::GetDTSHDBitstreamMode()
{
if (m_bForceDTSCore || !m_settings.bBitstream[Bitstream_DTSHD])
return DTS_Core;
if (m_settings.DTSHDFraming)
return DTS_HDMA;
if (!m_bsParser.m_bDTSHD)
return DTS_Core;
if (m_pAVCtx->profile == FF_PROFILE_DTS_HD_HRA)
return DTS_HDHR;
return DTS_HDMA;
}
void CLAVAudio::ActivateDTSHDMuxing()
{
DbgLog((LOG_TRACE, 20, L"::ActivateDTSHDMuxing(): Found DTS-HD marker - switching to DTS-HD muxing mode"));
m_DTSBitstreamMode = GetDTSHDBitstreamMode();
// Check if downstream actually accepts it..
const CMediaType &mt = CreateBitstreamMediaType(m_nCodecId, m_bsParser.m_dwSampleRate);
HRESULT hr = m_pOutput->GetConnected()->QueryAccept(&mt);
if (hr != S_OK)
{
DbgLog((LOG_TRACE, 20, L"-> But downstream doesn't want DTS-HD, sticking to DTS core"));
m_DTSBitstreamMode = DTS_Core;
m_bForceDTSCore = TRUE;
}
else
{
av_opt_set_int(m_avBSContext->priv_data, "dtshd_rate",
(m_DTSBitstreamMode == DTS_HDHR) ? LAV_BITSTREAM_DTS_HD_HR_RATE : LAV_BITSTREAM_DTS_HD_MA_RATE,
0);
}
}
HRESULT CLAVAudio::Bitstream(const BYTE *pDataBuffer, int buffsize, int &consumed, HRESULT *hrDeliver)
{
HRESULT hr = S_OK;
int ret = 0;
BOOL bFlush = (pDataBuffer == nullptr);
if (m_pBitstreamPacket == nullptr)
m_pBitstreamPacket = av_packet_alloc();
m_pBitstreamPacket->duration = 1;
consumed = 0;
while (buffsize > 0)
{
if (bFlush)
buffsize = 0;
BYTE *pOut = nullptr;
int pOut_size = 0;
int used_bytes = av_parser_parse2(m_pParser, m_pAVCtx, &pOut, &pOut_size, pDataBuffer, buffsize, AV_NOPTS_VALUE,
AV_NOPTS_VALUE, 0);
if (used_bytes < 0)
{
DbgLog((LOG_TRACE, 50, L"::Bitstream() - audio parsing failed (ret: %d)", -used_bytes));
return E_FAIL;
}
else if (used_bytes == 0 && pOut_size == 0)
{
DbgLog((LOG_TRACE, 50, L"::Bitstream() - could not process buffer, starving?"));
break;
}
// Timestamp cache to compensate for one frame delay the parser might introduce, in case the frames were already
// perfectly sliced apart If we used more (or equal) bytes then was output again, we encountered a new frame,
// update timestamps
if (used_bytes >= pOut_size && m_bUpdateTimeCache)
{
m_rtStartInputCache = m_rtStartInput;
m_rtStopInputCache = m_rtStopInput;
m_bUpdateTimeCache = FALSE;
}
if (!bFlush && used_bytes > 0)
{
buffsize -= used_bytes;
pDataBuffer += used_bytes;
consumed += used_bytes;
}
if (pOut_size > 0)
{
hr = m_bsParser.Parse(m_nCodecId, pOut, pOut_size);
if (FAILED(hr))
{
continue;
}
if (m_nCodecId == AV_CODEC_ID_TRUEHD)
{
m_bUpdateTimeCache = TRUE;
// Set long-time cache to the first timestamp encountered, used by TrueHD and E-AC3 because the S/PDIF
// muxer caches data internally If the current timestamp is not valid, use the last delivery timestamp
// in m_rtStart
if (m_rtBitstreamCache == AV_NOPTS_VALUE)
m_rtBitstreamCache = m_rtStartInputCache != AV_NOPTS_VALUE ? m_rtStartInputCache : m_rtStart;
BitstreamTrueHD(pOut, pOut_size, hrDeliver);
}
else
{
if (m_nCodecId == AV_CODEC_ID_DTS)
{
DTSBitstreamMode mode = GetDTSHDBitstreamMode();
if (mode != DTS_Core && mode != m_DTSBitstreamMode)
ActivateDTSHDMuxing();
}
m_pBitstreamPacket->data = pOut;
m_pBitstreamPacket->size = pOut_size;
// Write SPDIF muxed frame
ret = av_write_frame(m_avBSContext, m_pBitstreamPacket);
if (ret < 0)
{
DbgLog((LOG_ERROR, 20, "::Bitstream(): av_write_frame returned error code (%d)", -ret));
m_bsOutput.SetSize(0);
continue;
}
m_bUpdateTimeCache = TRUE;
// Set long-time cache to the first timestamp encountered, used by TrueHD and E-AC3 because the S/PDIF
// muxer caches data internally If the current timestamp is not valid, use the last delivery timestamp
// in m_rtStart
if (m_rtBitstreamCache == AV_NOPTS_VALUE)
m_rtBitstreamCache = m_rtStartInputCache != AV_NOPTS_VALUE ? m_rtStartInputCache : m_rtStart;
// Deliver frame
if (m_bsOutput.GetCount() > 0)
{
*hrDeliver = DeliverBitstream(m_nCodecId, m_bsOutput.Ptr(), m_bsOutput.GetCount(),
m_rtStartInputCache, m_rtStopInputCache);
m_bsOutput.SetSize(0);
}
}
/* if the bitstreaming context is lost at this point, then the deliver function caused a fallback to PCM */
if (m_avBSContext == nullptr)
return S_FALSE;
}
}
return S_OK;
}
HRESULT CLAVAudio::DeliverBitstream(AVCodecID codec, const BYTE *buffer, DWORD dwSize, REFERENCE_TIME rtStartInput,
REFERENCE_TIME rtStopInput, BOOL bSwap, int nSamplesOffset)
{
HRESULT hr = S_OK;
if (m_bFlushing)
return S_FALSE;
CMediaType mt = CreateBitstreamMediaType(codec, m_bsParser.m_dwSampleRate);
if (FAILED(hr = ReconnectOutput(dwSize, mt)))
{
return hr;
}
IMediaSample *pOut;
BYTE *pDataOut = nullptr;
if (FAILED(GetDeliveryBuffer(&pOut, &pDataOut)))
{
return E_FAIL;
}
if (m_bResyncTimestamp && (rtStartInput != AV_NOPTS_VALUE || m_rtBitstreamCache != AV_NOPTS_VALUE))
{
if (m_rtBitstreamCache != AV_NOPTS_VALUE)
m_rtStart = m_rtBitstreamCache;
else
m_rtStart = rtStartInput;
m_bResyncTimestamp = FALSE;
}
REFERENCE_TIME rtStart = m_rtStart, rtStop = AV_NOPTS_VALUE;
REFERENCE_TIME rtOffset = (REFERENCE_TIME)round(DBL_SECOND_MULT * (double)nSamplesOffset / m_bsParser.m_dwSampleRate / m_dRate);
double dDuration = DBL_SECOND_MULT * (double)m_bsParser.m_dwSamples / m_bsParser.m_dwSampleRate / m_dRate;
m_dStartOffset += fmod(dDuration, 1.0);
// Add rounded duration to rtStop
rtStop = rtStart + (REFERENCE_TIME)(dDuration + 0.5);
// and unrounded to m_rtStart..
m_rtStart += (REFERENCE_TIME)dDuration;
// and accumulate error..
if (m_dStartOffset > 0.5)
{
m_rtStart++;
m_dStartOffset -= 1.0;
}
REFERENCE_TIME rtJitter = 0;
if (m_rtBitstreamCache != AV_NOPTS_VALUE)
rtJitter = rtStart - m_rtBitstreamCache + rtOffset;
m_faJitter.Sample(rtJitter);
REFERENCE_TIME rtJitterMin = m_faJitter.AbsMinimum();
if (m_settings.AutoAVSync && abs(rtJitterMin) > m_JitterLimit)
{
DbgLog((LOG_TRACE, 10, L"::Deliver(): corrected A/V sync by %I64d", rtJitterMin));
m_rtStart -= rtJitterMin;
rtStart -= rtJitterMin;
m_faJitter.OffsetValues(-rtJitterMin);
m_bDiscontinuity = TRUE;
}
#ifdef DEBUG
DbgLog((LOG_CUSTOM5, 20,
L"Bitstream Delivery, rtStart(calc): %I64d, rtStart(input): %I64d, duration: %I64d, offset: %I64d, diff: %I64d", rtStart,
m_rtBitstreamCache, rtStop - rtStart, rtOffset, rtJitter));
if (m_faJitter.CurrentSample() == 0)
{
DbgLog((LOG_CUSTOM2, 20, L"Jitter Stats: min: %I64d - max: %I64d - avg: %I64d", rtJitterMin,
m_faJitter.AbsMaximum(), m_faJitter.Average()));
}
#endif
m_rtBitstreamCache = AV_NOPTS_VALUE;
if (m_settings.AudioDelayEnabled)
{
REFERENCE_TIME rtDelay = (REFERENCE_TIME)((m_settings.AudioDelay * 10000i64) / m_dRate);
rtStart += rtDelay;
rtStop += rtDelay;
}
pOut->SetTime(&rtStart, &rtStop);
pOut->SetMediaTime(nullptr, nullptr);
pOut->SetPreroll(FALSE);
pOut->SetDiscontinuity(m_bDiscontinuity);
m_bDiscontinuity = FALSE;
pOut->SetSyncPoint(TRUE);
pOut->SetActualDataLength(dwSize);
// byte-swap if needed
if (bSwap)
{
lav_spdif_bswap_buf16((uint16_t *)pDataOut, (uint16_t *)buffer, dwSize >> 1);
}
else
{
memcpy(pDataOut, buffer, dwSize);
}
if (hr == S_OK)
{
hr = m_pOutput->GetConnected()->QueryAccept(&mt);
if (hr == S_FALSE && m_nCodecId == AV_CODEC_ID_DTS && m_DTSBitstreamMode != DTS_Core)
{
DbgLog((LOG_TRACE, 1, L"DTS-HD Media Type failed with %0#.8x, trying fallback to DTS core", hr));
m_bForceDTSCore = TRUE;
UpdateBitstreamContext();
goto done;
}
else if (hr == S_FALSE && m_settings.bBitstreamingFallback)
{
BitstreamFallbackToPCM();
goto done;
}
DbgLog((LOG_TRACE, 1, L"Sending new Media Type (QueryAccept: %0#.8x)", hr));
m_pOutput->SetMediaType(&mt);
pOut->SetMediaType(&mt);
}
hr = m_pOutput->Deliver(pOut);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"::DeliverBitstream failed with code: %0#.8x", hr));
}
done:
SafeRelease(&pOut);
return hr;
}
HRESULT CLAVAudio::BitstreamFallbackToPCM()
{
// fallback to decoding
FreeBitstreamContext();
for (int i = 0; i < countof(lavf_bitstream_config); ++i)
{
if (lavf_bitstream_config[i].codec == m_nCodecId)
{
m_bBitstreamOverride[lavf_bitstream_config[i].config] = TRUE;
}
}
m_bQueueResync = TRUE;
return S_OK;
}
| 18,840
|
C++
|
.cpp
| 497
| 30.104628
| 133
| 0.621226
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,147
|
dllmain.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/dllmain.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
// Based on the SampleParser Template by GDCL
// --------------------------------------------------------------------------------
// Copyright (c) GDCL 2004. All Rights Reserved.
// You are free to re-use this as the basis for your own filter development,
// provided you retain this copyright notice in the source.
// http://www.gdcl.co.uk
// --------------------------------------------------------------------------------
#include "stdafx.h"
// Initialize the GUIDs
#include <InitGuid.h>
#include <qnetwork.h>
#include "LAVAudio.h"
#include "AudioSettingsProp.h"
#include "moreuuids.h"
#include "IMediaSideDataFFmpeg.h"
#include "registry.h"
// --- COM factory table and registration code --------------
const AMOVIESETUP_PIN sudpPinsAudioDec[] = {{L"Input", FALSE, FALSE, FALSE, FALSE, &CLSID_NULL, nullptr,
CLAVAudio::sudPinTypesInCount, CLAVAudio::sudPinTypesIn},
{L"Output", FALSE, TRUE, FALSE, FALSE, &CLSID_NULL, nullptr,
CLAVAudio::sudPinTypesOutCount, CLAVAudio::sudPinTypesOut}};
const AMOVIESETUP_FILTER sudFilterReg = {&__uuidof(CLAVAudio), // filter clsid
L"LAV Audio Decoder", // filter name
MERIT_PREFERRED + 3, // merit
countof(sudpPinsAudioDec), sudpPinsAudioDec, CLSID_LegacyAmFilterCategory};
// --- COM factory table and registration code --------------
// DirectShow base class COM factory requires this table,
// declaring all the COM objects in this DLL
CFactoryTemplate g_Templates[] = {
// one entry for each CoCreate-able object
{sudFilterReg.strName, sudFilterReg.clsID, CreateInstance<CLAVAudio>, nullptr, &sudFilterReg},
// This entry is for the property page.
{L"LAV Audio Properties", &CLSID_LAVAudioSettingsProp, CreateInstance<CLAVAudioSettingsProp>, nullptr, nullptr},
{L"LAV Audio Mixer", &CLSID_LAVAudioMixingProp, CreateInstance<CLAVAudioMixingProp>, nullptr, nullptr},
{L"LAV Audio Format Settings", &CLSID_LAVAudioFormatsProp, CreateInstance<CLAVAudioFormatsProp>, nullptr, nullptr},
{L"LAV Audio Status", &CLSID_LAVAudioStatusProp, CreateInstance<CLAVAudioStatusProp>, nullptr, nullptr}};
int g_cTemplates = sizeof(g_Templates) / sizeof(g_Templates[0]);
// self-registration entrypoint
STDAPI DllRegisterServer()
{
// base classes will handle registration using the factory template table
return AMovieDllRegisterServer2(true);
}
STDAPI DllUnregisterServer()
{
// base classes will handle de-registration using the factory template table
return AMovieDllRegisterServer2(false);
}
// if we declare the correct C runtime entrypoint and then forward it to the DShow base
// classes we will be sure that both the C/C++ runtimes and the base classes are initialized
// correctly
extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID);
BOOL WINAPI DllMain(HANDLE hDllHandle, DWORD dwReason, LPVOID lpReserved)
{
return DllEntryPoint(reinterpret_cast<HINSTANCE>(hDllHandle), dwReason, lpReserved);
}
void CALLBACK OpenConfiguration(HWND hwnd, HINSTANCE hinst, LPSTR lpszCmdLine, int nCmdShow)
{
HRESULT hr = S_OK;
CUnknown *pInstance = CreateInstance<CLAVAudio>(nullptr, &hr);
IBaseFilter *pFilter = nullptr;
pInstance->NonDelegatingQueryInterface(IID_IBaseFilter, (void **)&pFilter);
if (pFilter)
{
pFilter->AddRef();
CBaseDSPropPage::ShowPropPageDialog(pFilter);
}
delete pInstance;
}
| 4,410
|
C++
|
.cpp
| 87
| 45.275862
| 119
| 0.685542
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,148
|
BitstreamParser.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/BitstreamParser.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "BitstreamParser.h"
#pragma warning(push)
#pragma warning(disable : 4101)
#pragma warning(disable : 5033)
extern "C"
{
#define AVCODEC_X86_MATHOPS_H
#include "libavcodec/ac3_parser_internal.h"
}
#pragma warning(pop)
#include "parser/dts.h"
CBitstreamParser::CBitstreamParser()
{
init_dts_parser((DTSParserContext **)&m_pParserContext);
Reset();
}
CBitstreamParser::~CBitstreamParser()
{
close_dts_parser((DTSParserContext **)&m_pParserContext);
}
void CBitstreamParser::Reset()
{
m_dwBlocks = 0;
m_dwFrameSize = 0;
m_dwSampleRate = 0;
m_dwSamples = 0;
m_bDTSHD = FALSE;
memset(&m_DTSHeader, 0, sizeof(m_DTSHeader));
}
HRESULT CBitstreamParser::Parse(AVCodecID codec, BYTE *pBuffer, DWORD dwSize)
{
switch (codec)
{
case AV_CODEC_ID_DTS: return ParseDTS(pBuffer, dwSize);
case AV_CODEC_ID_AC3:
case AV_CODEC_ID_EAC3: return ParseAC3(pBuffer, dwSize);
case AV_CODEC_ID_TRUEHD: return ParseTrueHD(pBuffer, dwSize);
}
return S_OK;
}
HRESULT CBitstreamParser::ParseDTS(BYTE *pBuffer, DWORD dwSize)
{
int ret = parse_dts_header((DTSParserContext *)m_pParserContext, &m_DTSHeader, pBuffer, (unsigned)dwSize);
if (ret < 0)
return E_FAIL;
m_bDTSHD = m_bDTSHD || m_DTSHeader.IsHD;
m_dwBlocks = m_DTSHeader.Blocks;
m_dwSampleRate = m_DTSHeader.SampleRate;
m_dwFrameSize = m_DTSHeader.FrameSize;
m_dwSamples = m_DTSHeader.SamplesPerBlock * m_dwBlocks;
return S_OK;
}
HRESULT CBitstreamParser::ParseAC3(BYTE *pBuffer, DWORD dwSize)
{
AC3HeaderInfo hdr{};
AC3HeaderInfo *phdr = &hdr;
if (avpriv_ac3_parse_header(&phdr, pBuffer, dwSize) >= 0)
{
m_dwSampleRate = hdr.sample_rate;
// E-AC3 always combines 6 blocks, resulting in 1536 samples
m_dwSamples = (hdr.bitstream_id > 10) ? (6 * 256) : (hdr.num_blocks * 256);
}
else
{
if (m_dwSampleRate == 0) // not seen a valid frame yet, error out
return E_FAIL;
}
return S_OK;
}
HRESULT CBitstreamParser::ParseTrueHD(BYTE *pBuffer, DWORD dwSize)
{
if (AV_RB32(pBuffer + 4) == 0xf8726fba)
{
int nRatebits = pBuffer[8] >> 4;
m_dwSampleRate = (nRatebits & 8 ? 44100 : 48000) << (nRatebits & 7);
m_dwSamples = 24 * (40 << (nRatebits & 7));
}
return S_OK;
}
| 3,168
|
C++
|
.cpp
| 98
| 28.653061
| 110
| 0.693416
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| false
| false
| false
|
22,149
|
AudioSettingsProp.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/AudioSettingsProp.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "AudioSettingsProp.h"
#include "Media.h"
#include <Commctrl.h>
#include "resource.h"
#include "version.h"
CLAVAudioSettingsProp::CLAVAudioSettingsProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVCAudioProp"), pUnk, IDD_PROPPAGE_AUDIO_SETTINGS, IDS_SETTINGS)
{
}
CLAVAudioSettingsProp::~CLAVAudioSettingsProp()
{
}
HRESULT CLAVAudioSettingsProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pAudioSettings == nullptr);
return pUnk->QueryInterface(&m_pAudioSettings);
}
HRESULT CLAVAudioSettingsProp::OnDisconnect()
{
SafeRelease(&m_pAudioSettings);
return S_OK;
}
HRESULT CLAVAudioSettingsProp::OnApplyChanges()
{
ASSERT(m_pAudioSettings != nullptr);
HRESULT hr = S_OK;
BOOL bFlag;
// DRC
int iDRCLevel = (int)SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL, TBM_GETPOS, 0, 0);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_DRC, BM_GETCHECK, 0, 0);
hr = m_pAudioSettings->SetDRC(bFlag, iDRCLevel);
// Bitstreaming codec options
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_AC3, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetBitstreamConfig(Bitstream_AC3, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_EAC3, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetBitstreamConfig(Bitstream_EAC3, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_TRUEHD, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetBitstreamConfig(Bitstream_TRUEHD, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_DTS, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetBitstreamConfig(Bitstream_DTS, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_DTSHD, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetBitstreamConfig(Bitstream_DTSHD, bFlag);
// DTS-HD framing
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_DTSHD_FRAMING, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetDTSHDFraming(bFlag);
// Fallback to audio decoding
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_BS_FALLBACK, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetBitstreamingFallback(bFlag);
// The other playback options
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_AUTO_AVSYNC, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetAutoAVSync(bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_STANDARD_CH_LAYOUT, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetOutputStandardLayout(bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUTPUT51_LEGACY, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetOutput51LegacyLayout(bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_EXPAND_MONO, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetExpandMono(bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_EXPAND61, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetExpand61(bFlag);
// Sample Formats
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_S16, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetSampleFormat(SampleFormat_16, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_S24, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetSampleFormat(SampleFormat_24, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_S32, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetSampleFormat(SampleFormat_32, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_FP32, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetSampleFormat(SampleFormat_FP32, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_U8, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetSampleFormat(SampleFormat_U8, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_S16_DITHER, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetSampleConvertDithering(bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_DELAY_ENABLED, BM_GETCHECK, 0, 0);
WCHAR buffer[100];
SendDlgItemMessage(m_Dlg, IDC_DELAY, WM_GETTEXT, 100, (LPARAM)&buffer);
int delay = _wtoi(buffer);
m_pAudioSettings->SetAudioDelay(bFlag, delay);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetTrayIcon(bFlag);
LoadData();
return hr;
}
HRESULT CLAVAudioSettingsProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pAudioSettings != nullptr);
const WCHAR *version = TEXT(LAV_AUDIO) L" " TEXT(LAV_VERSION_STR);
SendDlgItemMessage(m_Dlg, IDC_LAVAUDIO_FOOTER, WM_SETTEXT, 0, (LPARAM)version);
hr = LoadData();
if (SUCCEEDED(hr))
{
SendDlgItemMessage(m_Dlg, IDC_DRC, BM_SETCHECK, m_bDRCEnabled, 0);
EnableWindow(GetDlgItem(m_Dlg, IDC_DRC_LEVEL), m_bDRCEnabled);
SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL, TBM_SETRANGE, 0, MAKELONG(0, 100));
SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL, TBM_SETTICFREQ, 10, 0);
SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL, TBM_SETPOS, 1, m_iDRCLevel);
WCHAR buffer[10];
_snwprintf_s(buffer, _TRUNCATE, L"%d%%", m_iDRCLevel);
SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
SendDlgItemMessage(m_Dlg, IDC_BS_AC3, BM_SETCHECK, m_bBitstreaming[Bitstream_AC3], 0);
SendDlgItemMessage(m_Dlg, IDC_BS_EAC3, BM_SETCHECK, m_bBitstreaming[Bitstream_EAC3], 0);
SendDlgItemMessage(m_Dlg, IDC_BS_TRUEHD, BM_SETCHECK, m_bBitstreaming[Bitstream_TRUEHD], 0);
SendDlgItemMessage(m_Dlg, IDC_BS_DTS, BM_SETCHECK, m_bBitstreaming[Bitstream_DTS], 0);
SendDlgItemMessage(m_Dlg, IDC_BS_DTSHD, BM_SETCHECK, m_bBitstreaming[Bitstream_DTSHD], 0);
EnableWindow(GetDlgItem(m_Dlg, IDC_BS_DTSHD), m_bBitstreaming[Bitstream_DTS]);
SendDlgItemMessage(m_Dlg, IDC_BS_DTSHD_FRAMING, BM_SETCHECK, m_bDTSHDFraming, 0);
EnableWindow(GetDlgItem(m_Dlg, IDC_BS_DTSHD_FRAMING), m_bBitstreaming[Bitstream_DTSHD]);
addHint(IDC_BS_DTSHD_FRAMING,
L"With some Receivers, this setting might be needed to achieve the full features of DTS. However, on "
L"other Receivers, this option will cause DTS to not work at all.\n\nIf you do not experience any "
L"problems, its recommended to leave this setting untouched.");
SendDlgItemMessage(m_Dlg, IDC_BS_FALLBACK, BM_SETCHECK, m_bBitstreamingFallback, 0);
addHint(IDC_BS_FALLBACK,
L"Fallback to audio decoding if bitstreaming is not supported by the audio renderer/hardware.");
SendDlgItemMessage(m_Dlg, IDC_AUTO_AVSYNC, BM_SETCHECK, m_bAutoAVSync, 0);
addHint(IDC_AUTO_AVSYNC, L"Enables automatic tracking and correction of A/V sync.\n\nIf you encounter any sync "
L"issues, disabling this option can help in debugging the source of the problem.");
SendDlgItemMessage(m_Dlg, IDC_STANDARD_CH_LAYOUT, BM_SETCHECK, m_bOutputStdLayout, 0);
addHint(IDC_STANDARD_CH_LAYOUT,
L"Converts all channel layouts to one of the \"standard\" layouts (5.1/6.1/7.1) by adding silent "
L"channels. This is required for sending the PCM over HDMI if not using another downstream mixer, for "
L"example when using WASAPI.");
SendDlgItemMessage(m_Dlg, IDC_OUTPUT51_LEGACY, BM_SETCHECK, m_bOutput51Legacy, 0);
addHint(IDC_OUTPUT51_LEGACY, L"Use the legacy 5.1 channel layout which uses back channels instead of side "
L"channels, required for some audio devices and/or software.");
SendDlgItemMessage(m_Dlg, IDC_EXPAND_MONO, BM_SETCHECK, m_bExpandMono, 0);
addHint(IDC_EXPAND_MONO, L"Plays Mono Audio in both Left/Right Front channels, instead of the center.");
SendDlgItemMessage(m_Dlg, IDC_EXPAND61, BM_SETCHECK, m_bExpand61, 0);
addHint(IDC_EXPAND61,
L"Converts 6.1 Audio to 7.1 by copying the Back Center into both Back Left and Right channels.");
SendDlgItemMessage(m_Dlg, IDC_OUT_S16, BM_SETCHECK, m_bSampleFormats[SampleFormat_16], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_S24, BM_SETCHECK, m_bSampleFormats[SampleFormat_24], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_S32, BM_SETCHECK, m_bSampleFormats[SampleFormat_32], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_FP32, BM_SETCHECK, m_bSampleFormats[SampleFormat_FP32], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_U8, BM_SETCHECK, m_bSampleFormats[SampleFormat_U8], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_S16_DITHER, BM_SETCHECK, m_bDither, 0);
SendDlgItemMessage(m_Dlg, IDC_DELAY_ENABLED, BM_SETCHECK, m_bAudioDelay, 0);
EnableWindow(GetDlgItem(m_Dlg, IDC_DELAYSPIN), m_bAudioDelay);
EnableWindow(GetDlgItem(m_Dlg, IDC_DELAY), m_bAudioDelay);
SendDlgItemMessage(m_Dlg, IDC_DELAYSPIN, UDM_SETRANGE32, -1000 * 60 * 60 * 24, 1000 * 60 * 60 * 24);
WCHAR stringBuffer[100];
swprintf_s(stringBuffer, L"%d", m_iAudioDelay);
SendDlgItemMessage(m_Dlg, IDC_DELAY, WM_SETTEXT, 0, (LPARAM)stringBuffer);
SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_SETCHECK, m_TrayIcon, 0);
}
return hr;
}
HRESULT CLAVAudioSettingsProp::LoadData()
{
HRESULT hr = S_OK;
hr = m_pAudioSettings->GetDRC(&m_bDRCEnabled, &m_iDRCLevel);
for (unsigned i = 0; i < Bitstream_NB; ++i)
m_bBitstreaming[i] = m_pAudioSettings->GetBitstreamConfig((LAVBitstreamCodec)i) != 0;
m_bDTSHDFraming = m_pAudioSettings->GetDTSHDFraming();
m_bBitstreamingFallback = m_pAudioSettings->GetBitstreamingFallback();
m_bAutoAVSync = m_pAudioSettings->GetAutoAVSync();
m_bOutputStdLayout = m_pAudioSettings->GetOutputStandardLayout();
m_bOutput51Legacy = m_pAudioSettings->GetOutput51LegacyLayout();
m_bExpandMono = m_pAudioSettings->GetExpandMono();
m_bExpand61 = m_pAudioSettings->GetExpand61();
for (unsigned i = 0; i < SampleFormat_NB; ++i)
m_bSampleFormats[i] = m_pAudioSettings->GetSampleFormat((LAVAudioSampleFormat)i) != 0;
m_bDither = m_pAudioSettings->GetSampleConvertDithering();
m_pAudioSettings->GetAudioDelay(&m_bAudioDelay, &m_iAudioDelay);
m_TrayIcon = m_pAudioSettings->GetTrayIcon();
return hr;
}
INT_PTR CLAVAudioSettingsProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
LRESULT lValue;
switch (uMsg)
{
case WM_COMMAND:
if (LOWORD(wParam) == IDC_DRC && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, IDC_DRC, BM_GETCHECK, 0, 0);
if (lValue != m_bDRCEnabled)
{
SetDirty();
}
EnableWindow(GetDlgItem(m_Dlg, IDC_DRC_LEVEL), (BOOL)lValue);
}
else if (LOWORD(wParam) == IDC_BS_AC3 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bBitstreaming[Bitstream_AC3])
SetDirty();
}
else if (LOWORD(wParam) == IDC_BS_EAC3 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bBitstreaming[Bitstream_EAC3])
SetDirty();
}
else if (LOWORD(wParam) == IDC_BS_TRUEHD && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bBitstreaming[Bitstream_TRUEHD])
SetDirty();
}
else if (LOWORD(wParam) == IDC_BS_DTS && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bBitstreaming[Bitstream_DTS])
SetDirty();
EnableWindow(GetDlgItem(m_Dlg, IDC_BS_DTSHD), bFlag);
}
else if (LOWORD(wParam) == IDC_BS_DTSHD && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bBitstreaming[Bitstream_DTSHD])
SetDirty();
EnableWindow(GetDlgItem(m_Dlg, IDC_BS_DTSHD_FRAMING), bFlag);
}
else if (LOWORD(wParam) == IDC_BS_DTSHD_FRAMING && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bDTSHDFraming)
SetDirty();
}
else if (LOWORD(wParam) == IDC_BS_FALLBACK && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bBitstreamingFallback)
SetDirty();
}
else if (LOWORD(wParam) == IDC_AUTO_AVSYNC && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bAutoAVSync)
SetDirty();
}
else if (LOWORD(wParam) == IDC_STANDARD_CH_LAYOUT && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bOutputStdLayout)
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUTPUT51_LEGACY && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bOutput51Legacy)
SetDirty();
}
else if (LOWORD(wParam) == IDC_EXPAND_MONO && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bExpandMono)
SetDirty();
}
else if (LOWORD(wParam) == IDC_EXPAND61 && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bExpand61)
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUT_S16 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bSampleFormats[SampleFormat_16])
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUT_S24 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bSampleFormats[SampleFormat_24])
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUT_S32 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bSampleFormats[SampleFormat_32])
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUT_FP32 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bSampleFormats[SampleFormat_FP32])
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUT_U8 && HIWORD(wParam) == BN_CLICKED)
{
bool bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_bSampleFormats[SampleFormat_U8])
SetDirty();
}
else if (LOWORD(wParam) == IDC_OUT_S16_DITHER && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bDither)
SetDirty();
}
else if (LOWORD(wParam) == IDC_DELAY_ENABLED && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bFlag != m_bAudioDelay)
SetDirty();
EnableWindow(GetDlgItem(m_Dlg, IDC_DELAYSPIN), bFlag);
EnableWindow(GetDlgItem(m_Dlg, IDC_DELAY), bFlag);
}
else if (LOWORD(wParam) == IDC_DELAY && HIWORD(wParam) == EN_CHANGE)
{
WCHAR buffer[100];
SendDlgItemMessage(m_Dlg, LOWORD(wParam), WM_GETTEXT, 100, (LPARAM)&buffer);
int delay = _wtoi(buffer);
size_t len = wcslen(buffer);
if (delay == 0 && (buffer[0] != L'0' || len > 1))
{
SendDlgItemMessage(m_Dlg, LOWORD(wParam), EM_UNDO, 0, 0);
}
else
{
swprintf_s(buffer, L"%d", delay);
if (wcslen(buffer) != len)
SendDlgItemMessage(m_Dlg, IDC_DELAY, WM_SETTEXT, 0, (LPARAM)buffer);
if (delay != m_iAudioDelay)
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_TRAYICON && HIWORD(wParam) == BN_CLICKED)
{
BOOL bFlag = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0) != 0;
if (bFlag != m_TrayIcon)
SetDirty();
}
break;
case WM_HSCROLL:
lValue = SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL, TBM_GETPOS, 0, 0);
if (lValue != m_iDRCLevel)
{
SetDirty();
}
WCHAR buffer[10];
_snwprintf_s(buffer, _TRUNCATE, L"%d%%", (int)lValue);
SendDlgItemMessage(m_Dlg, IDC_DRC_LEVEL_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
// Mixer Configurations
CLAVAudioMixingProp::CLAVAudioMixingProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVCAudioMixing"), pUnk, IDD_PROPPAGE_AUDIO_MIXING, IDS_MIXER)
{
}
CLAVAudioMixingProp::~CLAVAudioMixingProp()
{
}
HRESULT CLAVAudioMixingProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pAudioSettings == nullptr);
return pUnk->QueryInterface(&m_pAudioSettings);
}
HRESULT CLAVAudioMixingProp::OnDisconnect()
{
SafeRelease(&m_pAudioSettings);
return S_OK;
}
static DWORD dwSpkLayouts[] = {
AV_CH_LAYOUT_MONO, AV_CH_LAYOUT_STEREO, AV_CH_LAYOUT_2_2,
AV_CH_LAYOUT_5POINT1, AV_CH_LAYOUT_6POINT1, AV_CH_LAYOUT_7POINT1,
};
static DWORD get_speaker_index(DWORD dwLayout)
{
int i = 0;
for (i = 0; i < countof(dwSpkLayouts); i++)
{
if (dwSpkLayouts[i] == dwLayout)
return i;
}
return (DWORD)-1;
}
HRESULT CLAVAudioMixingProp::OnApplyChanges()
{
ASSERT(m_pAudioSettings != nullptr);
HRESULT hr = S_OK;
DWORD dwVal = 0;
BOOL bVal = FALSE;
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_GETCURSEL, 0, 0);
m_pAudioSettings->SetMixingLayout(dwSpkLayouts[dwVal]);
bVal = (BOOL)SendDlgItemMessage(m_Dlg, IDC_MIXING, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetMixingEnabled(bVal);
DWORD dwMixingFlags = 0;
bVal = (BOOL)SendDlgItemMessage(m_Dlg, IDC_UNTOUCHED_STEREO, BM_GETCHECK, 0, 0);
if (bVal)
dwMixingFlags |= LAV_MIXING_FLAG_UNTOUCHED_STEREO;
bVal = (BOOL)SendDlgItemMessage(m_Dlg, IDC_NORMALIZE_MATRIX, BM_GETCHECK, 0, 0);
if (bVal)
dwMixingFlags |= LAV_MIXING_FLAG_NORMALIZE_MATRIX;
bVal = (BOOL)SendDlgItemMessage(m_Dlg, IDC_CLIP_PROTECTION, BM_GETCHECK, 0, 0);
if (bVal)
dwMixingFlags |= LAV_MIXING_FLAG_CLIP_PROTECTION;
m_pAudioSettings->SetMixingFlags(dwMixingFlags);
BOOL bNormal = (BOOL)SendDlgItemMessage(m_Dlg, IDC_MIXMODE_NORMAL, BM_GETCHECK, 0, 0);
BOOL bDolby = (BOOL)SendDlgItemMessage(m_Dlg, IDC_MIXMODE_DOLBY, BM_GETCHECK, 0, 0);
BOOL bDPL2 = (BOOL)SendDlgItemMessage(m_Dlg, IDC_MIXMODE_DPL2, BM_GETCHECK, 0, 0);
m_pAudioSettings->SetMixingMode(bDolby ? MatrixEncoding_Dolby
: (bDPL2 ? MatrixEncoding_DPLII : MatrixEncoding_None));
DWORD dwMixCenter = (DWORD)SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_GETPOS, 0, 0);
DWORD dwMixSurround = (DWORD)SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_GETPOS, 0, 0);
DWORD dwMixLFE = (DWORD)SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_GETPOS, 0, 0);
m_pAudioSettings->SetMixingLevels(dwMixCenter, dwMixSurround, dwMixLFE);
LoadData();
return hr;
}
HRESULT CLAVAudioMixingProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pAudioSettings != nullptr);
WCHAR spkMono[] = L"Mono";
WCHAR spkStereo[] = L"Stereo";
WCHAR spkQuadro[] = L"4.0";
WCHAR spk51Surround[] = L"5.1";
WCHAR spk61Surround[] = L"6.1";
WCHAR spk71Surround[] = L"7.1";
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_RESETCONTENT, 0, 0);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_ADDSTRING, 0, (LPARAM)spkMono);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_ADDSTRING, 0, (LPARAM)spkStereo);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_ADDSTRING, 0, (LPARAM)spkQuadro);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_ADDSTRING, 0, (LPARAM)spk51Surround);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_ADDSTRING, 0, (LPARAM)spk61Surround);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_ADDSTRING, 0, (LPARAM)spk71Surround);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_SETRANGE, 0, MAKELONG(0, 10000));
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_SETTICFREQ, 100, 0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_SETLINESIZE, 0, 100);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_SETPAGESIZE, 0, 100);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_SETRANGE, 0, MAKELONG(0, 10000));
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_SETTICFREQ, 100, 0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_SETLINESIZE, 0, 100);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_SETPAGESIZE, 0, 100);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_SETRANGE, 0, MAKELONG(0, 30000));
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_SETTICFREQ, 100, 0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_SETLINESIZE, 0, 100);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_SETPAGESIZE, 0, 100);
addHint(IDC_UNTOUCHED_STEREO, L"With this option on, stereo sources will not be mixed. This is useful when you "
L"want to mix all surround sources to e.g. 5.1, but leave stereo untouched.");
addHint(IDC_NORMALIZE_MATRIX,
L"Normalizing the matrix will apply a global attenuation to the audio, in effect making it quieter to "
L"ensure that there is a consistent volume throughout the file, and no clipping occurs.\n\n"
L"This mode will produce inconsistent volumes between different source formats (stereo will be louder than "
L"5.1), but the volume during playback of one file will not change.");
addHint(IDC_CLIP_PROTECTION,
L"Clipping protection analyzes the audio, and reduces the volume if clipping is detected.\n\n"
L"This mode tries to preserve the original volume of the audio, and is generally more consistent between "
L"different source formats. It may however cause a sudden volume change during playback. "
L"In addition, this mode has a higher volume than a normalized matrix and is preferred on weak speakers or "
L"headphones.");
hr = LoadData();
if (SUCCEEDED(hr))
{
SendDlgItemMessage(m_Dlg, IDC_MIXING, BM_SETCHECK, m_bMixing, 0);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SPEAKERS, CB_SETCURSEL, get_speaker_index(m_dwSpeakerLayout), 0);
SendDlgItemMessage(m_Dlg, IDC_UNTOUCHED_STEREO, BM_SETCHECK, !!(m_dwFlags & LAV_MIXING_FLAG_UNTOUCHED_STEREO),
0);
SendDlgItemMessage(m_Dlg, IDC_NORMALIZE_MATRIX, BM_SETCHECK, !!(m_dwFlags & LAV_MIXING_FLAG_NORMALIZE_MATRIX),
0);
SendDlgItemMessage(m_Dlg, IDC_CLIP_PROTECTION, BM_SETCHECK, !!(m_dwFlags & LAV_MIXING_FLAG_CLIP_PROTECTION), 0);
SendDlgItemMessage(m_Dlg, IDC_MIXMODE_NORMAL, BM_SETCHECK, (m_dwMixingMode == MatrixEncoding_None), 0);
SendDlgItemMessage(m_Dlg, IDC_MIXMODE_DOLBY, BM_SETCHECK, (m_dwMixingMode == MatrixEncoding_Dolby), 0);
SendDlgItemMessage(m_Dlg, IDC_MIXMODE_DPL2, BM_SETCHECK, (m_dwMixingMode == MatrixEncoding_DPLII), 0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_SETPOS, 1, m_dwMixCenter);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_SETPOS, 1, m_dwMixSurround);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_SETPOS, 1, m_dwMixLFE);
WCHAR buffer[10];
_snwprintf_s(buffer, _TRUNCATE, L"%.2f", (double)m_dwMixCenter / 10000.0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%.2f", (double)m_dwMixSurround / 10000.0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%.2f", (double)m_dwMixLFE / 10000.0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
}
return hr;
}
HRESULT CLAVAudioMixingProp::LoadData()
{
HRESULT hr = S_OK;
m_dwSpeakerLayout = m_pAudioSettings->GetMixingLayout();
m_bMixing = m_pAudioSettings->GetMixingEnabled();
m_dwFlags = m_pAudioSettings->GetMixingFlags();
m_dwMixingMode = m_pAudioSettings->GetMixingMode();
m_pAudioSettings->GetMixingLevels(&m_dwMixCenter, &m_dwMixSurround, &m_dwMixLFE);
return hr;
}
INT_PTR CLAVAudioMixingProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
LRESULT lValue;
switch (uMsg)
{
case WM_COMMAND:
if (HIWORD(wParam) == CBN_SELCHANGE && LOWORD(wParam) == IDC_OUTPUT_SPEAKERS)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
if (dwSpkLayouts[lValue] != m_dwSpeakerLayout)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_MIXING && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != m_bMixing)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_UNTOUCHED_STEREO && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue == !(m_dwFlags & LAV_MIXING_FLAG_UNTOUCHED_STEREO))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_NORMALIZE_MATRIX && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue == !(m_dwFlags & LAV_MIXING_FLAG_NORMALIZE_MATRIX))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_CLIP_PROTECTION && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue == !(m_dwFlags & LAV_MIXING_FLAG_CLIP_PROTECTION))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_MIXMODE_NORMAL && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_dwMixingMode == MatrixEncoding_None))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_MIXMODE_DOLBY && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_dwMixingMode == MatrixEncoding_Dolby))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_MIXMODE_DPL2 && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_dwMixingMode == MatrixEncoding_DPLII))
{
SetDirty();
}
}
break;
case WM_HSCROLL:
if ((HWND)lParam == GetDlgItem(m_Dlg, IDC_MIX_LEVEL_CENTER))
{
lValue = SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER, TBM_GETPOS, 0, 0);
if (lValue != m_dwMixCenter)
{
SetDirty();
}
WCHAR buffer[10];
_snwprintf_s(buffer, _TRUNCATE, L"%.2f", (double)lValue / 10000.0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_CENTER_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
}
else if ((HWND)lParam == GetDlgItem(m_Dlg, IDC_MIX_LEVEL_SURROUND))
{
lValue = SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND, TBM_GETPOS, 0, 0);
if (lValue != m_dwMixSurround)
{
SetDirty();
}
WCHAR buffer[10];
_snwprintf_s(buffer, _TRUNCATE, L"%.2f", (double)lValue / 10000.0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_SURROUND_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
}
else if ((HWND)lParam == GetDlgItem(m_Dlg, IDC_MIX_LEVEL_LFE))
{
lValue = SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE, TBM_GETPOS, 0, 0);
if (lValue != m_dwMixLFE)
{
SetDirty();
}
WCHAR buffer[10];
_snwprintf_s(buffer, _TRUNCATE, L"%.2f", (double)lValue / 10000.0);
SendDlgItemMessage(m_Dlg, IDC_MIX_LEVEL_LFE_TEXT, WM_SETTEXT, 0, (LPARAM)buffer);
}
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
// Format Configurations
CLAVAudioFormatsProp::CLAVAudioFormatsProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVCAudioFormats"), pUnk, IDD_PROPPAGE_FORMATS, IDS_FORMATS)
{
}
CLAVAudioFormatsProp::~CLAVAudioFormatsProp()
{
}
HRESULT CLAVAudioFormatsProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pAudioSettings == nullptr);
return pUnk->QueryInterface(&m_pAudioSettings);
}
HRESULT CLAVAudioFormatsProp::OnDisconnect()
{
SafeRelease(&m_pAudioSettings);
return S_OK;
}
HRESULT CLAVAudioFormatsProp::OnApplyChanges()
{
ASSERT(m_pAudioSettings != nullptr);
HRESULT hr = S_OK;
HWND hlv = GetDlgItem(m_Dlg, IDC_CODECS);
// Get checked state
BOOL bFlag;
for (int nItem = 0; nItem < ListView_GetItemCount(hlv); nItem++)
{
bFlag = ListView_GetCheckState(hlv, nItem);
m_pAudioSettings->SetFormatConfiguration((LAVAudioCodec)nItem, bFlag);
}
LoadData();
return hr;
}
HRESULT CLAVAudioFormatsProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES | ICC_LISTVIEW_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pAudioSettings != nullptr);
// Setup ListView control for format configuration
SendDlgItemMessage(m_Dlg, IDC_CODECS, CCM_DPISCALE, TRUE, 0);
HWND hlv = GetDlgItem(m_Dlg, IDC_CODECS);
ListView_SetExtendedListViewStyle(hlv, LVS_EX_CHECKBOXES | LVS_EX_FULLROWSELECT | LVS_EX_GRIDLINES);
int nCol = 1;
LVCOLUMN lvc = {LVCF_WIDTH, 0, 20, 0};
ListView_InsertColumn(hlv, 0, &lvc);
ListView_AddCol(hlv, nCol, 75, L"Codec", false);
ListView_AddCol(hlv, nCol, 350, L"Description", false);
ListView_DeleteAllItems(hlv);
ListView_SetItemCount(hlv, Codec_AudioNB);
// Create entries for the formats
LVITEM lvi;
memset(&lvi, 0, sizeof(lvi));
lvi.mask = LVIF_TEXT | LVIF_PARAM;
int nItem = 0;
for (nItem = 0; nItem < Codec_AudioNB; ++nItem)
{
const codec_config_t *config = get_codec_config((LAVAudioCodec)nItem);
// Create main entry
lvi.iItem = nItem + 1;
ListView_InsertItem(hlv, &lvi);
// Set sub item texts
ATL::CA2W name(config->name);
ListView_SetItemText(hlv, nItem, 1, (LPWSTR)name);
ATL::CA2W desc(config->description);
ListView_SetItemText(hlv, nItem, 2, (LPWSTR)desc);
}
hr = LoadData();
if (SUCCEEDED(hr))
{
// Set checked state
for (nItem = 0; nItem < ListView_GetItemCount(hlv); nItem++)
{
ListView_SetCheckState(hlv, nItem, m_bFormats[nItem]);
}
}
return hr;
}
HRESULT CLAVAudioFormatsProp::LoadData()
{
HRESULT hr = S_OK;
for (unsigned i = 0; i < Codec_AudioNB; ++i)
m_bFormats[i] = m_pAudioSettings->GetFormatConfiguration((LAVAudioCodec)i) != 0;
return hr;
}
INT_PTR CLAVAudioFormatsProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
switch (uMsg)
{
case WM_NOTIFY:
NMHDR *hdr = (LPNMHDR)lParam;
if (hdr->idFrom == IDC_CODECS)
{
switch (hdr->code)
{
case LVN_ITEMCHANGED:
LPNMLISTVIEW nmlv = (LPNMLISTVIEW)lParam;
bool check = ListView_GetCheckState(hdr->hwndFrom, nmlv->iItem) ? true : false;
if (check != m_bFormats[nmlv->iItem])
{
SetDirty();
}
return TRUE;
}
}
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
// Audio Status Panel
#define MAX_CHANNELS 8
static int iddVolumeControls[MAX_CHANNELS] = {IDC_VOLUME1, IDC_VOLUME2, IDC_VOLUME3, IDC_VOLUME4,
IDC_VOLUME5, IDC_VOLUME6, IDC_VOLUME7, IDC_VOLUME8};
static int iddVolumeDescs[MAX_CHANNELS] = {IDC_VOLUME1_DESC, IDC_VOLUME2_DESC, IDC_VOLUME3_DESC, IDC_VOLUME4_DESC,
IDC_VOLUME5_DESC, IDC_VOLUME6_DESC, IDC_VOLUME7_DESC, IDC_VOLUME8_DESC};
CLAVAudioStatusProp::CLAVAudioStatusProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVCAudioStatusProp"), pUnk, IDD_PROPPAGE_STATUS, IDS_STATUS)
{
}
CLAVAudioStatusProp::~CLAVAudioStatusProp()
{
}
HRESULT CLAVAudioStatusProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pAudioStatus == nullptr);
return pUnk->QueryInterface(&m_pAudioStatus);
}
HRESULT CLAVAudioStatusProp::OnDisconnect()
{
SafeRelease(&m_pAudioStatus);
return S_OK;
}
HRESULT CLAVAudioStatusProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_STANDARD_CLASSES | ICC_BAR_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pAudioStatus != nullptr);
m_nChannels = 0;
const char *codec = nullptr;
const char *decodeFormat = nullptr;
int nDecodeChannels = 0;
int nDecodeSampleRate = 0;
DWORD dwDecodeChannelMask;
hr = m_pAudioStatus->GetDecodeDetails(&codec, &decodeFormat, &nDecodeChannels, &nDecodeSampleRate,
&dwDecodeChannelMask);
if (SUCCEEDED(hr))
{
WCHAR buffer[100];
_snwprintf_s(buffer, _TRUNCATE, L"%d / 0x%x", nDecodeChannels, dwDecodeChannelMask);
SendDlgItemMessage(m_Dlg, IDC_INPUT_CHANNEL, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%d", nDecodeSampleRate);
SendDlgItemMessage(m_Dlg, IDC_INPUT_SAMPLERATE, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%S", codec);
SendDlgItemMessage(m_Dlg, IDC_INPUT_CODEC, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%S", decodeFormat);
SendDlgItemMessage(m_Dlg, IDC_INPUT_FORMAT, WM_SETTEXT, 0, (LPARAM)buffer);
}
const char *outputFormat = nullptr;
int nOutputChannels = 0;
int nOutputSampleRate = 0;
DWORD dwOutputChannelMask = 0;
hr = m_pAudioStatus->GetOutputDetails(&outputFormat, &nOutputChannels, &nOutputSampleRate, &dwOutputChannelMask);
if (SUCCEEDED(hr))
{
WCHAR buffer[100];
if (hr == S_OK)
{
_snwprintf_s(buffer, _TRUNCATE, L"%d / 0x%x", nOutputChannels, dwOutputChannelMask);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_CHANNEL, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%d", nOutputSampleRate);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_SAMPLERATE, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"PCM");
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_CODEC, WM_SETTEXT, 0, (LPARAM)buffer);
_snwprintf_s(buffer, _TRUNCATE, L"%S", outputFormat);
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_FORMAT, WM_SETTEXT, 0, (LPARAM)buffer);
m_nChannels = nOutputChannels;
}
else
{
_snwprintf_s(buffer, _TRUNCATE, L"Bitstreaming");
SendDlgItemMessage(m_Dlg, IDC_OUTPUT_CODEC, WM_SETTEXT, 0, (LPARAM)buffer);
}
}
SetTimer(m_Dlg, 1, 250, nullptr);
m_pAudioStatus->EnableVolumeStats();
WCHAR chBuffer[5];
if (dwOutputChannelMask == 0 && nOutputChannels != 0)
{
// 0x4 is only front center, 0x3 is front left+right
dwOutputChannelMask = nOutputChannels == 1 ? 0x4 : 0x3;
}
for (int i = 0; i < MAX_CHANNELS; ++i)
{
SendDlgItemMessage(m_Dlg, iddVolumeControls[i], PBM_SETRANGE, 0, MAKELPARAM(0, 50));
_snwprintf_s(chBuffer, _TRUNCATE, L"%S", get_channel_desc(get_flag_from_channel(dwOutputChannelMask, i)));
SendDlgItemMessage(m_Dlg, iddVolumeDescs[i], WM_SETTEXT, 0, (LPARAM)chBuffer);
}
return hr;
}
HRESULT CLAVAudioStatusProp::OnDeactivate()
{
KillTimer(m_Dlg, 1);
m_pAudioStatus->DisableVolumeStats();
return S_OK;
}
void CLAVAudioStatusProp::UpdateVolumeDisplay()
{
for (int i = 0; i < m_nChannels; ++i)
{
float fDB = 0.0f;
if (SUCCEEDED(m_pAudioStatus->GetChannelVolumeAverage(i, &fDB)))
{
int value = (int)fDB + 50;
SendDlgItemMessage(m_Dlg, iddVolumeControls[i], PBM_SETPOS, value, 0);
}
else
{
SendDlgItemMessage(m_Dlg, iddVolumeControls[i], PBM_SETPOS, 0, 0);
}
}
}
INT_PTR CLAVAudioStatusProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
switch (uMsg)
{
case WM_TIMER: UpdateVolumeDisplay(); break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
| 40,487
|
C++
|
.cpp
| 887
| 37.67531
| 120
| 0.637661
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,150
|
dts.cpp
|
Nevcairiel_LAVFilters/decoder/LAVAudio/parser/dts.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#define __STDC_CONSTANT_MACROS
// Exclude inline asm from being included
#define AVCODEC_X86_MATHOPS_H
extern "C"
{
#pragma warning(push)
#pragma warning(disable : 4101)
#pragma warning(disable : 4244)
#pragma warning(disable : 4305)
#pragma warning(disable : 5033)
#include "libavcodec/avcodec.h"
#include "libavcodec/get_bits.h"
#include "libavcodec/dca_sample_rate_tab.h"
#pragma warning(pop)
extern int avpriv_dca_convert_bitstream(const uint8_t *src, int src_size, uint8_t *dst, int max_size);
};
#include "dts.h"
#include "parser.h"
#include <vector>
struct DTSParserContext
{
GetBitContext *gb;
};
int init_dts_parser(DTSParserContext **pContext)
{
if (!pContext)
return -1;
*pContext = (DTSParserContext *)malloc(sizeof(DTSParserContext));
if (*pContext == nullptr)
return -1;
memset(*pContext, 0, sizeof(DTSParserContext));
(*pContext)->gb = new GetBitContext();
return 0;
}
int close_dts_parser(DTSParserContext **pContext)
{
if (!pContext)
return -1;
delete (*pContext)->gb;
free(*pContext);
*pContext = nullptr;
return 0;
}
static int parse_dts_xch_header(DTSParserContext *pContext, DTSHeader *pHeader, const uint8_t *pBuffer, unsigned uSize)
{
GetBitContext *gb = pContext->gb;
init_get_bits(gb, pBuffer, uSize << 3);
get_bits(gb, 32); /* DCA_XCH_MARKER */
get_bits(gb, 10); /* XCh Frame Size */
pHeader->XChChannelLayout = get_bits(gb, 4); /* Extension Channel Arrangement */
return 0;
}
static int parse_dts_xch_hd_header(DTSParserContext *pContext, DTSHeader *pHeader, const uint8_t *pBuffer,
unsigned uSize)
{
GetBitContext *gb = pContext->gb;
init_get_bits(gb, pBuffer, uSize << 3);
/* Huh? */
pHeader->XChChannelLayout = 1;
return 0;
}
static int parse_dts_xxch_header(DTSParserContext *pContext, DTSHeader *pHeader, const uint8_t *pBuffer, unsigned uSize)
{
GetBitContext *gb = pContext->gb;
init_get_bits(gb, pBuffer, uSize << 3);
get_bits(gb, 32); /* DCA_XXCH_MARKER */
get_bits(gb, 8); /* ? */
pHeader->XChChannelLayout = get_bits(gb, 2); /* Channels Added? */
get_bits(gb, 6); /* ? */
return 0;
}
static int parse_dts_xxch_hd_header(DTSParserContext *pContext, DTSHeader *pHeader, const uint8_t *pBuffer,
unsigned uSize)
{
GetBitContext *gb = pContext->gb;
init_get_bits(gb, pBuffer, uSize << 3);
get_bits(gb, 32); /* DCA_XXCH_MARKER */
get_bits(gb, 8); /* ? */
pHeader->XChChannelLayout = get_bits(gb, 2); /* Channels Added? */
get_bits(gb, 6); /* ? */
return 0;
}
static int parse_dts_hd_header(DTSParserContext *pContext, DTSHeader *pHeader, const uint8_t *pBuffer, unsigned uSize)
{
GetBitContext *gb = pContext->gb;
init_get_bits(gb, pBuffer, uSize << 3);
unsigned NumAudio = 1;
unsigned NumAssets = 1;
get_bits(gb, 32); /* DCA_HD_MARKER */
get_bits(gb, 8); /* Unknown */
unsigned SubIdx = get_bits(gb, 2); /* Substream Index */
unsigned blownUp = get_bits(gb, 1); /* Blown Up Header */
if (blownUp)
{
get_bits(gb, 12); /* Header Size */
get_bits(gb, 20); /* HD Size */
}
else
{
get_bits(gb, 8); /* Header Size */
get_bits(gb, 16); /* HD Size */
}
unsigned staticFields = get_bits(gb, 1); /* Static fields present */
if (staticFields)
{
std::vector<uint32_t> ActiveExSSMasks;
get_bits(gb, 2); /* Reference clock code */
get_bits(gb, 3); /* ExSS frame duration code */
if (get_bits(gb, 1)) /* Timestamp flag */
get_bits(gb, 36); /* Timestamp */
NumAudio = get_bits(gb, 3) + 1; /* Num audio present */
NumAssets = get_bits(gb, 3) + 1; /* Num assets */
for (uint8_t Pos = 0; Pos < NumAudio; Pos++)
{
uint32_t mask = get_bits(gb, SubIdx + 1); /* Active ExSS masks */
ActiveExSSMasks.push_back(mask);
}
for (uint8_t Pos = 0; Pos < NumAudio; Pos++)
for (uint8_t Pos2 = 0; Pos2 < SubIdx + 1; Pos2 += 2)
if (ActiveExSSMasks[Pos] % 2)
get_bits(gb, 8); /* Active ExSS masks 2 */
if (get_bits(gb, 1))
{ /* Mix Metadata Flag */
get_bits(gb, 2); /* Mix Metadata Adjustment Level */
unsigned bits = get_bits(gb, 2); /* Bits4Mix Mask */
bits = 4 + bits * 4;
unsigned num = get_bits(gb, 2) + 1; /* Num Configs */
for (uint8_t Pos = 0; Pos < num; Pos++)
get_bits(gb, bits); /* Mix Out Ch Mask */
}
} /* END: Static Fields */
// Sizes
for (uint8_t Pos = 0; Pos < NumAssets; Pos++)
{
if (blownUp)
{
get_bits(gb, 20); /* Size */
}
else
{
get_bits(gb, 16); /* Size */
}
}
for (uint8_t Pos = 0; Pos < NumAssets; Pos++)
{
get_bits(gb, 9); /* Asset Size */
get_bits(gb, 3); /* Asset Descriptor Data */
if (staticFields)
{
if (get_bits(gb, 1)) /* Asset type descriptor present */
get_bits(gb, 4); /* Asset type descriptor */
if (get_bits(gb, 1)) /* Language descriptor present */
get_bits(gb, 24); /* Language descriptor */
if (get_bits(gb, 1))
{ /* Info text present */
unsigned bytes = get_bits(gb, 10) + 1; /* Info text size */
for (unsigned Pos = 0; Pos < bytes; Pos++)
get_bits(gb, 8); /* Info Text */
}
get_bits(gb, 5); /* Bit Resolution */
get_bits(gb, 4); /* Maximum Sample Rate */
pHeader->HDTotalChannels = get_bits(gb, 8) + 1; /* Total Number of Channels */
if (get_bits(gb, 1))
{ /* 1 to 1 speaker map present */
unsigned SpeakerActivityMaskBits = 0, SpeakerRemapSetsCount;
if (pHeader->HDTotalChannels > 2)
get_bits(gb, 1); /* Embedded stereo flag */
if (pHeader->HDTotalChannels > 6)
get_bits(gb, 1); /* Embedded 6 channels flag */
if (get_bits(gb, 1))
{ /* Speaker Mask Flag */
SpeakerActivityMaskBits = get_bits(gb, 2); /* Speaker mask bits */
SpeakerActivityMaskBits = 4 + SpeakerActivityMaskBits * 4;
pHeader->HDSpeakerMask = get_bits(gb, SpeakerActivityMaskBits); /* Speaker activity Mask */
}
SpeakerRemapSetsCount = get_bits(gb, 3); /* Speaker remap sets count */
for (uint8_t Pos = 0; Pos < SpeakerRemapSetsCount; Pos++)
get_bits(gb, SpeakerActivityMaskBits); /* Standard Speaker Layout mask */
/* unfinished
for (uint8_t Pos = 0; Pos < SpeakerRemapSetsCount; Pos++)
get_bits(gb, 5); /* NumDecCh4Remap
*/
}
}
}
return 0;
}
int parse_dts_header(DTSParserContext *pContext, DTSHeader *pHeader, uint8_t *pBuffer, unsigned uSize)
{
if (!pContext)
return -1;
if (!pHeader)
return -1;
unsigned ExtDescriptor = 0, ExtCoding = 0;
uint8_t dts_buffer[32 + AV_INPUT_BUFFER_PADDING_SIZE] = {0};
int ret = avpriv_dca_convert_bitstream(pBuffer, uSize, dts_buffer, 32);
bool is16be = (AV_RB32(pBuffer) == DCA_MARKER_RAW_BE);
/* Parse Core Header */
if (ret >= 0)
{
pHeader->HasCore = 1;
GetBitContext *gb = pContext->gb;
init_get_bits(gb, dts_buffer, 32 << 3);
skip_bits_long(gb, 32); /* Sync code */
skip_bits1(gb); /* Frame type */
pHeader->SamplesPerBlock = get_bits(gb, 5) + 1; /* Samples deficit */
pHeader->CRCPresent = get_bits1(gb); /* CRC present */
pHeader->Blocks = get_bits(gb, 7) + 1; /* Number of Blocks */
pHeader->FrameSize = get_bits(gb, 14) + 1; /* Primary (core) Frame Size */
pHeader->ChannelLayout = get_bits(gb, 6); /* Channel configuration */
unsigned sample_index = get_bits(gb, 4); /* Sample frequency index */
pHeader->SampleRate = ff_dca_sample_rates[sample_index];
skip_bits(gb, 5); /* Bitrate index */
skip_bits1(gb); /* Down mix */
skip_bits1(gb); /* Dynamic range */
skip_bits1(gb); /* Time stamp */
skip_bits1(gb); /* Auxiliary data */
skip_bits1(gb); /* HDCD */
ExtDescriptor = get_bits(gb, 3); /* External descriptor */
ExtCoding = get_bits1(gb); /* Extended coding */
skip_bits1(gb); /* ASPF */
pHeader->LFE = get_bits(gb, 2); /* LFE */
skip_bits1(gb); /* Predictor History */
if (pHeader->CRCPresent)
skip_bits(gb, 16); /* CRC */
skip_bits1(gb); /* Multirate Interpolator */
skip_bits(gb, 4); /* Encoder Software Revision */
skip_bits(gb, 2); /* Copy history */
pHeader->ES = get_bits1(gb); /* ES */
skip_bits(gb, 2); /* PCMR (source PCM resolution) */
skip_bits1(gb); /* SUMF (Front Sum/Difference Flag) */
skip_bits1(gb); /* SUMS (Surround Sum/Difference Flag) */
skip_bits(gb, 4); /* Dialog Normalization Parameter or Unspecified (dependent on encoder version) */
// Check some basic validity
if (uSize < pHeader->FrameSize)
return -1;
}
else
{
pHeader->HasCore = 0;
}
if (pHeader->HasCore && !is16be)
return 0;
// DTS-HD parsing
const uint8_t *pHD = nullptr;
if (pHeader->HasCore)
{ // If we have a core, only search after the normal buffer
if (uSize > (pHeader->FrameSize + 4))
{ // at least 4 bytes extra, could probably insert a minimal size of a HD header, but so what
pHD = find_marker32_position(pBuffer + pHeader->FrameSize, uSize - pHeader->FrameSize, DCA_HD_MARKER);
}
}
else
{
pHD = find_marker32_position(pBuffer, uSize, DCA_HD_MARKER);
}
if (pHD)
{
pHeader->IsHD = 1;
size_t remaining = uSize - (pHD - pBuffer);
parse_dts_hd_header(pContext, pHeader, pHD, (unsigned)remaining);
const uint8_t *pXChHD = find_marker32_position(pHD, remaining, DCA_XCH_MARKER);
if (pXChHD)
{
size_t remaining = uSize - (pXChHD - pBuffer);
parse_dts_xch_hd_header(pContext, pHeader, pXChHD, (unsigned)remaining);
}
const uint8_t *pXXChHD = find_marker32_position(pHD, remaining, DCA_XXCH_MARKER);
if (pXXChHD)
{
size_t remaining = uSize - (pXXChHD - pBuffer);
parse_dts_xxch_hd_header(pContext, pHeader, pXXChHD, (unsigned)remaining);
}
}
// Handle DTS extensions
if (ExtCoding)
{
size_t coreSize = pHD ? (pHD - pBuffer) : uSize;
if (ExtDescriptor == 0 || ExtDescriptor == 3)
{
const uint8_t *pXCh = find_marker32_position(pBuffer, coreSize, DCA_XCH_MARKER);
if (pXCh)
{
size_t remaining = coreSize - (pXCh - pBuffer);
parse_dts_xch_header(pContext, pHeader, pXCh, (unsigned)remaining);
}
}
if (ExtDescriptor == 6)
{
const uint8_t *pXXCh = find_marker32_position(pBuffer, coreSize, DCA_XXCH_MARKER);
if (pXXCh)
{
size_t remaining = coreSize - (pXXCh - pBuffer);
parse_dts_xxch_header(pContext, pHeader, pXXCh, (unsigned)remaining);
}
}
}
return 0;
}
| 13,523
|
C++
|
.cpp
| 317
| 34.195584
| 120
| 0.535258
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,151
|
VideoSettingsProp.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/VideoSettingsProp.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "VideoSettingsProp.h"
#include "Media.h"
#include <Commctrl.h>
#include "resource.h"
#include "version.h"
CLAVVideoSettingsProp::CLAVVideoSettingsProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVVideoProp"), pUnk, IDD_PROPPAGE_VIDEO_SETTINGS, IDS_SETTINGS)
{
}
CLAVVideoSettingsProp::~CLAVVideoSettingsProp()
{
}
HRESULT CLAVVideoSettingsProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pVideoSettings == nullptr);
HRESULT hr = pUnk->QueryInterface(&m_pVideoSettings);
if (FAILED(hr))
return hr;
hr = pUnk->QueryInterface(&m_pVideoStatus);
if (FAILED(hr))
return hr;
return S_OK;
}
HRESULT CLAVVideoSettingsProp::OnDisconnect()
{
SafeRelease(&m_pVideoSettings);
SafeRelease(&m_pVideoStatus);
return S_OK;
}
HRESULT CLAVVideoSettingsProp::OnApplyChanges()
{
ASSERT(m_pVideoSettings != nullptr);
ASSERT(m_pVideoStatus != nullptr);
HRESULT hr = S_OK;
BOOL bFlag;
DWORD dwVal;
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_STREAMAR, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetStreamAR(dwVal);
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_THREADS, CB_GETCURSEL, 0, 0);
m_pVideoSettings->SetNumThreads(dwVal);
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_DEINT_FIELDORDER, CB_GETCURSEL, 0, 0);
m_pVideoSettings->SetDeintFieldOrder((LAVDeintFieldOrder)dwVal);
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_GETCURSEL, 0, 0);
m_pVideoSettings->SetDeinterlacingMode((LAVDeintMode)dwVal);
m_bPixFmts[LAVOutPixFmt_YV12] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_YV12, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_NV12] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_NV12, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_P010] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_P010, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_P016] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_P016, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_YUY2] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_YUY2, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_UYVY] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_UYVY, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_P210] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_P210, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_v210] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_V210, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_P216] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_P216, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_YV24] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_YV24, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_AYUV] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_AYUV, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_Y410] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_Y410, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_v410] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_V410, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_Y416] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_Y416, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_RGB32] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_RGB32, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_RGB24] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_RGB24, BM_GETCHECK, 0, 0);
m_bPixFmts[LAVOutPixFmt_RGB48] = (BOOL)SendDlgItemMessage(m_Dlg, IDC_OUT_RGB48, BM_GETCHECK, 0, 0);
for (int i = 0; i < LAVOutPixFmt_NB; ++i)
{
m_pVideoSettings->SetPixelFormat((LAVOutPixFmts)i, m_bPixFmts[i]);
}
BOOL bRGBAuto = (BOOL)SendDlgItemMessage(m_Dlg, IDC_RGBOUT_AUTO, BM_GETCHECK, 0, 0);
BOOL bRGBTV = (BOOL)SendDlgItemMessage(m_Dlg, IDC_RGBOUT_TV, BM_GETCHECK, 0, 0);
BOOL bRGBPC = (BOOL)SendDlgItemMessage(m_Dlg, IDC_RGBOUT_PC, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetRGBOutputRange(bRGBAuto ? 0 : bRGBTV ? 1 : 2);
LRESULT retVal = SendDlgItemMessage(m_Dlg, IDC_HWACCEL, CB_GETCURSEL, 0, 0);
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_HWACCEL, CB_GETITEMDATA, retVal, 0);
m_pVideoSettings->SetHWAccel((LAVHWAccel)dwVal);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_H264, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_H264, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_VC1, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_VC1, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG2, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_MPEG2, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG4, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_MPEG4, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG2_DVD, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_MPEG2DVD, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_HEVC, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_HEVC, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_VP9, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_VP9, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_AV1, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_AV1, bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_H264MVC, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelCodec(HWCodec_H264MVC, bFlag);
DWORD dwHWResFlags = 0;
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWRES_SD, BM_GETCHECK, 0, 0);
if (bFlag)
dwHWResFlags |= LAVHWResFlag_SD;
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWRES_HD, BM_GETCHECK, 0, 0);
if (bFlag)
dwHWResFlags |= LAVHWResFlag_HD;
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWRES_UHD, BM_GETCHECK, 0, 0);
if (bFlag)
dwHWResFlags |= LAVHWResFlag_UHD;
m_pVideoSettings->SetHWAccelResolutionFlags(dwHWResFlags);
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE_SELECT, CB_GETCURSEL, 0, 0);
if (dwVal == 0)
dwVal = LAVHWACCEL_DEVICE_DEFAULT;
else
dwVal--;
m_pVideoSettings->SetHWAccelDeviceIndex(m_pVideoSettings->GetHWAccel(), dwVal, 0);
BOOL bHWAccelCUVIDDXVA = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_CUVID_DXVA, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelDeintHQ(bHWAccelCUVIDDXVA);
BOOL bHWDeint = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWDEINT_ENABLE, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelDeintMode(bHWDeint ? HWDeintMode_Hardware : HWDeintMode_Weave);
BOOL bFilm = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWDEINT_OUT_FILM, BM_GETCHECK, 0, 0);
// BOOL bVideo = (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWDEINT_OUT_VIDEO, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetHWAccelDeintOutput(bFilm ? DeintOutput_FramePer2Field : DeintOutput_FramePerField);
dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_GETCURSEL, 0, 0);
m_pVideoSettings->SetSWDeintMode((LAVSWDeintModes)dwVal);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_SWDEINT_OUT_FILM, BM_GETCHECK, 0, 0);
// BOOL bVideo = (BOOL)SendDlgItemMessage(m_Dlg, IDC_SWDEINT_OUT_VIDEO, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetSWDeintOutput(bFlag ? DeintOutput_FramePer2Field : DeintOutput_FramePerField);
BOOL bOrdered = (BOOL)SendDlgItemMessage(m_Dlg, IDC_DITHER_ORDERED, BM_GETCHECK, 0, 0);
BOOL bRandom = (BOOL)SendDlgItemMessage(m_Dlg, IDC_DITHER_RANDOM, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetDitherMode(bOrdered ? LAVDither_Ordered : LAVDither_Random);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetTrayIcon(bFlag);
LoadData();
return hr;
}
static void AddComboBoxEntryWithData(HWND hwnd, int nIDDlgItem, WCHAR *pszText, UINT Data)
{
// add the entry
LRESULT ret = SendDlgItemMessage(hwnd, nIDDlgItem, CB_ADDSTRING, 0, (LPARAM)pszText);
if (ret < 0)
return;
// set its data
SendDlgItemMessage(hwnd, nIDDlgItem, CB_SETITEMDATA, ret, Data);
}
static void SelectComboBoxItemByValue(HWND hwnd, int nIDDlgItem, UINT Data)
{
LRESULT count = SendDlgItemMessage(hwnd, nIDDlgItem, CB_GETCOUNT, 0, 0);
for (int i = 0; i < count; i++)
{
LRESULT ItemData = SendDlgItemMessage(hwnd, nIDDlgItem, CB_GETITEMDATA, i, 0);
if (ItemData == Data)
{
SendDlgItemMessage(hwnd, nIDDlgItem, CB_SETCURSEL, i, 0);
break;
}
}
}
HRESULT CLAVVideoSettingsProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pVideoSettings != nullptr);
const WCHAR *version = TEXT(LAV_VIDEO) L" " TEXT(LAV_VERSION_STR);
SendDlgItemMessage(m_Dlg, IDC_LAVVIDEO_FOOTER, WM_SETTEXT, 0, (LPARAM)version);
WCHAR stringBuffer[512] = L"Auto";
// Init the Combo Box
SendDlgItemMessage(m_Dlg, IDC_THREADS, CB_RESETCONTENT, 0, 0);
SendDlgItemMessage(m_Dlg, IDC_THREADS, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
for (unsigned i = 1; i <= 32; ++i)
{
swprintf_s(stringBuffer, L"%d", i);
SendDlgItemMessage(m_Dlg, IDC_THREADS, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
}
addHint(IDC_THREADS, L"Enable Multi-Threading for codecs that support it.\nAuto will automatically use the maximum "
L"number of threads suitable for your CPU. Using 1 thread disables multi-threading.\n\nMT "
L"decoding is supported for H264, MPEG2, MPEG4, VP8, VP3/Theora, DV and HuffYUV");
addHint(IDC_STREAMAR, L"Checked - Stream AR will be used.\nUnchecked - Frame AR will not be used.\nIndeterminate "
L"(Auto) - Stream AR will not be used on files with a container AR (recommended).");
WCHAR hwAccelNone[] = L"None";
WCHAR hwAccelCUDA[] = L"NVIDIA CUVID (old)";
WCHAR hwAccelQuickSync[] = L"Intel\xae QuickSync (old)";
WCHAR hwAccelDXVA2CB[] = L"DXVA2 (copy-back)";
WCHAR hwAccelDXVA2N[] = L"DXVA2 (native)";
WCHAR hwAccelD3D11[] = L"D3D11";
AddComboBoxEntryWithData(m_Dlg, IDC_HWACCEL, hwAccelNone, HWAccel_None);
AddComboBoxEntryWithData(m_Dlg, IDC_HWACCEL, hwAccelDXVA2CB, HWAccel_DXVA2CopyBack);
AddComboBoxEntryWithData(m_Dlg, IDC_HWACCEL, hwAccelDXVA2N, HWAccel_DXVA2Native);
AddComboBoxEntryWithData(m_Dlg, IDC_HWACCEL, hwAccelD3D11, HWAccel_D3D11);
AddComboBoxEntryWithData(m_Dlg, IDC_HWACCEL, hwAccelCUDA, HWAccel_CUDA);
AddComboBoxEntryWithData(m_Dlg, IDC_HWACCEL, hwAccelQuickSync, HWAccel_QuickSync);
// Init the fieldorder Combo Box
SendDlgItemMessage(m_Dlg, IDC_DEINT_FIELDORDER, CB_RESETCONTENT, 0, 0);
WideStringFromResource(stringBuffer, IDS_FIELDORDER_AUTO);
SendDlgItemMessage(m_Dlg, IDC_DEINT_FIELDORDER, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_FIELDORDER_TOP);
SendDlgItemMessage(m_Dlg, IDC_DEINT_FIELDORDER, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_FIELDORDER_BOTTOM);
SendDlgItemMessage(m_Dlg, IDC_DEINT_FIELDORDER, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
// Deint Mode combo box
SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_RESETCONTENT, 0, 0);
WideStringFromResource(stringBuffer, IDS_DEINTMODE_AUTO);
SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_DEINTMODE_AGGRESSIVE);
SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_DEINTMODE_FORCE);
SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
WideStringFromResource(stringBuffer, IDS_DEINTMODE_DISABLE);
SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_ADDSTRING, 0, (LPARAM)stringBuffer);
// SW Deint Mode
WCHAR swdeintNone[] = L"No Software Deinterlacing";
WCHAR swdeintYADIF[] = L"YADIF";
WCHAR swdeintW3FDIFS[] = L"Weston Three Field (Simple)";
WCHAR swdeintW3FDIFC[] = L"Weston Three Field (Complex)";
WCHAR swdeintBWDIF[] = L"BobWeaver (bwdif)";
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_ADDSTRING, 0, (LPARAM)swdeintNone);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_ADDSTRING, 0, (LPARAM)swdeintYADIF);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_ADDSTRING, 0, (LPARAM)swdeintW3FDIFS);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_ADDSTRING, 0, (LPARAM)swdeintW3FDIFC);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_ADDSTRING, 0, (LPARAM)swdeintBWDIF);
addHint(IDC_HWACCEL_MPEG4, L"EXPERIMENTAL! The MPEG4-ASP decoder is known to be unstable! Use at your own peril!");
addHint(IDC_HWACCEL_H264MVC, L"Intel GPU only.\nMVC acceleration is not supported on other graphics cards.");
addHint(IDC_HWACCEL_CUVID_DXVA, L"Enable DXVA video processing for CUVID decoding, enables hybrid decoding and can "
L"affect deinterlacing quality.\n\nNote: Using DXVA2-CopyBack is recommended for "
L"hybrid decoding instead of using CUVID in DXVA mode.");
addHint(IDC_HWRES_SD, L"Use Hardware Decoding for Standard-definition content (DVD, SDTV)\n\nThis affects all "
L"videos with a resolution less than 1024x576 (DVD resolution)");
addHint(IDC_HWRES_HD, L"Use Hardware Decoding for High-definition content (Blu-ray, HDTV)\n\nAffects all videos "
L"above SD resolution, up to Full-HD, 1920x1200");
addHint(IDC_HWRES_UHD,
L"Use Hardware Decoding for Ultra-high-definition content (4K, UHDTV)\n\nAffects all videos above HD "
L"resolution. Note that not all hardware supports decoding 4K/UHD content. On AMD GPUs, 4K support is very "
L"fragile, and may even cause crashes or BSODs, use at your own risk.");
addHint(IDC_DEINT_MODE,
L"Controls how interlaced material is handled.\n\nAuto: Frame flags are used to determine content "
L"type.\nAggressive: All frames in an interlaced streams are handled interlaced.\nForce: All frames are "
L"handles as interlaced.\nDisabled: All frames are handled as progressive.");
addHint(IDC_HWDEINT_OUT_FILM, L"Deinterlace in \"Film\" Mode.\nFor every pair of interlaced fields, one frame will "
L"be created, resulting in 25/30 fps.");
addHint(IDC_HWDEINT_OUT_VIDEO, L"Deinterlace in \"Video\" Mode. (Recommended)\nFor every interlaced field, one "
L"frame will be created, resulting in 50/60 fps.");
addHint(IDC_DITHER_ORDERED, L"Ordered Dithering uses a static pattern, resulting in very smooth and regular "
L"pattern. However, in some cases the regular pattern can be visible and distracting.");
addHint(IDC_DITHER_RANDOM,
L"Random Dithering uses random noise to dither the video frames. This has the advantage of not creating "
L"any visible pattern, at the downside of increasing the noise floor slightly.");
hr = LoadData();
if (SUCCEEDED(hr))
{
SendDlgItemMessage(m_Dlg, IDC_THREADS, CB_SETCURSEL, m_dwNumThreads, 0);
SendDlgItemMessage(m_Dlg, IDC_STREAMAR, BM_SETCHECK, m_StreamAR, 0);
SendDlgItemMessage(m_Dlg, IDC_DEINT_FIELDORDER, CB_SETCURSEL, m_DeintFieldOrder, 0);
SendDlgItemMessage(m_Dlg, IDC_DEINT_MODE, CB_SETCURSEL, m_DeintMode, 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_YV12, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_YV12], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_NV12, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_NV12], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_P010, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_P010], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_P016, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_P016], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_YUY2, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_YUY2], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_UYVY, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_UYVY], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_P210, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_P210], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_V210, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_v210], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_P216, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_P216], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_YV24, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_YV24], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_AYUV, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_AYUV], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_Y410, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_Y410], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_V410, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_v410], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_Y416, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_Y416], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_RGB32, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_RGB32], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_RGB24, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_RGB24], 0);
SendDlgItemMessage(m_Dlg, IDC_OUT_RGB48, BM_SETCHECK, m_bPixFmts[LAVOutPixFmt_RGB48], 0);
SendDlgItemMessage(m_Dlg, IDC_RGBOUT_AUTO, BM_SETCHECK, (m_dwRGBOutput == 0), 0);
SendDlgItemMessage(m_Dlg, IDC_RGBOUT_TV, BM_SETCHECK, (m_dwRGBOutput == 1), 0);
SendDlgItemMessage(m_Dlg, IDC_RGBOUT_PC, BM_SETCHECK, (m_dwRGBOutput == 2), 0);
SelectComboBoxItemByValue(m_Dlg, IDC_HWACCEL, m_HWAccel);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_H264, BM_SETCHECK, m_HWAccelCodecs[HWCodec_H264], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_VC1, BM_SETCHECK, m_HWAccelCodecs[HWCodec_VC1], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG2, BM_SETCHECK, m_HWAccelCodecs[HWCodec_MPEG2], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG4, BM_SETCHECK, m_HWAccelCodecs[HWCodec_MPEG4], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG2_DVD, BM_SETCHECK, m_HWAccelCodecs[HWCodec_MPEG2DVD], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_HEVC, BM_SETCHECK, m_HWAccelCodecs[HWCodec_HEVC], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_VP9, BM_SETCHECK, m_HWAccelCodecs[HWCodec_VP9], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_AV1, BM_SETCHECK, m_HWAccelCodecs[HWCodec_AV1], 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_H264MVC, BM_SETCHECK, m_HWAccelCodecs[HWCodec_H264MVC], 0);
SendDlgItemMessage(m_Dlg, IDC_HWRES_SD, BM_SETCHECK, !!(m_HWRes & LAVHWResFlag_SD), 0);
SendDlgItemMessage(m_Dlg, IDC_HWRES_HD, BM_SETCHECK, !!(m_HWRes & LAVHWResFlag_HD), 0);
SendDlgItemMessage(m_Dlg, IDC_HWRES_UHD, BM_SETCHECK, !!(m_HWRes & LAVHWResFlag_UHD), 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_CUVID_DXVA, BM_SETCHECK, m_HWAccelCUVIDDXVA, 0);
SendDlgItemMessage(m_Dlg, IDC_HWDEINT_ENABLE, BM_SETCHECK, (m_HWDeintAlgo == HWDeintMode_Hardware), 0);
SendDlgItemMessage(m_Dlg, IDC_HWDEINT_OUT_FILM, BM_SETCHECK, (m_HWDeintOutMode == DeintOutput_FramePer2Field),
0);
SendDlgItemMessage(m_Dlg, IDC_HWDEINT_OUT_VIDEO, BM_SETCHECK, (m_HWDeintOutMode == DeintOutput_FramePerField),
0);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_SETCURSEL, m_SWDeint, 0);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_OUT_FILM, BM_SETCHECK, (m_SWDeintOutMode == DeintOutput_FramePer2Field),
0);
SendDlgItemMessage(m_Dlg, IDC_SWDEINT_OUT_VIDEO, BM_SETCHECK, (m_SWDeintOutMode == DeintOutput_FramePerField),
0);
SendDlgItemMessage(m_Dlg, IDC_DITHER_ORDERED, BM_SETCHECK, (m_DitherMode == LAVDither_Ordered), 0);
SendDlgItemMessage(m_Dlg, IDC_DITHER_RANDOM, BM_SETCHECK, (m_DitherMode == LAVDither_Random), 0);
SendDlgItemMessage(m_Dlg, IDC_TRAYICON, BM_SETCHECK, m_TrayIcon, 0);
UpdateHWOptions();
UpdateYADIFOptions();
}
const WCHAR *decoder = m_pVideoStatus->GetActiveDecoderName();
SendDlgItemMessage(m_Dlg, IDC_ACTIVE_DECODER, WM_SETTEXT, 0, (LPARAM)(decoder ? decoder : L"<inactive>"));
BSTR bstrHWDevice = nullptr;
if (SUCCEEDED(m_pVideoStatus->GetHWAccelActiveDevice(&bstrHWDevice)))
{
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE, WM_SETTEXT, 0, (LPARAM)bstrHWDevice);
SysFreeString(bstrHWDevice);
}
else
{
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE, WM_SETTEXT, 0, (LPARAM)L"<none>");
}
return hr;
}
HRESULT CLAVVideoSettingsProp::UpdateHWOptions()
{
LRESULT lValue = SendDlgItemMessage(m_Dlg, IDC_HWACCEL, CB_GETCURSEL, 0, 0);
LAVHWAccel hwAccel = (LAVHWAccel)SendDlgItemMessage(m_Dlg, IDC_HWACCEL, CB_GETITEMDATA, lValue, 0);
DWORD dwSupport = m_pVideoSettings->CheckHWAccelSupport(hwAccel);
BOOL bEnabled = (hwAccel != HWAccel_None) && dwSupport;
BOOL bHWDeint = bEnabled && (hwAccel == HWAccel_CUDA || hwAccel == HWAccel_QuickSync);
BOOL bHWDeintEnabled = bHWDeint && (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWDEINT_ENABLE, BM_GETCHECK, 0, 0);
BOOL bCUDAOnly = bEnabled && (hwAccel == HWAccel_CUDA);
BOOL bDVD = bEnabled && (BOOL)SendDlgItemMessage(m_Dlg, IDC_HWACCEL_MPEG2, BM_GETCHECK, 0, 0);
BOOL bHEVC = bEnabled && (hwAccel != HWAccel_QuickSync);
BOOL bVP9 = bEnabled && (hwAccel != HWAccel_QuickSync);
BOOL bAV1 = bEnabled && (hwAccel != HWAccel_QuickSync) && (hwAccel != HWAccel_CUDA);
ShowWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_CUVID_DXVA),
(hwAccel == HWAccel_CUDA && !IsWindows10OrNewer()) ? SW_SHOW : SW_HIDE);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_H264), bEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_VC1), bEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_MPEG2), bEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_MPEG2_DVD), bDVD);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_MPEG4), bCUDAOnly);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_HEVC), bHEVC);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_VP9), bVP9);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_AV1), bAV1);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWRES_SD), bEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWRES_HD), bEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWRES_UHD), bEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWDEINT_ENABLE), bHWDeint);
EnableWindow(GetDlgItem(m_Dlg, IDC_LBL_HWDEINT_MODE), bHWDeintEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWDEINT_OUT_FILM), bHWDeintEnabled);
EnableWindow(GetDlgItem(m_Dlg, IDC_HWDEINT_OUT_VIDEO), bHWDeintEnabled);
WCHAR hwAccelEmpty[] = L"";
WCHAR hwAccelUnavailable[] = L"N/A";
WCHAR hwAccelAvailable[] = L"OK";
SendDlgItemMessage(
m_Dlg, IDC_HWACCEL_AVAIL, WM_SETTEXT, 0,
(LPARAM)(hwAccel == HWAccel_None ? hwAccelEmpty : dwSupport == 0 ? hwAccelUnavailable : hwAccelAvailable));
const WCHAR hwHintNoDeviceChoice[] = L"The selected Hardware Decoder does not support using a specific device.";
const WCHAR hwHintDXVA2Display[] = L"DXVA2 requires an active display for GPUs to be available.\nNote that GPUs "
L"are listed once for each connected display.";
const WCHAR hwHintD3D11NotSupported[] = L"D3D11 requires Windows 8 or newer, and is not supported on this OS.";
const WCHAR hwHintD3D11DeviceHint[] = L"Selecting a specific device for D3D11 disables Native mode and forces "
L"Copy-Back, use Automatic for the best performance.";
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE_SELECT, CB_RESETCONTENT, 0, 0);
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE_SELECT, CB_ADDSTRING, 0,
(hwAccel == HWAccel_D3D11) ? (LPARAM)L"Automatic (Native)" : (LPARAM)L"Automatic");
DWORD dwnDevices = m_pVideoSettings->GetHWAccelNumDevices(hwAccel);
for (DWORD dwDevice = 0; dwDevice < dwnDevices; dwDevice++)
{
BSTR bstrDeviceName = nullptr;
HRESULT hr = m_pVideoSettings->GetHWAccelDeviceInfo(hwAccel, dwDevice, &bstrDeviceName, NULL);
if (SUCCEEDED(hr))
{
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE_SELECT, CB_ADDSTRING, 0, (LPARAM)bstrDeviceName);
SysFreeString(bstrDeviceName);
}
}
if (hwAccel == HWAccel_D3D11 && !IsWindows8OrNewer())
{
m_HWDeviceIndex = 0;
dwnDevices = 0;
SendDlgItemMessage(m_Dlg, IDC_LBL_HWACCEL_DEVICE_HINT, WM_SETTEXT, 0, (LPARAM)hwHintD3D11NotSupported);
}
else if (dwnDevices == 0)
{
m_HWDeviceIndex = 0;
SendDlgItemMessage(m_Dlg, IDC_LBL_HWACCEL_DEVICE_HINT, WM_SETTEXT, 0, (LPARAM)hwHintNoDeviceChoice);
}
else
{
DWORD dwDeviceId = 0;
m_HWDeviceIndex = m_pVideoSettings->GetHWAccelDeviceIndex(hwAccel, &dwDeviceId);
if (m_HWDeviceIndex == LAVHWACCEL_DEVICE_DEFAULT)
m_HWDeviceIndex = 0;
else
m_HWDeviceIndex++;
if (hwAccel == HWAccel_DXVA2CopyBack)
SendDlgItemMessage(m_Dlg, IDC_LBL_HWACCEL_DEVICE_HINT, WM_SETTEXT, 0, (LPARAM)hwHintDXVA2Display);
else if (hwAccel == HWAccel_D3D11)
SendDlgItemMessage(m_Dlg, IDC_LBL_HWACCEL_DEVICE_HINT, WM_SETTEXT, 0, (LPARAM)hwHintD3D11DeviceHint);
else
SendDlgItemMessage(m_Dlg, IDC_LBL_HWACCEL_DEVICE_HINT, WM_SETTEXT, 0, (LPARAM)L"");
}
EnableWindow(GetDlgItem(m_Dlg, IDC_LBL_HWACCEL_DEVICE_SELECT), (dwnDevices > 0));
EnableWindow(GetDlgItem(m_Dlg, IDC_HWACCEL_DEVICE_SELECT), (dwnDevices > 0));
SendDlgItemMessage(m_Dlg, IDC_HWACCEL_DEVICE_SELECT, CB_SETCURSEL, m_HWDeviceIndex, 0);
return S_OK;
}
HRESULT CLAVVideoSettingsProp::UpdateYADIFOptions()
{
DWORD dwVal = (DWORD)SendDlgItemMessage(m_Dlg, IDC_SWDEINT_MODE, CB_GETCURSEL, 0, 0);
EnableWindow(GetDlgItem(m_Dlg, IDC_LBL_SWDEINT_MODE), (dwVal != SWDeintMode_None));
EnableWindow(GetDlgItem(m_Dlg, IDC_SWDEINT_OUT_FILM), (dwVal != SWDeintMode_None));
EnableWindow(GetDlgItem(m_Dlg, IDC_SWDEINT_OUT_VIDEO), (dwVal != SWDeintMode_None));
return S_OK;
}
HRESULT CLAVVideoSettingsProp::LoadData()
{
HRESULT hr = S_OK;
m_dwNumThreads = m_pVideoSettings->GetNumThreads();
m_StreamAR = m_pVideoSettings->GetStreamAR();
m_DeintFieldOrder = m_pVideoSettings->GetDeintFieldOrder();
m_DeintMode = m_pVideoSettings->GetDeinterlacingMode();
m_dwRGBOutput = m_pVideoSettings->GetRGBOutputRange();
for (int i = 0; i < LAVOutPixFmt_NB; ++i)
{
m_bPixFmts[i] = m_pVideoSettings->GetPixelFormat((LAVOutPixFmts)i);
}
m_HWAccel = m_pVideoSettings->GetHWAccel();
for (int i = 0; i < HWCodec_NB; ++i)
{
m_HWAccelCodecs[i] = m_pVideoSettings->GetHWAccelCodec((LAVVideoHWCodec)i);
}
m_HWRes = m_pVideoSettings->GetHWAccelResolutionFlags();
m_HWAccelCUVIDDXVA = m_pVideoSettings->GetHWAccelDeintHQ();
m_HWDeintAlgo = m_pVideoSettings->GetHWAccelDeintMode();
m_HWDeintOutMode = m_pVideoSettings->GetHWAccelDeintOutput();
m_SWDeint = m_pVideoSettings->GetSWDeintMode();
m_SWDeintOutMode = m_pVideoSettings->GetSWDeintOutput();
m_DitherMode = m_pVideoSettings->GetDitherMode();
m_TrayIcon = m_pVideoSettings->GetTrayIcon();
return hr;
}
INT_PTR CLAVVideoSettingsProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
LRESULT lValue;
BOOL bValue;
switch (uMsg)
{
case WM_COMMAND:
if (LOWORD(wParam) == IDC_STREAMAR && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != m_StreamAR)
{
SetDirty();
}
}
else if (HIWORD(wParam) == CBN_SELCHANGE && LOWORD(wParam) == IDC_THREADS)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
if (lValue != m_dwNumThreads)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_DEINT_FIELDORDER && HIWORD(wParam) == CBN_SELCHANGE)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
if (lValue != m_DeintFieldOrder)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_DEINT_MODE && HIWORD(wParam) == CBN_SELCHANGE)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
if (lValue != m_DeintMode)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_YV12 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_YV12])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_NV12 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_NV12])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_P010 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_P010])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_P016 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_P016])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_YUY2 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_YUY2])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_UYVY && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_UYVY])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_P210 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_P210])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_V210 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_v210])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_P216 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_P216])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_YV24 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_YV24])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_AYUV && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_AYUV])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_Y410 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_Y410])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_V410 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_v410])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_Y416 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_Y416])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_RGB32 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_RGB32])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_RGB24 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_RGB24])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_OUT_RGB48 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bPixFmts[LAVOutPixFmt_RGB48])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_RGBOUT_AUTO && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_dwRGBOutput == 0))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_RGBOUT_TV && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_dwRGBOutput == 1))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_RGBOUT_PC && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_dwRGBOutput == 2))
{
SetDirty();
}
}
else if (HIWORD(wParam) == CBN_SELCHANGE && LOWORD(wParam) == IDC_HWACCEL)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETITEMDATA, lValue, 0);
if (lValue != m_HWAccel)
{
SetDirty();
}
UpdateHWOptions();
}
else if (LOWORD(wParam) == IDC_HWACCEL_H264 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_H264])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_VC1 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_VC1])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_MPEG2 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_MPEG2])
{
SetDirty();
}
UpdateHWOptions();
}
else if (LOWORD(wParam) == IDC_HWACCEL_MPEG4 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_MPEG4])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_MPEG2_DVD && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_MPEG2DVD])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_HEVC && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_HEVC])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_VP9 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_VP9])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_AV1 && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_AV1])
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_H264MVC && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCodecs[HWCodec_H264MVC])
{
SetDirty();
}
}
else if (HIWORD(wParam) == CBN_SELCHANGE && LOWORD(wParam) == IDC_HWACCEL_DEVICE_SELECT)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
if (lValue != m_HWDeviceIndex)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWACCEL_CUVID_DXVA && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_HWAccelCUVIDDXVA)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWDEINT_ENABLE && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != (m_HWDeintAlgo == HWDeintMode_Hardware))
{
SetDirty();
}
UpdateHWOptions();
}
else if (LOWORD(wParam) == IDC_HWDEINT_OUT_FILM && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != (m_HWDeintOutMode == DeintOutput_FramePer2Field))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWDEINT_OUT_VIDEO && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != (m_HWDeintOutMode == DeintOutput_FramePerField))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_SWDEINT_OUT_FILM && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != (m_SWDeintOutMode == DeintOutput_FramePer2Field))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_SWDEINT_OUT_VIDEO && HIWORD(wParam) == BN_CLICKED)
{
bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != (m_SWDeintOutMode == DeintOutput_FramePerField))
{
SetDirty();
}
}
else if (HIWORD(wParam) == CBN_SELCHANGE && LOWORD(wParam) == IDC_SWDEINT_MODE)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), CB_GETCURSEL, 0, 0);
if (lValue != m_SWDeint)
{
SetDirty();
}
UpdateYADIFOptions();
}
else if (LOWORD(wParam) == IDC_DITHER_ORDERED && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_DitherMode == LAVDither_Ordered))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_DITHER_RANDOM && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != (m_DitherMode == LAVDither_Random))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWRES_SD && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue == !(m_HWRes & LAVHWResFlag_SD))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWRES_HD && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue == !(m_HWRes & LAVHWResFlag_HD))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_HWRES_UHD && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue == !(m_HWRes & LAVHWResFlag_UHD))
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_TRAYICON && HIWORD(wParam) == BN_CLICKED)
{
lValue = SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (lValue != m_TrayIcon)
{
SetDirty();
}
}
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
// Format Configurations
CLAVVideoFormatsProp::CLAVVideoFormatsProp(LPUNKNOWN pUnk, HRESULT *phr)
: CBaseDSPropPage(NAME("LAVVideoFormats"), pUnk, IDD_PROPPAGE_FORMATS, IDS_FORMATS)
{
}
CLAVVideoFormatsProp::~CLAVVideoFormatsProp()
{
}
HRESULT CLAVVideoFormatsProp::OnConnect(IUnknown *pUnk)
{
if (pUnk == nullptr)
{
return E_POINTER;
}
ASSERT(m_pVideoSettings == nullptr);
return pUnk->QueryInterface(&m_pVideoSettings);
}
HRESULT CLAVVideoFormatsProp::OnDisconnect()
{
SafeRelease(&m_pVideoSettings);
return S_OK;
}
HRESULT CLAVVideoFormatsProp::OnApplyChanges()
{
ASSERT(m_pVideoSettings != nullptr);
HRESULT hr = S_OK;
HWND hlv = GetDlgItem(m_Dlg, IDC_CODECS);
// Get checked state
BOOL bFlag;
for (int nItem = 0; nItem < ListView_GetItemCount(hlv); nItem++)
{
bFlag = ListView_GetCheckState(hlv, nItem);
m_pVideoSettings->SetFormatConfiguration((LAVVideoCodec)nItem, bFlag);
}
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_CODECS_MSWMVDMO, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetUseMSWMV9Decoder(bFlag);
bFlag = (BOOL)SendDlgItemMessage(m_Dlg, IDC_DVD_VIDEO, BM_GETCHECK, 0, 0);
m_pVideoSettings->SetDVDVideoSupport(bFlag);
LoadData();
return hr;
}
HRESULT CLAVVideoFormatsProp::OnActivate()
{
HRESULT hr = S_OK;
INITCOMMONCONTROLSEX icc;
icc.dwSize = sizeof(INITCOMMONCONTROLSEX);
icc.dwICC = ICC_BAR_CLASSES | ICC_STANDARD_CLASSES | ICC_LISTVIEW_CLASSES;
if (InitCommonControlsEx(&icc) == FALSE)
{
return E_FAIL;
}
ASSERT(m_pVideoSettings != nullptr);
// Setup ListView control for format configuration
SendDlgItemMessage(m_Dlg, IDC_CODECS, CCM_DPISCALE, TRUE, 0);
HWND hlv = GetDlgItem(m_Dlg, IDC_CODECS);
ListView_SetExtendedListViewStyle(hlv, LVS_EX_CHECKBOXES | LVS_EX_FULLROWSELECT | LVS_EX_GRIDLINES);
int nCol = 1;
LVCOLUMN lvc = {LVCF_WIDTH, 0, 20, 0};
ListView_InsertColumn(hlv, 0, &lvc);
ListView_AddCol(hlv, nCol, 85, L"Codec", false);
ListView_AddCol(hlv, nCol, 400, L"Description", false);
ListView_DeleteAllItems(hlv);
ListView_SetItemCount(hlv, Codec_VideoNB);
// Create entries for the formats
LVITEM lvi;
memset(&lvi, 0, sizeof(lvi));
lvi.mask = LVIF_TEXT | LVIF_PARAM;
int nItem = 0;
for (nItem = 0; nItem < Codec_VideoNB; ++nItem)
{
const codec_config_t *config = get_codec_config((LAVVideoCodec)nItem);
// Create main entry
lvi.iItem = nItem + 1;
ListView_InsertItem(hlv, &lvi);
// Set sub item texts
ATL::CA2W name(config->name);
ListView_SetItemText(hlv, nItem, 1, (LPWSTR)name);
ATL::CA2W desc(config->description);
ListView_SetItemText(hlv, nItem, 2, (LPWSTR)desc);
}
hr = LoadData();
if (SUCCEEDED(hr))
{
// Set checked state
for (nItem = 0; nItem < ListView_GetItemCount(hlv); nItem++)
{
ListView_SetCheckState(hlv, nItem, m_bFormats[nItem]);
}
}
SendDlgItemMessage(m_Dlg, IDC_CODECS_MSWMVDMO, BM_SETCHECK, m_bWMVDMO, 0);
SendDlgItemMessage(m_Dlg, IDC_DVD_VIDEO, BM_SETCHECK, m_bDVD, 0);
return hr;
}
HRESULT CLAVVideoFormatsProp::LoadData()
{
HRESULT hr = S_OK;
for (unsigned i = 0; i < Codec_VideoNB; ++i)
m_bFormats[i] = m_pVideoSettings->GetFormatConfiguration((LAVVideoCodec)i) != 0;
m_bWMVDMO = m_pVideoSettings->GetUseMSWMV9Decoder();
m_bDVD = m_pVideoSettings->GetDVDVideoSupport();
return hr;
}
INT_PTR CLAVVideoFormatsProp::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
switch (uMsg)
{
case WM_COMMAND:
if (LOWORD(wParam) == IDC_CODECS_MSWMVDMO && HIWORD(wParam) == BN_CLICKED)
{
BOOL bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bWMVDMO)
{
SetDirty();
}
}
else if (LOWORD(wParam) == IDC_DVD_VIDEO && HIWORD(wParam) == BN_CLICKED)
{
BOOL bValue = (BOOL)SendDlgItemMessage(m_Dlg, LOWORD(wParam), BM_GETCHECK, 0, 0);
if (bValue != m_bDVD)
{
SetDirty();
}
}
break;
case WM_NOTIFY:
NMHDR *hdr = (LPNMHDR)lParam;
if (hdr->idFrom == IDC_CODECS)
{
switch (hdr->code)
{
case LVN_ITEMCHANGED:
LPNMLISTVIEW nmlv = (LPNMLISTVIEW)lParam;
BOOL check = ListView_GetCheckState(hdr->hwndFrom, nmlv->iItem);
if (check != m_bFormats[nmlv->iItem])
{
SetDirty();
}
return TRUE;
}
}
break;
}
// Let the parent class handle the message.
return __super::OnReceiveMessage(hwnd, uMsg, wParam, lParam);
}
| 48,255
|
C++
|
.cpp
| 1,006
| 39.161034
| 120
| 0.633371
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,157
|
CCOutputPin.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/CCOutputPin.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "CCOutputPin.h"
#include "LAVVideo.h"
CCCOutputPin::CCCOutputPin(TCHAR *pObjectName, CLAVVideo *pFilter, CCritSec *pcsFilter, HRESULT *phr, LPWSTR pName)
: CBaseOutputPin(pObjectName, pFilter, pcsFilter, phr, pName)
{
m_CCmt.SetType(&MEDIATYPE_DTVCCData);
m_CCmt.SetSubtype(&IID_MediaSideDataEIA608CC);
m_CCmt.SetFormatType(&FORMAT_None);
m_CCmt.SetVariableSize();
m_CCmt.SetSampleSize(4096);
}
CCCOutputPin::~CCCOutputPin()
{
}
HRESULT CCCOutputPin::Active(void)
{
return __super::Active();
}
STDMETHODIMP CCCOutputPin::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
return __super::NonDelegatingQueryInterface(riid, ppv);
}
HRESULT CCCOutputPin::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties)
{
CheckPointer(pAlloc, E_POINTER);
CheckPointer(pProperties, E_POINTER);
HRESULT hr = S_OK;
pProperties->cBuffers = max(pProperties->cBuffers, 1);
pProperties->cbBuffer = max((ULONG)pProperties->cbBuffer, 4096);
// Sanity checks
ALLOCATOR_PROPERTIES Actual;
if (FAILED(hr = pAlloc->SetProperties(pProperties, &Actual)))
return hr;
if (Actual.cbBuffer < pProperties->cbBuffer)
return E_FAIL;
ASSERT(Actual.cBuffers >= pProperties->cBuffers);
return S_OK;
}
HRESULT CCCOutputPin::CheckMediaType(const CMediaType *pmt)
{
if (*pmt == m_CCmt)
return S_OK;
return E_INVALIDARG;
}
HRESULT CCCOutputPin::GetMediaType(int iPosition, CMediaType *pmt)
{
if (iPosition < 0 || iPosition > 0)
return E_INVALIDARG;
*pmt = m_CCmt;
return S_OK;
}
STDMETHODIMP CCCOutputPin::DeliverCCData(BYTE *pDataIn, size_t size, REFERENCE_TIME rtTime)
{
HRESULT hr;
IMediaSample *pSample = nullptr;
CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));
// Resize buffer if it is too small
// This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
if (size > (size_t)pSample->GetSize())
{
SafeRelease(&pSample);
ALLOCATOR_PROPERTIES props, actual;
CHECK_HR(hr = m_pAllocator->GetProperties(&props));
// Give us 2 times the requested size, so we don't resize every time
props.cbBuffer = (long)(size * 2);
if (props.cBuffers > 1)
{
CHECK_HR(hr = __super::DeliverBeginFlush());
CHECK_HR(hr = __super::DeliverEndFlush());
}
CHECK_HR(hr = m_pAllocator->Decommit());
CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
CHECK_HR(hr = m_pAllocator->Commit());
CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));
}
// Fill the sample
BYTE *pData = nullptr;
if (FAILED(hr = pSample->GetPointer(&pData)) || !pData)
goto done;
memcpy(pData, pDataIn, size);
// set properties
CHECK_HR(hr = pSample->SetActualDataLength((long)size));
CHECK_HR(hr = pSample->SetTime(rtTime != AV_NOPTS_VALUE ? &rtTime : nullptr,
rtTime != AV_NOPTS_VALUE ? &rtTime : nullptr));
CHECK_HR(hr = pSample->SetMediaTime(nullptr, nullptr));
// Deliver
CHECK_HR(hr = Deliver(pSample));
done:
SafeRelease(&pSample);
return hr;
}
| 4,139
|
C++
|
.cpp
| 111
| 32.540541
| 115
| 0.690809
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,159
|
Media.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/Media.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVVideo.h"
#include "Media.h"
#include <MMReg.h>
#include <Mfidl.h>
#include "moreuuids.h"
typedef struct
{
const CLSID *clsMinorType;
const enum AVCodecID nFFCodec;
} FFMPEG_SUBTYPE_MAP;
// clang-format off
// Map Media Subtype <> FFMPEG Codec Id
static const FFMPEG_SUBTYPE_MAP lavc_video_codecs[] = {
// H264
{ &MEDIASUBTYPE_H264, AV_CODEC_ID_H264 },
{ &MEDIASUBTYPE_h264, AV_CODEC_ID_H264 },
{ &MEDIASUBTYPE_X264, AV_CODEC_ID_H264 },
{ &MEDIASUBTYPE_x264, AV_CODEC_ID_H264 },
{ &MEDIASUBTYPE_AVC1, AV_CODEC_ID_H264 },
{ &MEDIASUBTYPE_avc1, AV_CODEC_ID_H264 },
{ &MEDIASUBTYPE_CCV1, AV_CODEC_ID_H264 }, // Used by Haali Splitter
{ &MEDIASUBTYPE_H264_bis, AV_CODEC_ID_H264}, // MainConcept specific
{ &MEDIASUBTYPE_AMVC, AV_CODEC_ID_H264_MVC },
{ &MEDIASUBTYPE_MVC1, AV_CODEC_ID_H264_MVC },
// HEVC
{ &MEDIASUBTYPE_HEVC, AV_CODEC_ID_HEVC },
{ &MEDIASUBTYPE_HVC1, AV_CODEC_ID_HEVC },
{ &MEDIASUBTYPE_HM10, AV_CODEC_ID_HEVC },
{ &MEDIASUBTYPE_H265, AV_CODEC_ID_HEVC },
// VVC
{ &MEDIASUBTYPE_VVC1, AV_CODEC_ID_VVC },
// MPEG1/2
{ &MEDIASUBTYPE_MPEG1Payload, AV_CODEC_ID_MPEG1VIDEO },
{ &MEDIASUBTYPE_MPEG1Video, AV_CODEC_ID_MPEG1VIDEO },
{ &MEDIASUBTYPE_MPEG2_VIDEO, AV_CODEC_ID_MPEG2VIDEO },
// MJPEG
{ &MEDIASUBTYPE_MJPG, AV_CODEC_ID_MJPEG },
{ &MEDIASUBTYPE_QTJpeg, AV_CODEC_ID_MJPEG },
{ &MEDIASUBTYPE_MJPGB, AV_CODEC_ID_MJPEGB },
// VC-1
{ &MEDIASUBTYPE_WVC1, AV_CODEC_ID_VC1 },
{ &MEDIASUBTYPE_wvc1, AV_CODEC_ID_VC1 },
{ &MEDIASUBTYPE_WMVA, AV_CODEC_ID_VC1 },
{ &MEDIASUBTYPE_wmva, AV_CODEC_ID_VC1 },
{ &MEDIASUBTYPE_WVP2, AV_CODEC_ID_VC1IMAGE },
{ &MEDIASUBTYPE_wvp2, AV_CODEC_ID_VC1IMAGE },
// WMV
{ &MEDIASUBTYPE_WMV1, AV_CODEC_ID_WMV1 },
{ &MEDIASUBTYPE_wmv1, AV_CODEC_ID_WMV1 },
{ &MEDIASUBTYPE_WMV2, AV_CODEC_ID_WMV2 },
{ &MEDIASUBTYPE_wmv2, AV_CODEC_ID_WMV2 },
{ &MEDIASUBTYPE_WMV3, AV_CODEC_ID_WMV3 },
{ &MEDIASUBTYPE_wmv3, AV_CODEC_ID_WMV3 },
{ &MEDIASUBTYPE_WMVP, AV_CODEC_ID_WMV3IMAGE },
{ &MEDIASUBTYPE_wmvp, AV_CODEC_ID_WMV3IMAGE },
// VP7/8/9
{ &MEDIASUBTYPE_VP70, AV_CODEC_ID_VP7 },
{ &MEDIASUBTYPE_VP80, AV_CODEC_ID_VP8 },
{ &MEDIASUBTYPE_VP90, AV_CODEC_ID_VP9 },
// AV1
{ &MEDIASUBTYPE_AV01, AV_CODEC_ID_AV1 },
// MPEG4 ASP
{ &MEDIASUBTYPE_XVID, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_xvid, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_DIVX, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_divx, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_Divx, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_DX50, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_dx50, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_MP4V, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_mp4v, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_M4S2, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_m4s2, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_MP4S, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_mp4s, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_FMP4, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_3IVX, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_3ivx, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_3IV1, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_3iv1, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_3IV2, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_3iv2, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_BLZ0, AV_CODEC_ID_MPEG4 },
{ &MEDIASUBTYPE_GEOV, AV_CODEC_ID_MPEG4 },
// MS-MPEG4
{ &MEDIASUBTYPE_MPG4, AV_CODEC_ID_MSMPEG4V1 },
{ &MEDIASUBTYPE_mpg4, AV_CODEC_ID_MSMPEG4V1 },
{ &MEDIASUBTYPE_MP41, AV_CODEC_ID_MSMPEG4V1 },
{ &MEDIASUBTYPE_mp41, AV_CODEC_ID_MSMPEG4V1 },
{ &MEDIASUBTYPE_DIV1, AV_CODEC_ID_MSMPEG4V1 },
{ &MEDIASUBTYPE_div1, AV_CODEC_ID_MSMPEG4V1 },
{ &MEDIASUBTYPE_MP42, AV_CODEC_ID_MSMPEG4V2 },
{ &MEDIASUBTYPE_mp42, AV_CODEC_ID_MSMPEG4V2 },
{ &MEDIASUBTYPE_DIV2, AV_CODEC_ID_MSMPEG4V2 },
{ &MEDIASUBTYPE_div2, AV_CODEC_ID_MSMPEG4V2 },
{ &MEDIASUBTYPE_MP43, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_mp43, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_DIV3, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_div3, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_MPG3, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_mpg3, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_DIV4, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_div4, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_DIV5, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_div5, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_DIV6, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_div6, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_DVX3, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_dvx3, AV_CODEC_ID_MSMPEG4V3 },
{ &MEDIASUBTYPE_3IVD, AV_CODEC_ID_MSMPEG4V3 },
// Flash
{ &MEDIASUBTYPE_FLV1, AV_CODEC_ID_FLV1 },
{ &MEDIASUBTYPE_flv1, AV_CODEC_ID_FLV1 },
{ &MEDIASUBTYPE_VP60, AV_CODEC_ID_VP6 },
{ &MEDIASUBTYPE_vp60, AV_CODEC_ID_VP6 },
{ &MEDIASUBTYPE_VP61, AV_CODEC_ID_VP6 },
{ &MEDIASUBTYPE_vp61, AV_CODEC_ID_VP6 },
{ &MEDIASUBTYPE_VP62, AV_CODEC_ID_VP6 },
{ &MEDIASUBTYPE_vp62, AV_CODEC_ID_VP6 },
{ &MEDIASUBTYPE_VP6A, AV_CODEC_ID_VP6A },
{ &MEDIASUBTYPE_vp6a, AV_CODEC_ID_VP6A },
{ &MEDIASUBTYPE_VP6F, AV_CODEC_ID_VP6F },
{ &MEDIASUBTYPE_vp6f, AV_CODEC_ID_VP6F },
{ &MEDIASUBTYPE_FLV4, AV_CODEC_ID_VP6F },
{ &MEDIASUBTYPE_flv4, AV_CODEC_ID_VP6F },
{ &MEDIASUBTYPE_FSV1, AV_CODEC_ID_FLASHSV },
// Real
{ &MEDIASUBTYPE_RV10, AV_CODEC_ID_RV10 },
{ &MEDIASUBTYPE_RV20, AV_CODEC_ID_RV20 },
{ &MEDIASUBTYPE_RV30, AV_CODEC_ID_RV30 },
{ &MEDIASUBTYPE_RV40, AV_CODEC_ID_RV40 },
// DV Video
{ &MEDIASUBTYPE_dvsd, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVSD, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_CDVH, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_CDVC, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_CDV5, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_dv25, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DV25, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_dv50, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DV50, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVCP, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DV5P, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DV5N, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVPP, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVC, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVH1, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVH2, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVH3, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVH4, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVH5, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVH6, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVHQ, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_DVHP, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_AVdv, AV_CODEC_ID_DVVIDEO },
{ &MEDIASUBTYPE_AVd1, AV_CODEC_ID_DVVIDEO },
// JPEG 2000
{ &MEDIASUBTYPE_mjp2, AV_CODEC_ID_JPEG2000 },
{ &MEDIASUBTYPE_MJ2C, AV_CODEC_ID_JPEG2000 },
{ &MEDIASUBTYPE_LJ2C, AV_CODEC_ID_JPEG2000 },
{ &MEDIASUBTYPE_LJ2K, AV_CODEC_ID_JPEG2000 },
{ &MEDIASUBTYPE_IPJ2, AV_CODEC_ID_JPEG2000 },
// Misc Formats
{ &MEDIASUBTYPE_SVQ1, AV_CODEC_ID_SVQ1 },
{ &MEDIASUBTYPE_SVQ3, AV_CODEC_ID_SVQ3 },
{ &MEDIASUBTYPE_H261, AV_CODEC_ID_H261 },
{ &MEDIASUBTYPE_h261, AV_CODEC_ID_H261 },
{ &MEDIASUBTYPE_H263, AV_CODEC_ID_H263 },
{ &MEDIASUBTYPE_h263, AV_CODEC_ID_H263 },
{ &MEDIASUBTYPE_S263, AV_CODEC_ID_H263 },
{ &MEDIASUBTYPE_s263, AV_CODEC_ID_H263 },
{ &MEDIASUBTYPE_I263, AV_CODEC_ID_H263I },
{ &MEDIASUBTYPE_i263, AV_CODEC_ID_H263I },
{ &MEDIASUBTYPE_THEORA, AV_CODEC_ID_THEORA },
{ &MEDIASUBTYPE_theora, AV_CODEC_ID_THEORA },
{ &MEDIASUBTYPE_TSCC, AV_CODEC_ID_TSCC },
{ &MEDIASUBTYPE_TSC2, AV_CODEC_ID_TSCC2 },
{ &MEDIASUBTYPE_IV50, AV_CODEC_ID_INDEO5 },
{ &MEDIASUBTYPE_IV41, AV_CODEC_ID_INDEO4 },
{ &MEDIASUBTYPE_IV31, AV_CODEC_ID_INDEO3 },
{ &MEDIASUBTYPE_IV32, AV_CODEC_ID_INDEO3 },
{ &MEDIASUBTYPE_FPS1, AV_CODEC_ID_FRAPS },
{ &MEDIASUBTYPE_HuffYUV, AV_CODEC_ID_HUFFYUV },
{ &MEDIASUBTYPE_Lagarith, AV_CODEC_ID_LAGARITH },
{ &MEDIASUBTYPE_CVID, AV_CODEC_ID_CINEPAK },
{ &MEDIASUBTYPE_QTRle, AV_CODEC_ID_QTRLE },
{ &MEDIASUBTYPE_VP30, AV_CODEC_ID_VP3 },
{ &MEDIASUBTYPE_VP31, AV_CODEC_ID_VP3 },
{ &MEDIASUBTYPE_CSCD, AV_CODEC_ID_CSCD },
{ &MEDIASUBTYPE_QPEG, AV_CODEC_ID_QPEG },
{ &MEDIASUBTYPE_QP10, AV_CODEC_ID_QPEG },
{ &MEDIASUBTYPE_QP11, AV_CODEC_ID_QPEG },
{ &MEDIASUBTYPE_MSZH, AV_CODEC_ID_MSZH },
{ &MEDIASUBTYPE_ZLIB, AV_CODEC_ID_ZLIB },
{ &MEDIASUBTYPE_QTRpza, AV_CODEC_ID_RPZA },
{ &MEDIASUBTYPE_PCM, AV_CODEC_ID_MSRLE }, // Yeah, PCM. Its the same FourCC as used by MS-RLE
{ &MEDIASUBTYPE_apch, AV_CODEC_ID_PRORES },
{ &MEDIASUBTYPE_apcn, AV_CODEC_ID_PRORES },
{ &MEDIASUBTYPE_apcs, AV_CODEC_ID_PRORES },
{ &MEDIASUBTYPE_apco, AV_CODEC_ID_PRORES },
{ &MEDIASUBTYPE_ap4h, AV_CODEC_ID_PRORES },
{ &MEDIASUBTYPE_ap4x, AV_CODEC_ID_PRORES },
{ &MEDIASUBTYPE_ULRA, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULRG, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULY0, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULY2, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULY4, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UQY2, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UQRG, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UQRA, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULH0, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULH2, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_ULH4, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UMY2, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UMH2, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UMY4, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UMH4, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UMRG, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_UMRA, AV_CODEC_ID_UTVIDEO },
{ &MEDIASUBTYPE_AMVV, AV_CODEC_ID_AMV },
{ &MEDIASUBTYPE_AMVF, AV_CODEC_ID_AMV },
{ &MEDIASUBTYPE_DiracVideo, AV_CODEC_ID_DIRAC },
{ &MEDIASUBTYPE_DRAC, AV_CODEC_ID_DIRAC },
{ &MEDIASUBTYPE_AVdn, AV_CODEC_ID_DNXHD },
{ &MEDIASUBTYPE_AVdh, AV_CODEC_ID_DNXHD },
{ &MEDIASUBTYPE_CRAM, AV_CODEC_ID_MSVIDEO1 },
{ &MEDIASUBTYPE_MSVC, AV_CODEC_ID_MSVIDEO1 },
{ &MEDIASUBTYPE_WHAM, AV_CODEC_ID_MSVIDEO1 },
{ &MEDIASUBTYPE_8BPS, AV_CODEC_ID_8BPS },
{ &MEDIASUBTYPE_LOCO, AV_CODEC_ID_LOCO },
{ &MEDIASUBTYPE_ZMBV, AV_CODEC_ID_ZMBV },
{ &MEDIASUBTYPE_VCR1, AV_CODEC_ID_VCR1 },
{ &MEDIASUBTYPE_AASC, AV_CODEC_ID_AASC },
{ &MEDIASUBTYPE_SNOW, AV_CODEC_ID_SNOW },
{ &MEDIASUBTYPE_FFV1, AV_CODEC_ID_FFV1 },
{ &MEDIASUBTYPE_FFVH, AV_CODEC_ID_FFVHUFF },
{ &MEDIASUBTYPE_VMNC, AV_CODEC_ID_VMNC },
{ &MEDIASUBTYPE_FLIC, AV_CODEC_ID_FLIC },
//{ &MEDIASUBTYPE_G2M2, AV_CODEC_ID_G2M },
//{ &MEDIASUBTYPE_G2M3, AV_CODEC_ID_G2M },
{ &MEDIASUBTYPE_G2M4, AV_CODEC_ID_G2M },
{ &MEDIASUBTYPE_icod, AV_CODEC_ID_AIC },
{ &MEDIASUBTYPE_DUCK, AV_CODEC_ID_TRUEMOTION1 },
{ &MEDIASUBTYPE_TM20, AV_CODEC_ID_TRUEMOTION2 },
{ &MEDIASUBTYPE_CFHD, AV_CODEC_ID_CFHD },
{ &MEDIASUBTYPE_MAGY, AV_CODEC_ID_MAGICYUV },
{ &MEDIASUBTYPE_FICV, AV_CODEC_ID_FIC },
{ &MEDIASUBTYPE_QTSmc, AV_CODEC_ID_SMC },
// Game Formats
{ &MEDIASUBTYPE_BIKI, AV_CODEC_ID_BINKVIDEO },
{ &MEDIASUBTYPE_BIKB, AV_CODEC_ID_BINKVIDEO },
{ &MEDIASUBTYPE_SMK2, AV_CODEC_ID_SMACKVIDEO },
{ &MEDIASUBTYPE_SMK4, AV_CODEC_ID_SMACKVIDEO },
{ &MEDIASUBTYPE_THPV, AV_CODEC_ID_THP },
{ &MEDIASUBTYPE_ROQV, AV_CODEC_ID_ROQ },
// Image Formats
{ &MEDIASUBTYPE_PNG, AV_CODEC_ID_PNG },
{ &MEDIASUBTYPE_TIFF, AV_CODEC_ID_TIFF },
{ &MEDIASUBTYPE_BMP, AV_CODEC_ID_BMP },
{ &MEDIASUBTYPE_GIF, AV_CODEC_ID_GIF },
{ &MEDIASUBTYPE_TGA, AV_CODEC_ID_TARGA },
// Special raw formats
{ &MEDIASUBTYPE_v210, AV_CODEC_ID_V210 },
{ &MEDIASUBTYPE_v410, AV_CODEC_ID_V410 },
{ &MEDIASUBTYPE_LAV_RAWVIDEO, AV_CODEC_ID_RAWVIDEO },
};
// Define Input Media Types
const AMOVIESETUP_MEDIATYPE CLAVVideo::sudPinTypesIn[] = {
// H264
{ &MEDIATYPE_Video, &MEDIASUBTYPE_H264 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_h264 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_X264 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_x264 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AVC1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_avc1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CCV1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_H264_bis },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AMVC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MVC1 },
// HEVC
{ &MEDIATYPE_Video, &MEDIASUBTYPE_HEVC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_HVC1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_HM10 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_H265 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VVC1 },
// MPEG1/2
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MPEG1Payload },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MPEG1Video },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MPEG2_VIDEO },
{ &MEDIATYPE_DVD_ENCRYPTED_PACK, &MEDIASUBTYPE_MPEG2_VIDEO },
{ &MEDIATYPE_MPEG2_PACK, &MEDIASUBTYPE_MPEG2_VIDEO },
{ &MEDIATYPE_MPEG2_PES, &MEDIASUBTYPE_MPEG2_VIDEO },
// MJPEG
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MJPG },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QTJpeg },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MJPGB },
// VC-1
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WVC1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wvc1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WMVA },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wmva },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WVP2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wvp2 },
// WMV
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WMV1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wmv1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WMV2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wmv2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WMV3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wmv3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WMVP },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_wmvp },
// VP7/8/9
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP70 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP80 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP90 },
// AV1
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AV01 },
// MPEG4 ASP
{ &MEDIATYPE_Video, &MEDIASUBTYPE_XVID },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_xvid },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIVX },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_divx },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_Divx },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DX50 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_dx50 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MP4V },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mp4v },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_M4S2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_m4s2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MP4S },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mp4s },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FMP4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3IVX },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3ivx },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3IV1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3iv1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3IV2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3iv2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_BLZ0 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_GEOV },
// MS-MPEG4
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MPG4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mpg4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MP41 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mp41 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIV1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_div1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MP42 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mp42 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIV2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_div2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MP43 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mp43 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIV3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_div3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MPG3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mpg3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIV4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_div4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIV5 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_div5 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DIV6 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_div6 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVX3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_dvx3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_3IVD },
// Flash
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FLV1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_flv1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP60 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_vp60 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP61 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_vp61 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP62 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_vp62 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP6A },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_vp6a },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP6F },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_vp6f },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FLV4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_flv4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FSV1 },
// Real
{ &MEDIATYPE_Video, &MEDIASUBTYPE_RV10 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_RV20 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_RV30 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_RV40 },
// DV Video
{ &MEDIATYPE_Video, &MEDIASUBTYPE_dvsd },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVSD },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CDVH },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CDVC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CDV5 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_dv25 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DV25 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_dv50 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DV50 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVCP },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DV5P },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DV5N },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVPP },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVH1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVH2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVH3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVH4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVH5 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVH6 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVHQ },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DVHP },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AVdv },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AVd1 },
// JPEG 2000
{ &MEDIATYPE_Video, &MEDIASUBTYPE_mjp2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MJ2C },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_LJ2C },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_LJ2K },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_IPJ2 },
// Misc Formats
{ &MEDIATYPE_Video, &MEDIASUBTYPE_SVQ1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_SVQ3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_H261 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_h261 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_H263 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_h263 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_S263 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_s263 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_I263 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_i263 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_THEORA },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_theora },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_TSCC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_TSC2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_IV50 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_IV41 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_IV31 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_IV32 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FPS1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_HuffYUV },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_Lagarith },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CVID },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QTRle },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP30 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VP31 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CSCD },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QPEG },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QP10 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QP11 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MSZH },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ZLIB },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QTRpza },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_PCM }, // Yeah, PCM. Its the same FourCC as used by MS-RLE
{ &MEDIATYPE_Video, &MEDIASUBTYPE_apch },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_apcn },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_apcs },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_apco },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ap4h },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ap4x },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULRA },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULRG },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULY0 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULY2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULY4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UQY2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UQRG },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UQRA },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULH0 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULH2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ULH4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UMY2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UMH2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UMY4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UMH4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UMRG },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UMRA },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AMVV },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AMVF },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DiracVideo },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DRAC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AVdn },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AVdh },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CRAM },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MSVC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_WHAM },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_8BPS },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_LOCO },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ZMBV },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VCR1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_AASC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_SNOW },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FFV1 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FFVH },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_VMNC },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FLIC },
//{ &MEDIATYPE_Video, &MEDIASUBTYPE_G2M2 },
//{ &MEDIATYPE_Video, &MEDIASUBTYPE_G2M3 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_G2M4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_icod },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_DUCK },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_TM20 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_CFHD },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_MAGY },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_FICV },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_QTSmc },
// Game Formats
{ &MEDIATYPE_Video, &MEDIASUBTYPE_BIKI },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_BIKB },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_SMK2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_SMK4 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_THPV },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_ROQV },
// Image Formats
{ &MEDIATYPE_Video, &MEDIASUBTYPE_PNG },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_TIFF },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_BMP },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_GIF },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_TGA },
// Special raw formats
{ &MEDIATYPE_Video, &MEDIASUBTYPE_v210 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_v410 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_LAV_RAWVIDEO },
};
const UINT CLAVVideo::sudPinTypesInCount = countof(CLAVVideo::sudPinTypesIn);
// Define Output Media Types
const AMOVIESETUP_MEDIATYPE CLAVVideo::sudPinTypesOut[] = {
{ &MEDIATYPE_Video, &MEDIASUBTYPE_YV12 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_NV12 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_YUY2 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_UYVY },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_RGB32 },
{ &MEDIATYPE_Video, &MEDIASUBTYPE_RGB24 },
};
const UINT CLAVVideo::sudPinTypesOutCount = countof(CLAVVideo::sudPinTypesOut);
// Crawl the lavc_video_codecs array for the proper codec
AVCodecID FindCodecId(const CMediaType *mt)
{
for (int i = 0; i < countof(lavc_video_codecs); ++i)
{
if (mt->subtype == *lavc_video_codecs[i].clsMinorType)
{
return lavc_video_codecs[i].nFFCodec;
}
}
return AV_CODEC_ID_NONE;
}
// Strings will be filled in eventually.
// AV_CODEC_ID_NONE means there is some special handling going on.
// Order is Important, has to be the same as the CC Enum
// Also, the order is used for storage in the Registry
static codec_config_t m_codec_config[] = {
{ 1, { AV_CODEC_ID_H264 }}, // Codec_H264
{ 2, { AV_CODEC_ID_VC1, AV_CODEC_ID_VC1IMAGE }}, // Codec_VC1
{ 1, { AV_CODEC_ID_MPEG1VIDEO }, "mpeg1"}, // Codec_MPEG1
{ 1, { AV_CODEC_ID_MPEG2VIDEO }, "mpeg2"}, // Codec_MPEG2
{ 1, { AV_CODEC_ID_MPEG4 }}, // Codec_MPEG4
{ 3, { AV_CODEC_ID_MSMPEG4V1, AV_CODEC_ID_MSMPEG4V2, AV_CODEC_ID_MSMPEG4V3 }, "msmpeg4", "MS-MPEG-4 (DIVX3)" }, // Codec_MSMPEG4
{ 1, { AV_CODEC_ID_VP8 }}, // Codec_VP8
{ 2, { AV_CODEC_ID_WMV3, AV_CODEC_ID_WMV3IMAGE }}, // Codec_WMV3
{ 2, { AV_CODEC_ID_WMV1, AV_CODEC_ID_WMV2 }, "wmv12", "Windows Media Video 7/8" }, // Codec_WMV12
{ 3, { AV_CODEC_ID_MJPEG, AV_CODEC_ID_MJPEGB, AV_CODEC_ID_AMV }}, // Codec_MJPEG
{ 2, { AV_CODEC_ID_THEORA, AV_CODEC_ID_VP3 }}, // Codec_Theora
{ 2, { AV_CODEC_ID_FLV1, AV_CODEC_ID_FLASHSV }, "flash", "Flash Video (FLV1, FSV1)"}, // Codec_FLV1
{ 3, { AV_CODEC_ID_VP6, AV_CODEC_ID_VP6A, AV_CODEC_ID_VP6F }}, // Codec_VP6
{ 2, { AV_CODEC_ID_SVQ1, AV_CODEC_ID_SVQ3 }, "svq", "SVQ 1 / SVQ 3"}, // Codec_SVQ
{ 1, { AV_CODEC_ID_H261 }}, // Codec_H261
{ 2, { AV_CODEC_ID_H263, AV_CODEC_ID_H263I }}, // Codec_H263
{ 3, { AV_CODEC_ID_INDEO3, AV_CODEC_ID_INDEO4, AV_CODEC_ID_INDEO5 }, "indeo", "Intel Indeo 3/4/5"}, // Codec_Indeo
{ 2, { AV_CODEC_ID_TSCC, AV_CODEC_ID_TSCC2 }}, // Codec_TSCC
{ 1, { AV_CODEC_ID_FRAPS }}, // Codec_Fraps
{ 2, { AV_CODEC_ID_HUFFYUV, AV_CODEC_ID_FFVHUFF }}, // Codec_HuffYUV
{ 1, { AV_CODEC_ID_QTRLE }}, // Codec_QTRle
{ 1, { AV_CODEC_ID_DVVIDEO }}, // Codec_DV
{ 1, { AV_CODEC_ID_BINKVIDEO }, "bink"}, // Codec_Bink
{ 1, { AV_CODEC_ID_SMACKVIDEO }}, // Codec_Smacker
{ 2, { AV_CODEC_ID_RV10, AV_CODEC_ID_RV20 }, "rv12", "RealVideo 1/2" }, // Codev_RV12
{ 2, { AV_CODEC_ID_RV30, AV_CODEC_ID_RV40 }, "rv34", "RealVideo 3/4" }, // Codec_RV34
{ 1, { AV_CODEC_ID_LAGARITH }}, // Codec_Lagarith
{ 1, { AV_CODEC_ID_CINEPAK }}, // Codec_Cinepak
{ 1, { AV_CODEC_ID_CSCD }}, // Codec_Camstudio
{ 1, { AV_CODEC_ID_QPEG }}, // Codec_QPEG
{ 2, { AV_CODEC_ID_ZLIB, AV_CODEC_ID_MSZH }, "zlib", "ZLIB/MSZH lossless" }, // Codec_ZLIB
{ 1, { AV_CODEC_ID_RPZA }}, // Codec_QTRpza
{ 1, { AV_CODEC_ID_PNG }}, // Codec_PNG
{ 2, { AV_CODEC_ID_MSRLE, AV_CODEC_ID_AASC }}, // Codec_MSRLE
{ 1, { AV_CODEC_ID_PRORES }}, // Codec_ProRes
{ 1, { AV_CODEC_ID_UTVIDEO }}, // Codec_UtVideo
{ 1, { AV_CODEC_ID_DIRAC }}, // Codec_Dirac
{ 1, { AV_CODEC_ID_DNXHD }}, // Codec_DNxHD
{ 1, { AV_CODEC_ID_MSVIDEO1 }}, // Codec_MSVideo1
{ 1, { AV_CODEC_ID_8BPS }}, // Codec_8BPS
{ 1, { AV_CODEC_ID_LOCO }}, // Codec_LOCO
{ 1, { AV_CODEC_ID_ZMBV }}, // Codec_ZMBV
{ 1, { AV_CODEC_ID_VCR1 }}, // Codec_VCR1
{ 1, { AV_CODEC_ID_SNOW }}, // Codec_Snow
{ 1, { AV_CODEC_ID_FFV1 }}, // Codec_FFV1
{ 2, { AV_CODEC_ID_V210, AV_CODEC_ID_V410 }, "v210/v410", "v210/v410 uncompressed"}, // Codec_v210
{ 1, { AV_CODEC_ID_JPEG2000 }}, // Codec_JPEG2000
{ 1, { AV_CODEC_ID_VMNC }}, // Codec_VMNC
{ 1, { AV_CODEC_ID_FLIC }}, // Codec_FLIC
{ 1, { AV_CODEC_ID_G2M }}, // Codec_G2M
{ 1, { AV_CODEC_ID_AIC }, "icod", "Apple Intermediate Codec (ICOD)"}, // Codec_ICOD
{ 1, { AV_CODEC_ID_THP }}, // Codec_THP
{ 1, { AV_CODEC_ID_HEVC }}, // Codec_HEVC
{ 1, { AV_CODEC_ID_VP9 }}, // Codec_VP9
{ 2, { AV_CODEC_ID_TRUEMOTION1, AV_CODEC_ID_TRUEMOTION2 }, "truemotion", "Duck TrueMotion 1/2"}, // Codec_TrueMotion
{ 1, { AV_CODEC_ID_VP7 }}, // Codec_VP7
{ 1, { AV_CODEC_ID_H264_MVC }, "h264mvc", "H.264 MVC 3D" }, // Codec_H264MVC
{ 1, { AV_CODEC_ID_CFHD }}, // Codec_CineformHD
{ 1, { AV_CODEC_ID_MAGICYUV }}, // Codec_MagicYUV
{ 1, { AV_CODEC_ID_AV1 }}, // Codec_AV1
{ 1, { AV_CODEC_ID_VVC }}, // Codec_AV1
};
// clang-format off
const codec_config_t *get_codec_config(LAVVideoCodec codec)
{
codec_config_t *config = &m_codec_config[codec];
const AVCodecDescriptor *desc = avcodec_descriptor_get(config->codecs[0]);
if (desc) {
if (!config->name) {
config->name = desc->name;
}
if (!config->description) {
config->description = desc->long_name;
}
}
return &m_codec_config[codec];
}
int flip_plane(BYTE *buffer, int stride, int height)
{
BYTE *line_buffer = (BYTE *)av_malloc(stride);
BYTE *cur_front = buffer;
BYTE *cur_back = buffer + (stride * (height - 1));
height /= 2;
for (int i = 0; i < height; i++) {
memcpy(line_buffer, cur_front, stride);
memcpy(cur_front, cur_back, stride);
memcpy(cur_back, line_buffer, stride);
cur_front += stride;
cur_back -= stride;
}
av_freep(&line_buffer);
return 0;
}
void fillDXVAExtFormat(DXVA2_ExtendedFormat &fmt, int range, int primaries, int matrix, int transfer, int chroma_sample_location, bool bClear)
{
if (bClear)
fmt.value = 0;
if (range != -1)
fmt.NominalRange = range ? DXVA2_NominalRange_0_255 : DXVA2_NominalRange_16_235;
// Color Primaries
switch(primaries) {
case AVCOL_PRI_BT709:
fmt.VideoPrimaries = DXVA2_VideoPrimaries_BT709;
break;
case AVCOL_PRI_BT470M:
fmt.VideoPrimaries = DXVA2_VideoPrimaries_BT470_2_SysM;
break;
case AVCOL_PRI_BT470BG:
fmt.VideoPrimaries = DXVA2_VideoPrimaries_BT470_2_SysBG;
break;
case AVCOL_PRI_SMPTE170M:
fmt.VideoPrimaries = DXVA2_VideoPrimaries_SMPTE170M;
break;
case AVCOL_PRI_SMPTE240M:
fmt.VideoPrimaries = DXVA2_VideoPrimaries_SMPTE240M;
break;
// Values from newer Windows SDK (MediaFoundation)
case AVCOL_PRI_BT2020:
fmt.VideoPrimaries = MFVideoPrimaries_BT2020;
break;
case AVCOL_PRI_SMPTE428:
fmt.VideoPrimaries = MFVideoPrimaries_XYZ;
break;
case AVCOL_PRI_SMPTE431:
fmt.VideoPrimaries = MFVideoPrimaries_DCI_P3;
break;
}
// Color Space / Transfer Matrix
switch (matrix) {
case AVCOL_SPC_BT709:
fmt.VideoTransferMatrix = DXVA2_VideoTransferMatrix_BT709;
break;
case AVCOL_SPC_BT470BG:
case AVCOL_SPC_SMPTE170M:
fmt.VideoTransferMatrix = DXVA2_VideoTransferMatrix_BT601;
break;
case AVCOL_SPC_SMPTE240M:
fmt.VideoTransferMatrix = DXVA2_VideoTransferMatrix_SMPTE240M;
break;
// Values from newer Windows SDK (MediaFoundation)
case AVCOL_SPC_BT2020_CL:
case AVCOL_SPC_BT2020_NCL:
fmt.VideoTransferMatrix = MFVideoTransferMatrix_BT2020_10;
break;
// Custom values, not official standard, but understood by madVR
case AVCOL_SPC_FCC:
fmt.VideoTransferMatrix = (DXVA2_VideoTransferMatrix)6;
break;
case AVCOL_SPC_YCGCO:
fmt.VideoTransferMatrix = (DXVA2_VideoTransferMatrix)7;
break;
}
// Color Transfer Function
switch(transfer) {
case AVCOL_TRC_BT709:
case AVCOL_TRC_SMPTE170M:
fmt.VideoTransferFunction = DXVA2_VideoTransFunc_709;
break;
case AVCOL_TRC_BT2020_10:
case AVCOL_TRC_BT2020_12:
fmt.VideoTransferFunction = (matrix == AVCOL_SPC_BT2020_CL) ? MFVideoTransFunc_2020_const : MFVideoTransFunc_2020;
break;
case AVCOL_TRC_GAMMA22:
fmt.VideoTransferFunction = DXVA2_VideoTransFunc_22;
break;
case AVCOL_TRC_GAMMA28:
fmt.VideoTransferFunction = DXVA2_VideoTransFunc_28;
break;
case AVCOL_TRC_SMPTE240M:
fmt.VideoTransferFunction = DXVA2_VideoTransFunc_240M;
break;
case AVCOL_TRC_LINEAR:
fmt.VideoTransferFunction = DXVA2_VideoTransFunc_10;
break;
case AVCOL_TRC_LOG:
fmt.VideoTransferFunction = MFVideoTransFunc_Log_100;
break;
case AVCOL_TRC_LOG_SQRT:
fmt.VideoTransferFunction = MFVideoTransFunc_Log_316;
break;
// Values from newer Windows SDK (MediaFoundation)
case AVCOL_TRC_SMPTEST2084:
fmt.VideoTransferFunction = MFVideoTransFunc_2084;
break;
case AVCOL_TRC_ARIB_STD_B67:
fmt.VideoTransferFunction = MFVideoTransFunc_HLG;
break;
}
// Chroma location
switch (chroma_sample_location) {
case AVCHROMA_LOC_LEFT:
fmt.VideoChromaSubsampling = DXVA2_VideoChromaSubsampling_MPEG2;
break;
case AVCHROMA_LOC_CENTER:
fmt.VideoChromaSubsampling = DXVA2_VideoChromaSubsampling_MPEG1;
break;
case AVCHROMA_LOC_TOPLEFT:
fmt.VideoChromaSubsampling = DXVA2_VideoChromaSubsampling_Cosited;
break;
}
}
void processFFHDRData(MediaSideDataHDR *sd, AVMasteringDisplayMetadata *ff)
{
if (!sd || !ff)
return;
// avcodec exports the display primaries in RGB order, we export them in GBR
if (ff->has_primaries) {
sd->display_primaries_x[0] = av_q2d(ff->display_primaries[1][0]);
sd->display_primaries_y[0] = av_q2d(ff->display_primaries[1][1]);
sd->display_primaries_x[1] = av_q2d(ff->display_primaries[2][0]);
sd->display_primaries_y[1] = av_q2d(ff->display_primaries[2][1]);
sd->display_primaries_x[2] = av_q2d(ff->display_primaries[0][0]);
sd->display_primaries_y[2] = av_q2d(ff->display_primaries[0][1]);
sd->white_point_x = av_q2d(ff->white_point[0]);
sd->white_point_y = av_q2d(ff->white_point[1]);
}
if (ff->has_luminance) {
sd->max_display_mastering_luminance = av_q2d(ff->max_luminance);
sd->min_display_mastering_luminance = av_q2d(ff->min_luminance);
}
}
void processFFHDR10PlusData(MediaSideDataHDR10Plus *sd, AVDynamicHDRPlus *ff, int width, int height)
{
if (!sd || !ff)
return;
if (ff->num_windows > 3)
return;
sd->num_windows = ff->num_windows;
for (int i = 0; i < ff->num_windows; i++)
{
sd->windows[i].upper_left_corner_x = (int)av_q2d(ff->params[i].window_upper_left_corner_x) * (width - 1);
sd->windows[i].upper_left_corner_y = (int)av_q2d(ff->params[i].window_upper_left_corner_y) * (height - 1);
sd->windows[i].lower_right_corner_x = (int)av_q2d(ff->params[i].window_lower_right_corner_x) * (width - 1);
sd->windows[i].lower_right_corner_y = (int)av_q2d(ff->params[i].window_lower_right_corner_y) * (height - 1);
sd->windows[i].center_of_ellipse_x = ff->params[i].center_of_ellipse_x;
sd->windows[i].center_of_ellipse_y = ff->params[i].center_of_ellipse_y;
sd->windows[i].rotation_angle = ff->params[i].rotation_angle;
sd->windows[i].semimajor_axis_internal_ellipse = ff->params[i].semimajor_axis_internal_ellipse;
sd->windows[i].semimajor_axis_external_ellipse = ff->params[i].semimajor_axis_external_ellipse;
sd->windows[i].semiminor_axis_external_ellipse = ff->params[i].semiminor_axis_external_ellipse;
sd->windows[i].overlap_process_option = ff->params[i].overlap_process_option;
for (int k = 0; k < 3; k++)
sd->windows[i].maxscl[k] = av_q2d(ff->params[i].maxscl[k]);
sd->windows[i].average_maxrgb = av_q2d(ff->params[i].average_maxrgb);
sd->windows[i].num_distribution_maxrgb_percentiles = ff->params[i].num_distribution_maxrgb_percentiles;
for (int k = 0; k < ff->params[i].num_distribution_maxrgb_percentiles; k++)
{
sd->windows[i].distribution_maxrgb_percentiles[k].percentage = ff->params[i].distribution_maxrgb[k].percentage;
sd->windows[i].distribution_maxrgb_percentiles[k].percentile = av_q2d(ff->params[i].distribution_maxrgb[k].percentile);
}
sd->windows[i].fraction_bright_pixels = av_q2d(ff->params[i].fraction_bright_pixels);
sd->windows[i].tone_mapping_flag = ff->params[i].tone_mapping_flag;
if (sd->windows[i].tone_mapping_flag)
{
sd->windows[i].knee_point_x = av_q2d(ff->params[i].knee_point_x);
sd->windows[i].knee_point_y = av_q2d(ff->params[i].knee_point_y);
sd->windows[i].num_bezier_curve_anchors = ff->params[i].num_bezier_curve_anchors;
for (int k = 0; k < ff->params[i].num_bezier_curve_anchors; k++)
sd->windows[i].bezier_curve_anchors[k] = av_q2d(ff->params[i].bezier_curve_anchors[k]);
}
sd->windows[i].color_saturation_mapping_flag = ff->params[i].color_saturation_mapping_flag;
if (sd->windows[i].color_saturation_mapping_flag)
sd->windows[i].color_saturation_weight = av_q2d(ff->params[i].color_saturation_weight);
}
sd->targeted_system_display_maximum_luminance = av_q2d(ff->targeted_system_display_maximum_luminance);
sd->targeted_system_display_actual_peak_luminance_flag = ff->targeted_system_display_actual_peak_luminance_flag;
sd->num_rows_targeted_system_display_actual_peak_luminance = ff->num_rows_targeted_system_display_actual_peak_luminance;
sd->num_cols_targeted_system_display_actual_peak_luminance = ff->num_cols_targeted_system_display_actual_peak_luminance;
for (int i = 0; i < ff->num_rows_targeted_system_display_actual_peak_luminance; i++)
for (int j = 0; j < ff->num_cols_targeted_system_display_actual_peak_luminance; j++)
sd->targeted_system_display_actual_peak_luminance[i][j] = av_q2d(ff->targeted_system_display_actual_peak_luminance[i][j]);
sd->mastering_display_actual_peak_luminance_flag = ff->mastering_display_actual_peak_luminance_flag;
sd->num_rows_mastering_display_actual_peak_luminance = ff->num_rows_mastering_display_actual_peak_luminance;
sd->num_cols_mastering_display_actual_peak_luminance = ff->num_cols_mastering_display_actual_peak_luminance;
for (int i = 0; i < ff->num_rows_mastering_display_actual_peak_luminance; i++)
for (int j = 0; j < ff->num_cols_mastering_display_actual_peak_luminance; j++)
sd->mastering_display_actual_peak_luminance[i][j] = av_q2d(ff->mastering_display_actual_peak_luminance[i][j]);
}
void processFFDOVIData(MediaSideDataDOVIMetadata* sd, const AVDOVIMetadata* ff)
{
const AVDOVIRpuDataHeader *header = av_dovi_get_header(ff);
const AVDOVIDataMapping *mapping = av_dovi_get_mapping(ff);
const AVDOVIColorMetadata *color = av_dovi_get_color(ff);
#define RPU_HDR(name) sd->Header.##name = header->##name;
#define RPU_MAP(name) sd->Mapping.##name = mapping->##name;
#define RPU_COLOR(name) sd->ColorMetadata.##name = color->##name;
RPU_HDR(rpu_type);
RPU_HDR(rpu_format);
RPU_HDR(vdr_rpu_profile);
RPU_HDR(vdr_rpu_level);
RPU_HDR(chroma_resampling_explicit_filter_flag);
RPU_HDR(coef_data_type);
RPU_HDR(coef_log2_denom);
RPU_HDR(vdr_rpu_normalized_idc);
RPU_HDR(bl_video_full_range_flag);
RPU_HDR(bl_bit_depth);
RPU_HDR(el_bit_depth);
RPU_HDR(vdr_bit_depth);
RPU_HDR(spatial_resampling_filter_flag);
RPU_HDR(el_spatial_resampling_filter_flag);
RPU_HDR(disable_residual_flag);
RPU_MAP(vdr_rpu_id);
RPU_MAP(mapping_color_space);
RPU_MAP(mapping_chroma_format_idc);
for (int i = 0; i < 3; i++)
{
sd->Mapping.curves[i].num_pivots = mapping->curves[i].num_pivots;
for (int j = 0; j < AV_DOVI_MAX_PIECES + 1; j++)
sd->Mapping.curves[i].pivots[j] = mapping->curves[i].pivots[j];
for (int j = 0; j < AV_DOVI_MAX_PIECES; j++)
{
sd->Mapping.curves[i].mapping_idc[j] = mapping->curves[i].mapping_idc[j];
// poly
sd->Mapping.curves[i].poly_order[j] = mapping->curves[i].poly_order[j];
sd->Mapping.curves[i].poly_coef[j][0] = mapping->curves[i].poly_coef[j][0];
sd->Mapping.curves[i].poly_coef[j][1] = mapping->curves[i].poly_coef[j][1];
sd->Mapping.curves[i].poly_coef[j][2] = mapping->curves[i].poly_coef[j][2];
// mmr
sd->Mapping.curves[i].mmr_order[j] = mapping->curves[i].mmr_order[j];
sd->Mapping.curves[i].mmr_constant[j] = mapping->curves[i].mmr_constant[j];
for (int k = 0; k < 3; k++)
for (int l = 0; l < 7; l++)
sd->Mapping.curves[i].mmr_coef[j][k][l] = mapping->curves[i].mmr_coef[j][k][l];
}
}
RPU_MAP(nlq_method_idc);
RPU_MAP(num_x_partitions);
RPU_MAP(num_y_partitions);
for (int i = 0; i < 3; i++)
{
sd->Mapping.nlq[i].nlq_offset = mapping->nlq[i].nlq_offset;
sd->Mapping.nlq[i].vdr_in_max = mapping->nlq[i].vdr_in_max;
sd->Mapping.nlq[i].linear_deadzone_slope = mapping->nlq[i].linear_deadzone_slope;
sd->Mapping.nlq[i].linear_deadzone_threshold = mapping->nlq[i].linear_deadzone_threshold;
}
RPU_COLOR(dm_metadata_id);
RPU_COLOR(scene_refresh_flag);
for (int i = 0; i < 9; i++)
{
sd->ColorMetadata.ycc_to_rgb_matrix[i] = av_q2d(color->ycc_to_rgb_matrix[i]);
sd->ColorMetadata.rgb_to_lms_matrix[i] = av_q2d(color->rgb_to_lms_matrix[i]);
}
for (int i = 0; i < 3; i++)
{
sd->ColorMetadata.ycc_to_rgb_offset[i] = av_q2d(color->ycc_to_rgb_offset[i]);
}
RPU_COLOR(signal_eotf);
RPU_COLOR(signal_eotf_param0);
RPU_COLOR(signal_eotf_param1);
RPU_COLOR(signal_eotf_param2);
RPU_COLOR(signal_bit_depth);
RPU_COLOR(signal_color_space);
RPU_COLOR(signal_chroma_format);
RPU_COLOR(signal_full_range_flag);
RPU_COLOR(source_min_pq);
RPU_COLOR(source_max_pq);
RPU_COLOR(source_diagonal);
#undef RPU_HDR
#undef RPU_MAP
#undef RPU_COLOR
int LAVExtIdx = 0;
for (int i = 0; i < ff->num_ext_blocks; i++)
{
AVDOVIDmData *ext = av_dovi_get_ext(ff, i);
auto lavext = &sd->Extensions[LAVExtIdx];
lavext->level = ext->level;
switch (ext->level)
{
case 1:
lavext->Level1.min_pq = ext->l1.min_pq;
lavext->Level1.max_pq = ext->l1.max_pq;
lavext->Level1.avg_pq = ext->l1.avg_pq;
break;
case 2:
lavext->Level2.target_max_pq = ext->l2.target_max_pq;
lavext->Level2.trim_slope = ext->l2.trim_slope;
lavext->Level2.trim_offset = ext->l2.trim_offset;
lavext->Level2.trim_power = ext->l2.trim_power;
lavext->Level2.trim_chroma_weight = ext->l2.trim_chroma_weight;
lavext->Level2.trim_saturation_gain = ext->l2.trim_saturation_gain;
lavext->Level2.ms_weight = ext->l2.ms_weight;
break;
case 3:
lavext->Level3.min_pq_offset = ext->l3.min_pq_offset;
lavext->Level3.max_pq_offset = ext->l3.max_pq_offset;
lavext->Level3.avg_pq_offset = ext->l3.avg_pq_offset;
break;
case 4:
lavext->Level4.anchor_pq = ext->l4.anchor_pq;
lavext->Level4.anchor_power = ext->l4.anchor_power;
break;
case 5:
lavext->Level5.left_offset = ext->l5.left_offset;
lavext->Level5.right_offset = ext->l5.right_offset;
lavext->Level5.top_offset = ext->l5.top_offset;
lavext->Level5.bottom_offset = ext->l5.bottom_offset;
break;
case 6:
lavext->Level6.max_luminance = ext->l6.max_luminance;
lavext->Level6.min_luminance = ext->l6.min_luminance;
lavext->Level6.max_cll = ext->l6.max_cll;
lavext->Level6.max_fall = ext->l6.max_fall;
break;
case 8:
lavext->Level8.target_display_index = ext->l8.target_display_index;
lavext->Level8.trim_slope = ext->l8.trim_slope;
lavext->Level8.trim_offset = ext->l8.trim_offset;
lavext->Level8.trim_power = ext->l8.trim_power;
lavext->Level8.trim_chroma_weight = ext->l8.trim_chroma_weight;
lavext->Level8.trim_saturation_gain = ext->l8.trim_saturation_gain;
lavext->Level8.ms_weight = ext->l8.ms_weight;
lavext->Level8.target_mid_contrast = ext->l8.target_mid_contrast;
lavext->Level8.clip_trim = ext->l8.clip_trim;
for (int j = 0; j < 6; j++) {
lavext->Level8.saturation_vector_field[j] = ext->l8.saturation_vector_field[j];
lavext->Level8.hue_vector_field[j] = ext->l8.hue_vector_field[j];
}
break;
case 9:
lavext->Level9.source_primary_index = ext->l9.source_primary_index;
lavext->Level9.white_point_x = av_q2d(ext->l9.source_display_primaries.wp.x);
lavext->Level9.white_point_y = av_q2d(ext->l9.source_display_primaries.wp.y);
lavext->Level9.display_primaries_x[0] = av_q2d(ext->l9.source_display_primaries.prim.r.x);
lavext->Level9.display_primaries_x[1] = av_q2d(ext->l9.source_display_primaries.prim.g.x);
lavext->Level9.display_primaries_x[2] = av_q2d(ext->l9.source_display_primaries.prim.b.x);
lavext->Level9.display_primaries_y[0] = av_q2d(ext->l9.source_display_primaries.prim.r.y);
lavext->Level9.display_primaries_y[1] = av_q2d(ext->l9.source_display_primaries.prim.g.y);
lavext->Level9.display_primaries_y[2] = av_q2d(ext->l9.source_display_primaries.prim.b.y);
break;
case 10:
lavext->Level10.target_display_index = ext->l10.target_display_index;
lavext->Level10.target_max_pq = ext->l10.target_max_pq;
lavext->Level10.target_min_pq = ext->l10.target_min_pq;
lavext->Level10.target_primary_index = ext->l10.target_primary_index;
lavext->Level10.white_point_x = av_q2d(ext->l10.target_display_primaries.wp.x);
lavext->Level10.white_point_y = av_q2d(ext->l10.target_display_primaries.wp.y);
lavext->Level10.display_primaries_x[0] = av_q2d(ext->l10.target_display_primaries.prim.r.x);
lavext->Level10.display_primaries_x[1] = av_q2d(ext->l10.target_display_primaries.prim.g.x);
lavext->Level10.display_primaries_x[2] = av_q2d(ext->l10.target_display_primaries.prim.b.x);
lavext->Level10.display_primaries_y[0] = av_q2d(ext->l10.target_display_primaries.prim.r.y);
lavext->Level10.display_primaries_y[1] = av_q2d(ext->l10.target_display_primaries.prim.g.y);
lavext->Level10.display_primaries_y[2] = av_q2d(ext->l10.target_display_primaries.prim.b.y);
break;
case 11:
lavext->Level11.content_type = ext->l11.content_type;
lavext->Level11.whitepoint = ext->l11.whitepoint;
lavext->Level11.reference_mode_flag = ext->l11.reference_mode_flag;
lavext->Level11.sharpness = ext->l11.sharpness;
lavext->Level11.noise_reduction = ext->l11.noise_reduction;
lavext->Level11.mpeg_noise_reduction = ext->l11.mpeg_noise_reduction;
lavext->Level11.frame_rate_conversion = ext->l11.frame_rate_conversion;
lavext->Level11.brightness = ext->l11.brightness;
lavext->Level11.color = ext->l11.color;
break;
case 254:
lavext->Level254.dm_mode = ext->l254.dm_mode;
lavext->Level254.dm_version_index = ext->l254.dm_version_index;
break;
default:
lavext->level = 0; /* reset level, unknown/not implemented extension */
break;
}
/* if the block is valid/recognized, go to the next one */
if (lavext->level > 0)
LAVExtIdx++;
/* only 32 blocks are allowed, sanity check here */
if (LAVExtIdx >= LAV_DOVI_MAX_EXTENSIONS)
break;
}
}
extern "C" const uint8_t *avpriv_find_start_code(const uint8_t *p, const uint8_t *end, uint32_t *state);
int CheckForSequenceMarkers(AVCodecID codec, const uint8_t *buf, long len, uint32_t *state, const uint8_t **pos)
{
int status = 0;
if (buf && len > 0) {
const uint8_t *p = buf, *end = buf + len;
if (codec == AV_CODEC_ID_MPEG2VIDEO) {
while (p < end) {
p = avpriv_find_start_code(p, end, state);
if (*state == 0x000001b7) {
DbgLog((LOG_TRACE, 50, L"Found SEQ_END_CODE at %p (end: %p)", p, end));
status |= STATE_EOS_FOUND;
if (pos)
*pos = p;
return status;
} else if (*state == 0x000001b8) {
status |= STATE_GOP_FOUND;
}
}
}
return status;
}
return status;
}
int sws_scale2(struct SwsContext *c, const uint8_t *const srcSlice[], const ptrdiff_t srcStride[], int srcSliceY, int srcSliceH, uint8_t *const dst[], const ptrdiff_t dstStride[])
{
if (!c)
return -1;
int srcStride2[4];
int dstStride2[4];
for (int i = 0; i < 4; i++) {
srcStride2[i] = (int)srcStride[i];
dstStride2[i] = (int)dstStride[i];
}
return sws_scale(c, srcSlice, srcStride2, srcSliceY, srcSliceH, dst, dstStride2);
}
| 50,406
|
C++
|
.cpp
| 1,076
| 42.310409
| 179
| 0.641326
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| false
| false
|
22,161
|
yuv2rgb.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/yuv2rgb.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include <ppl.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
#pragma warning(push)
#pragma warning(disable : 4556)
#define DITHER_STEPS 3
// This function converts 4x2 pixels from the source into 4x2 RGB pixels in the destination
template <LAVPixelFormat inputFormat, int shift, int outFmt, int right_edge, int dithertype, int ycgco>
__forceinline static int yuv2rgb_convert_pixels(const uint8_t *&srcY, const uint8_t *&srcU, const uint8_t *&srcV,
uint8_t *&dst, ptrdiff_t srcStrideY, ptrdiff_t srcStrideUV,
ptrdiff_t dstStride, ptrdiff_t line, const RGBCoeffs *coeffs,
const uint16_t *&dithers, ptrdiff_t pos)
{
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
xmm7 = _mm_setzero_si128();
// Shift > 0 is for 9/10 bit formats
if (inputFormat == LAVPixFmt_P016)
{
// Load 2 32-bit macro pixels from each line, which contain 4 UV at 16-bit each samples
PIXCONV_LOAD_PIXEL8(xmm0, srcU);
PIXCONV_LOAD_PIXEL8(xmm2, srcU + srcStrideUV);
}
else if (shift > 0)
{
// Load 4 U/V values from line 0/1 into registers
PIXCONV_LOAD_4PIXEL16(xmm1, srcU);
PIXCONV_LOAD_4PIXEL16(xmm3, srcU + srcStrideUV);
PIXCONV_LOAD_4PIXEL16(xmm0, srcV);
PIXCONV_LOAD_4PIXEL16(xmm2, srcV + srcStrideUV);
// Interleave U and V
xmm0 = _mm_unpacklo_epi16(xmm1, xmm0); /* 0V0U0V0U */
xmm2 = _mm_unpacklo_epi16(xmm3, xmm2); /* 0V0U0V0U */
}
else if (inputFormat == LAVPixFmt_NV12)
{
// Load 4 16-bit macro pixels, which contain 4 UV samples
PIXCONV_LOAD_4PIXEL16(xmm0, srcU);
PIXCONV_LOAD_4PIXEL16(xmm2, srcU + srcStrideUV);
// Expand to 16-bit
xmm0 = _mm_unpacklo_epi8(xmm0, xmm7); /* 0V0U0V0U */
xmm2 = _mm_unpacklo_epi8(xmm2, xmm7); /* 0V0U0V0U */
}
else
{
PIXCONV_LOAD_4PIXEL8(xmm1, srcU);
PIXCONV_LOAD_4PIXEL8(xmm3, srcU + srcStrideUV);
PIXCONV_LOAD_4PIXEL8(xmm0, srcV);
PIXCONV_LOAD_4PIXEL8(xmm2, srcV + srcStrideUV);
// Interleave U and V
xmm0 = _mm_unpacklo_epi8(xmm1, xmm0); /* VUVU0000 */
xmm2 = _mm_unpacklo_epi8(xmm3, xmm2); /* VUVU0000 */
// Expand to 16-bit
xmm0 = _mm_unpacklo_epi8(xmm0, xmm7); /* 0V0U0V0U */
xmm2 = _mm_unpacklo_epi8(xmm2, xmm7); /* 0V0U0V0U */
}
// xmm0/xmm2 contain 4 interleaved U/V samples from two lines each in the 16bit parts, still in their native
// bitdepth
// Chroma upsampling required
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_YUV422 ||
inputFormat == LAVPixFmt_P016)
{
if (inputFormat == LAVPixFmt_P016)
{
srcU += 8;
srcV += 8;
}
else if (shift > 0 || inputFormat == LAVPixFmt_NV12)
{
srcU += 4;
srcV += 4;
}
else
{
srcU += 2;
srcV += 2;
}
// Cut off the over-read into the stride and replace it with the last valid pixel
if (right_edge)
{
xmm6 = _mm_set_epi32(0, 0xffffffff, 0, 0);
// First line
xmm1 = xmm0;
xmm1 = _mm_slli_si128(xmm1, 4);
xmm1 = _mm_and_si128(xmm1, xmm6);
xmm0 = _mm_andnot_si128(xmm6, xmm0);
xmm0 = _mm_or_si128(xmm0, xmm1);
// Second line
xmm3 = xmm2;
xmm3 = _mm_slli_si128(xmm3, 4);
xmm3 = _mm_and_si128(xmm3, xmm6);
xmm2 = _mm_andnot_si128(xmm6, xmm2);
xmm2 = _mm_or_si128(xmm2, xmm3);
}
// 4:2:0 - upsample to 4:2:2 using 75:25
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_P016)
{
// Too high bitdepth, shift down to 14-bit
if (shift >= 7)
{
xmm0 = _mm_srli_epi16(xmm0, shift - 6);
xmm2 = _mm_srli_epi16(xmm2, shift - 6);
}
xmm1 = xmm0;
xmm1 = _mm_add_epi16(xmm1, xmm0); /* 2x line 0 */
xmm1 = _mm_add_epi16(xmm1, xmm0); /* 3x line 0 */
xmm1 = _mm_add_epi16(xmm1, xmm2); /* 3x line 0 + line 1 (10bit) */
xmm3 = xmm2;
xmm3 = _mm_add_epi16(xmm3, xmm2); /* 2x line 1 */
xmm3 = _mm_add_epi16(xmm3, xmm2); /* 3x line 1 */
xmm3 = _mm_add_epi16(xmm3, xmm0); /* 3x line 1 + line 0 (10bit) */
// If the bit depth is too high, we need to reduce it here (max 15bit)
// 14-16 bits need the reduction, because they all result in a 16-bit result
if (shift >= 6)
{
xmm1 = _mm_srli_epi16(xmm1, 1);
xmm3 = _mm_srli_epi16(xmm3, 1);
}
}
else
{
xmm1 = xmm0;
xmm3 = xmm2;
// Shift to maximum of 15-bit, if required
if (shift >= 8)
{
xmm1 = _mm_srli_epi16(xmm1, 1);
xmm3 = _mm_srli_epi16(xmm3, 1);
}
}
// After this step, xmm1 and xmm3 contain 8 16-bit values, V and U interleaved. For 4:2:2, filling 8 to 15 bits
// (original bit depth). For 4:2:0, filling input+2 bits (10 to 15).
// Upsample to 4:4:4 using 100:0, 50:50, 0:100 scheme (MPEG2 chroma siting)
// TODO: MPEG1 chroma siting, use 75:25
xmm0 = xmm1; /* UV UV UV UV */
xmm0 = _mm_unpacklo_epi32(xmm0, xmm7); /* UV 00 UV 00 */
xmm1 = _mm_srli_si128(xmm1, 4); /* UV UV UV 00 */
xmm1 = _mm_unpacklo_epi32(xmm7, xmm1); /* 00 UV 00 UV */
xmm1 = _mm_add_epi16(xmm1, xmm0); /* UV UV UV UV */
xmm1 = _mm_add_epi16(xmm1, xmm0); /* 2UV UV 2UV UV */
xmm0 = _mm_slli_si128(xmm0, 4); /* 00 UV 00 UV */
xmm1 = _mm_add_epi16(xmm1, xmm0); /* 2UV 2UV 2UV 2UV */
// Same for the second row
xmm2 = xmm3; /* UV UV UV UV */
xmm2 = _mm_unpacklo_epi32(xmm2, xmm7); /* UV 00 UV 00 */
xmm3 = _mm_srli_si128(xmm3, 4); /* UV UV UV 00 */
xmm3 = _mm_unpacklo_epi32(xmm7, xmm3); /* 00 UV 00 UV */
xmm3 = _mm_add_epi16(xmm3, xmm2); /* UV UV UV UV */
xmm3 = _mm_add_epi16(xmm3, xmm2); /* 2UV UV 2UV UV */
xmm2 = _mm_slli_si128(xmm2, 4); /* 00 UV 00 UV */
xmm3 = _mm_add_epi16(xmm3, xmm2); /* 2UV 2UV 2UV 2UV */
// Shift the result to 12 bit
// For 10-bit input, we need to shift one bit off, or we exceed the allowed processing depth
// For 8-bit, we need to add one bit
if ((inputFormat == LAVPixFmt_YUV420 && shift > 1) || inputFormat == LAVPixFmt_P016)
{
if (shift >= 5)
{
xmm1 = _mm_srli_epi16(xmm1, 4);
xmm3 = _mm_srli_epi16(xmm3, 4);
}
else
{
xmm1 = _mm_srli_epi16(xmm1, shift - 1);
xmm3 = _mm_srli_epi16(xmm3, shift - 1);
}
}
else if (inputFormat == LAVPixFmt_YUV422)
{
if (shift >= 7)
{
xmm1 = _mm_srli_epi16(xmm1, 4);
xmm3 = _mm_srli_epi16(xmm3, 4);
}
else if (shift > 3)
{
xmm1 = _mm_srli_epi16(xmm1, shift - 3);
xmm3 = _mm_srli_epi16(xmm3, shift - 3);
}
else if (shift < 3)
{
xmm1 = _mm_slli_epi16(xmm1, 3 - shift);
xmm3 = _mm_slli_epi16(xmm3, 3 - shift);
}
}
else if ((inputFormat == LAVPixFmt_YUV420 && shift == 0) || inputFormat == LAVPixFmt_NV12)
{
xmm1 = _mm_slli_epi16(xmm1, 1);
xmm3 = _mm_slli_epi16(xmm3, 1);
}
// 12-bit result, xmm1 & xmm3 with 4 UV combinations each
}
else if (inputFormat == LAVPixFmt_YUV444)
{
if (shift > 0)
{
srcU += 8;
srcV += 8;
}
else
{
srcU += 4;
srcV += 4;
}
// Shift to 12 bit
if (shift > 4)
{
xmm1 = _mm_srli_epi16(xmm0, shift - 4);
xmm3 = _mm_srli_epi16(xmm2, shift - 4);
}
else if (shift < 4)
{
xmm1 = _mm_slli_epi16(xmm0, 4 - shift);
xmm3 = _mm_slli_epi16(xmm2, 4 - shift);
}
else
{
xmm1 = xmm0;
xmm3 = xmm2;
}
}
// Load Y
if (shift > 0)
{
// Load 4 Y values from line 0/1 into registers
PIXCONV_LOAD_4PIXEL16(xmm5, srcY);
PIXCONV_LOAD_4PIXEL16(xmm0, srcY + srcStrideY);
srcY += 8;
}
else
{
PIXCONV_LOAD_4PIXEL8(xmm5, srcY);
PIXCONV_LOAD_4PIXEL8(xmm0, srcY + srcStrideY);
srcY += 4;
xmm5 = _mm_unpacklo_epi8(xmm5, xmm7); /* YYYY0000 (16-bit fields) */
xmm0 = _mm_unpacklo_epi8(xmm0, xmm7); /* YYYY0000 (16-bit fields)*/
}
xmm0 = _mm_unpacklo_epi64(xmm0, xmm5); /* YYYYYYYY */
// After this step, xmm1 & xmm3 contain 4 UV pairs, each in a 16-bit value, filling 12-bit.
if (!ycgco)
{
// YCbCr conversion
// Shift Y to 14 bits
if (shift < 6)
{
xmm0 = _mm_slli_epi16(xmm0, 6 - shift);
}
else if (shift > 6)
{
xmm0 = _mm_srli_epi16(xmm0, shift - 6);
}
xmm0 = _mm_subs_epu16(xmm0, coeffs->Ysub); /* Y-16 (in case of range expansion) */
xmm0 =
_mm_mulhi_epi16(xmm0, coeffs->cy); /* Y*cy (result is 28 bits, with 12 high-bits packed into the result) */
xmm0 = _mm_add_epi16(xmm0, coeffs->rgb_add); /* Y*cy + 16 (in case of range compression) */
xmm1 = _mm_subs_epi16(xmm1, coeffs->CbCr_center); /* move CbCr to proper range */
xmm3 = _mm_subs_epi16(xmm3, coeffs->CbCr_center);
xmm6 = xmm1;
xmm4 = xmm3;
xmm6 = _mm_madd_epi16(xmm6, coeffs->cR_Cr); /* Result is 25 bits (12 from chroma, 13 from coeff) */
xmm4 = _mm_madd_epi16(xmm4, coeffs->cR_Cr);
xmm6 = _mm_srai_epi32(xmm6, 13); /* Reduce to 12 bit */
xmm4 = _mm_srai_epi32(xmm4, 13);
xmm6 = _mm_packs_epi32(xmm6, xmm7); /* Pack back into 16 bit cells */
xmm4 = _mm_packs_epi32(xmm4, xmm7);
xmm6 = _mm_unpacklo_epi64(xmm4, xmm6); /* Interleave both parts */
xmm6 = _mm_add_epi16(xmm6, xmm0); /* R (12bit) */
xmm5 = xmm1;
xmm4 = xmm3;
xmm5 = _mm_madd_epi16(xmm5, coeffs->cG_Cb_cG_Cr); /* Result is 25 bits (12 from chroma, 13 from coeff) */
xmm4 = _mm_madd_epi16(xmm4, coeffs->cG_Cb_cG_Cr);
xmm5 = _mm_srai_epi32(xmm5, 13); /* Reduce to 12 bit */
xmm4 = _mm_srai_epi32(xmm4, 13);
xmm5 = _mm_packs_epi32(xmm5, xmm7); /* Pack back into 16 bit cells */
xmm4 = _mm_packs_epi32(xmm4, xmm7);
xmm5 = _mm_unpacklo_epi64(xmm4, xmm5); /* Interleave both parts */
xmm5 = _mm_add_epi16(xmm5, xmm0); /* G (12bit) */
xmm1 = _mm_madd_epi16(xmm1, coeffs->cB_Cb); /* Result is 25 bits (12 from chroma, 13 from coeff) */
xmm3 = _mm_madd_epi16(xmm3, coeffs->cB_Cb);
xmm1 = _mm_srai_epi32(xmm1, 13); /* Reduce to 12 bit */
xmm3 = _mm_srai_epi32(xmm3, 13);
xmm1 = _mm_packs_epi32(xmm1, xmm7); /* Pack back into 16 bit cells */
xmm3 = _mm_packs_epi32(xmm3, xmm7);
xmm1 = _mm_unpacklo_epi64(xmm3, xmm1); /* Interleave both parts */
xmm1 = _mm_add_epi16(xmm1, xmm0); /* B (12bit) */
}
else
{
// YCgCo conversion
// Shift Y to 12 bits
if (shift < 4)
{
xmm0 = _mm_slli_epi16(xmm0, 4 - shift);
}
else if (shift > 4)
{
xmm0 = _mm_srli_epi16(xmm0, shift - 4);
}
xmm7 = _mm_set1_epi32(0x0000FFFF);
xmm2 = xmm1;
xmm4 = xmm3;
xmm1 = _mm_and_si128(xmm1, xmm7); /* null out the high-order bytes to get the Cg values */
xmm4 = _mm_and_si128(xmm4, xmm7);
xmm3 = _mm_srli_epi32(xmm3, 16); /* right shift the Co values */
xmm2 = _mm_srli_epi32(xmm2, 16);
xmm1 = _mm_packs_epi32(xmm4, xmm1); /* Pack Cg into xmm1 */
xmm3 = _mm_packs_epi32(xmm3, xmm2); /* Pack Co into xmm3 */
xmm2 = coeffs->CbCr_center; /* move CgCo to proper range */
xmm1 = _mm_subs_epi16(xmm1, xmm2);
xmm3 = _mm_subs_epi16(xmm3, xmm2);
xmm2 = xmm0;
xmm2 = _mm_subs_epi16(xmm2, xmm1); /* tmp = Y - Cg */
xmm6 = _mm_adds_epi16(xmm2, xmm3); /* R = tmp + Co */
xmm5 = _mm_adds_epi16(xmm0, xmm1); /* G = Y + Cg */
xmm1 = _mm_subs_epi16(xmm2, xmm3); /* B = tmp - Co */
}
// Dithering
if (dithertype == LAVDither_Random)
{
/* Load random dithering coeffs from the dithers buffer */
int offset = (pos % (DITHER_STEPS * 4 * 2)) * 6;
xmm2 = _mm_load_si128((const __m128i *)(dithers + 0 + offset));
xmm3 = _mm_load_si128((const __m128i *)(dithers + 8 + offset));
xmm4 = _mm_load_si128((const __m128i *)(dithers + 16 + offset));
}
else
{
/* Load dithering coeffs and combine them for two lines */
const uint16_t *d1 = dither_8x8_256[line % 8];
xmm2 = _mm_load_si128((const __m128i *)d1);
const uint16_t *d2 = dither_8x8_256[(line + 1) % 8];
xmm3 = _mm_load_si128((const __m128i *)d2);
xmm4 = xmm2;
xmm2 = _mm_unpacklo_epi64(xmm2, xmm3);
xmm4 = _mm_unpackhi_epi64(xmm4, xmm3);
xmm2 = _mm_srli_epi16(xmm2, 4);
xmm4 = _mm_srli_epi16(xmm4, 4);
xmm3 = xmm4;
}
xmm6 = _mm_adds_epu16(xmm6, xmm2); /* Apply coefficients to the RGB values */
xmm5 = _mm_adds_epu16(xmm5, xmm3);
xmm1 = _mm_adds_epu16(xmm1, xmm4);
xmm6 = _mm_srai_epi16(xmm6, 4); /* Shift to 8 bit */
xmm5 = _mm_srai_epi16(xmm5, 4);
xmm1 = _mm_srai_epi16(xmm1, 4);
xmm2 = _mm_cmpeq_epi8(xmm2, xmm2); /* 0xffffffff,0xffffffff,0xffffffff,0xffffffff */
xmm6 = _mm_packus_epi16(xmm6, xmm7); /* R (lower 8bytes,8bit) * 8 */
xmm5 = _mm_packus_epi16(xmm5, xmm7); /* G (lower 8bytes,8bit) * 8 */
xmm1 = _mm_packus_epi16(xmm1, xmm7); /* B (lower 8bytes,8bit) * 8 */
xmm6 = _mm_unpacklo_epi8(xmm6, xmm2); // 0xff,R
xmm1 = _mm_unpacklo_epi8(xmm1, xmm5); // G,B
xmm2 = xmm1;
xmm1 = _mm_unpackhi_epi16(xmm1, xmm6); // 0xff,RGB * 4 (line 0)
xmm2 = _mm_unpacklo_epi16(xmm2, xmm6); // 0xff,RGB * 4 (line 1)
// TODO: RGB limiting
if (outFmt == 1)
{
_mm_stream_si128((__m128i *)(dst), xmm1);
_mm_stream_si128((__m128i *)(dst + dstStride), xmm2);
dst += 16;
}
else
{
// RGB 24 output is terribly inefficient due to the un-aligned size of 3 bytes per pixel
uint32_t eax;
__declspec(align(16)) uint8_t rgbbuf[32];
*(uint32_t *)rgbbuf = _mm_cvtsi128_si32(xmm1);
xmm1 = _mm_srli_si128(xmm1, 4);
*(uint32_t *)(rgbbuf + 3) = _mm_cvtsi128_si32(xmm1);
xmm1 = _mm_srli_si128(xmm1, 4);
*(uint32_t *)(rgbbuf + 6) = _mm_cvtsi128_si32(xmm1);
xmm1 = _mm_srli_si128(xmm1, 4);
*(uint32_t *)(rgbbuf + 9) = _mm_cvtsi128_si32(xmm1);
*(uint32_t *)(rgbbuf + 16) = _mm_cvtsi128_si32(xmm2);
xmm2 = _mm_srli_si128(xmm2, 4);
*(uint32_t *)(rgbbuf + 19) = _mm_cvtsi128_si32(xmm2);
xmm2 = _mm_srli_si128(xmm2, 4);
*(uint32_t *)(rgbbuf + 22) = _mm_cvtsi128_si32(xmm2);
xmm2 = _mm_srli_si128(xmm2, 4);
*(uint32_t *)(rgbbuf + 25) = _mm_cvtsi128_si32(xmm2);
xmm1 = _mm_loadl_epi64((const __m128i *)(rgbbuf));
xmm2 = _mm_loadl_epi64((const __m128i *)(rgbbuf + 16));
_mm_storel_epi64((__m128i *)(dst), xmm1);
eax = *(uint32_t *)(rgbbuf + 8);
*(uint32_t *)(dst + 8) = eax;
_mm_storel_epi64((__m128i *)(dst + dstStride), xmm2);
eax = *(uint32_t *)(rgbbuf + 24);
*(uint32_t *)(dst + dstStride + 8) = eax;
dst += 12;
}
return 0;
}
template <LAVPixelFormat inputFormat, int shift, int outFmt, int dithertype, int ycgco>
static int __stdcall yuv2rgb_convert(const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, uint8_t *dst,
int width, int height, ptrdiff_t srcStrideY, ptrdiff_t srcStrideUV,
ptrdiff_t dstStride, ptrdiff_t sliceYStart, ptrdiff_t sliceYEnd,
const RGBCoeffs *coeffs, const uint16_t *dithers)
{
const uint8_t *y = srcY;
const uint8_t *u = srcU;
const uint8_t *v = srcV;
uint8_t *rgb = dst;
ptrdiff_t line = sliceYStart;
ptrdiff_t lastLine = sliceYEnd;
bool lastLineInOddHeight = false;
const ptrdiff_t endx = width - 4;
const uint16_t *lineDither = dithers;
_mm_sfence();
// 4:2:0 needs special handling for the first and the last line
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_P016)
{
if (line == 0)
{
for (ptrdiff_t i = 0; i < endx; i += 4)
{
yuv2rgb_convert_pixels<inputFormat, shift, outFmt, 0, dithertype, ycgco>(y, u, v, rgb, 0, 0, 0, line,
coeffs, lineDither, i);
}
yuv2rgb_convert_pixels<inputFormat, shift, outFmt, 1, dithertype, ycgco>(y, u, v, rgb, 0, 0, 0, line,
coeffs, lineDither, 0);
line = 1;
}
if (lastLine == height)
lastLine--;
}
else if (lastLine == height && (lastLine & 1))
{
lastLine--;
lastLineInOddHeight = true;
}
for (; line < lastLine; line += 2)
{
if (dithertype == LAVDither_Random)
lineDither = dithers + (line * 24 * DITHER_STEPS);
y = srcY + line * srcStrideY;
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_P016)
{
u = srcU + (line >> 1) * srcStrideUV;
v = srcV + (line >> 1) * srcStrideUV;
}
else
{
u = srcU + line * srcStrideUV;
v = srcV + line * srcStrideUV;
}
rgb = dst + line * dstStride;
for (ptrdiff_t i = 0; i < endx; i += 4)
{
yuv2rgb_convert_pixels<inputFormat, shift, outFmt, 0, dithertype, ycgco>(
y, u, v, rgb, srcStrideY, srcStrideUV, dstStride, line, coeffs, lineDither, i);
}
yuv2rgb_convert_pixels<inputFormat, shift, outFmt, 1, dithertype, ycgco>(
y, u, v, rgb, srcStrideY, srcStrideUV, dstStride, line, coeffs, lineDither, 0);
}
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_P016 ||
lastLineInOddHeight)
{
if (sliceYEnd == height)
{
if (dithertype == LAVDither_Random)
lineDither = dithers + ((height - 2) * 24 * DITHER_STEPS);
y = srcY + (height - 1) * srcStrideY;
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_P016)
{
u = srcU + ((height >> 1) - 1) * srcStrideUV;
v = srcV + ((height >> 1) - 1) * srcStrideUV;
}
else
{
u = srcU + (height - 1) * srcStrideUV;
v = srcV + (height - 1) * srcStrideUV;
}
rgb = dst + (height - 1) * dstStride;
for (ptrdiff_t i = 0; i < endx; i += 4)
{
yuv2rgb_convert_pixels<inputFormat, shift, outFmt, 0, dithertype, ycgco>(y, u, v, rgb, 0, 0, 0, line,
coeffs, lineDither, i);
}
yuv2rgb_convert_pixels<inputFormat, shift, outFmt, 1, dithertype, ycgco>(y, u, v, rgb, 0, 0, 0, line,
coeffs, lineDither, 0);
}
}
return 0;
}
DECLARE_CONV_FUNC_IMPL(convert_yuv_rgb)
{
const RGBCoeffs *coeffs = getRGBCoeffs(width, height);
if (coeffs == nullptr)
return E_OUTOFMEMORY;
if (!m_bRGBConvInit)
{
m_bRGBConvInit = TRUE;
InitRGBConvDispatcher();
}
BOOL bYCgCo = (m_ColorProps.VideoTransferMatrix == 7);
int shift = max(bpp - 8, 0);
ASSERT(shift >= 0 && shift <= 8);
int outFmt = -1;
switch (outputFormat)
{
case LAVOutPixFmt_RGB24: outFmt = 0; break;
case LAVOutPixFmt_RGB32: outFmt = 1; break;
default: ASSERT(0);
}
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers =
(ditherMode == LAVDither_Random) ? GetRandomDitherCoeffs(height, DITHER_STEPS * 3, 4, 0) : nullptr;
if (ditherMode == LAVDither_Random && dithers == nullptr)
{
ditherMode = LAVDither_Ordered;
}
// Map the bX formats to their normal counter part, the shift parameter controls this now
if (inputFormat == LAVPixFmt_YUV420bX || inputFormat == LAVPixFmt_YUV422bX || inputFormat == LAVPixFmt_YUV444bX)
inputFormat = (LAVPixelFormat)(inputFormat - 1);
// P010 has the data in the high bits, so set shift appropriately
if (inputFormat == LAVPixFmt_P016)
shift = 8;
YUVRGBConversionFunc convFn = m_RGBConvFuncs[outFmt][ditherMode][bYCgCo][inputFormat][shift];
if (convFn == nullptr)
{
ASSERT(0);
return E_FAIL;
}
// run conversion, threaded
if (m_NumThreads <= 1)
{
convFn(src[0], src[1], src[2], dst[0], width, height, srcStride[0], srcStride[1], dstStride[0], 0, height,
coeffs, dithers);
}
else
{
const int is_odd =
(inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_NV12 || inputFormat == LAVPixFmt_P016);
const ptrdiff_t lines_per_thread = (height / m_NumThreads) & ~1;
Concurrency::parallel_for(0, m_NumThreads, [&](int i) {
const ptrdiff_t starty = (i * lines_per_thread);
const ptrdiff_t endy = (i == (m_NumThreads - 1)) ? height : starty + lines_per_thread + is_odd;
convFn(src[0], src[1], src[2], dst[0], width, height, srcStride[0], srcStride[1], dstStride[0],
starty + (i ? is_odd : 0), endy, coeffs, dithers);
});
}
return S_OK;
}
#define CONV_FUNC_INT2(out32, dither, ycgco, format, shift) \
m_RGBConvFuncs[out32][dither][ycgco][format][shift] = yuv2rgb_convert<format, shift, out32, dither, ycgco>;
#define CONV_FUNC_INT(dither, ycgco, format, shift) \
CONV_FUNC_INT2(0, dither, ycgco, format, shift) \
CONV_FUNC_INT2(1, dither, ycgco, format, shift)
#define CONV_FUNC(format, shift) \
CONV_FUNC_INT(LAVDither_Ordered, 0, format, shift) \
CONV_FUNC_INT(LAVDither_Random, 0, format, shift) \
CONV_FUNC_INT(LAVDither_Ordered, 1, format, shift) \
CONV_FUNC_INT(LAVDither_Random, 1, format, shift)
#define CONV_FUNCX(format) \
CONV_FUNC(format, 0) \
CONV_FUNC(format, 1) \
CONV_FUNC(format, 2) \
/* CONV_FUNC(format, 3) */ \
CONV_FUNC(format, 4) \
/* CONV_FUNC(format, 5) */ \
CONV_FUNC(format, 6) \
/* CONV_FUNC(format, 7) */ \
CONV_FUNC(format, 8)
void CLAVPixFmtConverter::InitRGBConvDispatcher()
{
ZeroMemory(&m_RGBConvFuncs, sizeof(m_RGBConvFuncs));
CONV_FUNC(LAVPixFmt_NV12, 0);
CONV_FUNC(LAVPixFmt_P016, 8);
CONV_FUNCX(LAVPixFmt_YUV420);
CONV_FUNCX(LAVPixFmt_YUV422);
CONV_FUNCX(LAVPixFmt_YUV444);
}
const RGBCoeffs *CLAVPixFmtConverter::getRGBCoeffs(int width, int height)
{
if (!m_rgbCoeffs || width != swsWidth || height != swsHeight)
{
swsWidth = width;
swsHeight = height;
if (!m_rgbCoeffs)
{
m_rgbCoeffs = (RGBCoeffs *)_aligned_malloc(sizeof(RGBCoeffs), 16);
if (m_rgbCoeffs == nullptr)
return nullptr;
}
DXVA2_VideoTransferMatrix matrix = (DXVA2_VideoTransferMatrix)m_ColorProps.VideoTransferMatrix;
if (matrix == DXVA2_VideoTransferMatrix_Unknown)
{
matrix = (swsHeight > 576 || swsWidth > 1024) ? DXVA2_VideoTransferMatrix_BT709
: DXVA2_VideoTransferMatrix_BT601;
}
BOOL inFullRange = (m_ColorProps.NominalRange == DXVA2_NominalRange_0_255);
BOOL outFullRange = (swsOutputRange == 0) ? inFullRange : (swsOutputRange == 2);
int inputWhite, inputBlack, inputChroma, outputWhite, outputBlack;
if (inFullRange)
{
inputWhite = 255;
inputBlack = 0;
inputChroma = 1;
}
else
{
inputWhite = 235;
inputBlack = 16;
inputChroma = 16;
}
if (outFullRange)
{
outputWhite = 255;
outputBlack = 0;
}
else
{
outputWhite = 235;
outputBlack = 16;
}
double Kr, Kg, Kb;
switch (matrix)
{
case DXVA2_VideoTransferMatrix_BT601:
Kr = 0.299;
Kg = 0.587;
Kb = 0.114;
break;
case DXVA2_VideoTransferMatrix_SMPTE240M:
Kr = 0.2120;
Kg = 0.7010;
Kb = 0.0870;
break;
case 6: // FCC
Kr = 0.300;
Kg = 0.590;
Kb = 0.110;
break;
case 4: // BT.2020
Kr = 0.2627;
Kg = 0.6780;
Kb = 0.0593;
break;
default: DbgLog((LOG_TRACE, 10, L"::getRGBCoeffs(): Unknown color space: %d - defaulting to BT709", matrix));
case DXVA2_VideoTransferMatrix_BT709:
Kr = 0.2126;
Kg = 0.7152;
Kb = 0.0722;
break;
}
double in_y_range = inputWhite - inputBlack;
double chr_range = 128 - inputChroma;
double cspOptionsRGBrange = outputWhite - outputBlack;
double y_mul, vr_mul, ug_mul, vg_mul, ub_mul;
y_mul = cspOptionsRGBrange / in_y_range;
vr_mul = (cspOptionsRGBrange / chr_range) * (1.0 - Kr);
ug_mul = (cspOptionsRGBrange / chr_range) * (1.0 - Kb) * Kb / Kg;
vg_mul = (cspOptionsRGBrange / chr_range) * (1.0 - Kr) * Kr / Kg;
ub_mul = (cspOptionsRGBrange / chr_range) * (1.0 - Kb);
short sub = min(outputBlack, inputBlack);
short Ysub = inputBlack - sub;
short RGB_add1 = outputBlack - sub;
short cy = short(y_mul * 16384 + 0.5);
short crv = short(vr_mul * 8192 + 0.5);
short cgu = short(-ug_mul * 8192 - 0.5);
short cgv = short(-vg_mul * 8192 - 0.5);
short cbu = short(ub_mul * 8192 + 0.5);
m_rgbCoeffs->Ysub = _mm_set1_epi16(Ysub << 6);
m_rgbCoeffs->cy = _mm_set1_epi16(cy);
m_rgbCoeffs->CbCr_center = _mm_set1_epi16(128 << 4);
m_rgbCoeffs->cR_Cr = _mm_set1_epi32(crv << 16); // R
m_rgbCoeffs->cG_Cb_cG_Cr = _mm_set1_epi32((cgv << 16) + cgu); // G
m_rgbCoeffs->cB_Cb = _mm_set1_epi32(cbu); // B
m_rgbCoeffs->rgb_add = _mm_set1_epi16(RGB_add1 << 4);
// YCgCo
if (matrix == 7)
{
m_rgbCoeffs->CbCr_center = _mm_set1_epi16(0x0800);
// Other Coeffs are not used in YCgCo
}
}
return m_rgbCoeffs;
}
#pragma warning(pop)
| 28,955
|
C++
|
.cpp
| 698
| 31.825215
| 119
| 0.539858
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,162
|
pixconv.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/pixconv.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
// 8x8 Bayes ordered dithering table, scaled to the 0-255 range for 16->8 conversion
// stored as 16-bit unsigned for optimized SIMD access
// clang-format off
__declspec(align(16)) const uint16_t dither_8x8_256[8][8] = {
{ 0, 192, 48, 240, 12, 204, 60, 252 },
{ 128, 64, 176, 112, 140, 76, 188, 124 },
{ 32, 224, 16, 208, 44, 236, 28, 220 },
{ 160, 96, 144, 80, 172, 108, 156, 92 },
{ 8, 200, 56, 248, 4, 196, 52, 244 },
{ 136, 72, 184, 120, 132, 68, 180, 116 },
{ 40, 232, 24, 216, 36, 228, 20, 212 },
{ 168, 104, 152, 88, 164, 100, 148, 84 }
};
// clang-format on
DECLARE_CONV_FUNC_IMPL(plane_copy)
{
LAVOutPixFmtDesc desc = lav_pixfmt_desc[outputFormat];
const int widthBytes = width * desc.codedbytes;
const int planes = max(desc.planes, 1);
ptrdiff_t line, plane;
for (plane = 0; plane < planes; plane++)
{
const int planeWidth = widthBytes / desc.planeWidth[plane];
const int planeHeight = height / desc.planeHeight[plane];
const ptrdiff_t srcPlaneStride = srcStride[plane];
const ptrdiff_t dstPlaneStride = dstStride[plane];
const uint8_t *const srcBuf = src[plane];
uint8_t *const dstBuf = dst[plane];
for (line = 0; line < planeHeight; ++line)
{
memcpy(dstBuf + line * dstPlaneStride, srcBuf + line * srcPlaneStride, planeWidth);
}
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(plane_copy_sse2)
{
LAVOutPixFmtDesc desc = lav_pixfmt_desc[outputFormat];
const int widthBytes = width * desc.codedbytes;
const int planes = max(desc.planes, 1);
ptrdiff_t line, plane;
for (plane = 0; plane < planes; plane++)
{
const int planeWidth = widthBytes / desc.planeWidth[plane];
const int planeHeight = height / desc.planeHeight[plane];
const ptrdiff_t srcPlaneStride = srcStride[plane];
const ptrdiff_t dstPlaneStride = dstStride[plane];
const uint8_t *const srcBuf = src[plane];
uint8_t *const dstBuf = dst[plane];
if ((dstPlaneStride % 16) == 0 && ((intptr_t)dstBuf % 16u) == 0)
{
for (line = 0; line < planeHeight; ++line)
{
PIXCONV_MEMCPY_ALIGNED(dstBuf + line * dstPlaneStride, srcBuf + line * srcPlaneStride, planeWidth);
}
}
else
{
for (line = 0; line < planeHeight; ++line)
{
memcpy(dstBuf + line * dstPlaneStride, srcBuf + line * srcPlaneStride, planeWidth);
}
}
}
return S_OK;
}
| 3,485
|
C++
|
.cpp
| 87
| 34.517241
| 115
| 0.638002
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,163
|
yuv2yuv_unscaled.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/yuv2yuv_unscaled.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
template <int nv12> DECLARE_CONV_FUNC_IMPL(convert_yuv_yv_nv12_dither_le)
{
const ptrdiff_t inYStride = srcStride[0];
const ptrdiff_t inUVStride = srcStride[1];
const ptrdiff_t outYStride = dstStride[0];
const ptrdiff_t outUVStride = dstStride[1];
ptrdiff_t chromaWidth = width;
ptrdiff_t chromaHeight = height;
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 4, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
if (inputFormat == LAVPixFmt_YUV420bX)
chromaHeight = chromaHeight >> 1;
if (inputFormat == LAVPixFmt_YUV420bX || inputFormat == LAVPixFmt_YUV422bX)
chromaWidth = (chromaWidth + 1) >> 1;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
_mm_sfence();
// Process Y
for (line = 0; line < height; ++line)
{
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm4 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 0));
xmm5 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 8));
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 16));
xmm7 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 24));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line, 8, dithers);
xmm4 = xmm5 = xmm6 = xmm7;
}
const uint16_t *const y = (const uint16_t *)(src[0] + line * inYStride);
uint16_t *const dy = (uint16_t *)(dst[0] + line * outYStride);
for (i = 0; i < width; i += 32)
{
// Load pixels into registers, and apply dithering
PIXCONV_LOAD_PIXEL16_DITHER(xmm0, xmm4, (y + i + 0), bpp); /* Y0Y0Y0Y0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm1, xmm5, (y + i + 8), bpp); /* Y0Y0Y0Y0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm2, xmm6, (y + i + 16), bpp); /* Y0Y0Y0Y0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm3, xmm7, (y + i + 24), bpp); /* Y0Y0Y0Y0 */
xmm0 = _mm_packus_epi16(xmm0, xmm1); /* YYYYYYYY */
xmm2 = _mm_packus_epi16(xmm2, xmm3); /* YYYYYYYY */
// Write data back
PIXCONV_PUT_STREAM(dy + (i >> 1) + 0, xmm0);
PIXCONV_PUT_STREAM(dy + (i >> 1) + 8, xmm2);
}
// Process U/V for chromaHeight lines
if (line < chromaHeight)
{
const uint16_t *const u = (const uint16_t *)(src[1] + line * inUVStride);
const uint16_t *const v = (const uint16_t *)(src[2] + line * inUVStride);
uint8_t *const duv = (uint8_t *)(dst[1] + line * outUVStride);
uint8_t *const du = (uint8_t *)(dst[2] + line * outUVStride);
uint8_t *const dv = (uint8_t *)(dst[1] + line * outUVStride);
for (i = 0; i < chromaWidth; i += 16)
{
PIXCONV_LOAD_PIXEL16_DITHER(xmm0, xmm4, (u + i + 0), bpp); /* U0U0U0U0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm1, xmm5, (u + i + 8), bpp); /* U0U0U0U0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm2, xmm6, (v + i + 0), bpp); /* V0V0V0V0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm3, xmm7, (v + i + 8), bpp); /* V0V0V0V0 */
xmm0 = _mm_packus_epi16(xmm0, xmm1); /* UUUUUUUU */
xmm2 = _mm_packus_epi16(xmm2, xmm3); /* VVVVVVVV */
if (nv12)
{
xmm1 = xmm0;
xmm0 = _mm_unpacklo_epi8(xmm0, xmm2);
xmm1 = _mm_unpackhi_epi8(xmm1, xmm2);
PIXCONV_PUT_STREAM(duv + (i << 1) + 0, xmm0);
PIXCONV_PUT_STREAM(duv + (i << 1) + 16, xmm1);
}
else
{
PIXCONV_PUT_STREAM(du + i, xmm0);
PIXCONV_PUT_STREAM(dv + i, xmm2);
}
}
}
}
return S_OK;
}
// Force creation of these two variants
template HRESULT CLAVPixFmtConverter::convert_yuv_yv_nv12_dither_le<0> CONV_FUNC_PARAMS;
template HRESULT CLAVPixFmtConverter::convert_yuv_yv_nv12_dither_le<1> CONV_FUNC_PARAMS;
DECLARE_CONV_FUNC_IMPL(convert_yuv420_px1x_le)
{
const ptrdiff_t inYStride = srcStride[0];
const ptrdiff_t inUVStride = srcStride[1];
const ptrdiff_t outYStride = dstStride[0];
const ptrdiff_t outUVStride = dstStride[1];
const ptrdiff_t uvHeight =
(outputFormat == LAVOutPixFmt_P010 || outputFormat == LAVOutPixFmt_P016) ? (height >> 1) : height;
const ptrdiff_t uvWidth = (width + 1) >> 1;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2;
_mm_sfence();
// Process Y
for (line = 0; line < height; ++line)
{
const uint16_t *const y = (const uint16_t *)(src[0] + line * inYStride);
uint16_t *const d = (uint16_t *)(dst[0] + line * outYStride);
for (i = 0; i < width; i += 16)
{
// Load 2x8 pixels into registers
PIXCONV_LOAD_PIXEL16X2(xmm0, xmm1, (y + i + 0), (y + i + 8), bpp);
// and write them out
PIXCONV_PUT_STREAM(d + i + 0, xmm0);
PIXCONV_PUT_STREAM(d + i + 8, xmm1);
}
}
// Process UV
for (line = 0; line < uvHeight; ++line)
{
const uint16_t *const u = (const uint16_t *)(src[1] + line * inUVStride);
const uint16_t *const v = (const uint16_t *)(src[2] + line * inUVStride);
uint16_t *const d = (uint16_t *)(dst[1] + line * outUVStride);
for (i = 0; i < uvWidth; i += 8)
{
// Load 8 pixels into register
PIXCONV_LOAD_PIXEL16X2(xmm0, xmm1, (v + i), (u + i), bpp); // Load V and U
xmm2 = xmm0;
xmm0 = _mm_unpacklo_epi16(xmm1, xmm0); /* UVUV */
xmm2 = _mm_unpackhi_epi16(xmm1, xmm2); /* UVUV */
PIXCONV_PUT_STREAM(d + (i << 1) + 0, xmm0);
PIXCONV_PUT_STREAM(d + (i << 1) + 8, xmm2);
}
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(convert_yuv_yv)
{
const uint8_t *y = src[0];
const uint8_t *u = src[1];
const uint8_t *v = src[2];
const ptrdiff_t inLumaStride = srcStride[0];
const ptrdiff_t inChromaStride = srcStride[1];
const ptrdiff_t outLumaStride = dstStride[0];
const ptrdiff_t outChromaStride = dstStride[1];
ptrdiff_t line;
ptrdiff_t chromaWidth = width;
ptrdiff_t chromaHeight = height;
if (inputFormat == LAVPixFmt_YUV420)
chromaHeight = chromaHeight >> 1;
if (inputFormat == LAVPixFmt_YUV420 || inputFormat == LAVPixFmt_YUV422)
chromaWidth = (chromaWidth + 1) >> 1;
// Copy planes
_mm_sfence();
// Y
if ((outLumaStride % 16) == 0 && ((intptr_t)dst[0] % 16u) == 0)
{
for (line = 0; line < height; ++line)
{
PIXCONV_MEMCPY_ALIGNED(dst[0] + outLumaStride * line, y + inLumaStride * line, width);
}
}
else
{
for (line = 0; line < height; ++line)
{
memcpy(dst[0] + outLumaStride * line, y + inLumaStride * line, width);
}
}
// U/V
if ((outChromaStride % 16) == 0 && ((intptr_t)dst[1] % 16u) == 0)
{
for (line = 0; line < chromaHeight; ++line)
{
PIXCONV_MEMCPY_ALIGNED_TWO(dst[2] + outChromaStride * line, u + inChromaStride * line,
dst[1] + outChromaStride * line, v + inChromaStride * line, chromaWidth);
}
}
else
{
for (line = 0; line < chromaHeight; ++line)
{
memcpy(dst[2] + outChromaStride * line, u + inChromaStride * line, chromaWidth);
memcpy(dst[1] + outChromaStride * line, v + inChromaStride * line, chromaWidth);
}
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(convert_yuv420_nv12)
{
const ptrdiff_t inLumaStride = srcStride[0];
const ptrdiff_t inChromaStride = srcStride[1];
const ptrdiff_t outLumaStride = dstStride[0];
const ptrdiff_t outChromaStride = dstStride[1];
const ptrdiff_t chromaWidth = (width + 1) >> 1;
const ptrdiff_t chromaHeight = height >> 1;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4;
_mm_sfence();
// Y
for (line = 0; line < height; ++line)
{
PIXCONV_MEMCPY_ALIGNED(dst[0] + outLumaStride * line, src[0] + inLumaStride * line, width);
}
// U/V
for (line = 0; line < chromaHeight; ++line)
{
const uint8_t *const u = src[1] + line * inChromaStride;
const uint8_t *const v = src[2] + line * inChromaStride;
uint8_t *const d = dst[1] + line * outChromaStride;
for (i = 0; i < (chromaWidth - 31); i += 32)
{
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, v + i);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm1, u + i);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm2, v + i + 16);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm3, u + i + 16);
xmm4 = xmm0;
xmm0 = _mm_unpacklo_epi8(xmm1, xmm0);
xmm4 = _mm_unpackhi_epi8(xmm1, xmm4);
xmm1 = xmm2;
xmm2 = _mm_unpacklo_epi8(xmm3, xmm2);
xmm1 = _mm_unpackhi_epi8(xmm3, xmm1);
PIXCONV_PUT_STREAM(d + (i << 1) + 0, xmm0);
PIXCONV_PUT_STREAM(d + (i << 1) + 16, xmm4);
PIXCONV_PUT_STREAM(d + (i << 1) + 32, xmm2);
PIXCONV_PUT_STREAM(d + (i << 1) + 48, xmm1);
}
for (; i < chromaWidth; i += 16)
{
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, v + i);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm1, u + i);
xmm2 = xmm0;
xmm0 = _mm_unpacklo_epi8(xmm1, xmm0);
xmm2 = _mm_unpackhi_epi8(xmm1, xmm2);
PIXCONV_PUT_STREAM(d + (i << 1) + 0, xmm0);
PIXCONV_PUT_STREAM(d + (i << 1) + 16, xmm2);
}
}
return S_OK;
}
template <int uyvy> DECLARE_CONV_FUNC_IMPL(convert_yuv422_yuy2_uyvy)
{
const ptrdiff_t inLumaStride = srcStride[0];
const ptrdiff_t inChromaStride = srcStride[1];
const ptrdiff_t outStride = dstStride[0];
const ptrdiff_t chromaWidth = (width + 1) >> 1;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5;
_mm_sfence();
for (line = 0; line < height; ++line)
{
const uint8_t *const y = src[0] + line * inLumaStride;
const uint8_t *const u = src[1] + line * inChromaStride;
const uint8_t *const v = src[2] + line * inChromaStride;
uint8_t *const d = dst[0] + line * outStride;
for (i = 0; i < chromaWidth; i += 16)
{
// Load pixels
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, (y + (i << 1) + 0)); /* YYYY */
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm1, (y + (i << 1) + 16)); /* YYYY */
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm2, (u + i)); /* UUUU */
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm3, (v + i)); /* VVVV */
// Interleave Us and Vs
xmm4 = xmm2;
xmm4 = _mm_unpacklo_epi8(xmm4, xmm3);
xmm2 = _mm_unpackhi_epi8(xmm2, xmm3);
// Interlave those with the Ys
if (uyvy)
{
xmm3 = xmm4;
xmm3 = _mm_unpacklo_epi8(xmm3, xmm0);
xmm4 = _mm_unpackhi_epi8(xmm4, xmm0);
xmm5 = xmm2;
xmm5 = _mm_unpacklo_epi8(xmm5, xmm1);
xmm2 = _mm_unpackhi_epi8(xmm2, xmm1);
}
else
{
xmm3 = xmm0;
xmm3 = _mm_unpacklo_epi8(xmm3, xmm4);
xmm4 = _mm_unpackhi_epi8(xmm0, xmm4);
xmm5 = xmm1;
xmm5 = _mm_unpacklo_epi8(xmm5, xmm2);
xmm2 = _mm_unpackhi_epi8(xmm1, xmm2);
}
PIXCONV_PUT_STREAM(d + (i << 2) + 0, xmm3);
PIXCONV_PUT_STREAM(d + (i << 2) + 16, xmm4);
PIXCONV_PUT_STREAM(d + (i << 2) + 32, xmm5);
PIXCONV_PUT_STREAM(d + (i << 2) + 48, xmm2);
}
}
return S_OK;
}
// Force creation of these two variants
template HRESULT CLAVPixFmtConverter::convert_yuv422_yuy2_uyvy<0> CONV_FUNC_PARAMS;
template HRESULT CLAVPixFmtConverter::convert_yuv422_yuy2_uyvy<1> CONV_FUNC_PARAMS;
template <int uyvy> DECLARE_CONV_FUNC_IMPL(convert_yuv422_yuy2_uyvy_dither_le)
{
const ptrdiff_t inLumaStride = srcStride[0];
const ptrdiff_t inChromaStride = srcStride[1];
const ptrdiff_t outStride = dstStride[0];
const ptrdiff_t chromaWidth = (width + 1) >> 1;
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 4, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
_mm_sfence();
for (line = 0; line < height; ++line)
{
const uint16_t *const y = (const uint16_t *)(src[0] + line * inLumaStride);
const uint16_t *const u = (const uint16_t *)(src[1] + line * inChromaStride);
const uint16_t *const v = (const uint16_t *)(src[2] + line * inChromaStride);
uint16_t *const d = (uint16_t *)(dst[0] + line * outStride);
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm4 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 0));
xmm5 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 8));
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 16));
xmm7 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 24));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line, 8, dithers);
xmm4 = xmm5 = xmm6 = xmm7;
}
for (i = 0; i < chromaWidth; i += 8)
{
// Load pixels
PIXCONV_LOAD_PIXEL16_DITHER(xmm0, xmm4, (y + (i * 2) + 0), bpp); /* YYYY */
PIXCONV_LOAD_PIXEL16_DITHER(xmm1, xmm5, (y + (i * 2) + 8), bpp); /* YYYY */
PIXCONV_LOAD_PIXEL16_DITHER(xmm2, xmm6, (u + i), bpp); /* UUUU */
PIXCONV_LOAD_PIXEL16_DITHER(xmm3, xmm7, (v + i), bpp); /* VVVV */
// Pack Ys
xmm0 = _mm_packus_epi16(xmm0, xmm1);
// Interleave Us and Vs
xmm2 = _mm_packus_epi16(xmm2, xmm2);
xmm3 = _mm_packus_epi16(xmm3, xmm3);
xmm2 = _mm_unpacklo_epi8(xmm2, xmm3);
// Interlave those with the Ys
if (uyvy)
{
xmm3 = xmm2;
xmm3 = _mm_unpacklo_epi8(xmm3, xmm0);
xmm2 = _mm_unpackhi_epi8(xmm2, xmm0);
}
else
{
xmm3 = xmm0;
xmm3 = _mm_unpacklo_epi8(xmm3, xmm2);
xmm2 = _mm_unpackhi_epi8(xmm0, xmm2);
}
PIXCONV_PUT_STREAM(d + (i << 1) + 0, xmm3);
PIXCONV_PUT_STREAM(d + (i << 1) + 8, xmm2);
}
}
return S_OK;
}
// Force creation of these two variants
template HRESULT CLAVPixFmtConverter::convert_yuv422_yuy2_uyvy_dither_le<0> CONV_FUNC_PARAMS;
template HRESULT CLAVPixFmtConverter::convert_yuv422_yuy2_uyvy_dither_le<1> CONV_FUNC_PARAMS;
DECLARE_CONV_FUNC_IMPL(convert_nv12_yv12)
{
const ptrdiff_t inLumaStride = srcStride[0];
const ptrdiff_t inChromaStride = srcStride[1];
const ptrdiff_t outLumaStride = dstStride[0];
const ptrdiff_t outChromaStride = dstStride[1];
const ptrdiff_t chromaHeight = height >> 1;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm7;
xmm7 = _mm_set1_epi16(0x00FF);
_mm_sfence();
// Copy the y
for (line = 0; line < height; line++)
{
PIXCONV_MEMCPY_ALIGNED(dst[0] + outLumaStride * line, src[0] + inLumaStride * line, width);
}
for (line = 0; line < chromaHeight; line++)
{
const uint8_t *const uv = src[1] + line * inChromaStride;
uint8_t *const dv = dst[1] + outChromaStride * line;
uint8_t *const du = dst[2] + outChromaStride * line;
for (i = 0; i < width; i += 32)
{
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, uv + i + 0);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm1, uv + i + 16);
xmm2 = xmm0;
xmm3 = xmm1;
// null out the high-order bytes to get the U values
xmm0 = _mm_and_si128(xmm0, xmm7);
xmm1 = _mm_and_si128(xmm1, xmm7);
// right shift the V values
xmm2 = _mm_srli_epi16(xmm2, 8);
xmm3 = _mm_srli_epi16(xmm3, 8);
// unpack the values
xmm0 = _mm_packus_epi16(xmm0, xmm1);
xmm2 = _mm_packus_epi16(xmm2, xmm3);
PIXCONV_PUT_STREAM(du + (i >> 1), xmm0);
PIXCONV_PUT_STREAM(dv + (i >> 1), xmm2);
}
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(convert_p010_nv12_sse2)
{
const ptrdiff_t inStride = srcStride[0];
const ptrdiff_t outStride = dstStride[0];
const ptrdiff_t chromaHeight = (height >> 1);
const ptrdiff_t byteWidth = width << 1;
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 2, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
__m128i xmm0, xmm1, xmm2, xmm3;
_mm_sfence();
ptrdiff_t line, i;
for (line = 0; line < height; line++)
{
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm2 = _mm_load_si128((const __m128i *)(dithers + (line << 4) + 0));
xmm3 = _mm_load_si128((const __m128i *)(dithers + (line << 4) + 8));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm2, line, 8, dithers);
xmm3 = xmm2;
}
const uint8_t *y = (src[0] + line * inStride);
uint8_t *dy = (dst[0] + line * outStride);
for (i = 0; i < byteWidth; i += 32)
{
PIXCONV_LOAD_ALIGNED(xmm0, y + i + 0);
PIXCONV_LOAD_ALIGNED(xmm1, y + i + 16);
// apply dithering coeffs
xmm0 = _mm_adds_epu16(xmm0, xmm2);
xmm1 = _mm_adds_epu16(xmm1, xmm3);
// shift and pack to 8-bit
xmm0 = _mm_packus_epi16(_mm_srli_epi16(xmm0, 8), _mm_srli_epi16(xmm1, 8));
PIXCONV_PUT_STREAM(dy + (i >> 1), xmm0);
}
}
for (line = 0; line < chromaHeight; line++)
{
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm2 = _mm_load_si128((const __m128i *)(dithers + (line << 4) + 0));
xmm3 = _mm_load_si128((const __m128i *)(dithers + (line << 4) + 8));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm2, line, 8, dithers);
xmm3 = xmm2;
}
const uint8_t *uv = (src[1] + line * inStride);
uint8_t *duv = (dst[1] + line * outStride);
for (i = 0; i < byteWidth; i += 32)
{
PIXCONV_LOAD_ALIGNED(xmm0, uv + i + 0);
PIXCONV_LOAD_ALIGNED(xmm1, uv + i + 16);
// apply dithering coeffs
xmm0 = _mm_adds_epu16(xmm0, xmm2);
xmm1 = _mm_adds_epu16(xmm1, xmm3);
// shift and pack to 8-bit
xmm0 = _mm_packus_epi16(_mm_srli_epi16(xmm0, 8), _mm_srli_epi16(xmm1, 8));
PIXCONV_PUT_STREAM(duv + (i >> 1), xmm0);
}
}
return S_OK;
}
| 20,522
|
C++
|
.cpp
| 492
| 32.589431
| 112
| 0.548198
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,164
|
convert_direct.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/convert_direct.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
// This function is only designed for NV12-like pixel formats, like NV12, P010, P016, ...
DECLARE_CONV_FUNC_IMPL(plane_copy_direct_sse4)
{
const ptrdiff_t inStride = srcStride[0];
const ptrdiff_t outStride = dstStride[0];
const ptrdiff_t chromaHeight = (height >> 1);
const ptrdiff_t byteWidth =
(outputFormat == LAVOutPixFmt_P010 || outputFormat == LAVOutPixFmt_P016) ? width << 1 : width;
const ptrdiff_t stride = min(FFALIGN(byteWidth, 64), min(inStride, outStride));
__m128i xmm0, xmm1, xmm2, xmm3;
_mm_sfence();
ptrdiff_t line, i;
for (line = 0; line < height; line++)
{
const uint8_t *y = (src[0] + line * inStride);
uint8_t *dy = (dst[0] + line * outStride);
for (i = 0; i < (stride - 63); i += 64)
{
PIXCONV_STREAM_LOAD(xmm0, y + i + 0);
PIXCONV_STREAM_LOAD(xmm1, y + i + 16);
PIXCONV_STREAM_LOAD(xmm2, y + i + 32);
PIXCONV_STREAM_LOAD(xmm3, y + i + 48);
_ReadWriteBarrier();
PIXCONV_PUT_STREAM(dy + i + 0, xmm0);
PIXCONV_PUT_STREAM(dy + i + 16, xmm1);
PIXCONV_PUT_STREAM(dy + i + 32, xmm2);
PIXCONV_PUT_STREAM(dy + i + 48, xmm3);
}
for (; i < byteWidth; i += 16)
{
PIXCONV_LOAD_ALIGNED(xmm0, y + i);
PIXCONV_PUT_STREAM(dy + i, xmm0);
}
}
for (line = 0; line < chromaHeight; line++)
{
const uint8_t *uv = (src[1] + line * inStride);
uint8_t *duv = (dst[1] + line * outStride);
for (i = 0; i < (stride - 63); i += 64)
{
PIXCONV_STREAM_LOAD(xmm0, uv + i + 0);
PIXCONV_STREAM_LOAD(xmm1, uv + i + 16);
PIXCONV_STREAM_LOAD(xmm2, uv + i + 32);
PIXCONV_STREAM_LOAD(xmm3, uv + i + 48);
_ReadWriteBarrier();
PIXCONV_PUT_STREAM(duv + i + 0, xmm0);
PIXCONV_PUT_STREAM(duv + i + 16, xmm1);
PIXCONV_PUT_STREAM(duv + i + 32, xmm2);
PIXCONV_PUT_STREAM(duv + i + 48, xmm3);
}
for (; i < byteWidth; i += 16)
{
PIXCONV_LOAD_ALIGNED(xmm0, uv + i);
PIXCONV_PUT_STREAM(duv + i, xmm0);
}
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(convert_nv12_yv12_direct_sse4)
{
const ptrdiff_t inStride = srcStride[0];
const ptrdiff_t outStride = dstStride[0];
const ptrdiff_t outChromaStride = dstStride[1];
const ptrdiff_t chromaHeight = (height >> 1);
const ptrdiff_t stride = min(FFALIGN(width, 64), min(inStride, outStride));
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm7;
xmm7 = _mm_set1_epi16(0x00FF);
_mm_sfence();
ptrdiff_t line, i;
for (line = 0; line < height; line++)
{
const uint8_t *y = (src[0] + line * inStride);
uint8_t *dy = (dst[0] + line * outStride);
for (i = 0; i < (stride - 63); i += 64)
{
PIXCONV_STREAM_LOAD(xmm0, y + i + 0);
PIXCONV_STREAM_LOAD(xmm1, y + i + 16);
PIXCONV_STREAM_LOAD(xmm2, y + i + 32);
PIXCONV_STREAM_LOAD(xmm3, y + i + 48);
_ReadWriteBarrier();
PIXCONV_PUT_STREAM(dy + i + 0, xmm0);
PIXCONV_PUT_STREAM(dy + i + 16, xmm1);
PIXCONV_PUT_STREAM(dy + i + 32, xmm2);
PIXCONV_PUT_STREAM(dy + i + 48, xmm3);
}
for (; i < width; i += 16)
{
PIXCONV_LOAD_ALIGNED(xmm0, y + i);
PIXCONV_PUT_STREAM(dy + i, xmm0);
}
}
for (line = 0; line < chromaHeight; line++)
{
const uint8_t *uv = (src[1] + line * inStride);
uint8_t *dv = (dst[1] + line * outChromaStride);
uint8_t *du = (dst[2] + line * outChromaStride);
for (i = 0; i < (stride - 63); i += 64)
{
PIXCONV_STREAM_LOAD(xmm0, uv + i + 0);
PIXCONV_STREAM_LOAD(xmm1, uv + i + 16);
PIXCONV_STREAM_LOAD(xmm2, uv + i + 32);
PIXCONV_STREAM_LOAD(xmm3, uv + i + 48);
_ReadWriteBarrier();
// process first pair
xmm4 = _mm_srli_epi16(xmm0, 8);
xmm5 = _mm_srli_epi16(xmm1, 8);
xmm0 = _mm_and_si128(xmm0, xmm7);
xmm1 = _mm_and_si128(xmm1, xmm7);
xmm0 = _mm_packus_epi16(xmm0, xmm1);
xmm4 = _mm_packus_epi16(xmm4, xmm5);
PIXCONV_PUT_STREAM(du + (i >> 1) + 0, xmm0);
PIXCONV_PUT_STREAM(dv + (i >> 1) + 0, xmm4);
// and second pair
xmm4 = _mm_srli_epi16(xmm2, 8);
xmm5 = _mm_srli_epi16(xmm3, 8);
xmm2 = _mm_and_si128(xmm2, xmm7);
xmm3 = _mm_and_si128(xmm3, xmm7);
xmm2 = _mm_packus_epi16(xmm2, xmm3);
xmm4 = _mm_packus_epi16(xmm4, xmm5);
PIXCONV_PUT_STREAM(du + (i >> 1) + 16, xmm2);
PIXCONV_PUT_STREAM(dv + (i >> 1) + 16, xmm4);
}
for (; i < width; i += 32)
{
PIXCONV_LOAD_ALIGNED(xmm0, uv + i + 0);
PIXCONV_LOAD_ALIGNED(xmm1, uv + i + 16);
xmm4 = _mm_srli_epi16(xmm0, 8);
xmm5 = _mm_srli_epi16(xmm1, 8);
xmm0 = _mm_and_si128(xmm0, xmm7);
xmm1 = _mm_and_si128(xmm1, xmm7);
xmm0 = _mm_packus_epi16(xmm0, xmm1);
xmm4 = _mm_packus_epi16(xmm4, xmm5);
PIXCONV_PUT_STREAM(du + (i >> 1), xmm0);
PIXCONV_PUT_STREAM(dv + (i >> 1), xmm4);
}
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(convert_p010_nv12_direct_sse4)
{
const ptrdiff_t inStride = srcStride[0];
const ptrdiff_t outStride = dstStride[0];
const ptrdiff_t chromaHeight = (height >> 1);
const ptrdiff_t byteWidth = width << 1;
const ptrdiff_t stride = min(FFALIGN(byteWidth, 64), min(inStride, outStride << 1));
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 4, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
_mm_sfence();
ptrdiff_t line, i;
for (line = 0; line < height; line++)
{
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm4 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 0));
xmm5 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 8));
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 16));
xmm7 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 24));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line, 8, dithers);
xmm4 = xmm5 = xmm6 = xmm7;
}
const uint8_t *y = (src[0] + line * inStride);
uint8_t *dy = (dst[0] + line * outStride);
for (i = 0; i < (stride - 63); i += 64)
{
PIXCONV_STREAM_LOAD(xmm0, y + i + 0);
PIXCONV_STREAM_LOAD(xmm1, y + i + 16);
PIXCONV_STREAM_LOAD(xmm2, y + i + 32);
PIXCONV_STREAM_LOAD(xmm3, y + i + 48);
_ReadWriteBarrier();
// apply dithering coeffs
xmm0 = _mm_adds_epu16(xmm0, xmm4);
xmm1 = _mm_adds_epu16(xmm1, xmm5);
xmm2 = _mm_adds_epu16(xmm2, xmm6);
xmm3 = _mm_adds_epu16(xmm3, xmm7);
// shift and pack to 8-bit
xmm0 = _mm_packus_epi16(_mm_srli_epi16(xmm0, 8), _mm_srli_epi16(xmm1, 8));
xmm2 = _mm_packus_epi16(_mm_srli_epi16(xmm2, 8), _mm_srli_epi16(xmm3, 8));
PIXCONV_PUT_STREAM(dy + (i >> 1) + 0, xmm0);
PIXCONV_PUT_STREAM(dy + (i >> 1) + 16, xmm2);
}
for (; i < byteWidth; i += 32)
{
PIXCONV_LOAD_ALIGNED(xmm0, y + i + 0);
PIXCONV_LOAD_ALIGNED(xmm1, y + i + 16);
// apply dithering coeffs
xmm0 = _mm_adds_epu16(xmm0, xmm4);
xmm1 = _mm_adds_epu16(xmm1, xmm5);
// shift and pack to 8-bit
xmm0 = _mm_packus_epi16(_mm_srli_epi16(xmm0, 8), _mm_srli_epi16(xmm1, 8));
PIXCONV_PUT_STREAM(dy + (i >> 1), xmm0);
}
}
for (line = 0; line < chromaHeight; line++)
{
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm4 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 0));
xmm5 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 8));
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 16));
xmm7 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 24));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line, 8, dithers);
xmm4 = xmm5 = xmm6 = xmm7;
}
const uint8_t *uv = (src[1] + line * inStride);
uint8_t *duv = (dst[1] + line * outStride);
for (i = 0; i < (stride - 63); i += 64)
{
PIXCONV_STREAM_LOAD(xmm0, uv + i + 0);
PIXCONV_STREAM_LOAD(xmm1, uv + i + 16);
PIXCONV_STREAM_LOAD(xmm2, uv + i + 32);
PIXCONV_STREAM_LOAD(xmm3, uv + i + 48);
_ReadWriteBarrier();
// apply dithering coeffs
xmm0 = _mm_adds_epu16(xmm0, xmm4);
xmm1 = _mm_adds_epu16(xmm1, xmm5);
xmm2 = _mm_adds_epu16(xmm2, xmm6);
xmm3 = _mm_adds_epu16(xmm3, xmm7);
// shift and pack to 8-bit
xmm0 = _mm_packus_epi16(_mm_srli_epi16(xmm0, 8), _mm_srli_epi16(xmm1, 8));
xmm2 = _mm_packus_epi16(_mm_srli_epi16(xmm2, 8), _mm_srli_epi16(xmm3, 8));
PIXCONV_PUT_STREAM(duv + (i >> 1) + 0, xmm0);
PIXCONV_PUT_STREAM(duv + (i >> 1) + 16, xmm2);
}
for (; i < byteWidth; i += 32)
{
PIXCONV_LOAD_ALIGNED(xmm0, uv + i + 0);
PIXCONV_LOAD_ALIGNED(xmm1, uv + i + 16);
// apply dithering coeffs
xmm0 = _mm_adds_epu16(xmm0, xmm4);
xmm1 = _mm_adds_epu16(xmm1, xmm5);
// shift and pack to 8-bit
xmm0 = _mm_packus_epi16(_mm_srli_epi16(xmm0, 8), _mm_srli_epi16(xmm1, 8));
PIXCONV_PUT_STREAM(duv + (i >> 1), xmm0);
}
}
return S_OK;
}
| 11,269
|
C++
|
.cpp
| 270
| 32.27037
| 102
| 0.532925
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,165
|
rgb2rgb_unscaled.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/rgb2rgb_unscaled.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
DECLARE_CONV_FUNC_IMPL(convert_rgb48_rgb32_ssse3)
{
const uint16_t *rgb = (const uint16_t *)src[0];
const ptrdiff_t inStride = srcStride[0] >> 1;
const ptrdiff_t outStride = dstStride[0];
ptrdiff_t line, i;
int processWidth = width * 3;
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 4, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
__m128i mask = _mm_setr_epi8(4, 5, 2, 3, 0, 1, -1, -1, 10, 11, 8, 9, 6, 7, -1, -1);
_mm_sfence();
for (line = 0; line < height; line++)
{
__m128i *dst128 = (__m128i *)(dst[0] + line * outStride);
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm5 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 0));
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 8));
xmm7 = _mm_load_si128((const __m128i *)(dithers + (line << 5) + 16));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line, 8, dithers);
xmm5 = xmm6 = xmm7;
}
for (i = 0; i < processWidth; i += 24)
{
PIXCONV_LOAD_ALIGNED(xmm0, (rgb + i)); /* load */
PIXCONV_LOAD_ALIGNED(xmm1, (rgb + i + 8));
PIXCONV_LOAD_ALIGNED(xmm2, (rgb + i + 16));
xmm0 = _mm_adds_epu16(xmm0, xmm5); /* apply dithering coefficients */
xmm1 = _mm_adds_epu16(xmm1, xmm6);
xmm2 = _mm_adds_epu16(xmm2, xmm7);
xmm0 = _mm_srli_epi16(xmm0, 8); /* shift to 8-bit */
xmm1 = _mm_srli_epi16(xmm1, 8);
xmm2 = _mm_srli_epi16(xmm2, 8);
xmm3 = _mm_shuffle_epi8(xmm0, mask);
xmm4 = _mm_shuffle_epi8(_mm_alignr_epi8(xmm1, xmm0, 12), mask);
xmm0 = _mm_shuffle_epi8(_mm_alignr_epi8(xmm2, xmm1, 8), mask);
xmm1 = _mm_shuffle_epi8(_mm_alignr_epi8(xmm2, xmm2, 4), mask);
xmm3 = _mm_packus_epi16(xmm3, xmm4);
xmm0 = _mm_packus_epi16(xmm0, xmm1);
_mm_stream_si128(dst128++, xmm3);
_mm_stream_si128(dst128++, xmm0);
}
rgb += inStride;
}
return S_OK;
}
template <int out32> DECLARE_CONV_FUNC_IMPL(convert_rgb48_rgb)
{
// Byte Swap to BGR layout
uint8_t *dstBS[4] = {nullptr};
dstBS[0] = (BYTE *)av_malloc(height * srcStride[0]);
if (dstBS[0] == nullptr)
return E_OUTOFMEMORY;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_BGR48LE, SWS_POINT);
sws_scale2(ctx, src, srcStride, 0, height, dstBS, srcStride);
// Dither to RGB24/32 with SSE2
const uint16_t *rgb = (const uint16_t *)dstBS[0];
const ptrdiff_t inStride = srcStride[0] >> 1;
const ptrdiff_t outStride = dstStride[0];
ptrdiff_t line, i;
int processWidth = width * 3;
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 2, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
__m128i xmm0, xmm1, xmm6, xmm7;
uint8_t *rgb24buffer = nullptr;
if (out32)
{
rgb24buffer = (uint8_t *)av_malloc(outStride + AV_INPUT_BUFFER_PADDING_SIZE);
if (rgb24buffer == nullptr)
{
av_freep(&dstBS[0]);
return E_OUTOFMEMORY;
}
}
_mm_sfence();
for (line = 0; line < height; line++)
{
__m128i *dst128 = nullptr;
if (out32)
{
dst128 = (__m128i *)rgb24buffer;
}
else
{
dst128 = (__m128i *)(dst[0] + line * outStride);
}
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line << 4) + 0));
xmm7 = _mm_load_si128((const __m128i *)(dithers + (line << 4) + 8));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line, 8, dithers);
xmm6 = xmm7;
}
for (i = 0; i < processWidth; i += 16)
{
PIXCONV_LOAD_ALIGNED(xmm0, (rgb + i)); /* load */
PIXCONV_LOAD_ALIGNED(xmm1, (rgb + i + 8));
xmm0 = _mm_adds_epu16(xmm0, xmm6); /* apply dithering coefficients */
xmm1 = _mm_adds_epu16(xmm1, xmm7);
xmm0 = _mm_srli_epi16(xmm0, 8); /* shift to 8-bit */
xmm1 = _mm_srli_epi16(xmm1, 8);
xmm0 = _mm_packus_epi16(xmm0, xmm1);
_mm_stream_si128(dst128++, xmm0);
}
rgb += inStride;
if (out32)
{
uint32_t *src24 = (uint32_t *)rgb24buffer;
uint32_t *dst32 = (uint32_t *)(dst[0] + line * outStride);
for (i = 0; i < width; i += 4)
{
uint32_t sa = src24[0];
uint32_t sb = src24[1];
uint32_t sc = src24[2];
dst32[i + 0] = sa;
dst32[i + 1] = (sa >> 24) | (sb << 8);
dst32[i + 2] = (sb >> 16) | (sc << 16);
dst32[i + 3] = sc >> 8;
src24 += 3;
}
}
}
if (out32)
av_freep(&rgb24buffer);
av_freep(&dstBS[0]);
return S_OK;
}
template HRESULT CLAVPixFmtConverter::convert_rgb48_rgb<0> CONV_FUNC_PARAMS;
template HRESULT CLAVPixFmtConverter::convert_rgb48_rgb<1> CONV_FUNC_PARAMS;
| 6,463
|
C++
|
.cpp
| 164
| 31.176829
| 96
| 0.56252
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,166
|
yuv420_yuy2.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/yuv420_yuy2.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include <ppl.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
#define DITHER_STEPS 2
// This function converts 8x2 pixels from the source into 8x2 YUY2 pixels in the destination
template <LAVPixelFormat inputFormat, int shift, int uyvy, int dithertype>
__forceinline static int yuv420yuy2_convert_pixels(const uint8_t *&srcY, const uint8_t *&srcU, const uint8_t *&srcV,
uint8_t *&dst, ptrdiff_t srcStrideY, ptrdiff_t srcStrideUV,
ptrdiff_t dstStride, ptrdiff_t line, const uint16_t *&dithers,
ptrdiff_t pos)
{
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
xmm7 = _mm_setzero_si128();
// Shift > 0 is for 9/10 bit formats
if (shift > 0)
{
// Load 4 U/V values from line 0/1 into registers
PIXCONV_LOAD_4PIXEL16(xmm1, srcU);
PIXCONV_LOAD_4PIXEL16(xmm3, srcU + srcStrideUV);
PIXCONV_LOAD_4PIXEL16(xmm0, srcV);
PIXCONV_LOAD_4PIXEL16(xmm2, srcV + srcStrideUV);
// Interleave U and V
xmm0 = _mm_unpacklo_epi16(xmm1, xmm0); /* 0V0U0V0U */
xmm2 = _mm_unpacklo_epi16(xmm3, xmm2); /* 0V0U0V0U */
}
else if (inputFormat == LAVPixFmt_NV12)
{
// Load 4 16-bit macro pixels, which contain 4 UV samples
PIXCONV_LOAD_4PIXEL16(xmm0, srcU);
PIXCONV_LOAD_4PIXEL16(xmm2, srcU + srcStrideUV);
// Expand to 16-bit
xmm0 = _mm_unpacklo_epi8(xmm0, xmm7); /* 0V0U0V0U */
xmm2 = _mm_unpacklo_epi8(xmm2, xmm7); /* 0V0U0V0U */
}
else
{
PIXCONV_LOAD_4PIXEL8(xmm1, srcU);
PIXCONV_LOAD_4PIXEL8(xmm3, srcU + srcStrideUV);
PIXCONV_LOAD_4PIXEL8(xmm0, srcV);
PIXCONV_LOAD_4PIXEL8(xmm2, srcV + srcStrideUV);
// Interleave U and V
xmm0 = _mm_unpacklo_epi8(xmm1, xmm0); /* VUVU0000 */
xmm2 = _mm_unpacklo_epi8(xmm3, xmm2); /* VUVU0000 */
// Expand to 16-bit
xmm0 = _mm_unpacklo_epi8(xmm0, xmm7); /* 0V0U0V0U */
xmm2 = _mm_unpacklo_epi8(xmm2, xmm7); /* 0V0U0V0U */
}
// xmm0/xmm2 contain 4 interleaved U/V samples from two lines each in the 16bit parts, still in their native
// bitdepth
// Chroma upsampling
if (shift > 0 || inputFormat == LAVPixFmt_NV12)
{
srcU += 8;
srcV += 8;
}
else
{
srcU += 4;
srcV += 4;
}
xmm1 = xmm0;
xmm1 = _mm_add_epi16(xmm1, xmm0); /* 2x line 0 */
xmm1 = _mm_add_epi16(xmm1, xmm0); /* 3x line 0 */
xmm1 = _mm_add_epi16(xmm1, xmm2); /* 3x line 0 + line 1 (10bit) */
xmm3 = xmm2;
xmm3 = _mm_add_epi16(xmm3, xmm2); /* 2x line 1 */
xmm3 = _mm_add_epi16(xmm3, xmm2); /* 3x line 1 */
xmm3 = _mm_add_epi16(xmm3, xmm0); /* 3x line 1 + line 0 (10bit) */
// After this step, xmm1 and xmm3 contain 8 16-bit values, V and U interleaved. For 4:2:0, filling input+2 bits (10,
// 11, 12). Load Y
if (shift > 0)
{
// Load 8 Y values from line 0/1 into registers
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, srcY);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm5, srcY + srcStrideY);
srcY += 16;
}
else
{
PIXCONV_LOAD_4PIXEL16(xmm0, srcY);
PIXCONV_LOAD_4PIXEL16(xmm5, srcY + srcStrideY);
srcY += 8;
xmm0 = _mm_unpacklo_epi8(xmm0, xmm7); /* YYYYYYYY (16-bit fields)*/
xmm5 = _mm_unpacklo_epi8(xmm5, xmm7); /* YYYYYYYY (16-bit fields) */
}
// Dither everything to 8-bit
// Dithering
if (dithertype == LAVDither_Random)
{
/* Load random dithering coeffs from the dithers buffer */
int offset = (pos % (DITHER_STEPS * 8 * 2)) * 2;
xmm6 = _mm_load_si128((const __m128i *)(dithers + 0 + offset));
xmm7 = _mm_load_si128((const __m128i *)(dithers + 8 + offset));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm6, line + 0, shift + 2, odithers);
PIXCONV_LOAD_DITHER_COEFFS(xmm7, line + 1, shift + 2, odithers2);
}
// Dither UV
xmm1 = _mm_adds_epu16(xmm1, xmm6);
xmm3 = _mm_adds_epu16(xmm3, xmm7);
xmm1 = _mm_srai_epi16(xmm1, shift + 2);
xmm3 = _mm_srai_epi16(xmm3, shift + 2);
if (shift)
{ /* Y only needs to be dithered if it was > 8 bit */
xmm6 = _mm_srli_epi16(xmm6, 2); /* Shift dithering coeffs to proper strength */
xmm7 = _mm_srli_epi16(xmm6, 2);
xmm0 = _mm_adds_epu16(xmm0, xmm6); /* Apply dithering coeffs */
xmm0 = _mm_srai_epi16(xmm0, shift); /* Shift to 8 bit */
xmm5 = _mm_adds_epu16(xmm5, xmm7); /* Apply dithering coeffs */
xmm5 = _mm_srai_epi16(xmm5, shift); /* Shift to 8 bit */
}
// Pack into 8-bit containers
xmm0 = _mm_packus_epi16(xmm0, xmm5);
xmm1 = _mm_packus_epi16(xmm1, xmm3);
// Interleave U/V with Y
if (uyvy)
{
xmm3 = xmm1;
xmm3 = _mm_unpacklo_epi8(xmm3, xmm0);
xmm4 = _mm_unpackhi_epi8(xmm1, xmm0);
}
else
{
xmm3 = xmm0;
xmm3 = _mm_unpacklo_epi8(xmm3, xmm1);
xmm4 = _mm_unpackhi_epi8(xmm0, xmm1);
}
// Write back into the target memory
_mm_stream_si128((__m128i *)(dst), xmm3);
_mm_stream_si128((__m128i *)(dst + dstStride), xmm4);
dst += 16;
return 0;
}
template <LAVPixelFormat inputFormat, int shift, int uyvy, int dithertype>
static int __stdcall yuv420yuy2_process_lines(const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
uint8_t *dst, int width, int height, ptrdiff_t srcStrideY,
ptrdiff_t srcStrideUV, ptrdiff_t dstStride, const uint16_t *dithers)
{
const uint8_t *y = srcY;
const uint8_t *u = srcU;
const uint8_t *v = srcV;
uint8_t *yuy2 = dst;
// Processing starts at line 1, and ends at height - 1. The first and last line have special handling
ptrdiff_t line = 1;
const ptrdiff_t lastLine = height - 1;
const uint16_t *lineDither = dithers;
_mm_sfence();
// Process first line
// This needs special handling because of the chroma offset of YUV420
for (ptrdiff_t i = 0; i < width; i += 8)
{
yuv420yuy2_convert_pixels<inputFormat, shift, uyvy, dithertype>(y, u, v, yuy2, 0, 0, 0, 0, lineDither, i);
}
for (; line < lastLine; line += 2)
{
if (dithertype == LAVDither_Random)
lineDither = dithers + (line * 16 * DITHER_STEPS);
y = srcY + line * srcStrideY;
u = srcU + (line >> 1) * srcStrideUV;
v = srcV + (line >> 1) * srcStrideUV;
yuy2 = dst + line * dstStride;
for (int i = 0; i < width; i += 8)
{
yuv420yuy2_convert_pixels<inputFormat, shift, uyvy, dithertype>(y, u, v, yuy2, srcStrideY, srcStrideUV,
dstStride, line, lineDither, i);
}
}
// Process last line
// This needs special handling because of the chroma offset of YUV420
if (dithertype == LAVDither_Random)
lineDither = dithers + ((height - 2) * 16 * DITHER_STEPS);
y = srcY + (height - 1) * srcStrideY;
u = srcU + ((height >> 1) - 1) * srcStrideUV;
v = srcV + ((height >> 1) - 1) * srcStrideUV;
yuy2 = dst + (height - 1) * dstStride;
for (ptrdiff_t i = 0; i < width; i += 8)
{
yuv420yuy2_convert_pixels<inputFormat, shift, uyvy, dithertype>(y, u, v, yuy2, 0, 0, 0, line, lineDither, i);
}
return 0;
}
template <int uyvy, int dithertype>
static int __stdcall yuv420yuy2_dispatch(LAVPixelFormat inputFormat, int bpp, const uint8_t *srcY, const uint8_t *srcU,
const uint8_t *srcV, uint8_t *dst, int width, int height, ptrdiff_t srcStrideY,
ptrdiff_t srcStrideUV, ptrdiff_t dstStride, const uint16_t *dithers)
{
// Wrap the input format into template args
switch (inputFormat)
{
case LAVPixFmt_YUV420:
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 0, uyvy, dithertype>(
srcY, srcU, srcV, dst, width, height, srcStrideY, srcStrideUV, dstStride, dithers);
case LAVPixFmt_NV12:
return yuv420yuy2_process_lines<LAVPixFmt_NV12, 0, uyvy, dithertype>(
srcY, srcU, srcV, dst, width, height, srcStrideY, srcStrideUV, dstStride, dithers);
case LAVPixFmt_YUV420bX:
if (bpp == 9)
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 1, uyvy, dithertype>(
srcY, srcU, srcV, dst, width, height, srcStrideY, srcStrideUV, dstStride, dithers);
else if (bpp == 10)
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 2, uyvy, dithertype>(
srcY, srcU, srcV, dst, width, height, srcStrideY, srcStrideUV, dstStride, dithers);
/*else if (bpp == 11)
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 3, uyvy, dithertype>(srcY, srcU, srcV, dst, width, height,
srcStrideY, srcStrideUV, dstStride, dithers);*/
else if (bpp == 12)
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 4, uyvy, dithertype>(
srcY, srcU, srcV, dst, width, height, srcStrideY, srcStrideUV, dstStride, dithers);
/*else if (bpp == 13)
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 5, uyvy, dithertype>(srcY, srcU, srcV, dst, width, height,
srcStrideY, srcStrideUV, dstStride, dithers);*/
else if (bpp == 14)
return yuv420yuy2_process_lines<LAVPixFmt_YUV420, 6, uyvy, dithertype>(
srcY, srcU, srcV, dst, width, height, srcStrideY, srcStrideUV, dstStride, dithers);
else
ASSERT(0);
break;
default: ASSERT(0);
}
return 0;
}
template <int uyvy> DECLARE_CONV_FUNC_IMPL(convert_yuv420_yuy2)
{
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers =
(ditherMode == LAVDither_Random) ? GetRandomDitherCoeffs(height, DITHER_STEPS * 2, bpp - 8 + 2, 0) : nullptr;
if (ditherMode == LAVDither_Random && dithers != nullptr)
{
yuv420yuy2_dispatch<uyvy, 1>(inputFormat, bpp, src[0], src[1], src[2], dst[0], width, height, srcStride[0],
srcStride[1], dstStride[0], dithers);
}
else
{
yuv420yuy2_dispatch<uyvy, 0>(inputFormat, bpp, src[0], src[1], src[2], dst[0], width, height, srcStride[0],
srcStride[1], dstStride[0], nullptr);
}
return S_OK;
}
// Force creation of these two variants
template HRESULT CLAVPixFmtConverter::convert_yuv420_yuy2<0> CONV_FUNC_PARAMS;
template HRESULT CLAVPixFmtConverter::convert_yuv420_yuy2<1> CONV_FUNC_PARAMS;
| 11,686
|
C++
|
.cpp
| 263
| 36.441065
| 120
| 0.605994
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,167
|
interleave.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/interleave.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
DECLARE_CONV_FUNC_IMPL(convert_yuv444_y410)
{
const uint16_t *y = (const uint16_t *)src[0];
const uint16_t *u = (const uint16_t *)src[1];
const uint16_t *v = (const uint16_t *)src[2];
const ptrdiff_t inStride = srcStride[0] >> 1;
const ptrdiff_t outStride = dstStride[0];
int shift = 10 - bpp;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
xmm7 = _mm_set1_epi32(0xC0000000);
xmm6 = _mm_setzero_si128();
_mm_sfence();
for (line = 0; line < height; ++line)
{
__m128i *dst128 = (__m128i *)(dst[0] + line * outStride);
for (i = 0; i < width; i += 8)
{
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, (y + i));
xmm0 = _mm_slli_epi16(xmm0, shift);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm1, (u + i));
xmm1 = _mm_slli_epi16(xmm1, shift);
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm2, (v + i));
xmm2 = _mm_slli_epi16(xmm2, shift + 4); // +4 so its directly aligned properly (data from bit 14 to bit 4)
xmm3 = _mm_unpacklo_epi16(xmm1, xmm2); // 0VVVVV00000UUUUU
xmm4 = _mm_unpackhi_epi16(xmm1, xmm2); // 0VVVVV00000UUUUU
xmm3 = _mm_or_si128(xmm3, xmm7); // AVVVVV00000UUUUU
xmm4 = _mm_or_si128(xmm4, xmm7); // AVVVVV00000UUUUU
xmm5 = _mm_unpacklo_epi16(xmm0, xmm6); // 00000000000YYYYY
xmm2 = _mm_unpackhi_epi16(xmm0, xmm6); // 00000000000YYYYY
xmm5 = _mm_slli_epi32(xmm5, 10); // 000000YYYYY00000
xmm2 = _mm_slli_epi32(xmm2, 10); // 000000YYYYY00000
xmm3 = _mm_or_si128(xmm3, xmm5); // AVVVVVYYYYYUUUUU
xmm4 = _mm_or_si128(xmm4, xmm2); // AVVVVVYYYYYUUUUU
// Write data back
_mm_stream_si128(dst128++, xmm3);
_mm_stream_si128(dst128++, xmm4);
}
y += inStride;
u += inStride;
v += inStride;
}
return S_OK;
}
| 2,898
|
C++
|
.cpp
| 66
| 37.19697
| 118
| 0.617188
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,168
|
convert_generic.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/convert_generic.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "pixconv_internal.h"
extern "C"
{
#include "libavutil/intreadwrite.h"
};
#define ALIGN(x, a) (((x) + (a)-1UL) & ~((a)-1UL))
DECLARE_CONV_FUNC_IMPL(convert_generic)
{
HRESULT hr = S_OK;
AVPixelFormat inputFmt = GetFFInput();
switch (m_OutputPixFmt)
{
case LAVOutPixFmt_YV12:
hr = swscale_scale(inputFmt, AV_PIX_FMT_YUV420P, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt], true);
break;
case LAVOutPixFmt_NV12:
hr = swscale_scale(inputFmt, AV_PIX_FMT_NV12, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt]);
break;
case LAVOutPixFmt_YUY2: hr = ConvertTo422Packed(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_UYVY: hr = ConvertTo422Packed(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_AYUV: hr = ConvertToAYUV(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_P010: hr = ConvertToPX1X(src, srcStride, dst, width, height, dstStride, 2); break;
case LAVOutPixFmt_P016: hr = ConvertToPX1X(src, srcStride, dst, width, height, dstStride, 2); break;
case LAVOutPixFmt_P210: hr = ConvertToPX1X(src, srcStride, dst, width, height, dstStride, 1); break;
case LAVOutPixFmt_P216: hr = ConvertToPX1X(src, srcStride, dst, width, height, dstStride, 1); break;
case LAVOutPixFmt_Y410: hr = ConvertToY410(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_Y416: hr = ConvertToY416(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_RGB32:
hr = swscale_scale(inputFmt, AV_PIX_FMT_BGRA, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt]);
break;
case LAVOutPixFmt_RGB24:
hr = swscale_scale(inputFmt, AV_PIX_FMT_BGR24, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt]);
break;
case LAVOutPixFmt_v210: hr = ConvertTov210(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_v410: hr = ConvertTov410(src, srcStride, dst, width, height, dstStride); break;
case LAVOutPixFmt_YV16:
hr = swscale_scale(inputFmt, AV_PIX_FMT_YUV422P, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt], true);
break;
case LAVOutPixFmt_YV24:
hr = swscale_scale(inputFmt, AV_PIX_FMT_YUV444P, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt], true);
break;
case LAVOutPixFmt_RGB48:
hr = swscale_scale(inputFmt, AV_PIX_FMT_RGB48LE, src, srcStride, dst, width, height, dstStride,
lav_pixfmt_desc[m_OutputPixFmt]);
break;
default:
ASSERT(0);
hr = E_FAIL;
break;
}
return S_OK;
}
inline SwsContext *CLAVPixFmtConverter::GetSWSContext(int width, int height, enum AVPixelFormat srcPix,
enum AVPixelFormat dstPix, int flags)
{
if (!m_pSwsContext || swsWidth != width || swsHeight != height)
{
// Get context
m_pSwsContext = sws_getCachedContext(m_pSwsContext, width, height, srcPix, width, height, dstPix,
flags | SWS_PRINT_INFO, nullptr, nullptr, nullptr);
int *inv_tbl = nullptr, *tbl = nullptr;
int srcRange, dstRange, brightness, contrast, saturation;
int ret = sws_getColorspaceDetails(m_pSwsContext, &inv_tbl, &srcRange, &tbl, &dstRange, &brightness, &contrast,
&saturation);
if (ret >= 0)
{
const int *rgbTbl = nullptr;
if (m_ColorProps.VideoTransferMatrix != DXVA2_VideoTransferMatrix_Unknown)
{
int colorspace = SWS_CS_ITU709;
switch (m_ColorProps.VideoTransferMatrix)
{
case DXVA2_VideoTransferMatrix_BT709: colorspace = SWS_CS_ITU709; break;
case DXVA2_VideoTransferMatrix_BT601: colorspace = SWS_CS_ITU601; break;
case DXVA2_VideoTransferMatrix_SMPTE240M: colorspace = SWS_CS_SMPTE240M; break;
}
rgbTbl = sws_getCoefficients(colorspace);
}
else
{
BOOL isHD = (height >= 720 || width >= 1280);
rgbTbl = sws_getCoefficients(isHD ? SWS_CS_ITU709 : SWS_CS_ITU601);
}
srcRange = dstRange = (m_ColorProps.NominalRange == DXVA2_NominalRange_0_255);
sws_setColorspaceDetails(m_pSwsContext, rgbTbl, srcRange, rgbTbl, dstRange, brightness, contrast,
saturation);
}
swsWidth = width;
swsHeight = height;
}
return m_pSwsContext;
}
HRESULT CLAVPixFmtConverter::swscale_scale(enum AVPixelFormat srcPix, enum AVPixelFormat dstPix,
const uint8_t *const src[], const ptrdiff_t srcStride[], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[],
LAVOutPixFmtDesc pixFmtDesc, bool swapPlanes12)
{
int ret;
SwsContext *ctx = GetSWSContext(width, height, srcPix, dstPix, SWS_BILINEAR);
CheckPointer(m_pSwsContext, E_POINTER);
if (swapPlanes12)
{
uint8_t *tmp = dst[1];
dst[1] = dst[2];
dst[2] = tmp;
}
ret = sws_scale2(ctx, src, srcStride, 0, height, dst, dstStride);
return S_OK;
}
HRESULT CLAVPixFmtConverter::ConvertTo422Packed(const uint8_t *const src[4], const ptrdiff_t srcStride[4],
uint8_t *dst[], int width, int height, const ptrdiff_t dstStride[])
{
const BYTE *y = nullptr;
const BYTE *u = nullptr;
const BYTE *v = nullptr;
ptrdiff_t line, i;
ptrdiff_t sourceStride = 0;
BYTE *pTmpBuffer = nullptr;
if (m_InputPixFmt != LAVPixFmt_YUV422)
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32);
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 2);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride);
tmp[2] = tmp[1] + (height * scaleStride / 2);
tmp[3] = nullptr;
tmpStride[0] = scaleStride;
tmpStride[1] = scaleStride / 2;
tmpStride[2] = scaleStride / 2;
tmpStride[3] = 0;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_YUV422P, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = tmp[0];
u = tmp[1];
v = tmp[2];
sourceStride = scaleStride;
}
else
{
y = src[0];
u = src[1];
v = src[2];
sourceStride = srcStride[0];
}
#define YUV422_PACK_YUY2(offset) \
*idst++ = y[(i + offset) * 2] | (u[i + offset] << 8) | (y[(i + offset) * 2 + 1] << 16) | (v[i + offset] << 24);
#define YUV422_PACK_UYVY(offset) \
*idst++ = u[i + offset] | (y[(i + offset) * 2] << 8) | (v[i + offset] << 16) | (y[(i + offset) * 2 + 1] << 24);
uint8_t *out = dst[0];
int halfwidth = width >> 1;
ptrdiff_t halfstride = sourceStride >> 1;
if (m_OutputPixFmt == LAVOutPixFmt_YUY2)
{
for (line = 0; line < height; ++line)
{
uint32_t *idst = (uint32_t *)out;
for (i = 0; i < (halfwidth - 7); i += 8)
{
YUV422_PACK_YUY2(0)
YUV422_PACK_YUY2(1)
YUV422_PACK_YUY2(2)
YUV422_PACK_YUY2(3)
YUV422_PACK_YUY2(4)
YUV422_PACK_YUY2(5)
YUV422_PACK_YUY2(6)
YUV422_PACK_YUY2(7)
}
for (; i < halfwidth; ++i)
{
YUV422_PACK_YUY2(0)
}
y += sourceStride;
u += halfstride;
v += halfstride;
out += dstStride[0];
}
}
else
{
for (line = 0; line < height; ++line)
{
uint32_t *idst = (uint32_t *)out;
for (i = 0; i < (halfwidth - 7); i += 8)
{
YUV422_PACK_UYVY(0)
YUV422_PACK_UYVY(1)
YUV422_PACK_UYVY(2)
YUV422_PACK_UYVY(3)
YUV422_PACK_UYVY(4)
YUV422_PACK_UYVY(5)
YUV422_PACK_UYVY(6)
YUV422_PACK_UYVY(7)
}
for (; i < halfwidth; ++i)
{
YUV422_PACK_UYVY(0)
}
y += sourceStride;
u += halfstride;
v += halfstride;
out += dstStride[0];
}
}
av_freep(&pTmpBuffer);
return S_OK;
}
HRESULT CLAVPixFmtConverter::ConvertToAYUV(const uint8_t *const src[4], const ptrdiff_t srcStride[4], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[])
{
const BYTE *y = nullptr;
const BYTE *u = nullptr;
const BYTE *v = nullptr;
ptrdiff_t line, i = 0;
ptrdiff_t sourceStride = 0;
BYTE *pTmpBuffer = nullptr;
if (m_InputPixFmt != LAVPixFmt_YUV444)
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32);
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 3);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride);
tmp[2] = tmp[1] + (height * scaleStride);
tmp[3] = nullptr;
tmpStride[0] = scaleStride;
tmpStride[1] = scaleStride;
tmpStride[2] = scaleStride;
tmpStride[3] = 0;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_YUV444P, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = tmp[0];
u = tmp[1];
v = tmp[2];
sourceStride = scaleStride;
}
else
{
y = src[0];
u = src[1];
v = src[2];
sourceStride = srcStride[0];
}
#define YUV444_PACK_AYUV(offset) *idst++ = v[i + offset] | (u[i + offset] << 8) | (y[i + offset] << 16) | (0xff << 24);
BYTE *out = dst[0];
for (line = 0; line < height; ++line)
{
uint32_t *idst = (uint32_t *)out;
for (i = 0; i < (width - 7); i += 8)
{
YUV444_PACK_AYUV(0)
YUV444_PACK_AYUV(1)
YUV444_PACK_AYUV(2)
YUV444_PACK_AYUV(3)
YUV444_PACK_AYUV(4)
YUV444_PACK_AYUV(5)
YUV444_PACK_AYUV(6)
YUV444_PACK_AYUV(7)
}
for (; i < width; ++i)
{
YUV444_PACK_AYUV(0)
}
y += sourceStride;
u += sourceStride;
v += sourceStride;
out += dstStride[0];
}
av_freep(&pTmpBuffer);
return S_OK;
}
HRESULT CLAVPixFmtConverter::ConvertToPX1X(const uint8_t *const src[4], const ptrdiff_t srcStride[4], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[], int chromaVertical)
{
const BYTE *y = nullptr;
const BYTE *u = nullptr;
const BYTE *v = nullptr;
ptrdiff_t line, i = 0;
ptrdiff_t sourceStride = 0;
int shift = 0;
BYTE *pTmpBuffer = nullptr;
if ((chromaVertical == 1 && m_InputPixFmt != LAVPixFmt_YUV422bX) ||
(chromaVertical == 2 && m_InputPixFmt != LAVPixFmt_YUV420bX))
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32) * 2;
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 2);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride);
tmp[2] = tmp[1] + ((height / chromaVertical) * (scaleStride / 2));
tmp[3] = nullptr;
tmpStride[0] = scaleStride;
tmpStride[1] = scaleStride / 2;
tmpStride[2] = scaleStride / 2;
tmpStride[3] = 0;
SwsContext *ctx =
GetSWSContext(width, height, GetFFInput(),
chromaVertical == 1 ? AV_PIX_FMT_YUV422P16LE : AV_PIX_FMT_YUV420P16LE, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = tmp[0];
u = tmp[1];
v = tmp[2];
sourceStride = scaleStride;
}
else
{
y = src[0];
u = src[1];
v = src[2];
sourceStride = srcStride[0];
shift = (16 - m_InBpp);
}
// copy Y
BYTE *pLineOut = dst[0];
const BYTE *pLineIn = y;
for (line = 0; line < height; ++line)
{
if (shift == 0)
{
memcpy(pLineOut, pLineIn, width * 2);
}
else
{
const uint16_t *yc = (uint16_t *)pLineIn;
uint16_t *idst = (uint16_t *)pLineOut;
for (i = 0; i < width; ++i)
{
uint16_t yv = AV_RL16(yc + i);
if (shift)
yv <<= shift;
*idst++ = yv;
}
}
pLineOut += dstStride[0];
pLineIn += sourceStride;
}
sourceStride >>= 2;
// Merge U/V
BYTE *out = dst[1];
const uint16_t *uc = (uint16_t *)u;
const uint16_t *vc = (uint16_t *)v;
for (line = 0; line < height / chromaVertical; ++line)
{
uint32_t *idst = (uint32_t *)out;
for (i = 0; i < width / 2; ++i)
{
uint16_t uv = AV_RL16(uc + i);
uint16_t vv = AV_RL16(vc + i);
if (shift)
{
uv <<= shift;
vv <<= shift;
}
*idst++ = uv | (vv << 16);
}
uc += sourceStride;
vc += sourceStride;
out += dstStride[1];
}
av_freep(&pTmpBuffer);
return S_OK;
}
#define YUV444_PACKED_LOOP_HEAD(width, height, y, u, v, out) \
for (int line = 0; line < height; ++line) \
{ \
uint32_t *idst = (uint32_t *)out; \
for (int i = 0; i < width; ++i) \
{ \
uint32_t yv, uv, vv;
#define YUV444_PACKED_LOOP_HEAD_LE(width, height, y, u, v, out) \
YUV444_PACKED_LOOP_HEAD(width, height, y, u, v, out) \
yv = AV_RL16(y + i); \
uv = AV_RL16(u + i); \
vv = AV_RL16(v + i);
#define YUV444_PACKED_LOOP_END(y, u, v, out, srcStride, dstStride) \
} \
y += srcStride; \
u += srcStride; \
v += srcStride; \
out += dstStride; \
}
HRESULT CLAVPixFmtConverter::ConvertToY410(const uint8_t *const src[4], const ptrdiff_t srcStride[4], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[])
{
const uint16_t *y = nullptr;
const uint16_t *u = nullptr;
const uint16_t *v = nullptr;
ptrdiff_t sourceStride = 0;
bool b9Bit = false;
BYTE *pTmpBuffer = nullptr;
if (m_InputPixFmt != LAVPixFmt_YUV444bX || m_InBpp > 10)
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32);
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 6);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride * 2);
tmp[2] = tmp[1] + (height * scaleStride * 2);
tmp[3] = nullptr;
tmpStride[0] = scaleStride * 2;
tmpStride[1] = scaleStride * 2;
tmpStride[2] = scaleStride * 2;
tmpStride[3] = 0;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_YUV444P10LE, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = (uint16_t *)tmp[0];
u = (uint16_t *)tmp[1];
v = (uint16_t *)tmp[2];
sourceStride = scaleStride;
}
else
{
y = (uint16_t *)src[0];
u = (uint16_t *)src[1];
v = (uint16_t *)src[2];
sourceStride = srcStride[0] / 2;
b9Bit = (m_InBpp == 9);
}
#define YUV444_Y410_PACK *idst++ = (uv & 0x3FF) | ((yv & 0x3FF) << 10) | ((vv & 0x3FF) << 20) | (3 << 30);
BYTE *out = dst[0];
YUV444_PACKED_LOOP_HEAD_LE(width, height, y, u, v, out)
if (b9Bit)
{
yv <<= 1;
uv <<= 1;
vv <<= 1;
}
YUV444_Y410_PACK
YUV444_PACKED_LOOP_END(y, u, v, out, sourceStride, dstStride[0])
av_freep(&pTmpBuffer);
return S_OK;
}
HRESULT CLAVPixFmtConverter::ConvertToY416(const uint8_t *const src[4], const ptrdiff_t srcStride[4], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[])
{
const uint16_t *y = nullptr;
const uint16_t *u = nullptr;
const uint16_t *v = nullptr;
ptrdiff_t sourceStride = 0;
BYTE *pTmpBuffer = nullptr;
int shift = (16 - m_InBpp);
if (m_InputPixFmt != LAVPixFmt_YUV444bX)
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32);
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 6);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride * 2);
tmp[2] = tmp[1] + (height * scaleStride * 2);
tmp[3] = nullptr;
tmpStride[0] = scaleStride * 2;
tmpStride[1] = scaleStride * 2;
tmpStride[2] = scaleStride * 2;
tmpStride[3] = 0;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_YUV444P16LE, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = (uint16_t *)tmp[0];
u = (uint16_t *)tmp[1];
v = (uint16_t *)tmp[2];
sourceStride = scaleStride;
shift = 0;
}
else
{
y = (uint16_t *)src[0];
u = (uint16_t *)src[1];
v = (uint16_t *)src[2];
sourceStride = srcStride[0] / 2;
}
BYTE *out = dst[0];
YUV444_PACKED_LOOP_HEAD_LE(width, height, y, u, v, out)
uint16_t *p = (uint16_t *)idst;
p[0] = (uv << shift);
p[1] = (yv << shift);
p[2] = (vv << shift);
p[3] = 0xFFFF;
idst += 2;
YUV444_PACKED_LOOP_END(y, u, v, out, sourceStride, dstStride[0])
av_freep(&pTmpBuffer);
return S_OK;
}
HRESULT CLAVPixFmtConverter::ConvertTov210(const uint8_t *const src[4], const ptrdiff_t srcStride[4], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[])
{
const uint16_t *y = nullptr;
const uint16_t *u = nullptr;
const uint16_t *v = nullptr;
ptrdiff_t srcyStride = 0;
ptrdiff_t srcuvStride = 0;
BYTE *pTmpBuffer = nullptr;
if (m_InputPixFmt != LAVPixFmt_YUV422bX || m_InBpp != 10)
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32);
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 6);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride * 2);
tmp[2] = tmp[1] + (height * scaleStride * 2);
tmp[3] = nullptr;
tmpStride[0] = scaleStride * 2;
tmpStride[1] = scaleStride;
tmpStride[2] = scaleStride;
tmpStride[3] = 0;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_YUV422P10LE, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = (uint16_t *)tmp[0];
u = (uint16_t *)tmp[1];
v = (uint16_t *)tmp[2];
srcyStride = scaleStride;
srcuvStride = scaleStride >> 1;
}
else
{
y = (uint16_t *)src[0];
u = (uint16_t *)src[1];
v = (uint16_t *)src[2];
srcyStride = srcStride[0] >> 1;
srcuvStride = srcStride[1] >> 1;
}
// Calculate v210 stride
ptrdiff_t outStride = (((dstStride[0] >> 2) + 47) / 48) * 128;
// Align width to an even number for processing
// This may read into the source stride, but otherwise the algorithm won't work.
width = FFALIGN(width, 2);
BYTE *pdst = dst[0];
uint32_t *p = (uint32_t *)pdst;
int w;
//#define CLIP(v) av_clip(v, 4, 1019)
#define CLIP(v) (v & 0x03FF)
#define WRITE_PIXELS(a, b, c) \
do \
{ \
val = CLIP(*a++); \
val |= (CLIP(*b++) << 10) | (CLIP(*c++) << 20); \
*p++ = val; \
} while (0)
for (int h = 0; h < height; h++)
{
uint32_t val;
for (w = 0; w < width - 5; w += 6)
{
WRITE_PIXELS(u, y, v);
WRITE_PIXELS(y, u, y);
WRITE_PIXELS(v, y, u);
WRITE_PIXELS(y, v, y);
}
if (w < width - 1)
{
WRITE_PIXELS(u, y, v);
val = CLIP(*y++);
if (w == width - 2)
*p++ = val;
if (w < width - 3)
{
val |= (CLIP(*u++) << 10) | (CLIP(*y++) << 20);
*p++ = val;
val = CLIP(*v++) | (CLIP(*y++) << 10);
*p++ = val;
}
}
pdst += outStride;
memset(p, 0, pdst - (BYTE *)p);
p = (uint32_t *)pdst;
y += srcyStride - width;
u += srcuvStride - (width >> 1);
v += srcuvStride - (width >> 1);
}
av_freep(&pTmpBuffer);
return S_OK;
}
HRESULT CLAVPixFmtConverter::ConvertTov410(const uint8_t *const src[4], const ptrdiff_t srcStride[4], uint8_t *dst[],
int width, int height, const ptrdiff_t dstStride[])
{
const uint16_t *y = nullptr;
const uint16_t *u = nullptr;
const uint16_t *v = nullptr;
ptrdiff_t sourceStride = 0;
bool b9Bit = false;
BYTE *pTmpBuffer = nullptr;
if (m_InputPixFmt != LAVPixFmt_YUV444bX || m_InBpp > 10)
{
uint8_t *tmp[4] = {nullptr};
ptrdiff_t tmpStride[4] = {0};
ptrdiff_t scaleStride = FFALIGN(width, 32);
pTmpBuffer = (BYTE *)av_malloc(height * scaleStride * 6);
if (pTmpBuffer == nullptr)
return E_OUTOFMEMORY;
tmp[0] = pTmpBuffer;
tmp[1] = tmp[0] + (height * scaleStride * 2);
tmp[2] = tmp[1] + (height * scaleStride * 2);
tmp[3] = nullptr;
tmpStride[0] = scaleStride * 2;
tmpStride[1] = scaleStride * 2;
tmpStride[2] = scaleStride * 2;
tmpStride[3] = 0;
SwsContext *ctx = GetSWSContext(width, height, GetFFInput(), AV_PIX_FMT_YUV444P10LE, SWS_BILINEAR);
sws_scale2(ctx, src, srcStride, 0, height, tmp, tmpStride);
y = (uint16_t *)tmp[0];
u = (uint16_t *)tmp[1];
v = (uint16_t *)tmp[2];
sourceStride = scaleStride;
}
else
{
y = (uint16_t *)src[0];
u = (uint16_t *)src[1];
v = (uint16_t *)src[2];
sourceStride = srcStride[0] / 2;
b9Bit = (m_InBpp == 9);
}
#define YUV444_v410_PACK *idst++ = ((uv & 0x3FF) << 2) | ((yv & 0x3FF) << 12) | ((vv & 0x3FF) << 22);
BYTE *out = dst[0];
YUV444_PACKED_LOOP_HEAD_LE(width, height, y, u, v, out)
if (b9Bit)
{
yv <<= 1;
uv <<= 1;
vv <<= 1;
}
YUV444_v410_PACK YUV444_PACKED_LOOP_END(y, u, v, out, sourceStride, dstStride[0])
av_freep(&pTmpBuffer);
return S_OK;
}
| 25,359
|
C++
|
.cpp
| 666
| 29.07958
| 119
| 0.529017
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,169
|
yuv444_ayuv.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/pixconv/yuv444_ayuv.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include <emmintrin.h>
#include "pixconv_internal.h"
#include "pixconv_sse2_templates.h"
#define PIXCONV_INTERLEAVE_AYUV(regY, regU, regV, regA, regOut1, regOut2) \
regY = _mm_unpacklo_epi8(regY, regA); /* YAYAYAYA */ \
regV = _mm_unpacklo_epi8(regV, regU); /* VUVUVUVU */ \
regOut1 = _mm_unpacklo_epi16(regV, regY); /* VUYAVUYA */ \
regOut2 = _mm_unpackhi_epi16(regV, regY); /* VUYAVUYA */
#define YUV444_PACK_AYUV(dst) *idst++ = v[i] | (u[i] << 8) | (y[i] << 16) | (0xff << 24);
DECLARE_CONV_FUNC_IMPL(convert_yuv444_ayuv)
{
const uint8_t *y = (const uint8_t *)src[0];
const uint8_t *u = (const uint8_t *)src[1];
const uint8_t *v = (const uint8_t *)src[2];
const ptrdiff_t inStride = srcStride[0];
const ptrdiff_t outStride = dstStride[0];
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6;
xmm6 = _mm_set1_epi32(-1);
_mm_sfence();
for (line = 0; line < height; ++line)
{
__m128i *dst128 = (__m128i *)(dst[0] + line * outStride);
for (i = 0; i < width; i += 16)
{
// Load pixels into registers
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm0, (y + i)); /* YYYYYYYY */
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm1, (u + i)); /* UUUUUUUU */
PIXCONV_LOAD_PIXEL8_ALIGNED(xmm2, (v + i)); /* VVVVVVVV */
// Interlave into AYUV
xmm4 = xmm0;
xmm0 = _mm_unpacklo_epi8(xmm0, xmm6); /* YAYAYAYA */
xmm4 = _mm_unpackhi_epi8(xmm4, xmm6); /* YAYAYAYA */
xmm5 = xmm2;
xmm2 = _mm_unpacklo_epi8(xmm2, xmm1); /* VUVUVUVU */
xmm5 = _mm_unpackhi_epi8(xmm5, xmm1); /* VUVUVUVU */
xmm1 = _mm_unpacklo_epi16(xmm2, xmm0); /* VUYAVUYA */
xmm2 = _mm_unpackhi_epi16(xmm2, xmm0); /* VUYAVUYA */
xmm0 = _mm_unpacklo_epi16(xmm5, xmm4); /* VUYAVUYA */
xmm3 = _mm_unpackhi_epi16(xmm5, xmm4); /* VUYAVUYA */
// Write data back
_mm_stream_si128(dst128++, xmm1);
_mm_stream_si128(dst128++, xmm2);
_mm_stream_si128(dst128++, xmm0);
_mm_stream_si128(dst128++, xmm3);
}
y += inStride;
u += inStride;
v += inStride;
}
return S_OK;
}
DECLARE_CONV_FUNC_IMPL(convert_yuv444_ayuv_dither_le)
{
const uint16_t *y = (const uint16_t *)src[0];
const uint16_t *u = (const uint16_t *)src[1];
const uint16_t *v = (const uint16_t *)src[2];
const ptrdiff_t inStride = srcStride[0] >> 1;
const ptrdiff_t outStride = dstStride[0];
LAVDitherMode ditherMode = m_pSettings->GetDitherMode();
const uint16_t *dithers = GetRandomDitherCoeffs(height, 3, 8, 0);
if (dithers == nullptr)
ditherMode = LAVDither_Ordered;
ptrdiff_t line, i;
__m128i xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7;
xmm7 = _mm_set1_epi16(-256); /* 0xFF00 - 0A0A0A0A */
_mm_sfence();
for (line = 0; line < height; ++line)
{
// Load dithering coefficients for this line
if (ditherMode == LAVDither_Random)
{
xmm4 = _mm_load_si128((const __m128i *)(dithers + (line * 24) + 0));
xmm5 = _mm_load_si128((const __m128i *)(dithers + (line * 24) + 8));
xmm6 = _mm_load_si128((const __m128i *)(dithers + (line * 24) + 16));
}
else
{
PIXCONV_LOAD_DITHER_COEFFS(xmm6, line, 8, dithers);
xmm4 = xmm5 = xmm6;
}
__m128i *dst128 = (__m128i *)(dst[0] + line * outStride);
for (i = 0; i < width; i += 8)
{
// Load pixels into registers, and apply dithering
PIXCONV_LOAD_PIXEL16_DITHER(xmm0, xmm4, (y + i), bpp); /* Y0Y0Y0Y0 */
PIXCONV_LOAD_PIXEL16_DITHER_HIGH(xmm1, xmm5, (u + i), bpp); /* U0U0U0U0 */
PIXCONV_LOAD_PIXEL16_DITHER(xmm2, xmm6, (v + i), bpp); /* V0V0V0V0 */
// Interlave into AYUV
xmm0 = _mm_or_si128(xmm0, xmm7); /* YAYAYAYA */
xmm1 = _mm_and_si128(xmm1, xmm7); /* clear out clobbered low-bytes */
xmm2 = _mm_or_si128(xmm2, xmm1); /* VUVUVUVU */
xmm3 = xmm2;
xmm2 = _mm_unpacklo_epi16(xmm2, xmm0); /* VUYAVUYA */
xmm3 = _mm_unpackhi_epi16(xmm3, xmm0); /* VUYAVUYA */
// Write data back
_mm_stream_si128(dst128++, xmm2);
_mm_stream_si128(dst128++, xmm3);
}
y += inStride;
u += inStride;
v += inStride;
}
return S_OK;
}
| 5,424
|
C++
|
.cpp
| 124
| 36.064516
| 89
| 0.573219
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,170
|
LAVSubtitleProvider.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/subtitles/LAVSubtitleProvider.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVSubtitleProvider.h"
#include "moreuuids.h"
#include "libavutil/colorspace.h"
#include "LAVVideo.h"
#include "version.h"
#define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
#define SUBTITLE_PTS_TIMEOUT (AV_NOPTS_VALUE + 1)
#define OFFSET(x) offsetof(LAVSubtitleProviderContext, x)
// clang-format off
static const SubRenderOption options[] = {
{ "name", OFFSET(name), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "version", OFFSET(version), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "yuvMatrix", OFFSET(yuvMatrix), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "outputLevels", OFFSET(outputLevels), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "colorPrimaries", OFFSET(primaries), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "isBitmap", OFFSET(isBitmap), SROPT_TYPE_BOOL, SROPT_FLAG_READONLY },
{ "isMovable", OFFSET(isMovable), SROPT_TYPE_BOOL, SROPT_FLAG_READONLY },
{ "combineBitmaps", OFFSET(combineBitmaps), SROPT_TYPE_BOOL, 0 },
{ 0 }
};
// clang-format on
#define COLOR_PRIM_NTSC _T("601_525")
#define COLOR_PRIM_PAL _T("601_625")
CLAVSubtitleProvider::CLAVSubtitleProvider(CLAVVideo *pLAVVideo, ISubRenderConsumer *pConsumer)
: CSubRenderOptionsImpl(::options, &context)
, CUnknown(L"CLAVSubtitleProvider", nullptr)
, m_pLAVVideo(pLAVVideo)
{
m_ControlThread = new CLAVSubtitleProviderControlThread();
ASSERT(pConsumer);
ZeroMemory(&context, sizeof(context));
context.name = TEXT(LAV_VIDEO);
context.version = TEXT(LAV_VERSION_STR);
context.yuvMatrix = _T("None");
context.outputLevels = _T("PC");
context.primaries = COLOR_PRIM_NTSC;
context.isBitmap = true;
context.isMovable = true;
AddRef();
SetConsumer(pConsumer);
}
CLAVSubtitleProvider::~CLAVSubtitleProvider(void)
{
Flush();
CloseDecoder();
DisconnectConsumer();
SAFE_DELETE(m_ControlThread);
}
void CLAVSubtitleProvider::CloseDecoder()
{
CAutoLock lock(this);
m_pAVCodec = nullptr;
if (m_pAVCtx) {
if (m_pAVCtx->extradata)
av_freep(&m_pAVCtx->extradata);
avcodec_free_context(&m_pAVCtx);
}
if (m_pParser) {
av_parser_close(m_pParser);
m_pParser = nullptr;
}
av_packet_free(&m_pSubtitlePacket);
}
STDMETHODIMP CLAVSubtitleProvider::SetConsumer(ISubRenderConsumer *pConsumer)
{
CAutoLock lock(this);
if (m_pConsumer)
DisconnectConsumer();
CheckPointer(pConsumer, E_FAIL);
m_pConsumer = pConsumer;
m_pConsumer->AddRef();
m_pConsumer->Connect(this);
if (FAILED(m_pConsumer->QueryInterface(&m_pConsumer2)))
m_pConsumer2 = nullptr;
m_ControlThread->SetConsumer2(m_pConsumer2);
return S_OK;
}
STDMETHODIMP CLAVSubtitleProvider::DisconnectConsumer(void)
{
CAutoLock lock(this);
CheckPointer(m_pConsumer, S_FALSE);
m_ControlThread->SetConsumer2(nullptr);
m_pConsumer->Disconnect();
SafeRelease(&m_pConsumer);
SafeRelease(&m_pConsumer2);
return S_OK;
}
#define PTS2RT(pts) (10000i64 * pts / 90)
STDMETHODIMP CLAVSubtitleProvider::RequestFrame(REFERENCE_TIME start, REFERENCE_TIME stop, LPVOID context)
{
ASSERT(m_pConsumer);
// Create a new frame
CLAVSubtitleFrame *subtitleFrame = new CLAVSubtitleFrame();
subtitleFrame->AddRef();
if (m_pAVCtx->width == 720 && m_pAVCtx->height == 480) {
SIZE videoSize;
m_pConsumer->GetSize("originalVideoSize", &videoSize);
if (videoSize.cx == 720) {
m_pAVCtx->height = videoSize.cy;
}
}
// update primaries based on the video dimensions
if (m_pAVCtx->height == 480)
this->context.primaries = COLOR_PRIM_NTSC;
else if (m_pAVCtx->height == 576)
this->context.primaries = COLOR_PRIM_PAL;
RECT outputRect;
::SetRect(&outputRect, 0, 0, m_pAVCtx->width, m_pAVCtx->height);
subtitleFrame->SetOutputRect(outputRect);
REFERENCE_TIME mid = start + ((stop-start) >> 1);
// Scope this so we limit the provider-lock to the part where its needed
{
CAutoLock lock(this);
for (auto it = m_SubFrames.begin(); it != m_SubFrames.end(); it++) {
CLAVSubRect *pRect = *it;
if ((pRect->rtStart == AV_NOPTS_VALUE || pRect->rtStart <= mid)
&& (pRect->rtStop == AV_NOPTS_VALUE || pRect->rtStop > mid)
&& (m_bComposit || pRect->forced)) {
if (m_pHLI && PTS2RT(m_pHLI->StartPTM) <= mid && PTS2RT(m_pHLI->EndPTM) >= mid) {
pRect = ProcessDVDHLI(pRect);
}
subtitleFrame->AddBitmap(pRect);
}
}
m_rtLastFrame = start;
}
if (subtitleFrame->Empty()) {
SafeRelease(&subtitleFrame);
}
// Deliver Frame
m_pConsumer->DeliverFrame(start, stop, context, subtitleFrame);
SafeRelease(&subtitleFrame);
TimeoutSubtitleRects(stop);
return S_OK;
}
STDMETHODIMP CLAVSubtitleProvider::Disconnect(void)
{
SafeRelease(&m_pConsumer);
return S_OK;
}
STDMETHODIMP CLAVSubtitleProvider::InitDecoder(const CMediaType *pmt, AVCodecID codecId)
{
CAutoLock lock(this);
m_pAVCodec = avcodec_find_decoder(codecId);
CheckPointer(m_pAVCodec, VFW_E_TYPE_NOT_ACCEPTED);
m_pAVCtx = avcodec_alloc_context3(m_pAVCodec);
CheckPointer(m_pAVCtx, E_POINTER);
m_pParser = av_parser_init(codecId);
size_t extralen = 0;
getExtraData((const BYTE *)pmt->Format(), pmt->FormatType(), pmt->FormatLength(), nullptr, &extralen);
if (extralen > 0) {
// Just copy extradata
BYTE *extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
getExtraData((const BYTE *)pmt->Format(), pmt->FormatType(), pmt->FormatLength(), extra, nullptr);
m_pAVCtx->extradata = extra;
m_pAVCtx->extradata_size = (int)extralen;
}
if (pmt->formattype == FORMAT_SubtitleInfo) {
// Not much info in here
} else {
// Try video info
BITMAPINFOHEADER *bmi = nullptr;
videoFormatTypeHandler(*pmt, &bmi, nullptr, nullptr, nullptr);
m_pAVCtx->width = bmi->biWidth;
m_pAVCtx->height = bmi->biHeight;
}
int ret = avcodec_open2(m_pAVCtx, m_pAVCodec, nullptr);
if (ret < 0) {
DbgLog((LOG_TRACE, 10, L"CLAVSubtitleProvider::InitDecoder(): avocdec_open2 failed with %d", ret));
CloseDecoder();
return VFW_E_TYPE_NOT_ACCEPTED;
}
return S_OK;
}
STDMETHODIMP CLAVSubtitleProvider::Flush()
{
CAutoLock lock(this);
ClearSubtitleRects();
SAFE_DELETE(m_pHLI);
m_rtLastFrame = AV_NOPTS_VALUE;
context.isMovable = true;
m_pLAVVideo->SetInDVDMenu(false);
av_packet_free(&m_pSubtitlePacket);
return S_OK;
}
void CLAVSubtitleProvider::ClearSubtitleRects()
{
CAutoLock lock(this);
for (auto it = m_SubFrames.begin(); it != m_SubFrames.end(); it++) {
(*it)->Release();
}
m_SubFrames.clear();
}
void CLAVSubtitleProvider::TimeoutSubtitleRects(REFERENCE_TIME rt)
{
CAutoLock lock(this);
REFERENCE_TIME timestamp = rt - 10 * 10000000; // Timeout all subs 10 seconds in the past
auto it = m_SubFrames.begin();
while (it != m_SubFrames.end()) {
if ((*it)->rtStop != AV_NOPTS_VALUE && (*it)->rtStop < timestamp) {
DbgLog((LOG_TRACE, 10, L"Timed out subtitle at %I64d", (*it)->rtStart));
(*it)->Release();
it = m_SubFrames.erase(it);
} else {
it++;
}
}
}
STDMETHODIMP CLAVSubtitleProvider::Decode(BYTE *buf, int buflen, REFERENCE_TIME rtStartIn, REFERENCE_TIME rtStopIn)
{
ASSERT(m_pAVCtx);
if (m_pSubtitlePacket == nullptr)
m_pSubtitlePacket = av_packet_alloc();
AVSubtitle sub;
memset(&sub, 0, sizeof(sub));
if (!buflen || !buf) {
return S_OK;
}
while (buflen > 0) {
REFERENCE_TIME rtStart = rtStartIn, rtStop = rtStopIn;
int used_bytes = 0;
int got_sub = 0;
if (m_pParser) {
uint8_t *pOut = nullptr;
int pOut_size = 0;
used_bytes = av_parser_parse2(m_pParser, m_pAVCtx, &pOut, &pOut_size, buf, buflen, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
if (used_bytes == 0 && pOut_size == 0) {
DbgLog((LOG_TRACE, 50, L"CLAVSubtitleProvider::Decode - could not process buffer"));
break;
}
if (used_bytes > pOut_size) {
if (rtStartIn != AV_NOPTS_VALUE)
m_rtStartCache = rtStartIn;
} else if (used_bytes == pOut_size) {
m_rtStartCache = rtStartIn = AV_NOPTS_VALUE;
} else if (pOut_size > used_bytes) {
rtStart = m_rtStartCache;
m_rtStartCache = rtStartIn;
// The value was used once, don't use it for multiple frames, that ends up in weird timings
rtStartIn = AV_NOPTS_VALUE;
}
if (pOut_size > 0) {
m_pSubtitlePacket->data = pOut;
m_pSubtitlePacket->size = pOut_size;
m_pSubtitlePacket->pts = rtStart;
m_pSubtitlePacket->duration = 0;
int ret = avcodec_decode_subtitle2(m_pAVCtx, &sub, &got_sub, m_pSubtitlePacket);
if (ret < 0) {
DbgLog((LOG_TRACE, 50, L"CLAVSubtitleProvider::Decode - decoding failed despite successful parsing"));
got_sub = 0;
}
} else {
got_sub = 0;
}
}
if (used_bytes < 0) {
return S_OK;
}
if (!m_pParser && (!got_sub && used_bytes == 0)) {
buflen = 0;
} else {
buf += used_bytes;
buflen -= used_bytes;
}
if (got_sub) {
ProcessSubtitleFrame(&sub, rtStart);
}
avsubtitle_free(&sub);
}
return S_OK;
}
void CLAVSubtitleProvider::ProcessSubtitleFrame(AVSubtitle *sub, REFERENCE_TIME rtStart)
{
DbgLog((LOG_TRACE, 10, L"Decoded Sub: rtStart: %I64d, start_display_time: %d, end_display_time: %d, num_rects: %u, num_dvd_palette: %d", rtStart, sub->start_display_time, sub->end_display_time, sub->num_rects, sub->num_dvd_palette));
if (sub->num_rects > 0) {
if (m_pAVCtx->codec_id == AV_CODEC_ID_DVD_SUBTITLE) {
CAutoLock lock(this);
// DVD subs have the limitation that only one subtitle can be shown at a given time,
// so we need to timeout unlimited subs when a new one appears, as well as limit the duration of timed subs
// to prevent overlapping subtitles
REFERENCE_TIME rtSubTimeout = (rtStart != AV_NOPTS_VALUE) ? rtStart - 1 : SUBTITLE_PTS_TIMEOUT;
for (auto it = m_SubFrames.begin(); it != m_SubFrames.end(); it++) {
if ((*it)->rtStop == AV_NOPTS_VALUE || rtStart == AV_NOPTS_VALUE || (*it)->rtStop > rtStart) {
(*it)->rtStop = rtSubTimeout;
}
}
// Override subtitle timestamps if we have a timeout, and are not in a menu
if (rtStart == AV_NOPTS_VALUE && sub->end_display_time > 0 && !(sub->rects[0]->flags & AV_SUBTITLE_FLAG_FORCED)) {
DbgLog((LOG_TRACE, 10, L" -> Overriding subtitle timestamp to %I64d", m_rtLastFrame));
rtStart = m_rtLastFrame;
}
}
REFERENCE_TIME rtStop = AV_NOPTS_VALUE;
if (rtStart != AV_NOPTS_VALUE) {
if (sub->end_display_time > 0) {
rtStop = rtStart + (sub->end_display_time * 10000i64);
}
rtStart += sub->start_display_time * 10000i64;
}
for (unsigned i = 0; i < sub->num_rects; i++) {
if (sub->num_dvd_palette > 1 && rtStart != AV_NOPTS_VALUE) {
REFERENCE_TIME rtStartRect = rtStart - (sub->start_display_time * 10000i64);
REFERENCE_TIME rtStopRect = rtStart;
for (unsigned k = 0; k < sub->num_dvd_palette; k++) {
// Start is the stop of the previous part
rtStartRect = rtStopRect;
// Stop is either the start of the next part, or the final stop
if (k < (sub->num_dvd_palette-1))
rtStopRect = rtStart + (sub->dvd_palette[k+1]->start_display_time * 10000i64);
else
rtStopRect = rtStop;
// Update palette with new alpha values
for (unsigned j = 0; j < 4; j++)
sub->rects[i]->data[1][(j << 2) + 3] = sub->dvd_palette[k]->alpha[j] * 17;
ProcessSubtitleRect(sub->rects[i], rtStartRect, rtStopRect);
}
} else {
ProcessSubtitleRect(sub->rects[i], rtStart, rtStop);
}
}
}
}
void CLAVSubtitleProvider::ProcessSubtitleRect(AVSubtitleRect *rect, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
DbgLog((LOG_TRACE, 10, L"Subtitle Rect, start: %I64d, stop: %I64d", rtStart, rtStop));
// Skip zero-length subs
if (rtStart != AV_NOPTS_VALUE && rtStart == rtStop) return;
int hpad = rect->x & 1;
int vpad = rect->y & 1;
int width = rect->w + hpad;
if (width & 1) width++;
int height = rect->h + vpad;
if (height & 1) height++;
int rgbStride = FFALIGN(width, 16);
BYTE *rgbSub = (BYTE *)CoTaskMemAlloc(rgbStride * height * 4);
if (!rgbSub) return;
BYTE *rgbSubStart = rgbSub;
const BYTE *palSub = rect->data[0];
const BYTE *palette = rect->data[1];
memset(rgbSub, 0, rgbStride * height * 4);
rgbSub += (rgbStride * vpad + hpad) * 4;
for (int y = 0; y < rect->h; y++) {
for (int x = 0; x < rect->w; x++) {
// Read paletted value
int idx = palSub[x];
// Skip invalid values
if (idx >= rect->nb_colors)
continue;
// Read RGB values from palette
BYTE b = palette[(idx << 2) + 0];
BYTE g = palette[(idx << 2) + 1];
BYTE r = palette[(idx << 2) + 2];
BYTE a = palette[(idx << 2) + 3];
// Store as RGBA pixel, pre-multiplied
rgbSub[(x << 2) + 0] = FAST_DIV255(b * a);
rgbSub[(x << 2) + 1] = FAST_DIV255(g * a);
rgbSub[(x << 2) + 2] = FAST_DIV255(r * a);
rgbSub[(x << 2) + 3] = a;
}
palSub += rect->linesize[0];
rgbSub += rgbStride * 4;
}
// Store the rect
POINT position = { rect->x - hpad, rect->y - vpad };
SIZE size = { width, height };
CLAVSubRect *lavRect = new CLAVSubRect();
if (!lavRect) return;
lavRect->id = m_SubPicId++;
lavRect->pitch = rgbStride;
lavRect->pixels = rgbSubStart;
lavRect->position = position;
lavRect->size = size;
lavRect->rtStart = rtStart;
lavRect->rtStop = rtStop;
lavRect->forced = rect->flags & AV_SUBTITLE_FLAG_FORCED;
if (m_pAVCtx->codec_id == AV_CODEC_ID_DVD_SUBTITLE) {
lavRect->pixelsPal = CoTaskMemAlloc(lavRect->pitch * lavRect->size.cy);
if (!lavRect->pixelsPal) return;
int paletteTransparent = 0;
for (int i = 0; i < rect->nb_colors; i++) {
if (palette[(i << 2) + 3] == 0) {
paletteTransparent = i;
break;
}
}
memset(lavRect->pixelsPal, paletteTransparent, lavRect->pitch * lavRect->size.cy);
BYTE *palPixels = (BYTE *)lavRect->pixelsPal;
palSub = rect->data[0];
palPixels += lavRect->pitch * vpad + hpad;
for (int y = 0; y < rect->h; y++) {
memcpy(palPixels, palSub, rect->w);
palPixels += lavRect->pitch;
palSub += rect->linesize[0];
}
}
// Ensure the width/height in avctx are valid
m_pAVCtx->width = FFMAX(m_pAVCtx->width, position.x + size.cx);
m_pAVCtx->height = FFMAX(m_pAVCtx->height, position.y + size.cy);
// HACK: Since we're only dealing with DVDs so far, do some trickery here
if (m_pAVCtx->height > 480 && m_pAVCtx->height < 576)
m_pAVCtx->height = 576;
AddSubtitleRect(lavRect);
}
void CLAVSubtitleProvider::AddSubtitleRect(CLAVSubRect *rect)
{
CAutoLock lock(this);
rect->AddRef();
m_SubFrames.push_back(rect);
}
typedef struct DVDSubContext
{
AVClass *avclass;
uint32_t palette[16];
char *palette_str;
char *ifo_str;
int has_palette;
uint8_t colormap[4];
uint8_t alpha[256];
uint8_t buf[0x10000];
int buf_size;
int forced_subs_only;
} DVDSubContext;
#define MAX_NEG_CROP 1024
extern "C" __declspec(dllimport) uint8_t ff_crop_tab[256 + 2 * MAX_NEG_CROP];
STDMETHODIMP CLAVSubtitleProvider::SetDVDPalette(AM_PROPERTY_SPPAL *pPal)
{
DbgLog((LOG_TRACE, 10, L"CLAVSubtitleProvider(): Setting new DVD Palette"));
CAutoLock lock(this);
if (!m_pAVCtx || m_pAVCtx->codec_id != AV_CODEC_ID_DVD_SUBTITLE || !pPal) {
return E_FAIL;
}
DVDSubContext *ctx = (DVDSubContext *)m_pAVCtx->priv_data;
ctx->has_palette = 1;
uint8_t r,g,b;
int i, y, cb, cr;
int r_add, g_add, b_add;
uint8_t *cm = ff_crop_tab + MAX_NEG_CROP;
for (i = 0; i < 16; i++) {
y = pPal->sppal[i].Y;
cb = pPal->sppal[i].V;
cr = pPal->sppal[i].U;
YUV_TO_RGB1_CCIR(cb, cr);
YUV_TO_RGB2_CCIR(r, g, b, y);
ctx->palette[i] = (0xFF << 24) | (r << 16) | (g << 8) | b;
}
return S_OK;
}
STDMETHODIMP CLAVSubtitleProvider::SetDVDHLI(struct _AM_PROPERTY_SPHLI *pHLI)
{
bool redraw = false;
// Scoped lock so the lock is lifted when the redraw is issued
// Otherwise we can deadlock in the decoder - this one holding the provider lock, the decoder holding the decoder lock...
{
CAutoLock lock(this);
if (pHLI) {
#define DHLI(var) (pHLI->var != m_pHLI->var)
if (!m_pHLI || DHLI(StartX) || DHLI(StopX) || DHLI(StartY) || DHLI(StopY) || memcmp(&pHLI->ColCon, &m_pHLI->ColCon, sizeof(pHLI->ColCon)) != 0) {
DbgLog((LOG_TRACE, 10, L"CLAVSubtitleProvider(): DVD HLI event. HLISS: %u, x: %u->%u, y: %u->%u, StartPTM: %u, EndPTM: %u", pHLI->HLISS, pHLI->StartX, pHLI->StopX, pHLI->StartY, pHLI->StopY, pHLI->StartPTM, pHLI->EndPTM));
SAFE_DELETE(m_pHLI);
m_pHLI = new AM_PROPERTY_SPHLI(*pHLI);
redraw = true;
}
context.isMovable = false;
m_pLAVVideo->SetInDVDMenu(true);
} else {
SAFE_DELETE(m_pHLI);
}
}
if (redraw)
ControlCmd(CNTRL_FLUSH);
return S_OK;
}
CLAVSubRect* CLAVSubtitleProvider::ProcessDVDHLI(CLAVSubRect *rect)
{
DVDSubContext *ctx = (DVDSubContext *)m_pAVCtx->priv_data;
if (!m_pHLI || !rect->pixelsPal || !ctx->has_palette)
return rect;
LPVOID newPixels = CoTaskMemAlloc(rect->pitch * rect->size.cy * 4);
if (!newPixels) return rect;
// copy pixels before modification
memcpy(newPixels, rect->pixels, rect->pitch * rect->size.cy * 4);
uint8_t *originalPalPixels = (uint8_t *)rect->pixelsPal;
// create new object
rect = new CLAVSubRect(*rect);
rect->ResetRefCount();
rect->pixels = newPixels;
rect->pixelsPal = nullptr;
// Need to assign a new Id since we're modifying it here..
rect->id = m_SubPicId++;
uint8_t *palette = (uint8_t *)ctx->palette;
for (int y = 0; y < rect->size.cy; y++) {
if (y+rect->position.y < m_pHLI->StartY || y+rect->position.y > m_pHLI->StopY)
continue;
uint8_t *pixelsPal = originalPalPixels + rect->pitch * y;
uint8_t *pixels = ((uint8_t *)rect->pixels) + rect->pitch * y * 4;
for (int x = 0; x < rect->size.cx; x++) {
if (x+rect->position.x < m_pHLI->StartX || x+rect->position.x > m_pHLI->StopX)
continue;
uint8_t idx = pixelsPal[x];
uint8_t alpha = 0;
switch (idx) {
case 0:
idx = m_pHLI->ColCon.backcol;
alpha = m_pHLI->ColCon.backcon;
break;
case 1:
idx = m_pHLI->ColCon.patcol;
alpha = m_pHLI->ColCon.patcon;
break;
case 2:
idx = m_pHLI->ColCon.emph1col;
alpha = m_pHLI->ColCon.emph1con;
break;
case 3:
idx = m_pHLI->ColCon.emph2col;
alpha = m_pHLI->ColCon.emph2con;
break;
}
// Read RGB values from palette
BYTE b = palette[(idx << 2) + 0];
BYTE g = palette[(idx << 2) + 1];
BYTE r = palette[(idx << 2) + 2];
BYTE a = alpha << 4;
// Store as RGBA pixel, pre-multiplied
pixels[(x << 2) + 0] = FAST_DIV255(b * a);
pixels[(x << 2) + 1] = FAST_DIV255(g * a);
pixels[(x << 2) + 2] = FAST_DIV255(r * a);
pixels[(x << 2) + 3] = a;
}
}
return rect;
}
STDMETHODIMP CLAVSubtitleProvider::SetDVDComposit(BOOL bComposit)
{
CAutoLock lock(this);
m_bComposit = bComposit;
return S_OK;
}
CLAVSubtitleProviderControlThread::CLAVSubtitleProviderControlThread()
: CAMThread()
{
Create();
}
CLAVSubtitleProviderControlThread::~CLAVSubtitleProviderControlThread()
{
CallWorker(CLAVSubtitleProvider::CNTRL_EXIT);
Close();
}
void CLAVSubtitleProviderControlThread::SetConsumer2(ISubRenderConsumer2 * pConsumer2)
{
CAutoLock lock(this);
m_pConsumer2 = pConsumer2;
}
DWORD CLAVSubtitleProviderControlThread::ThreadProc()
{
SetThreadName(-1, "LAV Subtitle Control Thread");
DWORD cmd;
while (1) {
cmd = GetRequest();
switch (cmd) {
case CLAVSubtitleProvider::CNTRL_EXIT:
Reply(S_OK);
return 0;
case CLAVSubtitleProvider::CNTRL_FLUSH:
Reply(S_OK);
{
CAutoLock lock(this);
if (m_pConsumer2)
m_pConsumer2->Clear();
}
break;
}
}
return 1;
}
| 21,247
|
C++
|
.cpp
| 594
| 31.186869
| 235
| 0.648296
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,171
|
LAVSubtitleConsumer.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/subtitles/LAVSubtitleConsumer.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVSubtitleConsumer.h"
#include "LAVVideo.h"
#include "Media.h"
#include "version.h"
#define OFFSET(x) offsetof(LAVSubtitleConsumerContext, x)
// clang-format off
static const SubRenderOption options[] = {
{ "name", OFFSET(name), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "version", OFFSET(version), SROPT_TYPE_STRING, SROPT_FLAG_READONLY },
{ "originalVideoSize", OFFSET(originalVideoSize), SROPT_TYPE_SIZE, SROPT_FLAG_READONLY },
{ 0 }
};
// clang-format on
#define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
CLAVSubtitleConsumer::CLAVSubtitleConsumer(CLAVVideo *pLAVVideo)
: CSubRenderOptionsImpl(::options, &context)
, CUnknown(L"CLAVSubtitleConsumer", nullptr)
, m_pLAVVideo(pLAVVideo)
{
ZeroMemory(&context, sizeof(context));
context.name = TEXT(LAV_VIDEO);
context.version = TEXT(LAV_VERSION_STR);
m_evFrame.Reset();
}
CLAVSubtitleConsumer::~CLAVSubtitleConsumer(void)
{
if (m_pProvider)
{
m_pProvider->Disconnect();
}
Disconnect();
}
STDMETHODIMP CLAVSubtitleConsumer::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
*ppv = nullptr;
return QI(ISubRenderConsumer) QI(ISubRenderConsumer2) __super::NonDelegatingQueryInterface(riid, ppv);
}
STDMETHODIMP CLAVSubtitleConsumer::Connect(ISubRenderProvider *subtitleRenderer)
{
SafeRelease(&m_pProvider);
m_pProvider = subtitleRenderer;
return S_OK;
}
STDMETHODIMP CLAVSubtitleConsumer::Disconnect(void)
{
SafeRelease(&m_pProvider);
if (m_pSwsContext)
{
sws_freeContext(m_pSwsContext);
m_pSwsContext = nullptr;
}
return S_OK;
}
STDMETHODIMP CLAVSubtitleConsumer::DeliverFrame(REFERENCE_TIME start, REFERENCE_TIME stop, LPVOID context,
ISubRenderFrame *subtitleFrame)
{
ASSERT(m_SubtitleFrame == nullptr);
if (subtitleFrame)
subtitleFrame->AddRef();
m_SubtitleFrame = subtitleFrame;
m_evFrame.Set();
return S_OK;
}
STDMETHODIMP CLAVSubtitleConsumer::Clear(REFERENCE_TIME clearNewerThan)
{
m_pLAVVideo->RedrawStillImage();
return S_OK;
}
STDMETHODIMP CLAVSubtitleConsumer::RequestFrame(REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
CheckPointer(m_pProvider, E_FAIL);
return m_pProvider->RequestFrame(rtStart, rtStop, nullptr);
}
STDMETHODIMP CLAVSubtitleConsumer::ProcessFrame(LAVFrame *pFrame)
{
CheckPointer(m_pProvider, E_FAIL);
HRESULT hr = S_OK;
LPDIRECT3DSURFACE9 pSurface = nullptr;
// Wait for the requested frame
m_evFrame.Wait();
if (m_SubtitleFrame != nullptr)
{
int count = 0;
if (FAILED(m_SubtitleFrame->GetBitmapCount(&count)))
{
count = 0;
}
if (count == 0)
{
SafeRelease(&m_SubtitleFrame);
return S_FALSE;
}
BYTE *data[4] = {0};
ptrdiff_t stride[4] = {0};
LAVPixelFormat format = pFrame->format;
int bpp = pFrame->bpp;
if (pFrame->format == LAVPixFmt_DXVA2)
{
// Copy the surface, if required
if (!(pFrame->flags & LAV_FRAME_FLAG_BUFFER_MODIFY))
{
IMediaSample *pOrigSample = (IMediaSample *)pFrame->data[0];
LPDIRECT3DSURFACE9 pOrigSurface = (LPDIRECT3DSURFACE9)pFrame->data[3];
hr = m_pLAVVideo->GetD3DBuffer(pFrame);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"CLAVSubtitleConsumer::ProcessFrame: getting a new D3D buffer failed"));
}
else
{
IMediaSample *pNewSample = (IMediaSample *)pFrame->data[0];
pSurface = (LPDIRECT3DSURFACE9)pFrame->data[3];
IDirect3DDevice9 *pDevice = nullptr;
if (SUCCEEDED(hr = pSurface->GetDevice(&pDevice)))
{
hr = pDevice->StretchRect(pOrigSurface, nullptr, pSurface, nullptr, D3DTEXF_NONE);
if (SUCCEEDED(hr))
{
pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY | LAV_FRAME_FLAG_DXVA_NOADDREF;
pOrigSurface = nullptr;
// Release the surface, we only want to hold a ref on the media buffer
pSurface->Release();
}
SafeRelease(&pDevice);
}
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10,
L"CLAVSubtitleConsumer::ProcessFrame: processing d3d buffer failed, restoring previous "
L"buffer"));
pNewSample->Release();
pSurface->Release();
pFrame->data[0] = (BYTE *)pOrigSample;
pFrame->data[3] = (BYTE *)pOrigSurface;
}
}
}
pSurface = (LPDIRECT3DSURFACE9)pFrame->data[3];
D3DSURFACE_DESC surfaceDesc;
pSurface->GetDesc(&surfaceDesc);
D3DLOCKED_RECT LockedRect;
hr = pSurface->LockRect(&LockedRect, nullptr, 0);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"pSurface->LockRect failed (hr: %X)", hr));
SafeRelease(&m_SubtitleFrame);
return E_FAIL;
}
data[0] = (BYTE *)LockedRect.pBits;
data[1] = data[0] + (surfaceDesc.Height * LockedRect.Pitch);
stride[0] = LockedRect.Pitch;
stride[1] = LockedRect.Pitch;
format = LAVPixFmt_NV12;
bpp = 8;
}
else if (pFrame->format == LAVPixFmt_D3D11)
{
// TODO D3D11
SafeRelease(&m_SubtitleFrame);
return E_FAIL;
}
else
{
if (!(pFrame->flags & LAV_FRAME_FLAG_BUFFER_MODIFY))
{
CopyLAVFrameInPlace(pFrame);
}
memcpy(&data, &pFrame->data, sizeof(pFrame->data));
memcpy(&stride, &pFrame->stride, sizeof(pFrame->stride));
}
RECT videoRect;
::SetRect(&videoRect, 0, 0, pFrame->width, pFrame->height);
RECT subRect;
m_SubtitleFrame->GetOutputRect(&subRect);
ULONGLONG id;
POINT position;
SIZE size;
const uint8_t *rgbData;
int pitch;
for (int i = 0; i < count; i++)
{
if (FAILED(m_SubtitleFrame->GetBitmap(i, &id, &position, &size, (LPCVOID *)&rgbData, &pitch)))
{
DbgLog((LOG_TRACE, 10, L"GetBitmap() failed on index %d", i));
break;
}
ProcessSubtitleBitmap(format, bpp, videoRect, data, stride, subRect, position, size, rgbData, pitch);
}
if (pSurface)
pSurface->UnlockRect();
SafeRelease(&m_SubtitleFrame);
return S_OK;
}
return S_FALSE;
}
static struct
{
LAVPixelFormat pixfmt;
AVPixelFormat ffpixfmt;
} lav_ff_subtitle_pixfmt_map[] = {
{LAVPixFmt_YUV420, AV_PIX_FMT_YUVA420P}, {LAVPixFmt_YUV420bX, AV_PIX_FMT_YUVA420P},
{LAVPixFmt_YUV422, AV_PIX_FMT_YUVA422P}, {LAVPixFmt_YUV422bX, AV_PIX_FMT_YUVA422P},
{LAVPixFmt_YUV444, AV_PIX_FMT_YUVA444P}, {LAVPixFmt_YUV444bX, AV_PIX_FMT_YUVA444P},
{LAVPixFmt_NV12, AV_PIX_FMT_YUVA420P}, {LAVPixFmt_P016, AV_PIX_FMT_YUVA420P},
{LAVPixFmt_YUY2, AV_PIX_FMT_YUVA422P}, {LAVPixFmt_RGB24, AV_PIX_FMT_BGRA},
{LAVPixFmt_RGB32, AV_PIX_FMT_BGRA}, {LAVPixFmt_ARGB32, AV_PIX_FMT_BGRA},
};
static LAVPixFmtDesc ff_sub_pixfmt_desc[] = {
{1, 4, {1, 2, 2, 1}, {1, 2, 2, 1}}, ///< PIX_FMT_YUVA420P
{1, 4, {1, 2, 2, 1}, {1, 1, 1, 1}}, ///< PIX_FMT_YUVA422P
{1, 4, {1, 1, 1, 1}, {1, 1, 1, 1}}, ///< PIX_FMT_YUVA444P
{4, 1, {1}, {1}}, ///< PIX_FMT_BGRA
};
static LAVPixFmtDesc getFFSubPixelFormatDesc(AVPixelFormat pixFmt)
{
int index = 0;
switch (pixFmt)
{
case AV_PIX_FMT_YUVA420P: index = 0; break;
case AV_PIX_FMT_YUVA422P: index = 1; break;
case AV_PIX_FMT_YUVA444P: index = 2; break;
case AV_PIX_FMT_BGRA: index = 3; break;
default: ASSERT(0);
}
return ff_sub_pixfmt_desc[index];
}
static AVPixelFormat getFFPixFmtForSubtitle(LAVPixelFormat pixFmt)
{
AVPixelFormat fmt = AV_PIX_FMT_NONE;
for (int i = 0; i < countof(lav_ff_subtitle_pixfmt_map); i++)
{
if (lav_ff_subtitle_pixfmt_map[i].pixfmt == pixFmt)
{
return lav_ff_subtitle_pixfmt_map[i].ffpixfmt;
}
}
ASSERT(0);
return AV_PIX_FMT_NONE;
}
STDMETHODIMP CLAVSubtitleConsumer::SelectBlendFunction()
{
switch (m_PixFmt)
{
case LAVPixFmt_RGB32:
case LAVPixFmt_RGB24: blend = &CLAVSubtitleConsumer::blend_rgb_c; break;
case LAVPixFmt_NV12: blend = &CLAVSubtitleConsumer::blend_yuv_c<uint8_t, 1>; break;
case LAVPixFmt_P016: blend = &CLAVSubtitleConsumer::blend_yuv_c<uint16_t, 1>; break;
case LAVPixFmt_YUV420:
case LAVPixFmt_YUV422:
case LAVPixFmt_YUV444: blend = &CLAVSubtitleConsumer::blend_yuv_c<uint8_t, 0>; break;
case LAVPixFmt_YUV420bX:
case LAVPixFmt_YUV422bX:
case LAVPixFmt_YUV444bX: blend = &CLAVSubtitleConsumer::blend_yuv_c<uint16_t, 0>; break;
default: DbgLog((LOG_ERROR, 10, L"ProcessSubtitleBitmap(): No Blend function available")); blend = nullptr;
}
return S_OK;
}
STDMETHODIMP CLAVSubtitleConsumer::ProcessSubtitleBitmap(LAVPixelFormat pixFmt, int bpp, RECT videoRect,
BYTE *videoData[4], ptrdiff_t videoStride[4], RECT subRect,
POINT subPosition, SIZE subSize, const uint8_t *rgbData,
ptrdiff_t pitch)
{
if (subRect.left != 0 || subRect.top != 0)
{
DbgLog((LOG_ERROR, 10, L"ProcessSubtitleBitmap(): Left/Top in SubRect non-zero"));
}
BOOL bNeedScaling = FALSE;
// We need scaling if the width is not the same, or the subtitle rect is higher then the video rect
if (subRect.right != videoRect.right || subRect.bottom > videoRect.bottom)
{
bNeedScaling = TRUE;
}
if (pixFmt != LAVPixFmt_RGB32 && pixFmt != LAVPixFmt_RGB24)
{
bNeedScaling = TRUE;
}
if (m_PixFmt != pixFmt)
{
m_PixFmt = pixFmt;
SelectBlendFunction();
}
// P010/P016 is always handled like its 16 bpp to compensate for having the data in the high bits
if (pixFmt == LAVPixFmt_P016)
bpp = 16;
BYTE *subData[4] = {nullptr, nullptr, nullptr, nullptr};
ptrdiff_t subStride[4] = {0, 0, 0, 0};
// If we need scaling (either scaling or pixel conversion), do it here before starting the blend process
if (bNeedScaling)
{
uint8_t *tmpBuf = nullptr;
const AVPixelFormat avPixFmt = getFFPixFmtForSubtitle(pixFmt);
// Calculate scaled size
// We must ensure that the scaled subs still fit into the video
// HACK: Scale to video size. In the future, we should take AR and the likes into account
RECT newRect = videoRect;
/*
float subAR = (float)subRect.right / (float)subRect.bottom;
if (newRect.right != videoRect.right) {
newRect.right = videoRect.right;
newRect.bottom = (LONG)(newRect.right / subAR);
}
if (newRect.bottom > videoRect.bottom) {
newRect.bottom = videoRect.bottom;
newRect.right = (LONG)(newRect.bottom * subAR);
}*/
SIZE newSize;
newSize.cx = (LONG)av_rescale(subSize.cx, newRect.right, subRect.right);
newSize.cy = (LONG)av_rescale(subSize.cy, newRect.bottom, subRect.bottom);
// And scaled position
subPosition.x = (LONG)av_rescale(subPosition.x, newSize.cx, subSize.cx);
subPosition.y = (LONG)av_rescale(subPosition.y, newSize.cy, subSize.cy);
m_pSwsContext =
sws_getCachedContext(m_pSwsContext, subSize.cx, subSize.cy, AV_PIX_FMT_BGRA, newSize.cx, newSize.cy,
avPixFmt, SWS_BILINEAR | SWS_FULL_CHR_H_INP, nullptr, nullptr, nullptr);
const uint8_t *src[4] = {(const uint8_t *)rgbData, nullptr, nullptr, nullptr};
const ptrdiff_t srcStride[4] = {pitch, 0, 0, 0};
const LAVPixFmtDesc desc = getFFSubPixelFormatDesc(avPixFmt);
const ptrdiff_t stride = FFALIGN(newSize.cx, 64) * desc.codedbytes;
for (int plane = 0; plane < desc.planes; plane++)
{
subStride[plane] = stride / desc.planeWidth[plane];
const size_t size = subStride[plane] * FFALIGN(newSize.cy, 2) / desc.planeHeight[plane];
subData[plane] = (BYTE *)av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
if (subData[plane] == nullptr)
goto fail;
}
// Un-pre-multiply alpha for YUV formats
// TODO: Can we SIMD this? See ARGBUnattenuateRow_C/SSE2 in libyuv
if (avPixFmt != AV_PIX_FMT_BGRA)
{
tmpBuf = (uint8_t *)av_malloc(pitch * subSize.cy);
if (tmpBuf == nullptr)
goto fail;
memcpy(tmpBuf, rgbData, pitch * subSize.cy);
for (int line = 0; line < subSize.cy; line++)
{
uint8_t *p = tmpBuf + line * pitch;
for (int col = 0; col < subSize.cx; col++)
{
if (p[3] != 0 && p[3] != 255)
{
p[0] = av_clip_uint8(p[0] * 255 / p[3]);
p[1] = av_clip_uint8(p[1] * 255 / p[3]);
p[2] = av_clip_uint8(p[2] * 255 / p[3]);
}
p += 4;
}
}
src[0] = tmpBuf;
}
int ret = sws_scale2(m_pSwsContext, src, srcStride, 0, subSize.cy, subData, subStride);
subSize = newSize;
if (tmpBuf)
av_free(tmpBuf);
}
else
{
subData[0] = (BYTE *)rgbData;
subStride[0] = pitch;
}
ASSERT((subPosition.x + subSize.cx) <= videoRect.right);
ASSERT((subPosition.y + subSize.cy) <= videoRect.bottom);
if (blend)
(this->*blend)(videoData, videoStride, videoRect, subData, subStride, subPosition, subSize, pixFmt, bpp);
if (bNeedScaling)
{
for (int i = 0; i < 4; i++)
{
av_freep(&subData[i]);
}
}
return S_OK;
fail:
for (int i = 0; i < 4; i++)
{
av_freep(&subData[i]);
}
return E_OUTOFMEMORY;
}
| 15,678
|
C++
|
.cpp
| 399
| 30.122807
| 120
| 0.588525
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,172
|
LAVVideoSubtitleInputPin.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/subtitles/LAVVideoSubtitleInputPin.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVVideoSubtitleInputPin.h"
#include "LAVVideo.h"
#include "moreuuids.h"
typedef struct
{
const CLSID *clsMajorType;
const CLSID *clsMinorType;
const enum AVCodecID nFFCodec;
} LAV_TYPE_MAP;
static const LAV_TYPE_MAP lav_subtitle_codecs[] = {
{&MEDIATYPE_DVD_ENCRYPTED_PACK, &MEDIASUBTYPE_DVD_SUBPICTURE, AV_CODEC_ID_DVD_SUBTITLE},
{&MEDIATYPE_MPEG2_PACK, &MEDIASUBTYPE_DVD_SUBPICTURE, AV_CODEC_ID_DVD_SUBTITLE},
{&MEDIATYPE_MPEG2_PES, &MEDIASUBTYPE_DVD_SUBPICTURE, AV_CODEC_ID_DVD_SUBTITLE},
{&MEDIATYPE_Video, &MEDIASUBTYPE_DVD_SUBPICTURE, AV_CODEC_ID_DVD_SUBTITLE}};
CLAVVideoSubtitleInputPin::CLAVVideoSubtitleInputPin(TCHAR *pObjectName, CLAVVideo *pFilter, CCritSec *pcsFilter,
HRESULT *phr, LPWSTR pName)
: CBaseInputPin(pObjectName, pFilter, pcsFilter, phr, pName)
, m_pLAVVideo(pFilter)
, CDeCSSPinHelper()
{
}
CLAVVideoSubtitleInputPin::~CLAVVideoSubtitleInputPin(void)
{
SafeRelease(&m_pConsumer);
SafeRelease(&m_pProvider);
}
STDMETHODIMP CLAVVideoSubtitleInputPin::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
return QI(IKsPropertySet) __super::NonDelegatingQueryInterface(riid, ppv);
}
HRESULT CLAVVideoSubtitleInputPin::CheckMediaType(const CMediaType *mtIn)
{
for (int i = 0; i < countof(lav_subtitle_codecs); i++)
{
if (*lav_subtitle_codecs[i].clsMajorType == mtIn->majortype &&
*lav_subtitle_codecs[i].clsMinorType == mtIn->subtype &&
(mtIn->formattype == FORMAT_SubtitleInfo || mtIn->formattype == FORMAT_MPEG2_VIDEO))
{
return S_OK;
}
}
return VFW_E_TYPE_NOT_ACCEPTED;
}
HRESULT CLAVVideoSubtitleInputPin::SetMediaType(const CMediaType *pmt)
{
CAutoLock lock(&m_csReceive);
SetCSSMediaType(pmt);
ASSERT(m_pConsumer);
SafeRelease(&m_pProvider);
AVCodecID codecId = AV_CODEC_ID_NONE;
for (int i = 0; i < countof(lav_subtitle_codecs); i++)
{
if (*lav_subtitle_codecs[i].clsMajorType == pmt->majortype &&
*lav_subtitle_codecs[i].clsMinorType == pmt->subtype)
{
codecId = lav_subtitle_codecs[i].nFFCodec;
}
}
if (codecId == AV_CODEC_ID_NONE)
return VFW_E_TYPE_NOT_ACCEPTED;
m_pProvider = new CLAVSubtitleProvider(m_pLAVVideo, m_pConsumer);
m_pProvider->AddRef();
HRESULT hr = m_pProvider->InitDecoder(pmt, codecId);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"Subtitle Decoder Init failed...."));
return hr;
}
return __super::SetMediaType(pmt);
}
HRESULT CLAVVideoSubtitleInputPin::BreakConnect()
{
if (m_pProvider)
{
m_pProvider->DisconnectConsumer();
SafeRelease(&m_pProvider);
}
return __super::BreakConnect();
}
STDMETHODIMP CLAVVideoSubtitleInputPin::BeginFlush()
{
return __super::BeginFlush();
}
STDMETHODIMP CLAVVideoSubtitleInputPin::EndFlush()
{
CAutoLock lock(&m_csReceive);
DbgLog((LOG_TRACE, 10, L"CLAVVideoSubtitleInputPin::EndFlush()"));
if (m_pProvider)
{
m_pProvider->Flush();
}
return __super::EndFlush();
}
HRESULT CLAVVideoSubtitleInputPin::SetSubtitleConsumer(ISubRenderConsumer *pConsumer)
{
if (pConsumer == m_pConsumer)
return S_OK;
SafeRelease(&m_pConsumer);
m_pConsumer = pConsumer;
m_pConsumer->AddRef();
if (m_pProvider)
{
m_pProvider->DisconnectConsumer();
m_pProvider->SetConsumer(pConsumer);
}
return S_OK;
}
STDMETHODIMP CLAVVideoSubtitleInputPin::Receive(IMediaSample *pSample)
{
CAutoLock lock(&m_csReceive);
HRESULT hr = S_OK;
Decrypt(pSample);
ASSERT(m_pProvider);
hr = CBaseInputPin::Receive(pSample);
if (hr == S_OK)
{
long len = pSample->GetActualDataLength();
BYTE *pBuffer = nullptr;
if (FAILED(hr = pSample->GetPointer(&pBuffer)))
{
DbgLog((LOG_TRACE, 10, L"CLAVVideoSubtitleInputPin::Receive() GetPointer failed"));
return S_OK;
}
StripPacket(pBuffer, len);
REFERENCE_TIME rtStart, rtStop;
hr = pSample->GetTime(&rtStart, &rtStop);
if (hr == VFW_S_NO_STOP_TIME)
{
rtStop = AV_NOPTS_VALUE;
}
else if (FAILED(hr))
{
rtStart = rtStop = AV_NOPTS_VALUE;
}
m_pProvider->Decode(pBuffer, len, rtStart, rtStop);
}
return S_OK;
}
// IKsPropertySet
STDMETHODIMP CLAVVideoSubtitleInputPin::Set(REFGUID PropSet, ULONG Id, LPVOID pInstanceData, ULONG InstanceLength,
LPVOID pPropertyData, ULONG DataLength)
{
if (PropSet != AM_KSPROPSETID_DvdSubPic)
{
return __super::Set(PropSet, Id, pInstanceData, InstanceLength, pPropertyData, DataLength);
}
switch (Id)
{
case AM_PROPERTY_DVDSUBPIC_PALETTE: {
AM_PROPERTY_SPPAL *pSPPAL = (AM_PROPERTY_SPPAL *)pPropertyData;
if (m_pProvider)
{
m_pProvider->SetDVDPalette(pSPPAL);
}
}
break;
case AM_PROPERTY_DVDSUBPIC_HLI: {
AM_PROPERTY_SPHLI *pSPHLI = (AM_PROPERTY_SPHLI *)pPropertyData;
if (pSPHLI->HLISS)
{
if (m_pProvider)
{
m_pProvider->SetDVDHLI(pSPHLI);
}
}
else
{
if (m_pProvider)
{
m_pProvider->SetDVDHLI(nullptr);
}
}
}
break;
case AM_PROPERTY_DVDSUBPIC_COMPOSIT_ON: {
AM_PROPERTY_COMPOSIT_ON *pCompositOn = (AM_PROPERTY_COMPOSIT_ON *)pPropertyData;
DbgLog((LOG_TRACE, 10, L"Composit Event - on: %d", *pCompositOn));
m_pProvider->SetDVDComposit(*pCompositOn);
}
break;
default: return E_PROP_ID_UNSUPPORTED;
}
return S_OK;
}
STDMETHODIMP CLAVVideoSubtitleInputPin::QuerySupported(REFGUID PropSet, ULONG Id, ULONG *pTypeSupport)
{
if (PropSet != AM_KSPROPSETID_DvdSubPic)
{
return __super::QuerySupported(PropSet, Id, pTypeSupport);
}
switch (Id)
{
case AM_PROPERTY_DVDSUBPIC_PALETTE: *pTypeSupport = KSPROPERTY_SUPPORT_SET; break;
case AM_PROPERTY_DVDSUBPIC_HLI: *pTypeSupport = KSPROPERTY_SUPPORT_SET; break;
case AM_PROPERTY_DVDSUBPIC_COMPOSIT_ON: *pTypeSupport = KSPROPERTY_SUPPORT_SET; break;
default: return E_PROP_ID_UNSUPPORTED;
}
return S_OK;
}
| 7,308
|
C++
|
.cpp
| 219
| 27.347032
| 114
| 0.662084
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,173
|
LAVSubtitleFrame.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/subtitles/LAVSubtitleFrame.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "LAVSubtitleFrame.h"
CLAVSubtitleFrame::CLAVSubtitleFrame(void)
: CUnknown(L"CLAVSubtitleFrame", nullptr)
{
ZeroMemory(&m_outputRect, sizeof(m_outputRect));
ZeroMemory(&m_clipRect, sizeof(m_clipRect));
}
CLAVSubtitleFrame::~CLAVSubtitleFrame(void)
{
for (int i = 0; i < m_NumBitmaps; i++)
{
m_Bitmaps[i]->Release();
}
SAFE_CO_FREE(m_Bitmaps);
}
STDMETHODIMP CLAVSubtitleFrame::SetOutputRect(RECT outputRect)
{
m_outputRect = m_clipRect = outputRect;
return S_OK;
}
STDMETHODIMP CLAVSubtitleFrame::SetClipRect(RECT clipRect)
{
m_clipRect = clipRect;
return S_OK;
}
STDMETHODIMP CLAVSubtitleFrame::AddBitmap(CLAVSubRect *subRect)
{
// Allocate memory for the new block
void *mem = CoTaskMemRealloc(m_Bitmaps, sizeof(*m_Bitmaps) * (m_NumBitmaps + 1));
if (!mem)
{
return E_OUTOFMEMORY;
}
m_Bitmaps = (CLAVSubRect **)mem;
m_Bitmaps[m_NumBitmaps] = subRect;
m_NumBitmaps++;
// Hold reference on the subtitle rect
subRect->AddRef();
return S_OK;
}
STDMETHODIMP CLAVSubtitleFrame::GetOutputRect(RECT *outputRect)
{
CheckPointer(outputRect, E_POINTER);
*outputRect = m_outputRect;
return S_OK;
}
STDMETHODIMP CLAVSubtitleFrame::GetClipRect(RECT *clipRect)
{
CheckPointer(clipRect, E_POINTER);
*clipRect = m_clipRect;
return S_OK;
}
STDMETHODIMP CLAVSubtitleFrame::GetBitmapCount(int *count)
{
CheckPointer(count, E_POINTER);
*count = m_NumBitmaps;
return S_OK;
}
STDMETHODIMP CLAVSubtitleFrame::GetBitmap(int index, ULONGLONG *id, POINT *position, SIZE *size, LPCVOID *pixels,
int *pitch)
{
if (index < 0 || index >= m_NumBitmaps)
return E_INVALIDARG;
CheckPointer(id, E_POINTER);
CheckPointer(position, E_POINTER);
CheckPointer(size, E_POINTER);
CheckPointer(pixels, E_POINTER);
CheckPointer(pitch, E_POINTER);
*id = m_Bitmaps[index]->id;
*position = m_Bitmaps[index]->position;
*size = m_Bitmaps[index]->size;
*pixels = m_Bitmaps[index]->pixels;
*pitch = m_Bitmaps[index]->pitch * 4;
return S_OK;
}
| 2,987
|
C++
|
.cpp
| 94
| 27.904255
| 113
| 0.702571
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,174
|
SubRenderOptionsImpl.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/subtitles/SubRenderOptionsImpl.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "SubRenderOptionsImpl.h"
static const SubRenderOption *sropt_next(const SubRenderOption *options, const SubRenderOption *last)
{
if (!last && options[0].name)
return options;
if (last && last[1].name)
return ++last;
return nullptr;
}
static const SubRenderOption *sropt_find_option(const SubRenderOption *options, LPCSTR name, int flags)
{
const SubRenderOption *o = nullptr;
while (o = sropt_next(options, o))
{
if (!_stricmp(o->name, name) && !(o->flags & flags))
{
return o;
}
}
return nullptr;
}
#define GET_OPT_AND_VALIDATE(t) \
const SubRenderOption *o = sropt_find_option(options, field, 0); \
CheckPointer(value, E_POINTER); \
CheckPointer(o, E_INVALIDARG); \
if (o->type != t) \
return E_INVALIDARG;
#define GET_VALUE memcpy(value, ((uint8_t *)context) + o->offset, sizeof(*value));
#define SET_OPT_AND_VALIDATE(t) \
const SubRenderOption *o = sropt_find_option(options, field, SROPT_FLAG_READONLY); \
CheckPointer(o, E_INVALIDARG); \
if (o->type != t) \
return E_INVALIDARG;
#define SET_VALUE \
memcpy(((uint8_t *)context) + o->offset, &value, sizeof(value)); \
OnSubOptionSet(field);
STDMETHODIMP CSubRenderOptionsImpl::GetBool(LPCSTR field, bool *value)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_BOOL)
GET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetInt(LPCSTR field, int *value)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_INT)
GET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetSize(LPCSTR field, SIZE *value)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_SIZE)
GET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetRect(LPCSTR field, RECT *value)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_RECT)
GET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetUlonglong(LPCSTR field, ULONGLONG *value)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_ULONGLONG)
GET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetDouble(LPCSTR field, double *value)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_DOUBLE)
GET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetString(LPCSTR field, LPWSTR *value, int *chars)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_STRING)
CheckPointer(chars, E_POINTER);
const LPWSTR string = *(LPWSTR *)(((uint8_t *)context) + o->offset);
if (!string)
{
*value = nullptr;
*chars = 0;
return S_OK;
}
*chars = (int)wcslen(string);
*value = (LPWSTR)LocalAlloc(0, sizeof(WCHAR) * (*chars + 1));
CheckPointer(*value, E_OUTOFMEMORY);
wcscpy_s(*value, *chars + 1, string);
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::GetBin(LPCSTR field, LPVOID *value, int *size)
{
GET_OPT_AND_VALIDATE(SROPT_TYPE_BIN)
CheckPointer(size, E_POINTER);
return E_NOTIMPL;
}
STDMETHODIMP CSubRenderOptionsImpl::SetBool(LPCSTR field, bool value)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_BOOL)
SET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::SetInt(LPCSTR field, int value)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_INT)
SET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::SetSize(LPCSTR field, SIZE value)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_SIZE)
SET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::SetRect(LPCSTR field, RECT value)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_RECT)
SET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::SetUlonglong(LPCSTR field, ULONGLONG value)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_ULONGLONG)
SET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::SetDouble(LPCSTR field, double value)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_DOUBLE)
SET_VALUE
return S_OK;
}
STDMETHODIMP CSubRenderOptionsImpl::SetString(LPCSTR field, LPWSTR value, int chars)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_STRING)
return E_NOTIMPL;
}
STDMETHODIMP CSubRenderOptionsImpl::SetBin(LPCSTR field, LPVOID value, int size)
{
SET_OPT_AND_VALIDATE(SROPT_TYPE_BIN)
return E_NOTIMPL;
}
| 5,306
|
C++
|
.cpp
| 160
| 29.54375
| 103
| 0.663672
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,175
|
blend_generic.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/subtitles/blend/blend_generic.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "../LAVSubtitleConsumer.h"
#define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
DECLARE_BLEND_FUNC_IMPL(blend_rgb_c)
{
ASSERT(pixFmt == LAVPixFmt_RGB32 || pixFmt == LAVPixFmt_RGB24);
BYTE *rgbOut = video[0];
const BYTE *subIn = subData[0];
const ptrdiff_t outStride = videoStride[0];
const ptrdiff_t inStride = subStride[0];
const ptrdiff_t dstep = (pixFmt == LAVPixFmt_RGB24) ? 3 : 4;
for (int y = 0; y < size.cy; y++)
{
BYTE *dstLine = rgbOut + ((y + position.y) * outStride) + (position.x * dstep);
const BYTE *srcLine = subIn + (y * inStride);
for (int x = 0; x < size.cx; x++)
{
const BYTE a = srcLine[3];
switch (a)
{
case 0: break;
case 255:
dstLine[0] = srcLine[0];
dstLine[1] = srcLine[1];
dstLine[2] = srcLine[2];
break;
default:
dstLine[0] = av_clip_uint8(FAST_DIV255(dstLine[0] * (255 - a)) + srcLine[0]);
dstLine[1] = av_clip_uint8(FAST_DIV255(dstLine[1] * (255 - a)) + srcLine[1]);
dstLine[2] = av_clip_uint8(FAST_DIV255(dstLine[2] * (255 - a)) + srcLine[2]);
break;
}
dstLine += dstep;
srcLine += 4;
}
}
return S_OK;
}
template <class pixT, int nv12> DECLARE_BLEND_FUNC_IMPL(blend_yuv_c)
{
ASSERT(pixFmt == LAVPixFmt_YUV420 || pixFmt == LAVPixFmt_NV12 || pixFmt == LAVPixFmt_YUV422 ||
pixFmt == LAVPixFmt_YUV444 || pixFmt == LAVPixFmt_YUV420bX || pixFmt == LAVPixFmt_YUV422bX ||
pixFmt == LAVPixFmt_YUV444bX || pixFmt == LAVPixFmt_P016);
BYTE *y = video[0];
BYTE *u = video[1];
BYTE *v = video[2];
const BYTE *subY = subData[0];
const BYTE *subU = subData[1];
const BYTE *subV = subData[2];
const BYTE *subA = subData[3];
const ptrdiff_t outStride = videoStride[0];
const ptrdiff_t outStrideUV = videoStride[1];
const ptrdiff_t inStride = subStride[0];
const ptrdiff_t inStrideUV = subStride[1];
int line, col;
int w = size.cx, h = size.cy;
int yPos = position.y;
int xPos = position.x;
const int hsub = nv12 || (pixFmt == LAVPixFmt_YUV420 || pixFmt == LAVPixFmt_YUV420bX || pixFmt == LAVPixFmt_NV12);
const int vsub = nv12 || (pixFmt != LAVPixFmt_YUV444 && pixFmt != LAVPixFmt_YUV444bX);
const int shift = sizeof(pixT) > 1 ? bpp - 8 : 0;
for (line = 0; line < h; line++)
{
pixT *dstY = (pixT *)(y + ((line + yPos) * outStride)) + xPos;
const BYTE *srcY = subY + (line * inStride);
const BYTE *srcA = subA + (line * inStride);
for (col = 0; col < w; col++)
{
switch (srcA[col])
{
case 0: break;
case 255: dstY[col] = srcY[col] << shift; break;
default: dstY[col] = FAST_DIV255(dstY[col] * (255 - srcA[col]) + (srcY[col] << shift) * srcA[col]); break;
}
}
}
if (hsub)
{
w >>= 1;
xPos >>= 1;
}
if (vsub)
{
h >>= 1;
yPos >>= 1;
}
for (line = 0; line < h; line++)
{
pixT *dstUV = (pixT *)(u + (line + yPos) * outStrideUV) + (xPos << 1);
pixT *dstU = (pixT *)(u + (line + yPos) * outStrideUV) + xPos;
const BYTE *srcU = subU + line * inStrideUV;
pixT *dstV = (pixT *)(v + (line + yPos) * outStrideUV) + xPos;
const BYTE *srcV = subV + line * inStrideUV;
const BYTE *srcA = subA + (line * inStride * (ptrdiff_t(1) << hsub));
for (col = 0; col < w; col++)
{
// Average Alpha
int alpha;
if (hsub && vsub && col + 1 < w && line + 1 < h)
{
alpha = (srcA[0] + srcA[inStride] + srcA[1] + srcA[inStride + 1]) >> 2;
}
else if (hsub || vsub)
{
int alpha_h = hsub && col + 1 < w ? (srcA[0] + srcA[1]) >> 1 : srcA[0];
int alpha_v = vsub && line + 1 < h ? (srcA[0] + srcA[inStride]) >> 1 : srcA[0];
alpha = (alpha_h + alpha_v) >> 1;
}
else
{
alpha = srcA[0];
}
if (nv12)
{
switch (alpha)
{
case 0: break;
case 255:
dstUV[(col << 1) + 0] = srcU[col] << shift;
dstUV[(col << 1) + 1] = srcV[col] << shift;
break;
default:
dstUV[(col << 1) + 0] =
FAST_DIV255(dstUV[(col << 1) + 0] * (255 - alpha) + (srcU[col] << shift) * alpha);
dstUV[(col << 1) + 1] =
FAST_DIV255(dstUV[(col << 1) + 1] * (255 - alpha) + (srcV[col] << shift) * alpha);
break;
}
}
else
{
switch (alpha)
{
case 0: break;
case 255:
dstU[col] = srcU[col] << shift;
dstV[col] = srcV[col] << shift;
break;
default:
dstU[col] = FAST_DIV255(dstU[col] * (255 - alpha) + (srcU[col] << shift) * alpha);
dstV[col] = FAST_DIV255(dstV[col] * (255 - alpha) + (srcV[col] << shift) * alpha);
break;
}
}
srcA += ptrdiff_t(1) << vsub;
}
}
return S_OK;
}
template HRESULT CLAVSubtitleConsumer::blend_yuv_c<uint8_t, 1> BLEND_FUNC_PARAMS;
template HRESULT CLAVSubtitleConsumer::blend_yuv_c<uint8_t, 0> BLEND_FUNC_PARAMS;
template HRESULT CLAVSubtitleConsumer::blend_yuv_c<uint16_t, 0> BLEND_FUNC_PARAMS;
template HRESULT CLAVSubtitleConsumer::blend_yuv_c<uint16_t, 1> BLEND_FUNC_PARAMS;
| 6,753
|
C++
|
.cpp
| 171
| 29.690058
| 118
| 0.514177
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
22,176
|
avcodec.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/decoders/avcodec.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "avcodec.h"
#include "moreuuids.h"
#include "parsers/MPEG2HeaderParser.h"
#include "parsers/H264SequenceParser.h"
#include "parsers/VC1HeaderParser.h"
#include "Media.h"
#include "IMediaSideData.h"
#include "IMediaSideDataFFmpeg.h"
#include "ByteParser.h"
#ifdef DEBUG
#include "lavf_log.h"
#endif
extern "C"
{
#include "libavutil/pixdesc.h"
#include "libavutil/mastering_display_metadata.h"
#include "libavutil/hdr_dynamic_metadata.h"
#include "libavutil/dovi_meta.h"
};
////////////////////////////////////////////////////////////////////////////////
// Constructor
////////////////////////////////////////////////////////////////////////////////
ILAVDecoder *CreateDecoderAVCodec()
{
return new CDecAvcodec();
}
////////////////////////////////////////////////////////////////////////////////
// Create DXVA2 Extended Flags from a AVFrame and AVCodecContext
////////////////////////////////////////////////////////////////////////////////
static DXVA2_ExtendedFormat GetDXVA2ExtendedFlags(AVCodecContext *ctx, AVFrame *frame)
{
DXVA2_ExtendedFormat fmt;
ZeroMemory(&fmt, sizeof(fmt));
fillDXVAExtFormat(fmt, -1, ctx->color_primaries, ctx->colorspace, ctx->color_trc, ctx->chroma_sample_location);
if (frame->format == AV_PIX_FMT_XYZ12LE || frame->format == AV_PIX_FMT_XYZ12BE)
fmt.VideoPrimaries = DXVA2_VideoPrimaries_BT709;
// Color Range, 0-255 or 16-235
BOOL ffFullRange = (ctx->color_range == AVCOL_RANGE_JPEG) || frame->format == AV_PIX_FMT_YUVJ420P ||
frame->format == AV_PIX_FMT_YUVJ422P || frame->format == AV_PIX_FMT_YUVJ444P ||
frame->format == AV_PIX_FMT_YUVJ440P || frame->format == AV_PIX_FMT_YUVJ411P;
fmt.NominalRange =
ffFullRange ? DXVA2_NominalRange_0_255
: (ctx->color_range == AVCOL_RANGE_MPEG) ? DXVA2_NominalRange_16_235 : DXVA2_NominalRange_Unknown;
return fmt;
}
////////////////////////////////////////////////////////////////////////////////
// avcodec -> LAV codec mappings
////////////////////////////////////////////////////////////////////////////////
// This mapping table should contain all pixel formats, except hardware formats (VDPAU, XVMC, DXVA, etc)
// A format that is not listed will be converted to YUV420
static struct PixelFormatMapping
{
AVPixelFormat ffpixfmt;
LAVPixelFormat lavpixfmt;
BOOL conversion;
int bpp;
} ff_pix_map[] = {
{AV_PIX_FMT_YUV420P, LAVPixFmt_YUV420, FALSE},
{AV_PIX_FMT_YUYV422, LAVPixFmt_YUY2, FALSE},
{AV_PIX_FMT_RGB24, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR24, LAVPixFmt_RGB24, FALSE},
{AV_PIX_FMT_YUV422P, LAVPixFmt_YUV422, FALSE},
{AV_PIX_FMT_YUV444P, LAVPixFmt_YUV444, FALSE},
{AV_PIX_FMT_YUV410P, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_YUV411P, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_GRAY8, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_MONOWHITE, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_MONOBLACK, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_PAL8, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_YUVJ420P, LAVPixFmt_YUV420, FALSE},
{AV_PIX_FMT_YUVJ422P, LAVPixFmt_YUV422, FALSE},
{AV_PIX_FMT_YUVJ444P, LAVPixFmt_YUV444, FALSE},
{AV_PIX_FMT_UYVY422, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_UYYVYY411, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_BGR8, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR4, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR4_BYTE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB8, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB4, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB4_BYTE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_NV12, LAVPixFmt_NV12, FALSE},
{AV_PIX_FMT_NV21, LAVPixFmt_NV12, TRUE},
{AV_PIX_FMT_ARGB, LAVPixFmt_ARGB32, TRUE},
{AV_PIX_FMT_RGBA, LAVPixFmt_ARGB32, TRUE},
{AV_PIX_FMT_ABGR, LAVPixFmt_ARGB32, TRUE},
{AV_PIX_FMT_BGRA, LAVPixFmt_ARGB32, FALSE},
{AV_PIX_FMT_GRAY16BE, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_GRAY16LE, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_YUV440P, LAVPixFmt_YUV444, TRUE},
{AV_PIX_FMT_YUVJ440P, LAVPixFmt_YUV444, TRUE},
{AV_PIX_FMT_YUVA420P, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_RGB48BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_RGB48LE, LAVPixFmt_RGB48, FALSE},
{AV_PIX_FMT_RGB565BE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB565LE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB555BE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB555LE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR565BE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR565LE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR555BE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR555LE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_YUV420P16LE, LAVPixFmt_YUV420bX, FALSE, 16},
{AV_PIX_FMT_YUV420P16BE, LAVPixFmt_YUV420bX, TRUE, 16},
{AV_PIX_FMT_YUV422P16LE, LAVPixFmt_YUV422bX, FALSE, 16},
{AV_PIX_FMT_YUV422P16BE, LAVPixFmt_YUV422bX, TRUE, 16},
{AV_PIX_FMT_YUV444P16LE, LAVPixFmt_YUV444bX, FALSE, 16},
{AV_PIX_FMT_YUV444P16BE, LAVPixFmt_YUV444bX, TRUE, 16},
{AV_PIX_FMT_RGB444LE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB444BE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR444LE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR444BE, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_YA8, LAVPixFmt_YUV420, TRUE},
{AV_PIX_FMT_BGR48BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_BGR48LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_YUV420P9BE, LAVPixFmt_YUV420bX, TRUE, 9},
{AV_PIX_FMT_YUV420P9LE, LAVPixFmt_YUV420bX, FALSE, 9},
{AV_PIX_FMT_YUV420P10BE, LAVPixFmt_YUV420bX, TRUE, 10},
{AV_PIX_FMT_YUV420P10LE, LAVPixFmt_YUV420bX, FALSE, 10},
{AV_PIX_FMT_YUV422P10BE, LAVPixFmt_YUV422bX, TRUE, 10},
{AV_PIX_FMT_YUV422P10LE, LAVPixFmt_YUV422bX, FALSE, 10},
{AV_PIX_FMT_YUV444P9BE, LAVPixFmt_YUV444bX, TRUE, 9},
{AV_PIX_FMT_YUV444P9LE, LAVPixFmt_YUV444bX, FALSE, 9},
{AV_PIX_FMT_YUV444P10BE, LAVPixFmt_YUV444bX, TRUE, 10},
{AV_PIX_FMT_YUV444P10LE, LAVPixFmt_YUV444bX, FALSE, 10},
{AV_PIX_FMT_YUV422P9BE, LAVPixFmt_YUV422bX, TRUE, 9},
{AV_PIX_FMT_YUV422P9LE, LAVPixFmt_YUV422bX, FALSE, 9},
{AV_PIX_FMT_GBRP, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_GBRP9BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP9LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP10BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP10LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP16BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP16LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_YUVA422P, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_YUVA444P, LAVPixFmt_YUV444, TRUE},
{AV_PIX_FMT_YUVA420P9BE, LAVPixFmt_YUV420bX, TRUE, 9},
{AV_PIX_FMT_YUVA420P9LE, LAVPixFmt_YUV420bX, FALSE, 9},
{AV_PIX_FMT_YUVA422P9BE, LAVPixFmt_YUV422bX, TRUE, 9},
{AV_PIX_FMT_YUVA422P9LE, LAVPixFmt_YUV422bX, FALSE, 9},
{AV_PIX_FMT_YUVA444P9BE, LAVPixFmt_YUV444bX, TRUE, 9},
{AV_PIX_FMT_YUVA444P9LE, LAVPixFmt_YUV444bX, FALSE, 9},
{AV_PIX_FMT_YUVA420P10BE, LAVPixFmt_YUV420bX, TRUE, 10},
{AV_PIX_FMT_YUVA420P10LE, LAVPixFmt_YUV420bX, FALSE, 10},
{AV_PIX_FMT_YUVA422P10BE, LAVPixFmt_YUV422bX, TRUE, 10},
{AV_PIX_FMT_YUVA422P10LE, LAVPixFmt_YUV422bX, FALSE, 10},
{AV_PIX_FMT_YUVA444P10BE, LAVPixFmt_YUV444bX, TRUE, 10},
{AV_PIX_FMT_YUVA444P10LE, LAVPixFmt_YUV444bX, FALSE, 10},
{AV_PIX_FMT_YUVA420P16BE, LAVPixFmt_YUV420bX, TRUE, 16},
{AV_PIX_FMT_YUVA420P16LE, LAVPixFmt_YUV420bX, FALSE, 16},
{AV_PIX_FMT_YUVA422P16BE, LAVPixFmt_YUV422bX, TRUE, 16},
{AV_PIX_FMT_YUVA422P16LE, LAVPixFmt_YUV422bX, FALSE, 16},
{AV_PIX_FMT_YUVA444P16BE, LAVPixFmt_YUV444bX, TRUE, 16},
{AV_PIX_FMT_YUVA444P16LE, LAVPixFmt_YUV444bX, FALSE, 16},
{AV_PIX_FMT_XYZ12LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_XYZ12BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_NV16, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_NV20LE, LAVPixFmt_YUV422bX, TRUE, 10},
{AV_PIX_FMT_NV20BE, LAVPixFmt_YUV422bX, TRUE, 10},
{AV_PIX_FMT_RGBA64BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_RGBA64LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_BGRA64BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_BGRA64LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_NV16, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_YA16BE, LAVPixFmt_YUV420bX, TRUE, 16},
{AV_PIX_FMT_YA16LE, LAVPixFmt_YUV420bX, TRUE, 16},
{AV_PIX_FMT_GBRAP, LAVPixFmt_ARGB32, TRUE},
{AV_PIX_FMT_GBRAP16BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRAP16LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_0RGB, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_RGB0, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_0BGR, LAVPixFmt_RGB32, TRUE},
{AV_PIX_FMT_BGR0, LAVPixFmt_RGB32, FALSE},
{AV_PIX_FMT_YUV420P12BE, LAVPixFmt_YUV420bX, TRUE, 12},
{AV_PIX_FMT_YUV420P12LE, LAVPixFmt_YUV420bX, FALSE, 12},
{AV_PIX_FMT_YUV420P14BE, LAVPixFmt_YUV420bX, TRUE, 14},
{AV_PIX_FMT_YUV420P14LE, LAVPixFmt_YUV420bX, FALSE, 14},
{AV_PIX_FMT_YUV422P12BE, LAVPixFmt_YUV422bX, TRUE, 12},
{AV_PIX_FMT_YUV422P12LE, LAVPixFmt_YUV422bX, FALSE, 12},
{AV_PIX_FMT_YUV422P14BE, LAVPixFmt_YUV422bX, TRUE, 14},
{AV_PIX_FMT_YUV422P14LE, LAVPixFmt_YUV422bX, FALSE, 14},
{AV_PIX_FMT_YUV444P12BE, LAVPixFmt_YUV444bX, TRUE, 12},
{AV_PIX_FMT_YUV444P12LE, LAVPixFmt_YUV444bX, FALSE, 12},
{AV_PIX_FMT_YUV444P14BE, LAVPixFmt_YUV444bX, TRUE, 14},
{AV_PIX_FMT_YUV444P14LE, LAVPixFmt_YUV444bX, FALSE, 14},
{AV_PIX_FMT_GBRP12BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP12LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP14BE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_GBRP14LE, LAVPixFmt_RGB48, TRUE},
{AV_PIX_FMT_YUVJ411P, LAVPixFmt_YUV422, TRUE},
{AV_PIX_FMT_YUV440P10LE, LAVPixFmt_YUV444bX, TRUE, 10},
{AV_PIX_FMT_YUV440P10BE, LAVPixFmt_YUV444bX, TRUE, 10},
{AV_PIX_FMT_YUV440P12LE, LAVPixFmt_YUV444bX, TRUE, 12},
{AV_PIX_FMT_YUV440P12BE, LAVPixFmt_YUV444bX, TRUE, 12},
{AV_PIX_FMT_P010LE, LAVPixFmt_P016, FALSE, 10},
{AV_PIX_FMT_P016LE, LAVPixFmt_P016, FALSE, 16},
{AV_PIX_FMT_DXVA2_VLD, LAVPixFmt_DXVA2, FALSE},
{AV_PIX_FMT_D3D11, LAVPixFmt_D3D11, FALSE},
};
static AVCodecID ff_interlace_capable[] = {AV_CODEC_ID_DNXHD, AV_CODEC_ID_DVVIDEO, AV_CODEC_ID_FRWU,
AV_CODEC_ID_MJPEG, AV_CODEC_ID_MPEG4, AV_CODEC_ID_MPEG2VIDEO,
AV_CODEC_ID_H264, AV_CODEC_ID_VC1, AV_CODEC_ID_PNG,
AV_CODEC_ID_PRORES, AV_CODEC_ID_RAWVIDEO, AV_CODEC_ID_UTVIDEO};
static struct PixelFormatMapping getPixFmtMapping(AVPixelFormat pixfmt)
{
const PixelFormatMapping def = {pixfmt, LAVPixFmt_YUV420, TRUE, 8};
PixelFormatMapping result = def;
for (int i = 0; i < countof(ff_pix_map); i++)
{
if (ff_pix_map[i].ffpixfmt == pixfmt)
{
result = ff_pix_map[i];
break;
}
}
if (result.lavpixfmt != LAVPixFmt_YUV420bX && result.lavpixfmt != LAVPixFmt_YUV422bX &&
result.lavpixfmt != LAVPixFmt_YUV444bX)
result.bpp = 8;
return result;
}
////////////////////////////////////////////////////////////////////////////////
// AVCodec decoder implementation
////////////////////////////////////////////////////////////////////////////////
CDecAvcodec::CDecAvcodec(void)
: CDecBase()
{
}
CDecAvcodec::~CDecAvcodec(void)
{
DestroyDecoder();
}
// ILAVDecoder
STDMETHODIMP CDecAvcodec::Init()
{
#ifdef DEBUG
DbgSetModuleLevel(LOG_CUSTOM1, DWORD_MAX); // FFMPEG messages use custom1
av_log_set_callback(lavf_log_callback);
#else
av_log_set_callback(nullptr);
#endif
return S_OK;
}
STDMETHODIMP CDecAvcodec::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
DestroyDecoder();
DbgLog((LOG_TRACE, 10, L"Initializing ffmpeg for codec %S", avcodec_get_name(codec)));
BITMAPINFOHEADER *pBMI = nullptr;
videoFormatTypeHandler((const BYTE *)pmt->Format(), pmt->FormatType(), &pBMI);
if (codec == AV_CODEC_ID_AV1 && IsHardwareAccelerator())
{
m_pAVCodec = avcodec_find_decoder_by_name("av1");
}
else
{
m_pAVCodec = avcodec_find_decoder(codec);
}
CheckPointer(m_pAVCodec, VFW_E_UNSUPPORTED_VIDEO);
m_pAVCtx = avcodec_alloc_context3(m_pAVCodec);
CheckPointer(m_pAVCtx, E_POINTER);
DWORD dwDecFlags = m_pCallback->GetDecodeFlags();
// Use parsing for mpeg1/2 at all times, or H264/HEVC when its not from LAV Splitter
if (codec == AV_CODEC_ID_MPEG1VIDEO || codec == AV_CODEC_ID_MPEG2VIDEO ||
(!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) &&
(pmt->subtype == MEDIASUBTYPE_H264 || pmt->subtype == MEDIASUBTYPE_h264 || pmt->subtype == MEDIASUBTYPE_X264 ||
pmt->subtype == MEDIASUBTYPE_x264 || pmt->subtype == MEDIASUBTYPE_H264_bis ||
pmt->subtype == MEDIASUBTYPE_HEVC || pmt->subtype == MEDIASUBTYPE_H265)))
{
m_pParser = av_parser_init(codec);
}
LONG biRealWidth = pBMI->biWidth, biRealHeight = pBMI->biHeight;
if (pmt->formattype == FORMAT_VideoInfo || pmt->formattype == FORMAT_MPEGVideo)
{
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->Format();
if (vih->rcTarget.right != 0 && vih->rcTarget.bottom != 0)
{
biRealWidth = vih->rcTarget.right;
biRealHeight = vih->rcTarget.bottom;
}
}
else if (pmt->formattype == FORMAT_VideoInfo2 || pmt->formattype == FORMAT_MPEG2Video)
{
VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)pmt->Format();
if (vih2->rcTarget.right != 0 && vih2->rcTarget.bottom != 0)
{
biRealWidth = vih2->rcTarget.right;
biRealHeight = vih2->rcTarget.bottom;
}
}
m_pAVCtx->codec_id = codec;
m_pAVCtx->codec_tag = pBMI->biCompression;
m_pAVCtx->coded_width = pBMI->biWidth;
m_pAVCtx->coded_height = abs(pBMI->biHeight);
m_pAVCtx->bits_per_coded_sample = pBMI->biBitCount;
m_pAVCtx->err_recognition = 0;
m_pAVCtx->workaround_bugs = FF_BUG_AUTODETECT;
// Setup threading
// Thread Count. 0 = auto detect
int thread_count = m_pSettings->GetNumThreads();
if (thread_count == 0)
{
thread_count = av_cpu_count();
}
m_pAVCtx->thread_count = max(1, min(thread_count, AVCODEC_MAX_THREADS));
if (dwDecFlags & LAV_VIDEO_DEC_FLAG_NO_MT || codec == AV_CODEC_ID_MPEG4)
{
m_pAVCtx->thread_count = 1;
}
m_pFrame = av_frame_alloc();
CheckPointer(m_pFrame, E_POINTER);
// Process Extradata
size_t extralen = 0;
getExtraData(*pmt, nullptr, &extralen);
BOOL bH264avc = FALSE;
if (pmt->formattype == FORMAT_MPEG2Video && (m_pAVCtx->codec_tag == MAKEFOURCC('a', 'v', 'c', '1') ||
m_pAVCtx->codec_tag == MAKEFOURCC('A', 'V', 'C', '1') ||
m_pAVCtx->codec_tag == MAKEFOURCC('C', 'C', 'V', '1')))
{
// Reconstruct AVC1 extradata format
DbgLog((LOG_TRACE, 10, L"-> Processing AVC1 extradata of %d bytes", extralen));
MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->Format();
extralen += 7;
BYTE *extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
extra[0] = 1;
extra[1] = (BYTE)mp2vi->dwProfile;
extra[2] = 0;
extra[3] = (BYTE)mp2vi->dwLevel;
extra[4] = (BYTE)(mp2vi->dwFlags ? mp2vi->dwFlags : 4) - 1;
// only process extradata if available
uint8_t ps_count = 0;
if (extralen > 7)
{
// Actually copy the metadata into our new buffer
size_t actual_len;
getExtraData(*pmt, extra + 6, &actual_len);
// Count the number of SPS/PPS in them and set the length
// We'll put them all into one block and add a second block with 0 elements afterwards
// The parsing logic does not care what type they are, it just expects 2 blocks.
BYTE *p = extra + 6, *end = extra + 6 + actual_len;
BOOL bSPS = FALSE, bPPS = FALSE;
while (p + 1 < end)
{
unsigned len = (((unsigned)p[0] << 8) | p[1]) + 2;
if (p + len > end)
{
break;
}
if ((p[2] & 0x1F) == 7)
bSPS = TRUE;
if ((p[2] & 0x1F) == 8)
bPPS = TRUE;
ps_count++;
p += len;
}
}
extra[5] = ps_count;
extra[extralen - 1] = 0;
bH264avc = TRUE;
m_pAVCtx->extradata = extra;
m_pAVCtx->extradata_size = (int)extralen;
}
else if (extralen > 0)
{
DbgLog((LOG_TRACE, 10, L"-> Processing extradata of %d bytes", extralen));
BYTE *extra = nullptr;
if (pmt->subtype == MEDIASUBTYPE_LAV_RAWVIDEO)
{
if (extralen < sizeof(m_pAVCtx->pix_fmt))
{
DbgLog((LOG_TRACE, 10, L"-> LAV RAW Video extradata is missing.."));
}
else
{
extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
getExtraData(*pmt, extra, nullptr);
m_pAVCtx->pix_fmt = *(AVPixelFormat *)extra;
extralen -= sizeof(AVPixelFormat);
memmove(extra, extra + sizeof(AVPixelFormat), extralen);
}
}
else if (codec == AV_CODEC_ID_VP9)
{
// read custom vpcC headers
if (extralen >= 16)
{
extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
getExtraData(*pmt, extra, nullptr);
if (AV_RB32(extra) == MKBETAG('v', 'p', 'c', 'C') && AV_RB8(extra + 4) == 1)
{
m_pAVCtx->profile = AV_RB8(extra + 8);
m_pAVCtx->color_primaries = (AVColorPrimaries)AV_RB8(extra + 11);
m_pAVCtx->color_trc = (AVColorTransferCharacteristic)AV_RB8(extra + 12);
m_pAVCtx->colorspace = (AVColorSpace)AV_RB8(extra + 13);
int bitdepth = AV_RB8(extra + 10) >> 4;
if (m_pAVCtx->profile == 2 && bitdepth == 10)
{
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P10;
}
else if (m_pAVCtx->profile == 2 && bitdepth == 12)
{
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P12;
}
}
av_freep(&extra);
extralen = 0;
}
}
else if (codec == AV_CODEC_ID_AV1)
{
extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
getExtraData(*pmt, extra, nullptr);
// ISOBMFF start marker, invalid in OBU syntax
if (extralen >= 4 && extra[0] == 0x81)
{
CByteParser av1(extra, extralen);
av1.BitSkip(8);
m_pAVCtx->profile = av1.BitRead(3);
av1.BitSkip(5); // level idx
av1.BitSkip(1); // tier
int high_bitdepth = av1.BitRead(1);
int twelve_bit = av1.BitRead(1);
int monochrome = av1.BitRead(1);
int chroma_x = av1.BitRead(1);
int chroma_y = av1.BitRead(1);
av1.BitSkip(2); // chroma sample position
// determine pixel format
if (m_pAVCtx->profile == FF_PROFILE_AV1_MAIN)
{
if (!monochrome)
{
if (chroma_x && chroma_y)
{
if (!high_bitdepth)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P;
else if (!twelve_bit)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P10;
else
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P12;
}
else if (chroma_x && !chroma_y)
{
if (!high_bitdepth)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P;
else if (!twelve_bit)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P10;
else
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P12;
}
else if (!chroma_x && !chroma_y)
{
if (!high_bitdepth)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV444P;
else if (!twelve_bit)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV444P10;
else
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV444P12;
}
}
else
{
if (!high_bitdepth)
m_pAVCtx->pix_fmt = AV_PIX_FMT_GRAY8;
else if (!twelve_bit)
m_pAVCtx->pix_fmt = AV_PIX_FMT_GRAY10;
else
m_pAVCtx->pix_fmt = AV_PIX_FMT_GRAY12;
}
}
}
}
else
{
// Just copy extradata for other formats
extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
getExtraData(*pmt, extra, nullptr);
}
// Hack to discard invalid MP4 metadata with AnnexB style video
if (codec == AV_CODEC_ID_H264 && !bH264avc && extra && extra[0] == 1)
{
av_freep(&extra);
extralen = 0;
}
m_pAVCtx->extradata = extra;
m_pAVCtx->extradata_size = (int)extralen;
}
else
{
if (codec == AV_CODEC_ID_VP6 || codec == AV_CODEC_ID_VP6A || codec == AV_CODEC_ID_VP6F)
{
int cropH = pBMI->biWidth - biRealWidth;
int cropV = pBMI->biHeight - biRealHeight;
if (cropH >= 0 && cropH <= 0x0f && cropV >= 0 && cropV <= 0x0f)
{
m_pAVCtx->extradata = (uint8_t *)av_mallocz(1 + AV_INPUT_BUFFER_PADDING_SIZE);
m_pAVCtx->extradata_size = 1;
m_pAVCtx->extradata[0] = (cropH << 4) | cropV;
}
}
}
m_CurrentThread = 0;
m_rtStartCache = AV_NOPTS_VALUE;
LAVPinInfo lavPinInfo = {0};
BOOL bLAVInfoValid = SUCCEEDED(m_pCallback->GetLAVPinInfo(lavPinInfo));
m_bInputPadded = (dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER);
// Setup codec-specific timing logic
// MPEG-4 with VideoInfo/2 is from AVI, and only DTS
if (codec == AV_CODEC_ID_MPEG4 && pmt->formattype != FORMAT_MPEG2Video)
dwDecFlags |= LAV_VIDEO_DEC_FLAG_ONLY_DTS;
// RealVideo is only DTS
if (codec == AV_CODEC_ID_RV10 || codec == AV_CODEC_ID_RV20 || codec == AV_CODEC_ID_RV30 ||
codec == AV_CODEC_ID_RV40)
dwDecFlags |= LAV_VIDEO_DEC_FLAG_ONLY_DTS;
// H.264 without B frames should use reordering for proper delay handling
if (codec == AV_CODEC_ID_H264 && bLAVInfoValid && lavPinInfo.has_b_frames == 0)
{
dwDecFlags &= ~LAV_VIDEO_DEC_FLAG_ONLY_DTS;
}
// Use ffmpegs logic to reorder timestamps
// This is required for all codecs which use frame re-ordering or frame-threaded decoding (unless they specifically
// use DTS timestamps, ie. H264 in AVI)
m_bFFReordering = !(dwDecFlags & LAV_VIDEO_DEC_FLAG_ONLY_DTS) &&
(m_pAVCodec->capabilities & (AV_CODEC_CAP_DELAY | AV_CODEC_CAP_FRAME_THREADS));
// Stop time is unreliable, drop it and calculate it
m_bCalculateStopTime = (codec == AV_CODEC_ID_H264 || codec == AV_CODEC_ID_DIRAC ||
(codec == AV_CODEC_ID_MPEG4 && pmt->formattype == FORMAT_MPEG2Video) ||
(codec == AV_CODEC_ID_VC1 && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_ONLY_DTS)));
// Real Video content has some odd timestamps
// LAV Splitter does them allright with RV30/RV40, everything else screws them up
m_bRVDropBFrameTimings = (codec == AV_CODEC_ID_RV10 || codec == AV_CODEC_ID_RV20 ||
((codec == AV_CODEC_ID_RV30 || codec == AV_CODEC_ID_RV40) &&
(!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) ||
(bLAVInfoValid && (lavPinInfo.flags & LAV_STREAM_FLAG_RV34_MKV)))));
// Enable B-Frame delay handling
m_bBFrameDelay = !m_bFFReordering && !m_bRVDropBFrameTimings;
m_bWaitingForKeyFrame = TRUE;
m_bResumeAtKeyFrame = codec == AV_CODEC_ID_MPEG2VIDEO || codec == AV_CODEC_ID_VC1 ||
codec == AV_CODEC_ID_VC1IMAGE || codec == AV_CODEC_ID_WMV3 ||
codec == AV_CODEC_ID_WMV3IMAGE || codec == AV_CODEC_ID_RV30 || codec == AV_CODEC_ID_RV40 ||
codec == AV_CODEC_ID_VP3 || codec == AV_CODEC_ID_THEORA || codec == AV_CODEC_ID_MPEG4;
m_bHasPalette = m_pAVCtx->bits_per_coded_sample <= 8 && m_pAVCtx->extradata_size &&
!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) &&
(codec == AV_CODEC_ID_MSVIDEO1 || codec == AV_CODEC_ID_MSRLE || codec == AV_CODEC_ID_CINEPAK ||
codec == AV_CODEC_ID_8BPS || codec == AV_CODEC_ID_QPEG || codec == AV_CODEC_ID_QTRLE ||
codec == AV_CODEC_ID_TSCC);
if (FAILED(AdditionaDecoderInit()))
{
return E_FAIL;
}
if (bLAVInfoValid)
{
// Use strict decoding with LAV Splitter and non-live sources
if (codec == AV_CODEC_ID_H264 && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_LIVE) && m_bFFReordering &&
!m_pAVCtx->hwaccel_context)
{
m_pAVCtx->strict_std_compliance = FF_COMPLIANCE_STRICT;
}
// Try to set the has_b_frames info if available
else if (lavPinInfo.has_b_frames >= 0)
{
DbgLog((LOG_TRACE, 10, L"-> Setting has_b_frames to %d", lavPinInfo.has_b_frames));
m_pAVCtx->has_b_frames = lavPinInfo.has_b_frames;
// Set H264 to at least 2 B-Frames, which is the most common for broadcasts
if (codec == AV_CODEC_ID_H264 && m_pAVCtx->has_b_frames == 1)
m_pAVCtx->has_b_frames = 2;
}
}
// codec-specific options
AVDictionary *options = nullptr;
// workaround for old/broken x264 streams
int nX264Build = m_pCallback->GetX264Build();
if (codec == AV_CODEC_ID_H264 && nX264Build != -1)
{
av_dict_set_int(&options, "x264_build", nX264Build, 0);
}
// Open the decoder
m_bInInit = TRUE;
int ret = avcodec_open2(m_pAVCtx, m_pAVCodec, &options);
m_bInInit = FALSE;
// the dict now contains all options that could not be applied
if (options)
{
av_dict_free(&options);
}
if (ret >= 0)
{
DbgLog((LOG_TRACE, 10, L"-> ffmpeg codec opened successfully (ret: %d)", ret));
m_nCodecId = codec;
}
else
{
DbgLog((LOG_TRACE, 10, L"-> ffmpeg codec failed to open (ret: %d)", ret));
DestroyDecoder();
return VFW_E_UNSUPPORTED_VIDEO;
}
m_iInterlaced = 0;
for (int i = 0; i < countof(ff_interlace_capable); i++)
{
if (codec == ff_interlace_capable[i])
{
m_iInterlaced = -1;
break;
}
}
// Detect chroma and interlaced
if (m_pAVCtx->extradata && m_pAVCtx->extradata_size)
{
if (codec == AV_CODEC_ID_MPEG2VIDEO)
{
CMPEG2HeaderParser mpeg2Parser(m_pAVCtx->extradata, m_pAVCtx->extradata_size);
if (mpeg2Parser.hdr.valid)
{
if (mpeg2Parser.hdr.chroma < 2)
{
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P;
}
else if (mpeg2Parser.hdr.chroma == 2)
{
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P;
}
m_iInterlaced = mpeg2Parser.hdr.interlaced;
}
}
else if (codec == AV_CODEC_ID_H264)
{
CH264SequenceParser h264parser;
if (bH264avc)
h264parser.ParseNALs(m_pAVCtx->extradata + 6, m_pAVCtx->extradata_size - 6, 2);
else
h264parser.ParseNALs(m_pAVCtx->extradata, m_pAVCtx->extradata_size, 0);
if (h264parser.sps.valid)
m_iInterlaced = h264parser.sps.interlaced;
}
else if (codec == AV_CODEC_ID_VC1)
{
CVC1HeaderParser vc1parser(m_pAVCtx->extradata, m_pAVCtx->extradata_size);
if (vc1parser.hdr.valid)
m_iInterlaced = (vc1parser.hdr.interlaced ? -1 : 0);
}
}
if (codec == AV_CODEC_ID_DNXHD)
m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P10;
else if (codec == AV_CODEC_ID_FRAPS)
m_pAVCtx->pix_fmt = AV_PIX_FMT_BGR24;
if (bLAVInfoValid && lavPinInfo.pix_fmt != AV_PIX_FMT_NONE && codec != AV_CODEC_ID_FRAPS)
{
if (m_pAVCtx->pix_fmt != AV_PIX_FMT_DXVA2_VLD && m_pAVCtx->pix_fmt != AV_PIX_FMT_D3D11)
m_pAVCtx->pix_fmt = lavPinInfo.pix_fmt;
if (m_pAVCtx->sw_pix_fmt == AV_PIX_FMT_NONE)
m_pAVCtx->sw_pix_fmt = lavPinInfo.pix_fmt;
}
// Ensure software pixfmt is set, so hardware accels can use it immediately
if (m_pAVCtx->sw_pix_fmt == AV_PIX_FMT_NONE && m_pAVCtx->pix_fmt != AV_PIX_FMT_DXVA2_VLD && m_pAVCtx->pix_fmt != AV_PIX_FMT_D3D11)
m_pAVCtx->sw_pix_fmt = m_pAVCtx->pix_fmt;
DbgLog((LOG_TRACE, 10, L"AVCodec init successfull. interlaced: %d", m_iInterlaced));
return S_OK;
}
STDMETHODIMP CDecAvcodec::DestroyDecoder()
{
m_pAVCodec = nullptr;
if (m_pParser)
{
av_parser_close(m_pParser);
m_pParser = nullptr;
}
if (m_pAVCtx)
{
if (m_pAVCtx->codec_id == AV_CODEC_ID_H264)
{
int64_t x264build = -1;
if (av_opt_get_int(m_pAVCtx->priv_data, "x264_build", 0, &x264build) >= 0)
m_pCallback->SetX264Build((int)x264build);
}
av_freep(&m_pAVCtx->hwaccel_context);
av_freep(&m_pAVCtx->extradata);
avcodec_free_context(&m_pAVCtx);
}
av_frame_free(&m_pFrame);
av_freep(&m_pFFBuffer);
m_nFFBufferSize = 0;
if (m_pSwsContext)
{
sws_freeContext(m_pSwsContext);
m_pSwsContext = nullptr;
}
m_nCodecId = AV_CODEC_ID_NONE;
return S_OK;
}
static void lav_avframe_free(LAVFrame *frame)
{
ASSERT(frame->priv_data);
av_frame_free((AVFrame **)&frame->priv_data);
}
static void avpacket_mediasample_free(void *opaque, uint8_t *buffer)
{
IMediaSample *pSample = (IMediaSample *)opaque;
SafeRelease(&pSample);
}
STDMETHODIMP CDecAvcodec::FillAVPacketData(AVPacket *avpkt, const uint8_t *buffer, int buflen, IMediaSample *pSample,
bool bRefCounting)
{
if (m_bInputPadded && (m_pParser == nullptr))
{
avpkt->data = (uint8_t *)buffer;
avpkt->size = buflen;
if (pSample && bRefCounting && m_pCallback->HasDynamicInputAllocator())
{
avpkt->buf =
av_buffer_create(avpkt->data, avpkt->size, avpacket_mediasample_free, pSample, AV_BUFFER_FLAG_READONLY);
if (!avpkt->buf)
{
return E_OUTOFMEMORY;
}
pSample->AddRef();
}
}
else
{
// create fresh packet
if (av_new_packet(avpkt, buflen) < 0)
return E_OUTOFMEMORY;
// copy data over
memcpy(avpkt->data, buffer, buflen);
}
// copy side-data from input sample
if (pSample)
{
IMediaSideData *pSideData = nullptr;
if (SUCCEEDED(pSample->QueryInterface(&pSideData)))
{
size_t nFFSideDataSize = 0;
const MediaSideDataFFMpeg *pFFSideData = nullptr;
if (FAILED(
pSideData->GetSideData(IID_MediaSideDataFFMpeg, (const BYTE **)&pFFSideData, &nFFSideDataSize)) ||
nFFSideDataSize != sizeof(MediaSideDataFFMpeg))
{
pFFSideData = nullptr;
}
SafeRelease(&pSideData);
CopyMediaSideDataFF(avpkt, &pFFSideData);
}
}
return S_OK;
}
STDMETHODIMP CDecAvcodec::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStartIn, REFERENCE_TIME rtStopIn,
BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pSample)
{
CheckPointer(m_pAVCtx, E_UNEXPECTED);
// Put timestamps into the buffers if appropriate
if (m_pAVCtx->active_thread_type & FF_THREAD_FRAME)
{
if (!m_bFFReordering)
{
m_tcThreadBuffer[m_CurrentThread].rtStart = rtStartIn;
m_tcThreadBuffer[m_CurrentThread].rtStop = rtStopIn;
}
m_CurrentThread = (m_CurrentThread + 1) % m_pAVCtx->thread_count;
}
else if (m_bBFrameDelay)
{
m_tcBFrameDelay[m_nBFramePos].rtStart = rtStartIn;
m_tcBFrameDelay[m_nBFramePos].rtStop = rtStopIn;
m_nBFramePos = !m_nBFramePos;
}
// if we have a parser, it'll handle calling the decode function
if (m_pParser)
{
return ParsePacket(buffer, buflen, rtStartIn, rtStopIn, pSample);
}
else
{
// Flush the decoder if appropriate
if (buffer == nullptr)
{
return DecodePacket(nullptr, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
}
// build an AVPacket
AVPacket *avpkt = av_packet_alloc();
// set data pointers
if (FAILED(FillAVPacketData(avpkt, buffer, buflen, pSample, true)))
{
return E_OUTOFMEMORY;
}
// timestamps
avpkt->pts = rtStartIn;
if (rtStartIn != AV_NOPTS_VALUE && rtStopIn != AV_NOPTS_VALUE)
avpkt->duration = (int)(rtStopIn - rtStartIn);
// flags
avpkt->flags = bSyncPoint ? AV_PKT_FLAG_KEY : 0;
// perform decoding
HRESULT hr = DecodePacket(avpkt, rtStartIn, rtStopIn);
// free packet after
av_packet_free(&avpkt);
// forward decoding failures, should only happen when a hardware decoder fails
if (FAILED(hr))
{
return hr;
}
}
return S_OK;
}
STDMETHODIMP CDecAvcodec::ParsePacket(const BYTE *buffer, int buflen, REFERENCE_TIME rtStartIn, REFERENCE_TIME rtStopIn,
IMediaSample *pSample)
{
BOOL bFlush = (buffer == NULL);
int used_bytes = 0;
uint8_t *pDataBuffer = (uint8_t *)buffer;
HRESULT hr = S_OK;
// re-allocate with padding, if needed
if (m_bInputPadded == false && buflen > 0)
{
// re-allocate buffer to have enough space
BYTE *pBuf = (BYTE *)av_fast_realloc(m_pFFBuffer, &m_nFFBufferSize, buflen + AV_INPUT_BUFFER_PADDING_SIZE);
if (!pBuf)
return E_FAIL;
m_pFFBuffer = pBuf;
// copy data to buffer
memcpy(m_pFFBuffer, buffer, buflen);
memset(m_pFFBuffer + buflen, 0, AV_INPUT_BUFFER_PADDING_SIZE);
pDataBuffer = m_pFFBuffer;
}
// loop over the data buffer until the parser has consumed all data
while (buflen > 0 || bFlush)
{
REFERENCE_TIME rtStart = rtStartIn, rtStop = rtStopIn;
uint8_t *pOutBuffer = nullptr;
int pOutLen = 0;
used_bytes = av_parser_parse2(m_pParser, m_pAVCtx, &pOutBuffer, &pOutLen, pDataBuffer, buflen, AV_NOPTS_VALUE,
AV_NOPTS_VALUE, 0);
if (used_bytes == 0 && pOutLen == 0 && !bFlush)
{
DbgLog((LOG_TRACE, 50, L"::Decode() - could not process buffer, starving?"));
break;
}
else if (used_bytes > 0)
{
buflen -= used_bytes;
pDataBuffer += used_bytes;
}
// Update start time cache
// If more data was read then output, update the cache (incomplete frame)
// If output is bigger, a frame was completed, update the actual rtStart with the cached value, and then
// overwrite the cache
if (used_bytes > pOutLen)
{
if (rtStartIn != AV_NOPTS_VALUE)
m_rtStartCache = rtStartIn;
}
else if (used_bytes == pOutLen || ((used_bytes + 9) == pOutLen))
{
// Why +9 above?
// Well, apparently there are some broken MKV muxers that like to mux the MPEG-2 PICTURE_START_CODE block
// (which is 9 bytes) in the package with the previous frame This would cause the frame timestamps to be
// delayed by one frame exactly, and cause timestamp reordering to go wrong. So instead of failing on those
// samples, lets just assume that 9 bytes are that case exactly.
m_rtStartCache = rtStartIn = AV_NOPTS_VALUE;
}
else if (pOutLen > used_bytes)
{
rtStart = m_rtStartCache;
m_rtStartCache = rtStartIn;
// The value was used once, don't use it for multiple frames, that ends up in weird timings
rtStartIn = AV_NOPTS_VALUE;
}
// decode any parsed data
if (pOutLen > 0)
{
AVPacket *avpkt = av_packet_alloc();
// set data pointers
if (FAILED(FillAVPacketData(avpkt, pOutBuffer, pOutLen, pSample, false)))
{
return E_OUTOFMEMORY;
}
// timestamp
avpkt->pts = rtStart;
// decode the parsed packet
hr = DecodePacket(avpkt, rtStart, rtStop);
// and free it after
av_packet_free(&avpkt);
if (FAILED(hr))
{
return hr;
}
}
// or perform a flush at the end
else if (bFlush)
{
hr = DecodePacket(nullptr, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
if (FAILED(hr))
{
return hr;
}
break;
}
}
return S_OK;
}
STDMETHODIMP CDecAvcodec::DecodePacket(AVPacket *avpkt, REFERENCE_TIME rtStartIn, REFERENCE_TIME rtStopIn)
{
int ret = 0;
BOOL bEndOfSequence = FALSE;
BOOL bDeliverFirst = FALSE;
REFERENCE_TIME rtStart = rtStartIn, rtStop = rtStopIn;
// packet pre-processing
if (avpkt)
{
// EOS/GOP detection for mpeg2 video streams
if (m_nCodecId == AV_CODEC_ID_MPEG2VIDEO)
{
int state = CheckForSequenceMarkers(m_nCodecId, avpkt->data, avpkt->size, &m_MpegParserState);
if (state & STATE_EOS_FOUND)
{
bEndOfSequence = TRUE;
}
if (state & STATE_GOP_FOUND && m_nCodecId == AV_CODEC_ID_MPEG2VIDEO)
{
m_bWaitingForKeyFrame = FALSE;
}
}
// Check for VP8 keyframes
if (m_nCodecId == AV_CODEC_ID_VP8 && m_bWaitingForKeyFrame)
{
if (!(avpkt->data[0] & 1))
{
DbgLog((LOG_TRACE, 10, L"::Decode(): Found VP8 key-frame, resuming decoding"));
m_bWaitingForKeyFrame = FALSE;
}
else
{
return S_OK;
}
}
else if (m_nCodecId == AV_CODEC_ID_VP9 && m_bWaitingForKeyFrame)
{
CByteParser VP9Header(avpkt->data, avpkt->size);
// check vp9 frame marker
if (VP9Header.BitRead(2) != 0x2)
return E_FAIL;
int profile = VP9Header.BitRead(1) | (VP9Header.BitRead(1) << 1);
if (profile == 3)
profile += VP9Header.BitRead(1);
if (VP9Header.BitRead(1) == 0 && VP9Header.BitRead(1) == 0)
{
DbgLog((LOG_TRACE, 10, L"::Decode(): Found VP9 key-frame, resuming decoding"));
m_bWaitingForKeyFrame = FALSE;
}
else
{
return S_OK;
}
}
// Add a palette from extradata, if any
if (m_bHasPalette)
{
m_bHasPalette = FALSE;
uint32_t *pal = (uint32_t *)av_packet_new_side_data(avpkt, AV_PKT_DATA_PALETTE, AVPALETTE_SIZE);
int pal_size = FFMIN((1 << m_pAVCtx->bits_per_coded_sample) << 2, m_pAVCtx->extradata_size);
uint8_t *pal_src = m_pAVCtx->extradata + m_pAVCtx->extradata_size - pal_size;
for (int i = 0; i < pal_size / 4; i++)
pal[i] = 0xFF << 24 | AV_RL32(pal_src + 4 * i);
}
}
send_packet:
// send packet to the decoder
ret = avcodec_send_packet(m_pAVCtx, avpkt);
if (ret < 0)
{
// Check if post-decoding checks failed
if (FAILED(PostDecode()))
{
return E_FAIL;
}
if (ret == AVERROR(EAGAIN))
{
if (bDeliverFirst)
{
DbgLog((LOG_ERROR, 10, L"::Decode(): repeated packet submission to the decoder failed"));
ASSERT(0);
return E_FAIL;
}
bDeliverFirst = TRUE;
}
else
return S_FALSE;
}
else
{
bDeliverFirst = FALSE;
}
// loop over available frames
while (1)
{
ret = avcodec_receive_frame(m_pAVCtx, m_pFrame);
if (FAILED(PostDecode()))
{
av_frame_unref(m_pFrame);
return E_FAIL;
}
// Decoding of this frame failed ... oh well!
if (ret < 0 && ret != AVERROR(EAGAIN))
{
av_frame_unref(m_pFrame);
return S_FALSE;
}
// Judge frame usability
// This determines if a frame is artifact free and can be delivered.
if (m_bResumeAtKeyFrame && m_bWaitingForKeyFrame && ret >= 0)
{
if (m_pFrame->flags & AV_FRAME_FLAG_KEY)
{
DbgLog((LOG_TRACE, 50, L"::Decode() - Found Key-Frame, resuming decoding at %I64d", m_pFrame->pts));
m_bWaitingForKeyFrame = FALSE;
}
else
{
ret = AVERROR(EAGAIN);
}
}
// Handle B-frame delay for frame threading codecs
if ((m_pAVCtx->active_thread_type & FF_THREAD_FRAME) && m_bBFrameDelay)
{
m_tcBFrameDelay[m_nBFramePos] = m_tcThreadBuffer[m_CurrentThread];
m_nBFramePos = !m_nBFramePos;
}
// no frame was decoded, bail out here
if (ret < 0 || !m_pFrame->data[0])
{
av_frame_unref(m_pFrame);
break;
}
///////////////////////////////////////////////////////////////////////////////////////////////
// Determine the proper timestamps for the frame, based on different possible flags.
///////////////////////////////////////////////////////////////////////////////////////////////
if (m_bFFReordering)
{
rtStart = m_pFrame->pts;
if (m_pFrame->duration)
rtStop = m_pFrame->pts + m_pFrame->duration;
else
rtStop = AV_NOPTS_VALUE;
}
else if (m_bBFrameDelay && m_pAVCtx->has_b_frames)
{
rtStart = m_tcBFrameDelay[m_nBFramePos].rtStart;
rtStop = m_tcBFrameDelay[m_nBFramePos].rtStop;
}
else if (m_pAVCtx->active_thread_type & FF_THREAD_FRAME)
{
unsigned index = m_CurrentThread;
rtStart = m_tcThreadBuffer[index].rtStart;
rtStop = m_tcThreadBuffer[index].rtStop;
}
if (m_bRVDropBFrameTimings && m_pFrame->pict_type == AV_PICTURE_TYPE_B)
{
rtStart = AV_NOPTS_VALUE;
}
if (m_bCalculateStopTime)
rtStop = AV_NOPTS_VALUE;
///////////////////////////////////////////////////////////////////////////////////////////////
// All required values collected, deliver the frame
///////////////////////////////////////////////////////////////////////////////////////////////
LAVFrame *pOutFrame = nullptr;
AllocateFrame(&pOutFrame);
AVRational display_aspect_ratio;
int64_t num = (int64_t)m_pFrame->sample_aspect_ratio.num * m_pFrame->width;
int64_t den = (int64_t)m_pFrame->sample_aspect_ratio.den * m_pFrame->height;
av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, num, den, INT_MAX);
pOutFrame->width = m_pFrame->width;
pOutFrame->height = m_pFrame->height;
pOutFrame->aspect_ratio = display_aspect_ratio;
pOutFrame->repeat = m_pFrame->repeat_pict;
pOutFrame->key_frame = !!(m_pFrame->flags & AV_FRAME_FLAG_KEY);
pOutFrame->frame_type = av_get_picture_type_char(m_pFrame->pict_type);
pOutFrame->ext_format = GetDXVA2ExtendedFlags(m_pAVCtx, m_pFrame);
if ((m_nCodecId == AV_CODEC_ID_H264 || m_nCodecId == AV_CODEC_ID_MPEG2VIDEO) && m_pFrame->repeat_pict)
m_nSoftTelecine = 2;
else if (m_nSoftTelecine > 0)
m_nSoftTelecine--;
if ((m_pFrame->flags & AV_FRAME_FLAG_INTERLACED) || (!m_pAVCtx->progressive_sequence &&
(m_nCodecId == AV_CODEC_ID_H264 || m_nCodecId == AV_CODEC_ID_MPEG2VIDEO)))
{
if (!m_nSoftTelecine)
m_iInterlaced = 1;
}
else if (m_pAVCtx->progressive_sequence)
m_iInterlaced = 0;
// Don't apply aggressive deinterlacing to content that looks soft-telecined, as it would destroy the content
bool bAggressiveFlag =
(m_iInterlaced == 1 && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) && !m_nSoftTelecine;
pOutFrame->interlaced = ((m_pFrame->flags & AV_FRAME_FLAG_INTERLACED) || bAggressiveFlag ||
m_pSettings->GetDeinterlacingMode() == DeintMode_Force) &&
!(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable);
LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
pOutFrame->tff =
(fo == DeintFieldOrder_Auto) ? !!(m_pFrame->flags & AV_FRAME_FLAG_TOP_FIELD_FIRST) : (fo == DeintFieldOrder_TopFieldFirst);
pOutFrame->rtStart = rtStart;
pOutFrame->rtStop = rtStop;
PixelFormatMapping map = getPixFmtMapping((AVPixelFormat)m_pFrame->format);
pOutFrame->format = map.lavpixfmt;
pOutFrame->bpp = map.bpp;
if (m_nCodecId == AV_CODEC_ID_MPEG2VIDEO || m_nCodecId == AV_CODEC_ID_MPEG1VIDEO)
pOutFrame->avgFrameDuration = GetFrameDuration();
AVFrameSideData *sdHDR = av_frame_get_side_data(m_pFrame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
if (sdHDR)
{
if (sdHDR->size == sizeof(AVMasteringDisplayMetadata))
{
AVMasteringDisplayMetadata *metadata = (AVMasteringDisplayMetadata *)sdHDR->data;
MediaSideDataHDR *hdr =
(MediaSideDataHDR *)AddLAVFrameSideData(pOutFrame, IID_MediaSideDataHDR, sizeof(MediaSideDataHDR));
processFFHDRData(hdr, metadata);
}
else
{
DbgLog((LOG_TRACE, 10, L"::Decode(): Found HDR data of an unexpected size (%d)", sdHDR->size));
}
}
AVFrameSideData *sdHDRContentLightLevel = av_frame_get_side_data(m_pFrame, AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
if (sdHDRContentLightLevel)
{
if (sdHDRContentLightLevel->size == sizeof(AVContentLightMetadata))
{
AVContentLightMetadata *metadata = (AVContentLightMetadata *)sdHDRContentLightLevel->data;
MediaSideDataHDRContentLightLevel *hdr = (MediaSideDataHDRContentLightLevel *)AddLAVFrameSideData(
pOutFrame, IID_MediaSideDataHDRContentLightLevel, sizeof(MediaSideDataHDRContentLightLevel));
hdr->MaxCLL = metadata->MaxCLL;
hdr->MaxFALL = metadata->MaxFALL;
}
else
{
DbgLog((LOG_TRACE, 10, L"::Decode(): Found HDR Light Level data of an unexpected size (%d)",
sdHDRContentLightLevel->size));
}
}
AVFrameSideData *sdHDR10Plus = av_frame_get_side_data(m_pFrame, AV_FRAME_DATA_DYNAMIC_HDR_PLUS);
if (sdHDR10Plus)
{
if (sdHDR10Plus->size == sizeof(AVDynamicHDRPlus))
{
AVDynamicHDRPlus *metadata = (AVDynamicHDRPlus *)sdHDR10Plus->data;
MediaSideDataHDR10Plus *hdr = (MediaSideDataHDR10Plus *)AddLAVFrameSideData(
pOutFrame, IID_MediaSideDataHDR10Plus, sizeof(MediaSideDataHDR10Plus));
processFFHDR10PlusData(hdr, metadata, m_pFrame->width, m_pFrame->height);
}
else
{
DbgLog((LOG_TRACE, 10, L"::Decode(): Found HDR10+ data of an unexpected size (%d)", sdHDR10Plus->size));
}
}
AVFrameSideData *sdDOVI = av_frame_get_side_data(m_pFrame, AV_FRAME_DATA_DOVI_METADATA);
if (sdDOVI)
{
AVDOVIMetadata *metadata = (AVDOVIMetadata *)sdDOVI->data;
MediaSideDataDOVIMetadata *hdr = (MediaSideDataDOVIMetadata *)AddLAVFrameSideData(
pOutFrame, IID_MediaSideDataDOVIMetadataV2, sizeof(MediaSideDataDOVIMetadata));
processFFDOVIData(hdr, metadata);
#pragma warning(push)
#pragma warning(disable: 4996)
// legacy format without extensions
BYTE *hdrLegacy = (BYTE *)AddLAVFrameSideData(
pOutFrame, IID_MediaSideDataDOVIMetadata, offsetof(MediaSideDataDOVIMetadata, Extensions));
memcpy(hdrLegacy, hdr, offsetof(MediaSideDataDOVIMetadata, Extensions));
#pragma warning(pop)
}
AVFrameSideData *sdDOVIRPU = av_frame_get_side_data(m_pFrame, AV_FRAME_DATA_DOVI_RPU_BUFFER);
if (sdDOVIRPU)
{
BYTE *pDVRPU = AddLAVFrameSideData(pOutFrame, IID_MediaSideDataDOVIRPU, sdDOVIRPU->size);
memcpy(pDVRPU, sdDOVIRPU->data, sdDOVIRPU->size);
}
AVFrameSideData *sdCC = av_frame_get_side_data(m_pFrame, AV_FRAME_DATA_A53_CC);
if (sdCC)
{
BYTE *CC = AddLAVFrameSideData(pOutFrame, IID_MediaSideDataEIA608CC, sdCC->size);
memcpy(CC, sdCC->data, sdCC->size);
}
if (map.conversion)
{
ConvertPixFmt(m_pFrame, pOutFrame);
}
else
{
AVFrame *pFrameRef = av_frame_alloc();
av_frame_ref(pFrameRef, m_pFrame);
for (int i = 0; i < 4; i++)
{
pOutFrame->data[i] = pFrameRef->data[i];
pOutFrame->stride[i] = pFrameRef->linesize[i];
}
pOutFrame->priv_data = pFrameRef;
pOutFrame->destruct = lav_avframe_free;
// Check alignment on rawvideo, which can be off depending on the source file
if (m_nCodecId == AV_CODEC_ID_RAWVIDEO)
{
for (int i = 0; i < 4; i++)
{
if ((intptr_t)pOutFrame->data[i] % 16u || pOutFrame->stride[i] % 16u)
{
// copy the frame, its not aligned properly and would crash later
CopyLAVFrameInPlace(pOutFrame);
break;
}
}
}
}
if (bEndOfSequence)
pOutFrame->flags |= LAV_FRAME_FLAG_END_OF_SEQUENCE;
if (pOutFrame->format == LAVPixFmt_DXVA2)
{
pOutFrame->data[0] = m_pFrame->data[4];
HandleDXVA2Frame(pOutFrame);
}
else if (pOutFrame->format == LAVPixFmt_D3D11)
{
HandleDXVA2Frame(pOutFrame);
}
else
{
Deliver(pOutFrame);
}
if (bEndOfSequence)
{
bEndOfSequence = FALSE;
if (pOutFrame->format == LAVPixFmt_DXVA2 || pOutFrame->format == LAVPixFmt_D3D11)
{
HandleDXVA2Frame(m_pCallback->GetFlushFrame());
}
else
{
Deliver(m_pCallback->GetFlushFrame());
}
}
// increase thread count when flushing
if (avpkt == nullptr)
{
m_CurrentThread = (m_CurrentThread + 1) % m_pAVCtx->thread_count;
}
av_frame_unref(m_pFrame);
}
// repeat sending the packet to the decoder if it failed first
if (bDeliverFirst)
{
goto send_packet;
}
return S_OK;
}
STDMETHODIMP CDecAvcodec::Flush()
{
if (m_pAVCtx && avcodec_is_open(m_pAVCtx))
{
avcodec_flush_buffers(m_pAVCtx);
}
if (m_pParser)
{
av_parser_close(m_pParser);
m_pParser = av_parser_init(m_nCodecId);
}
m_CurrentThread = 0;
m_rtStartCache = AV_NOPTS_VALUE;
m_bWaitingForKeyFrame = TRUE;
m_nSoftTelecine = 0;
m_nBFramePos = 0;
m_tcBFrameDelay[0].rtStart = m_tcBFrameDelay[0].rtStop = AV_NOPTS_VALUE;
m_tcBFrameDelay[1].rtStart = m_tcBFrameDelay[1].rtStop = AV_NOPTS_VALUE;
if (!(m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_DVD) &&
(m_nCodecId == AV_CODEC_ID_H264 || m_nCodecId == AV_CODEC_ID_MPEG2VIDEO))
{
CDecAvcodec::InitDecoder(m_nCodecId, &m_pCallback->GetInputMediaType());
}
return __super::Flush();
}
STDMETHODIMP CDecAvcodec::EndOfStream()
{
Decode(nullptr, 0, AV_NOPTS_VALUE, AV_NOPTS_VALUE, FALSE, FALSE, nullptr);
return S_OK;
}
STDMETHODIMP CDecAvcodec::GetPixelFormat(LAVPixelFormat *pPix, int *pBpp)
{
AVPixelFormat pixfmt = m_pAVCtx ? m_pAVCtx->pix_fmt : AV_PIX_FMT_NONE;
PixelFormatMapping mapping = getPixFmtMapping(pixfmt);
if (pPix)
*pPix = mapping.lavpixfmt;
if (pBpp)
*pBpp = mapping.bpp;
return S_OK;
}
STDMETHODIMP CDecAvcodec::ConvertPixFmt(AVFrame *pFrame, LAVFrame *pOutFrame)
{
// Allocate the buffers to write into
HRESULT hr = AllocLAVFrameBuffers(pOutFrame);
if (FAILED(hr))
return hr;
// Map to swscale compatible format
AVPixelFormat dstFormat = getFFPixelFormatFromLAV(pOutFrame->format, pOutFrame->bpp);
// Get a context
m_pSwsContext = sws_getCachedContext(m_pSwsContext, pFrame->width, pFrame->height, (AVPixelFormat)pFrame->format,
pFrame->width, pFrame->height, dstFormat,
SWS_BILINEAR | SWS_FULL_CHR_H_INT | SWS_PRINT_INFO, nullptr, nullptr, nullptr);
ptrdiff_t linesize[4];
for (int i = 0; i < 4; i++)
linesize[i] = pFrame->linesize[i];
// Perform conversion
sws_scale2(m_pSwsContext, pFrame->data, linesize, 0, pFrame->height, pOutFrame->data, pOutFrame->stride);
return S_OK;
}
STDMETHODIMP_(REFERENCE_TIME) CDecAvcodec::GetFrameDuration()
{
if (m_pAVCtx->framerate.den && m_pAVCtx->framerate.num)
return (REF_SECOND_MULT * m_pAVCtx->framerate.den / m_pAVCtx->framerate.num);
if (m_pAVCtx->time_base.den && m_pAVCtx->time_base.num)
return (REF_SECOND_MULT * m_pAVCtx->time_base.num / m_pAVCtx->time_base.den) *
((m_pAVCtx->codec_descriptor && m_pAVCtx->codec_descriptor->props & AV_CODEC_PROP_FIELDS) ? 2 : 1);
return 0;
}
STDMETHODIMP_(BOOL) CDecAvcodec::IsInterlaced(BOOL bAllowGuess)
{
return (bAllowGuess && m_iInterlaced) || (m_iInterlaced > 0) ||
m_pSettings->GetDeinterlacingMode() == DeintMode_Force;
}
| 56,782
|
C++
|
.cpp
| 1,335
| 32.665169
| 135
| 0.573761
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,179
|
d3d11va.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/decoders/d3d11va.cpp
|
/*
* Copyright (C) 2017-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "d3d11va.h"
#include "ID3DVideoMemoryConfiguration.h"
#include "dxva2/dxva_common.h"
#include <d3d11_1.h>
ILAVDecoder *CreateDecoderD3D11()
{
return new CDecD3D11();
}
HRESULT VerifyD3D11Device(DWORD &dwIndex, DWORD dwDeviceId)
{
HRESULT hr = S_OK;
DXGI_ADAPTER_DESC desc;
HMODULE dxgi = LoadLibrary(L"dxgi.dll");
if (dxgi == nullptr)
{
hr = E_FAIL;
goto done;
}
PFN_CREATE_DXGI_FACTORY1 mCreateDXGIFactory1 = (PFN_CREATE_DXGI_FACTORY1)GetProcAddress(dxgi, "CreateDXGIFactory1");
if (mCreateDXGIFactory1 == nullptr)
{
hr = E_FAIL;
goto done;
}
IDXGIAdapter *pDXGIAdapter = nullptr;
IDXGIFactory1 *pDXGIFactory = nullptr;
hr = mCreateDXGIFactory1(IID_IDXGIFactory1, (void **)&pDXGIFactory);
if (FAILED(hr))
goto done;
// check the adapter specified by dwIndex
hr = pDXGIFactory->EnumAdapters(dwIndex, &pDXGIAdapter);
if (FAILED(hr))
goto done;
// if it matches the device id, then all is well and we're done
pDXGIAdapter->GetDesc(&desc);
if (desc.DeviceId == dwDeviceId)
goto done;
SafeRelease(&pDXGIAdapter);
// try to find a device that matches this device id
UINT i = 0;
while (SUCCEEDED(pDXGIFactory->EnumAdapters(i, &pDXGIAdapter)))
{
pDXGIAdapter->GetDesc(&desc);
SafeRelease(&pDXGIAdapter);
if (desc.DeviceId == dwDeviceId)
{
dwIndex = i;
goto done;
}
i++;
}
// if none is found, fail
hr = E_FAIL;
done:
SafeRelease(&pDXGIAdapter);
SafeRelease(&pDXGIFactory);
FreeLibrary(dxgi);
return hr;
}
////////////////////////////////////////////////////////////////////////////////
// D3D11 decoder implementation
////////////////////////////////////////////////////////////////////////////////
static const D3D_FEATURE_LEVEL s_D3D11Levels[] = {
D3D_FEATURE_LEVEL_12_1,
D3D_FEATURE_LEVEL_12_0,
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
};
static int s_GetD3D11FeatureLevels(int max_fl, const D3D_FEATURE_LEVEL **out)
{
static const int levels_len = countof(s_D3D11Levels);
int start = 0;
for (; start < levels_len; start++)
{
if (s_D3D11Levels[start] <= max_fl)
break;
}
*out = &s_D3D11Levels[start];
return levels_len - start;
}
static DXGI_FORMAT d3d11va_map_sw_to_hw_format(enum AVPixelFormat pix_fmt)
{
switch (pix_fmt)
{
case AV_PIX_FMT_YUV420P10:
case AV_PIX_FMT_P010: return DXGI_FORMAT_P010;
case AV_PIX_FMT_NV12:
default: return DXGI_FORMAT_NV12;
}
}
CDecD3D11::CDecD3D11(void)
: CDecAvcodec()
{
ZeroMemory(&m_FrameQueue, sizeof(m_FrameQueue));
}
CDecD3D11::~CDecD3D11(void)
{
DestroyDecoder(true);
if (m_pAllocator)
m_pAllocator->DecoderDestruct();
SafeRelease(&m_pAllocator);
}
STDMETHODIMP CDecD3D11::DestroyDecoder(bool bFull)
{
for (int i = 0; i < D3D11_QUEUE_SURFACES; i++)
{
ReleaseFrame(&m_FrameQueue[i]);
}
if (m_pOutputViews)
{
for (int i = 0; i < m_nOutputViews; i++)
{
SafeRelease(&m_pOutputViews[i]);
}
av_freep(&m_pOutputViews);
m_nOutputViews = 0;
}
SafeRelease(&m_pDecoder);
SafeRelease(&m_pD3D11StagingTexture);
av_buffer_unref(&m_pFramesCtx);
CDecAvcodec::DestroyDecoder();
if (bFull)
{
av_buffer_unref(&m_pDevCtx);
if (dx.d3d11lib)
{
FreeLibrary(dx.d3d11lib);
dx.d3d11lib = nullptr;
}
if (dx.dxgilib)
{
FreeLibrary(dx.dxgilib);
dx.dxgilib = nullptr;
}
}
return S_OK;
}
// ILAVDecoder
STDMETHODIMP CDecD3D11::Init()
{
// D3D11 decoding requires Windows 8 or newer
if (!IsWindows8OrNewer())
return E_NOINTERFACE;
dx.d3d11lib = LoadLibrary(L"d3d11.dll");
if (dx.d3d11lib == nullptr)
{
DbgLog((LOG_TRACE, 10, L"Cannot open d3d11.dll"));
return E_FAIL;
}
dx.mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE)GetProcAddress(dx.d3d11lib, "D3D11CreateDevice");
if (dx.mD3D11CreateDevice == nullptr)
{
DbgLog((LOG_TRACE, 10, L"D3D11CreateDevice not available"));
return E_FAIL;
}
dx.dxgilib = LoadLibrary(L"dxgi.dll");
if (dx.dxgilib == nullptr)
{
DbgLog((LOG_TRACE, 10, L"Cannot open dxgi.dll"));
return E_FAIL;
}
dx.mCreateDXGIFactory1 = (PFN_CREATE_DXGI_FACTORY1)GetProcAddress(dx.dxgilib, "CreateDXGIFactory1");
if (dx.mCreateDXGIFactory1 == nullptr)
{
DbgLog((LOG_TRACE, 10, L"CreateDXGIFactory1 not available"));
return E_FAIL;
}
return S_OK;
}
STDMETHODIMP CDecD3D11::Check()
{
// attempt creating a hardware device with video support
// by passing nullptr to the device parameter, no actual device will be created and only support will be checked
// do probing agains level 11.1 only, to avoid complex checking logic here
const D3D_FEATURE_LEVEL *levels = NULL;
int level_count = s_GetD3D11FeatureLevels(D3D_FEATURE_LEVEL_11_1, &levels);
HRESULT hr =
dx.mD3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, D3D11_CREATE_DEVICE_VIDEO_SUPPORT,
levels, level_count, D3D11_SDK_VERSION, nullptr, nullptr, nullptr);
return hr;
}
STDMETHODIMP CDecD3D11::InitAllocator(IMemAllocator **ppAlloc)
{
HRESULT hr = S_OK;
if (m_bReadBackFallback)
return E_NOTIMPL;
if (m_pAllocator == nullptr)
{
m_pAllocator = new CD3D11SurfaceAllocator(this, &hr);
if (!m_pAllocator)
{
return E_OUTOFMEMORY;
}
if (FAILED(hr))
{
SAFE_DELETE(m_pAllocator);
return hr;
}
// Hold a reference on the allocator
m_pAllocator->AddRef();
}
// return the proper interface
return m_pAllocator->QueryInterface(__uuidof(IMemAllocator), (void **)ppAlloc);
}
STDMETHODIMP CDecD3D11::CreateD3D11Device(UINT nDeviceIndex, ID3D11Device **ppDevice, DXGI_ADAPTER_DESC *pDesc)
{
ID3D11Device *pD3D11Device = nullptr;
// create DXGI factory
IDXGIAdapter *pDXGIAdapter = nullptr;
IDXGIFactory1 *pDXGIFactory = nullptr;
HRESULT hr = dx.mCreateDXGIFactory1(IID_IDXGIFactory1, (void **)&pDXGIFactory);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> DXGIFactory creation failed"));
goto fail;
}
// find the adapter
enum_adapter:
hr = pDXGIFactory->EnumAdapters(nDeviceIndex, &pDXGIAdapter);
if (FAILED(hr))
{
if (nDeviceIndex != 0)
{
DbgLog(
(LOG_ERROR, 10, L"-> Requested DXGI device %d not available, falling back to default", nDeviceIndex));
nDeviceIndex = 0;
hr = pDXGIFactory->EnumAdapters(0, &pDXGIAdapter);
}
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> Failed to enumerate a valid DXGI device"));
goto fail;
}
}
// Create a device with video support, and BGRA support for Direct2D interoperability (drawing UI, etc)
UINT nCreationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_BGRA_SUPPORT;
D3D_FEATURE_LEVEL max_level = D3D_FEATURE_LEVEL_12_1;
D3D_FEATURE_LEVEL d3dFeatureLevel;
do
{
const D3D_FEATURE_LEVEL *levels = NULL;
int level_count = s_GetD3D11FeatureLevels(max_level, &levels);
hr = dx.mD3D11CreateDevice(pDXGIAdapter, D3D_DRIVER_TYPE_UNKNOWN, nullptr, nCreationFlags, levels, level_count,
D3D11_SDK_VERSION, &pD3D11Device, &d3dFeatureLevel, nullptr);
if (SUCCEEDED(hr))
break;
// 12.0+ devices fail on Windows 8.1, try without it
if (max_level >= D3D_FEATURE_LEVEL_12_0)
{
max_level = D3D_FEATURE_LEVEL_11_1;
continue;
}
break;
} while (true);
if (FAILED(hr))
{
if (nDeviceIndex != 0)
{
DbgLog((
LOG_ERROR, 10,
L"-> Failed to create a D3D11 device with video support on requested device %d, re-trying with default",
nDeviceIndex));
SafeRelease(&pDXGIAdapter);
nDeviceIndex = 0;
goto enum_adapter;
}
DbgLog((LOG_ERROR, 10, L"-> Failed to create a D3D11 device with video support"));
goto fail;
}
DbgLog((LOG_TRACE, 10, L"-> Created D3D11 device with feature level %d.%d", d3dFeatureLevel >> 12,
(d3dFeatureLevel >> 8) & 0xF));
// enable multithreaded protection
ID3D10Multithread *pMultithread = nullptr;
hr = pD3D11Device->QueryInterface(&pMultithread);
if (SUCCEEDED(hr))
{
pMultithread->SetMultithreadProtected(TRUE);
SafeRelease(&pMultithread);
}
// store adapter info
if (pDesc)
{
ZeroMemory(pDesc, sizeof(*pDesc));
pDXGIAdapter->GetDesc(pDesc);
}
// return device
*ppDevice = pD3D11Device;
fail:
SafeRelease(&pDXGIFactory);
SafeRelease(&pDXGIAdapter);
return hr;
}
STDMETHODIMP CDecD3D11::PostConnect(IPin *pPin)
{
DbgLog((LOG_TRACE, 10, L"CDecD3D11::PostConnect()"));
HRESULT hr = S_OK;
ID3D11DecoderConfiguration *pD3D11DecoderConfiguration = nullptr;
hr = pPin->QueryInterface(&pD3D11DecoderConfiguration);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> ID3D11DecoderConfiguration not available, using fallback mode"));
}
// Release old D3D resources, we're about to re-init
m_pCallback->ReleaseAllDXVAResources();
// free the decoder to force a re-init down the line
SafeRelease(&m_pDecoder);
// and the old device
av_buffer_unref(&m_pDevCtx);
// device id
UINT nDevice = m_pSettings->GetHWAccelDeviceIndex(HWAccel_D3D11, nullptr);
// in automatic mode use the device the renderer gives us
if (nDevice == LAVHWACCEL_DEVICE_DEFAULT && pD3D11DecoderConfiguration)
{
nDevice = pD3D11DecoderConfiguration->GetD3D11AdapterIndex();
}
else
{
// if a device is specified manually, fallback to copy-back and use the selected device
SafeRelease(&pD3D11DecoderConfiguration);
// use the configured device
if (nDevice == LAVHWACCEL_DEVICE_DEFAULT)
nDevice = 0;
}
// create the device
ID3D11Device *pD3D11Device = nullptr;
hr = CreateD3D11Device(nDevice, &pD3D11Device, &m_AdapterDesc);
if (FAILED(hr))
{
goto fail;
}
// allocate and fill device context
m_pDevCtx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_D3D11VA);
AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
pDeviceContext->device = pD3D11Device;
// finalize the context
int ret = av_hwdevice_ctx_init(m_pDevCtx);
if (ret < 0)
{
av_buffer_unref(&m_pDevCtx);
goto fail;
}
// enable multithreaded protection
ID3D10Multithread *pMultithread = nullptr;
hr = pDeviceContext->device_context->QueryInterface(&pMultithread);
if (SUCCEEDED(hr))
{
pMultithread->SetMultithreadProtected(TRUE);
SafeRelease(&pMultithread);
}
// check if the connection supports native mode
if (pD3D11DecoderConfiguration)
{
CMediaType mt = m_pCallback->GetOutputMediaType();
if ((m_SurfaceFormat == DXGI_FORMAT_NV12 && mt.subtype != MEDIASUBTYPE_NV12) ||
(m_SurfaceFormat == DXGI_FORMAT_P010 && mt.subtype != MEDIASUBTYPE_P010) ||
(m_SurfaceFormat == DXGI_FORMAT_P016 && mt.subtype != MEDIASUBTYPE_P016))
{
DbgLog((LOG_ERROR, 10, L"-> Connection is not the appropriate pixel format for D3D11 Native"));
SafeRelease(&pD3D11DecoderConfiguration);
}
}
// verify hardware support
{
GUID guidConversion = GUID_NULL;
hr = FindVideoServiceConversion(m_pAVCtx->codec_id, m_pAVCtx->profile, m_SurfaceFormat, &guidConversion);
if (FAILED(hr))
{
goto fail;
}
// get decoder configuration
D3D11_VIDEO_DECODER_DESC desc = {0};
desc.Guid = guidConversion;
desc.OutputFormat = m_SurfaceFormat;
desc.SampleWidth = m_pAVCtx->coded_width;
desc.SampleHeight = m_pAVCtx->coded_height;
D3D11_VIDEO_DECODER_CONFIG decoder_config = {0};
hr = FindDecoderConfiguration(&desc, &decoder_config);
if (FAILED(hr))
{
goto fail;
}
// test creating a texture
D3D11_TEXTURE2D_DESC texDesc = {0};
texDesc.Width = m_pAVCtx->coded_width;
texDesc.Height = m_pAVCtx->coded_height;
texDesc.MipLevels = 1;
texDesc.ArraySize = GetBufferCount();
texDesc.Format = m_SurfaceFormat;
texDesc.SampleDesc.Count = 1;
texDesc.Usage = D3D11_USAGE_DEFAULT;
texDesc.BindFlags = D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
texDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED;
hr = pD3D11Device->CreateTexture2D(&texDesc, nullptr, nullptr);
if (FAILED(hr))
{
goto fail;
}
}
// Notice the connected pin that we're sending D3D11 textures
if (pD3D11DecoderConfiguration)
{
hr = pD3D11DecoderConfiguration->ActivateD3D11Decoding(pDeviceContext->device, pDeviceContext->device_context,
pDeviceContext->lock_ctx, 0);
SafeRelease(&pD3D11DecoderConfiguration);
m_bReadBackFallback = FAILED(hr);
}
else
{
m_bReadBackFallback = true;
}
return S_OK;
fail:
SafeRelease(&pD3D11DecoderConfiguration);
return E_FAIL;
}
STDMETHODIMP CDecD3D11::BreakConnect()
{
if (m_bReadBackFallback)
return S_FALSE;
// release any resources held by the core
m_pCallback->ReleaseAllDXVAResources();
// flush all buffers out of the decoder to ensure the allocator can be properly de-allocated
if (m_pAVCtx && avcodec_is_open(m_pAVCtx))
avcodec_flush_buffers(m_pAVCtx);
return S_OK;
}
STDMETHODIMP CDecD3D11::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
HRESULT hr = S_OK;
DbgLog((LOG_TRACE, 10, L"CDecD3D11::InitDecoder(): Initializing D3D11 decoder"));
// Destroy old decoder
DestroyDecoder(false);
// reset stream compatibility
m_bFailHWDecode = false;
m_DisplayDelay = D3D11_QUEUE_SURFACES;
// Reduce display delay for DVD decoding for lower decode latency
if (m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_DVD)
m_DisplayDelay /= 2;
// Initialize ffmpeg
hr = CDecAvcodec::InitDecoder(codec, pmt);
if (FAILED(hr))
return hr;
if (check_dxva_codec_profile(m_pAVCtx, AV_PIX_FMT_D3D11))
{
DbgLog((LOG_TRACE, 10, L"-> Incompatible profile detected, falling back to software decoding"));
return E_FAIL;
}
// initialize surface format to ensure the default media type is set properly
m_SurfaceFormat = d3d11va_map_sw_to_hw_format(m_pAVCtx->sw_pix_fmt);
m_dwSurfaceWidth = dxva_align_dimensions(m_pAVCtx->codec_id, m_pAVCtx->coded_width);
m_dwSurfaceHeight = dxva_align_dimensions(m_pAVCtx->codec_id, m_pAVCtx->coded_height);
return S_OK;
}
HRESULT CDecD3D11::AdditionaDecoderInit()
{
AVD3D11VAContext *ctx = av_d3d11va_alloc_context();
if (m_pDecoder)
{
FillHWContext(ctx);
}
m_pAVCtx->thread_count = 1;
m_pAVCtx->thread_type = 0;
m_pAVCtx->hwaccel_context = ctx;
m_pAVCtx->get_format = get_d3d11_format;
m_pAVCtx->get_buffer2 = get_d3d11_buffer;
m_pAVCtx->opaque = this;
m_pAVCtx->slice_flags |= SLICE_FLAG_ALLOW_FIELD;
// disable error concealment in hwaccel mode, it doesn't work either way
m_pAVCtx->error_concealment = 0;
av_opt_set_int(m_pAVCtx, "enable_er", 0, AV_OPT_SEARCH_CHILDREN);
return S_OK;
}
STDMETHODIMP CDecD3D11::FillHWContext(AVD3D11VAContext *ctx)
{
AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
ctx->decoder = m_pDecoder;
ctx->video_context = pDeviceContext->video_context;
ctx->cfg = &m_DecoderConfig;
ctx->surface_count = m_nOutputViews;
ctx->surface = m_pOutputViews;
ctx->context_mutex = pDeviceContext->lock_ctx;
ctx->workaround = 0;
return S_OK;
}
STDMETHODIMP_(long) CDecD3D11::GetBufferCount(long *pMaxBuffers)
{
long buffers = 0;
// Native decoding should use 16 buffers to enable seamless codec changes
if (!m_bReadBackFallback)
buffers = 16;
else
{
// Buffers based on max ref frames
if (m_nCodecId == AV_CODEC_ID_H264 || m_nCodecId == AV_CODEC_ID_HEVC)
buffers = 16;
else if (m_nCodecId == AV_CODEC_ID_VP9 || m_nCodecId == AV_CODEC_ID_AV1)
buffers = 8;
else
buffers = 2;
}
// 4 extra buffers for handling and safety
buffers += 4;
if (m_bReadBackFallback)
{
buffers += m_DisplayDelay;
}
if (m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_DVD)
{
buffers += 4;
}
if (pMaxBuffers)
{
// cap at 127, because it needs to fit into the 7-bit DXVA structs
*pMaxBuffers = 127;
// VC-1 and VP9 decoding has stricter requirements (decoding flickers otherwise)
if (m_nCodecId == AV_CODEC_ID_VC1 || m_nCodecId == AV_CODEC_ID_VP9 || m_nCodecId == AV_CODEC_ID_AV1)
*pMaxBuffers = 32;
}
return buffers;
}
STDMETHODIMP CDecD3D11::FlushDisplayQueue(BOOL bDeliver)
{
for (int i = 0; i < m_DisplayDelay; ++i)
{
if (m_FrameQueue[m_FrameQueuePosition])
{
if (bDeliver)
{
DeliverD3D11Frame(m_FrameQueue[m_FrameQueuePosition]);
m_FrameQueue[m_FrameQueuePosition] = nullptr;
}
else
{
ReleaseFrame(&m_FrameQueue[m_FrameQueuePosition]);
}
}
m_FrameQueuePosition = (m_FrameQueuePosition + 1) % m_DisplayDelay;
}
return S_OK;
}
STDMETHODIMP CDecD3D11::Flush()
{
CDecAvcodec::Flush();
// Flush display queue
FlushDisplayQueue(FALSE);
return S_OK;
}
STDMETHODIMP CDecD3D11::EndOfStream()
{
CDecAvcodec::EndOfStream();
// Flush display queue
FlushDisplayQueue(TRUE);
return S_OK;
}
HRESULT CDecD3D11::PostDecode()
{
if (m_bFailHWDecode)
{
DbgLog((LOG_TRACE, 10, L"::PostDecode(): HW Decoder failed, falling back to software decoding"));
return E_FAIL;
}
return S_OK;
}
enum AVPixelFormat CDecD3D11::get_d3d11_format(struct AVCodecContext *s, const enum AVPixelFormat *pix_fmts)
{
CDecD3D11 *pDec = (CDecD3D11 *)s->opaque;
const enum AVPixelFormat *p;
for (p = pix_fmts; *p != -1; p++)
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(*p);
if (!desc || !(desc->flags & AV_PIX_FMT_FLAG_HWACCEL))
break;
if (*p == AV_PIX_FMT_D3D11)
{
HRESULT hr = pDec->ReInitD3D11Decoder(s);
if (FAILED(hr))
{
pDec->m_bFailHWDecode = TRUE;
continue;
}
else
{
break;
}
}
}
return *p;
}
int CDecD3D11::get_d3d11_buffer(struct AVCodecContext *c, AVFrame *frame, int flags)
{
CDecD3D11 *pDec = (CDecD3D11 *)c->opaque;
HRESULT hr = S_OK;
if (frame->format != AV_PIX_FMT_D3D11)
{
DbgLog((LOG_ERROR, 10, L"D3D11 buffer request, but not D3D11 pixfmt"));
pDec->m_bFailHWDecode = TRUE;
return -1;
}
hr = pDec->ReInitD3D11Decoder(c);
if (FAILED(hr))
{
pDec->m_bFailHWDecode = TRUE;
return -1;
}
if (pDec->m_bReadBackFallback && pDec->m_pFramesCtx)
{
int ret = av_hwframe_get_buffer(pDec->m_pFramesCtx, frame, 0);
frame->width = c->coded_width;
frame->height = c->coded_height;
return ret;
}
else if (pDec->m_bReadBackFallback == false && pDec->m_pAllocator)
{
IMediaSample *pSample = nullptr;
hr = pDec->m_pAllocator->GetBuffer(&pSample, nullptr, nullptr, 0);
if (SUCCEEDED(hr))
{
CD3D11MediaSample *pD3D11Sample = dynamic_cast<CD3D11MediaSample *>(pSample);
// fill the frame from the sample, including a reference to the sample
if (FAILED(pD3D11Sample->GetAVFrameBuffer(frame)))
{
pD3D11Sample->Release();
return -1;
}
frame->width = c->coded_width;
frame->height = c->coded_height;
// the frame holds the sample now, can release the direct interface
pD3D11Sample->Release();
return 0;
}
}
return -1;
}
STDMETHODIMP CDecD3D11::ReInitD3D11Decoder(AVCodecContext *c)
{
HRESULT hr = S_OK;
// Don't allow decoder creation during first init
if (m_bInInit)
return S_FALSE;
// sanity check that we have a device
if (m_pDevCtx == nullptr)
return E_FAIL;
// we need an allocator at this point
if (m_bReadBackFallback == false && m_pAllocator == nullptr)
return E_FAIL;
if (m_pDecoder == nullptr || m_dwSurfaceWidth != dxva_align_dimensions(c->codec_id, c->coded_width) ||
m_dwSurfaceHeight != dxva_align_dimensions(c->codec_id, c->coded_height) ||
m_SurfaceFormat != d3d11va_map_sw_to_hw_format(c->sw_pix_fmt))
{
AVD3D11VADeviceContext *pDeviceContext =
(AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
DbgLog((LOG_TRACE, 10, L"No D3D11 Decoder or image dimensions changed -> Re-Allocating resources"));
// if we're not in readback mode, we need to flush all the frames
if (m_bReadBackFallback == false)
if (m_pDecoder)
avcodec_flush_buffers(c);
else
FlushDisplayQueue(TRUE);
pDeviceContext->lock(pDeviceContext->lock_ctx);
hr = CreateD3D11Decoder();
pDeviceContext->unlock(pDeviceContext->lock_ctx);
if (FAILED(hr))
return hr;
// Update the frames context in the allocator
if (m_bReadBackFallback == false)
{
// decommit the allocator
m_pAllocator->Decommit();
// verify we were able to decommit all its resources
if (m_pAllocator->DecommitInProgress())
{
DbgLog((LOG_TRACE, 10, L"WARNING! D3D11 Allocator is still busy, trying to flush downstream"));
m_pCallback->ReleaseAllDXVAResources();
m_pCallback->GetOutputPin()->GetConnected()->BeginFlush();
m_pCallback->GetOutputPin()->GetConnected()->EndFlush();
if (m_pAllocator->DecommitInProgress())
{
DbgLog(
(LOG_TRACE, 10, L"WARNING! Flush had no effect, decommit of the allocator still not complete"));
m_pAllocator->ForceDecommit();
}
else
{
DbgLog((LOG_TRACE, 10, L"Flush was successful, decommit completed!"));
}
}
// re-commit it to update its frame reference
m_pAllocator->Commit();
}
}
return S_OK;
}
STDMETHODIMP CDecD3D11::FindVideoServiceConversion(AVCodecID codec, int profile, DXGI_FORMAT surface_format,
GUID *input)
{
AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
HRESULT hr = S_OK;
UINT nProfiles = pDeviceContext->video_device->GetVideoDecoderProfileCount();
GUID *guid_list = (GUID *)av_malloc_array(nProfiles, sizeof(*guid_list));
DbgLog((LOG_TRACE, 10, L"-> Enumerating supported D3D11 modes (count: %d)", nProfiles));
for (UINT i = 0; i < nProfiles; i++)
{
hr = pDeviceContext->video_device->GetVideoDecoderProfile(i, &guid_list[i]);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"Error retrieving decoder profile"));
av_free(guid_list);
return hr;
}
#ifdef DEBUG
const dxva_mode_t *mode = get_dxva_mode_from_guid(&guid_list[i]);
if (mode)
{
DbgLog((LOG_TRACE, 10, L" -> %S", mode->name));
}
else
{
DbgLog((LOG_TRACE, 10, L" -> Unknown GUID (%s)", WStringFromGUID(guid_list[i]).c_str()));
}
#endif
}
/* Iterate over our priority list */
for (unsigned i = 0; dxva_modes[i].name; i++)
{
const dxva_mode_t *mode = &dxva_modes[i];
if (!check_dxva_mode_compatibility(mode, codec, profile, (surface_format == DXGI_FORMAT_NV12)))
continue;
BOOL supported = FALSE;
for (UINT g = 0; !supported && g < nProfiles; g++)
{
supported = IsEqualGUID(*mode->guid, guid_list[g]);
}
if (!supported)
continue;
DbgLog((LOG_TRACE, 10, L"-> Trying to use '%S'", mode->name));
hr = pDeviceContext->video_device->CheckVideoDecoderFormat(mode->guid, surface_format, &supported);
if (SUCCEEDED(hr) && supported)
{
*input = *mode->guid;
av_free(guid_list);
return S_OK;
}
}
av_free(guid_list);
return E_FAIL;
}
STDMETHODIMP CDecD3D11::FindDecoderConfiguration(const D3D11_VIDEO_DECODER_DESC *desc,
D3D11_VIDEO_DECODER_CONFIG *pConfig)
{
AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
HRESULT hr = S_OK;
UINT nConfig = 0;
hr = pDeviceContext->video_device->GetVideoDecoderConfigCount(desc, &nConfig);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"Unable to retrieve decoder configuration count"));
return E_FAIL;
}
int best_score = -1;
D3D11_VIDEO_DECODER_CONFIG best_config;
for (UINT i = 0; i < nConfig; i++)
{
D3D11_VIDEO_DECODER_CONFIG config = {0};
hr = pDeviceContext->video_device->GetVideoDecoderConfig(desc, i, &config);
if (FAILED(hr))
continue;
DbgLog((LOG_ERROR, 10, "-> Configuration Record %d: ConfigBitstreamRaw = %d", i, config.ConfigBitstreamRaw));
int score;
if (config.ConfigBitstreamRaw == 1)
score = 1;
else if (m_pAVCtx->codec_id == AV_CODEC_ID_H264 && config.ConfigBitstreamRaw == 2)
score = 2;
else if (m_pAVCtx->codec_id == AV_CODEC_ID_VP9) // hack for broken AMD drivers
score = 0;
else
continue;
if (IsEqualGUID(config.guidConfigBitstreamEncryption, DXVA2_NoEncrypt))
score += 16;
if (score > best_score)
{
best_score = score;
best_config = config;
}
}
if (best_score < 0)
{
DbgLog((LOG_TRACE, 10, L"-> No matching configuration available"));
return E_FAIL;
}
*pConfig = best_config;
return S_OK;
}
STDMETHODIMP CDecD3D11::CreateD3D11Decoder()
{
HRESULT hr = S_OK;
AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
// release the old decoder, it needs to be re-created
SafeRelease(&m_pDecoder);
// find a decoder configuration
GUID profileGUID = GUID_NULL;
DXGI_FORMAT surface_format = d3d11va_map_sw_to_hw_format(m_pAVCtx->sw_pix_fmt);
hr = FindVideoServiceConversion(m_pAVCtx->codec_id, m_pAVCtx->profile, surface_format, &profileGUID);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> No video service profile found"));
return hr;
}
// get decoder configuration
D3D11_VIDEO_DECODER_DESC desc = {0};
desc.Guid = profileGUID;
desc.OutputFormat = surface_format;
desc.SampleWidth = m_pAVCtx->coded_width;
desc.SampleHeight = m_pAVCtx->coded_height;
D3D11_VIDEO_DECODER_CONFIG decoder_config = {0};
hr = FindDecoderConfiguration(&desc, &decoder_config);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> No valid video decoder configuration found"));
return hr;
}
m_DecoderConfig = decoder_config;
// update surface properties
m_dwSurfaceWidth = dxva_align_dimensions(m_pAVCtx->codec_id, m_pAVCtx->coded_width);
m_dwSurfaceHeight = dxva_align_dimensions(m_pAVCtx->codec_id, m_pAVCtx->coded_height);
m_SurfaceFormat = surface_format;
if (m_bReadBackFallback == false && m_pAllocator)
{
ALLOCATOR_PROPERTIES properties;
hr = m_pAllocator->GetProperties(&properties);
if (FAILED(hr))
return hr;
m_dwSurfaceCount = properties.cBuffers;
}
else
{
m_dwSurfaceCount = GetBufferCount();
}
// allocate a new frames context for the dimensions and format
hr = AllocateFramesContext(m_dwSurfaceWidth, m_dwSurfaceHeight, m_pAVCtx->sw_pix_fmt, m_dwSurfaceCount,
&m_pFramesCtx);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> Error allocating frames context"));
return hr;
}
// release any old output views and allocate memory for the new ones
if (m_pOutputViews)
{
for (int i = 0; i < m_nOutputViews; i++)
{
SafeRelease(&m_pOutputViews[i]);
}
av_freep(&m_pOutputViews);
}
m_pOutputViews = (ID3D11VideoDecoderOutputView **)av_calloc(m_dwSurfaceCount, sizeof(*m_pOutputViews));
m_nOutputViews = m_dwSurfaceCount;
// allocate output views for the frames
AVD3D11VAFramesContext *pFramesContext = (AVD3D11VAFramesContext *)((AVHWFramesContext *)m_pFramesCtx->data)->hwctx;
for (int i = 0; i < m_nOutputViews; i++)
{
D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc = {0};
viewDesc.DecodeProfile = profileGUID;
viewDesc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D;
viewDesc.Texture2D.ArraySlice = i;
hr = pDeviceContext->video_device->CreateVideoDecoderOutputView(pFramesContext->texture, &viewDesc,
&m_pOutputViews[i]);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> Failed to create video decoder output views"));
return E_FAIL;
}
}
// flush textures to black
if (surface_format == DXGI_FORMAT_NV12 || surface_format == DXGI_FORMAT_P010 || surface_format == DXGI_FORMAT_P016)
{
D3D11_FEATURE_DATA_D3D11_OPTIONS d3d11Options{};
pDeviceContext->device->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS, &d3d11Options,
sizeof(D3D11_FEATURE_DATA_D3D11_OPTIONS));
// XXX: The ClearView path does not function properly on Intel GPUs for P010
// Investigation has shown that contrary to the documentation, Intel transforms the color information,
// instead of treating the values like integral floats, as required.
ID3D11DeviceContext1 *pDeviceContext1 = nullptr;
if (m_AdapterDesc.VendorId != VEND_ID_INTEL && d3d11Options.ClearView &&
SUCCEEDED(hr = pDeviceContext->device_context->QueryInterface(&pDeviceContext1)))
{
for (int i = 0; i < m_nOutputViews; i++)
{
// clear the Luma channel to zero and Chroma channel to half
// for both P010/P016, the full 16-bit range value needs to be used due to the nature of their memory layout
float fChromaBlack = (surface_format == DXGI_FORMAT_NV12) ? 128.0f : 32768.0f;
const FLOAT ClearYUV[4] = {0.0f, fChromaBlack, fChromaBlack, 0.0f};
pDeviceContext1->ClearView(m_pOutputViews[i], ClearYUV, nullptr, 0);
}
SafeRelease(&pDeviceContext1);
}
else
{
D3D11_TEXTURE2D_DESC FlushTexDesc{};
FlushTexDesc.Width = m_dwSurfaceWidth;
FlushTexDesc.Height = m_dwSurfaceHeight;
FlushTexDesc.MipLevels = 1;
FlushTexDesc.ArraySize = 1;
FlushTexDesc.Format = surface_format;
FlushTexDesc.SampleDesc.Count = 1;
FlushTexDesc.Usage = D3D11_USAGE_DEFAULT;
FlushTexDesc.BindFlags = D3D11_BIND_RENDER_TARGET;
FlushTexDesc.CPUAccessFlags = 0;
FlushTexDesc.MiscFlags = 0;
ID3D11Texture2D *pFlushTexture = NULL;
if (SUCCEEDED(pDeviceContext->device->CreateTexture2D(&FlushTexDesc, NULL, &pFlushTexture)))
{
D3D11_RENDER_TARGET_VIEW_DESC rtvDesc{};
rtvDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
rtvDesc.Format = surface_format;
ID3D11RenderTargetView *pRTV = nullptr;
// clear the Luma channel to zero
rtvDesc.Format = (surface_format == DXGI_FORMAT_NV12) ? DXGI_FORMAT_R8_UNORM : DXGI_FORMAT_R16_UNORM;
if (SUCCEEDED(hr = pDeviceContext->device->CreateRenderTargetView(pFlushTexture, &rtvDesc, &pRTV)))
{
const FLOAT ClearYUV[4] = { 0.0f };
pDeviceContext->device_context->ClearRenderTargetView(pRTV, ClearYUV);
SafeRelease(&pRTV);
}
// clear the Chroma channel to half
rtvDesc.Format = (surface_format == DXGI_FORMAT_NV12) ? DXGI_FORMAT_R8G8_UNORM : DXGI_FORMAT_R16G16_UNORM;
if (SUCCEEDED(hr = pDeviceContext->device->CreateRenderTargetView(pFlushTexture, &rtvDesc, &pRTV)))
{
const FLOAT ClearYUV[4] = { 0.5f, 0.5f, 0.0f, 0.0f };
pDeviceContext->device_context->ClearRenderTargetView(pRTV, ClearYUV);
SafeRelease(&pRTV);
}
// update all surfaces with the flush color
for (unsigned i = 0; i < m_dwSurfaceCount; i++)
{
pDeviceContext->device_context->CopySubresourceRegion(pFramesContext->texture, i, 0, 0, 0,
pFlushTexture, 0, NULL);
}
SafeRelease(&pFlushTexture);
}
}
// flush all pending work
pDeviceContext->device_context->Flush();
}
// create the decoder
hr = pDeviceContext->video_device->CreateVideoDecoder(&desc, &decoder_config, &m_pDecoder);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> Failed to create video decoder object"));
return E_FAIL;
}
FillHWContext((AVD3D11VAContext *)m_pAVCtx->hwaccel_context);
return S_OK;
}
STDMETHODIMP CDecD3D11::AllocateFramesContext(int width, int height, AVPixelFormat format, int nSurfaces,
AVBufferRef **ppFramesCtx)
{
ASSERT(m_pAVCtx);
ASSERT(m_pDevCtx);
ASSERT(ppFramesCtx);
// unref any old buffer
av_buffer_unref(ppFramesCtx);
SafeRelease(&m_pD3D11StagingTexture);
// allocate a new frames context for the device context
*ppFramesCtx = av_hwframe_ctx_alloc(m_pDevCtx);
if (*ppFramesCtx == nullptr)
return E_OUTOFMEMORY;
AVHWFramesContext *pFrames = (AVHWFramesContext *)(*ppFramesCtx)->data;
pFrames->format = AV_PIX_FMT_D3D11;
pFrames->sw_format = (format == AV_PIX_FMT_YUV420P10) ? AV_PIX_FMT_P010 : AV_PIX_FMT_NV12;
pFrames->width = width;
pFrames->height = height;
pFrames->initial_pool_size = nSurfaces;
AVD3D11VAFramesContext *pFramesHWContext = (AVD3D11VAFramesContext *)pFrames->hwctx;
pFramesHWContext->BindFlags |= D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
pFramesHWContext->MiscFlags |= D3D11_RESOURCE_MISC_SHARED;
int ret = av_hwframe_ctx_init(*ppFramesCtx);
if (ret < 0)
{
av_buffer_unref(ppFramesCtx);
return E_FAIL;
}
return S_OK;
}
HRESULT CDecD3D11::HandleDXVA2Frame(LAVFrame *pFrame)
{
if (pFrame->flags & LAV_FRAME_FLAG_FLUSH)
{
if (m_bReadBackFallback)
{
FlushDisplayQueue(TRUE);
}
Deliver(pFrame);
return S_OK;
}
ASSERT(pFrame->format == LAVPixFmt_D3D11);
if (m_bReadBackFallback == false || m_DisplayDelay == 0)
{
DeliverD3D11Frame(pFrame);
}
else
{
LAVFrame *pQueuedFrame = m_FrameQueue[m_FrameQueuePosition];
m_FrameQueue[m_FrameQueuePosition] = pFrame;
m_FrameQueuePosition = (m_FrameQueuePosition + 1) % m_DisplayDelay;
if (pQueuedFrame)
{
DeliverD3D11Frame(pQueuedFrame);
}
}
return S_OK;
}
HRESULT CDecD3D11::DeliverD3D11Frame(LAVFrame *pFrame)
{
if (m_bReadBackFallback)
{
if (m_bDirect)
DeliverD3D11ReadbackDirect(pFrame);
else
DeliverD3D11Readback(pFrame);
}
else
{
AVFrame *pAVFrame = (AVFrame *)pFrame->priv_data;
pFrame->data[0] = pAVFrame->data[3];
pFrame->data[1] = pFrame->data[2] = pFrame->data[3] = nullptr;
GetPixelFormat(&pFrame->format, &pFrame->bpp);
Deliver(pFrame);
}
return S_OK;
}
HRESULT CDecD3D11::DeliverD3D11Readback(LAVFrame *pFrame)
{
AVFrame *src = (AVFrame *)pFrame->priv_data;
AVFrame *dst = av_frame_alloc();
int ret = av_hwframe_transfer_data(dst, src, 0);
if (ret < 0)
{
ReleaseFrame(&pFrame);
av_frame_free(&dst);
return E_FAIL;
}
// free the source frame
av_frame_free(&src);
// and store the dst frame in LAVFrame
pFrame->priv_data = dst;
GetPixelFormat(&pFrame->format, &pFrame->bpp);
ASSERT((dst->format == AV_PIX_FMT_NV12 && pFrame->format == LAVPixFmt_NV12) ||
(dst->format == AV_PIX_FMT_P010 && pFrame->format == LAVPixFmt_P016));
for (int i = 0; i < 4; i++)
{
pFrame->data[i] = dst->data[i];
pFrame->stride[i] = dst->linesize[i];
}
return Deliver(pFrame);
}
struct D3D11DirectPrivate
{
AVBufferRef *pDeviceContex;
ID3D11Texture2D *pStagingTexture;
};
static bool d3d11_direct_lock(LAVFrame *pFrame, LAVDirectBuffer *pBuffer)
{
D3D11DirectPrivate *c = (D3D11DirectPrivate *)pFrame->priv_data;
AVD3D11VADeviceContext *pDeviceContext =
(AVD3D11VADeviceContext *)((AVHWDeviceContext *)c->pDeviceContex->data)->hwctx;
D3D11_TEXTURE2D_DESC desc;
D3D11_MAPPED_SUBRESOURCE map;
ASSERT(pFrame && pBuffer);
// lock the device context
pDeviceContext->lock(pDeviceContext->lock_ctx);
c->pStagingTexture->GetDesc(&desc);
// map
HRESULT hr = pDeviceContext->device_context->Map(c->pStagingTexture, 0, D3D11_MAP_READ, 0, &map);
if (FAILED(hr))
{
pDeviceContext->unlock(pDeviceContext->lock_ctx);
return false;
}
pBuffer->data[0] = (BYTE *)map.pData;
pBuffer->data[1] = pBuffer->data[0] + desc.Height * map.RowPitch;
pBuffer->stride[0] = map.RowPitch;
pBuffer->stride[1] = map.RowPitch;
return true;
}
static void d3d11_direct_unlock(LAVFrame *pFrame)
{
D3D11DirectPrivate *c = (D3D11DirectPrivate *)pFrame->priv_data;
AVD3D11VADeviceContext *pDeviceContext =
(AVD3D11VADeviceContext *)((AVHWDeviceContext *)c->pDeviceContex->data)->hwctx;
pDeviceContext->device_context->Unmap(c->pStagingTexture, 0);
pDeviceContext->unlock(pDeviceContext->lock_ctx);
}
static void d3d11_direct_free(LAVFrame *pFrame)
{
D3D11DirectPrivate *c = (D3D11DirectPrivate *)pFrame->priv_data;
av_buffer_unref(&c->pDeviceContex);
c->pStagingTexture->Release();
delete c;
}
HRESULT CDecD3D11::DeliverD3D11ReadbackDirect(LAVFrame *pFrame)
{
AVD3D11VADeviceContext *pDeviceContext = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)m_pDevCtx->data)->hwctx;
AVFrame *src = (AVFrame *)pFrame->priv_data;
if (m_pD3D11StagingTexture == nullptr)
{
D3D11_TEXTURE2D_DESC texDesc = {0};
((ID3D11Texture2D *)src->data[0])->GetDesc(&texDesc);
texDesc.ArraySize = 1;
texDesc.Usage = D3D11_USAGE_STAGING;
texDesc.BindFlags = 0;
texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
texDesc.MiscFlags = 0;
HRESULT hr = pDeviceContext->device->CreateTexture2D(&texDesc, nullptr, &m_pD3D11StagingTexture);
if (FAILED(hr))
{
ReleaseFrame(&pFrame);
return E_FAIL;
}
}
pDeviceContext->lock(pDeviceContext->lock_ctx);
pDeviceContext->device_context->CopySubresourceRegion(
m_pD3D11StagingTexture, 0, 0, 0, 0, (ID3D11Texture2D *)src->data[0], (UINT)(intptr_t)src->data[1], nullptr);
pDeviceContext->unlock(pDeviceContext->lock_ctx);
av_frame_free(&src);
D3D11DirectPrivate *c = new D3D11DirectPrivate;
c->pDeviceContex = av_buffer_ref(m_pDevCtx);
c->pStagingTexture = m_pD3D11StagingTexture;
m_pD3D11StagingTexture->AddRef();
pFrame->priv_data = c;
pFrame->destruct = d3d11_direct_free;
GetPixelFormat(&pFrame->format, &pFrame->bpp);
pFrame->direct = true;
pFrame->direct_lock = d3d11_direct_lock;
pFrame->direct_unlock = d3d11_direct_unlock;
return Deliver(pFrame);
}
STDMETHODIMP CDecD3D11::GetPixelFormat(LAVPixelFormat *pPix, int *pBpp)
{
// Output is always NV12 or P010
if (pPix)
*pPix = m_bReadBackFallback == false
? LAVPixFmt_D3D11
: ((m_SurfaceFormat == DXGI_FORMAT_P010 || m_SurfaceFormat == DXGI_FORMAT_P016) ? LAVPixFmt_P016
: LAVPixFmt_NV12);
if (pBpp)
*pBpp = (m_SurfaceFormat == DXGI_FORMAT_P016) ? 16 : (m_SurfaceFormat == DXGI_FORMAT_P010 ? 10 : 8);
return S_OK;
}
STDMETHODIMP_(DWORD) CDecD3D11::GetHWAccelNumDevices()
{
DWORD nDevices = 0;
UINT i = 0;
IDXGIAdapter *pDXGIAdapter = nullptr;
IDXGIFactory1 *pDXGIFactory = nullptr;
HRESULT hr = dx.mCreateDXGIFactory1(IID_IDXGIFactory1, (void **)&pDXGIFactory);
if (FAILED(hr))
goto fail;
DXGI_ADAPTER_DESC desc;
while (SUCCEEDED(pDXGIFactory->EnumAdapters(i, &pDXGIAdapter)))
{
pDXGIAdapter->GetDesc(&desc);
SafeRelease(&pDXGIAdapter);
// stop when we hit the MS software device
if (desc.VendorId == 0x1414 && desc.DeviceId == 0x8c)
break;
i++;
}
nDevices = i;
fail:
SafeRelease(&pDXGIFactory);
return nDevices;
}
STDMETHODIMP CDecD3D11::GetHWAccelDeviceInfo(DWORD dwIndex, BSTR *pstrDeviceName, DWORD *dwDeviceIdentifier)
{
IDXGIAdapter *pDXGIAdapter = nullptr;
IDXGIFactory1 *pDXGIFactory = nullptr;
HRESULT hr = dx.mCreateDXGIFactory1(IID_IDXGIFactory1, (void **)&pDXGIFactory);
if (FAILED(hr))
goto fail;
hr = pDXGIFactory->EnumAdapters(dwIndex, &pDXGIAdapter);
if (FAILED(hr))
goto fail;
DXGI_ADAPTER_DESC desc;
pDXGIAdapter->GetDesc(&desc);
// stop when we hit the MS software device
if (desc.VendorId == 0x1414 && desc.DeviceId == 0x8c)
{
hr = E_INVALIDARG;
goto fail;
}
if (pstrDeviceName)
*pstrDeviceName = SysAllocString(desc.Description);
if (dwDeviceIdentifier)
*dwDeviceIdentifier = desc.DeviceId;
fail:
SafeRelease(&pDXGIFactory);
SafeRelease(&pDXGIAdapter);
return hr;
}
STDMETHODIMP CDecD3D11::GetHWAccelActiveDevice(BSTR *pstrDeviceName)
{
CheckPointer(pstrDeviceName, E_POINTER);
if (m_AdapterDesc.Description[0] == 0)
return E_UNEXPECTED;
*pstrDeviceName = SysAllocString(m_AdapterDesc.Description);
return S_OK;
}
| 45,157
|
C++
|
.cpp
| 1,228
| 29.112378
| 124
| 0.628383
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,180
|
quicksync.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/decoders/quicksync.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "quicksync.h"
#include "moreuuids.h"
#include "parsers/H264SequenceParser.h"
#include "parsers/MPEG2HeaderParser.h"
#include "parsers/VC1HeaderParser.h"
#include "Media.h"
#include <Shlwapi.h>
#include <dxva2api.h>
#include <evr.h>
// Timestamp padding to avoid an issue with negative timestamps
// 10 hours should be enough padding to take care of all eventualities
#define RTPADDING 360000000000i64
////////////////////////////////////////////////////////////////////////////////
// Constructor
////////////////////////////////////////////////////////////////////////////////
ILAVDecoder *CreateDecoderQuickSync()
{
return new CDecQuickSync();
}
////////////////////////////////////////////////////////////////////////////////
// Codec FourCC map
////////////////////////////////////////////////////////////////////////////////
static const FOURCC FourCC_MPG1 = mmioFOURCC('M', 'P', 'G', '1');
static const FOURCC FourCC_MPG2 = mmioFOURCC('M', 'P', 'G', '2');
static const FOURCC FourCC_VC1 = mmioFOURCC('W', 'V', 'C', '1');
static const FOURCC FourCC_WMV3 = mmioFOURCC('W', 'M', 'V', '3');
static const FOURCC FourCC_H264 = mmioFOURCC('H', '2', '6', '4');
static const FOURCC FourCC_AVC1 = mmioFOURCC('A', 'V', 'C', '1');
static struct
{
AVCodecID ffcodec;
FOURCC fourCC;
} quicksync_codecs[] = {
{AV_CODEC_ID_MPEG2VIDEO, FourCC_MPG2},
{AV_CODEC_ID_VC1, FourCC_VC1},
{AV_CODEC_ID_WMV3, FourCC_WMV3},
{AV_CODEC_ID_H264, FourCC_H264},
};
////////////////////////////////////////////////////////////////////////////////
// CQSMediaSample Implementation
////////////////////////////////////////////////////////////////////////////////
class CQSMediaSample : public IMediaSample
{
public:
CQSMediaSample(BYTE *pBuffer, long len)
: m_pBuffer(pBuffer)
, m_lLen(len)
, m_lActualLen(len)
{
}
// IUnknown
STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject)
{
if (riid == IID_IUnknown)
{
AddRef();
*ppvObject = (IUnknown *)this;
}
else if (riid == IID_IMediaSample)
{
AddRef();
*ppvObject = (IMediaSample *)this;
}
else
{
return E_NOINTERFACE;
}
return S_OK;
}
STDMETHODIMP_(ULONG) AddRef()
{
LONG lRef = InterlockedIncrement(&m_cRef);
return max(ULONG(lRef), 1ul);
}
STDMETHODIMP_(ULONG) Release()
{
LONG lRef = InterlockedDecrement(&m_cRef);
if (lRef == 0)
{
m_cRef++;
delete this;
return 0;
}
return max(ULONG(lRef), 1ul);
}
// IMediaSample
STDMETHODIMP GetPointer(BYTE **ppBuffer)
{
CheckPointer(ppBuffer, E_POINTER);
*ppBuffer = m_pBuffer;
return S_OK;
}
STDMETHODIMP_(long) GetSize(void) { return m_lLen; }
STDMETHODIMP GetTime(REFERENCE_TIME *pTimeStart, REFERENCE_TIME *pTimeEnd)
{
CheckPointer(pTimeStart, E_POINTER);
CheckPointer(pTimeEnd, E_POINTER);
if (m_rtStart != AV_NOPTS_VALUE)
{
*pTimeStart = m_rtStart;
if (m_rtStop != AV_NOPTS_VALUE)
{
*pTimeEnd = m_rtStop;
return S_OK;
}
*pTimeEnd = m_rtStart + 1;
return VFW_S_NO_STOP_TIME;
}
return VFW_E_SAMPLE_TIME_NOT_SET;
}
STDMETHODIMP SetTime(REFERENCE_TIME *pTimeStart, REFERENCE_TIME *pTimeEnd)
{
if (!pTimeStart)
{
m_rtStart = m_rtStop = AV_NOPTS_VALUE;
}
else
{
m_rtStart = *pTimeStart;
if (!pTimeEnd)
m_rtStop = AV_NOPTS_VALUE;
else
m_rtStop = *pTimeEnd;
}
return S_OK;
}
STDMETHODIMP IsSyncPoint(void) { return m_bSyncPoint ? S_OK : S_FALSE; }
STDMETHODIMP SetSyncPoint(BOOL bIsSyncPoint)
{
m_bSyncPoint = bIsSyncPoint;
return S_OK;
}
STDMETHODIMP IsPreroll(void) { return E_NOTIMPL; }
STDMETHODIMP SetPreroll(BOOL bIsPreroll) { return E_NOTIMPL; }
STDMETHODIMP_(long) GetActualDataLength(void) { return m_lActualLen; }
STDMETHODIMP SetActualDataLength(long length)
{
m_lActualLen = length;
return S_OK;
}
STDMETHODIMP GetMediaType(AM_MEDIA_TYPE **ppMediaType) { return S_FALSE; }
STDMETHODIMP SetMediaType(AM_MEDIA_TYPE *pMediaType) { return E_NOTIMPL; }
STDMETHODIMP IsDiscontinuity(void) { return m_bDiscontinuity ? S_OK : S_FALSE; }
STDMETHODIMP SetDiscontinuity(BOOL bDiscontinuity)
{
m_bDiscontinuity = bDiscontinuity;
return S_OK;
}
STDMETHODIMP GetMediaTime(LONGLONG *pTimeStart, LONGLONG *pTimeEnd) { return E_NOTIMPL; }
STDMETHODIMP SetMediaTime(LONGLONG *pTimeStart, LONGLONG *pTimeEnd) { return E_NOTIMPL; }
private:
BYTE *m_pBuffer = nullptr;
long m_lLen = 0;
long m_lActualLen = 0;
REFERENCE_TIME m_rtStart = AV_NOPTS_VALUE;
REFERENCE_TIME m_rtStop = AV_NOPTS_VALUE;
BOOL m_bSyncPoint = FALSE;
BOOL m_bDiscontinuity = FALSE;
ULONG m_cRef = 1;
};
class CIDirect3DDeviceManager9Proxy : public IDirect3DDeviceManager9
{
public:
CIDirect3DDeviceManager9Proxy(IPin *pPin)
: m_pPin(pPin)
{
}
~CIDirect3DDeviceManager9Proxy() { SafeRelease(&m_D3DManager); }
#define CREATE_DEVICE \
if (!m_D3DManager) \
{ \
if (FAILED(CreateDeviceManager())) \
return E_FAIL; \
}
// IUnknown
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppvObject)
{
CREATE_DEVICE;
return m_D3DManager->QueryInterface(riid, ppvObject);
}
ULONG STDMETHODCALLTYPE AddRef(void)
{
ULONG lRef = InterlockedIncrement(&m_cRef);
return max(ULONG(lRef), 1ul);
}
ULONG STDMETHODCALLTYPE Release(void)
{
ULONG lRef = InterlockedDecrement(&m_cRef);
if (lRef == 0)
{
m_cRef++;
delete this;
return 0;
}
return max(ULONG(lRef), 1ul);
}
// IDirect3DDeviceManager9
HRESULT STDMETHODCALLTYPE ResetDevice(IDirect3DDevice9 *pDevice, UINT resetToken)
{
CREATE_DEVICE;
return m_D3DManager->ResetDevice(pDevice, resetToken);
}
HRESULT STDMETHODCALLTYPE OpenDeviceHandle(HANDLE *phDevice)
{
CREATE_DEVICE;
return m_D3DManager->OpenDeviceHandle(phDevice);
}
HRESULT STDMETHODCALLTYPE CloseDeviceHandle(HANDLE hDevice)
{
CREATE_DEVICE;
return m_D3DManager->CloseDeviceHandle(hDevice);
}
HRESULT STDMETHODCALLTYPE TestDevice(HANDLE hDevice)
{
CREATE_DEVICE;
return m_D3DManager->TestDevice(hDevice);
}
HRESULT STDMETHODCALLTYPE LockDevice(HANDLE hDevice, IDirect3DDevice9 **ppDevice, BOOL fBlock)
{
CREATE_DEVICE;
return m_D3DManager->LockDevice(hDevice, ppDevice, fBlock);
}
HRESULT STDMETHODCALLTYPE UnlockDevice(HANDLE hDevice, BOOL fSaveState)
{
CREATE_DEVICE;
return m_D3DManager->UnlockDevice(hDevice, fSaveState);
}
HRESULT STDMETHODCALLTYPE GetVideoService(HANDLE hDevice, REFIID riid, void **ppService)
{
CREATE_DEVICE;
return m_D3DManager->GetVideoService(hDevice, riid, ppService);
}
private:
HRESULT STDMETHODCALLTYPE CreateDeviceManager()
{
DbgLog((LOG_TRACE, 10, L"CIDirect3DDeviceManager9Proxy::CreateDeviceManager()"));
HRESULT hr = S_OK;
IMFGetService *pGetService = nullptr;
hr = m_pPin->QueryInterface(__uuidof(IMFGetService), (void **)&pGetService);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> IMFGetService not available"));
goto done;
}
// Get the Direct3D device manager.
IDirect3DDeviceManager9 *pDevMgr = nullptr;
hr = pGetService->GetService(MR_VIDEO_ACCELERATION_SERVICE, __uuidof(IDirect3DDeviceManager9),
(void **)&pDevMgr);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> D3D Device Manager not available"));
goto done;
}
m_D3DManager = pDevMgr;
done:
SafeRelease(&pGetService);
return hr;
}
private:
IDirect3DDeviceManager9 *m_D3DManager = nullptr;
IPin *m_pPin = nullptr;
ULONG m_cRef = 1;
};
////////////////////////////////////////////////////////////////////////////////
// QuickSync decoder implementation
////////////////////////////////////////////////////////////////////////////////
CDecQuickSync::CDecQuickSync(void)
: CDecBase()
{
ZeroMemory(&qs, sizeof(qs));
ZeroMemory(&m_DXVAExtendedFormat, sizeof(m_DXVAExtendedFormat));
}
CDecQuickSync::~CDecQuickSync(void)
{
DestroyDecoder(true);
}
STDMETHODIMP CDecQuickSync::DestroyDecoder(bool bFull)
{
if (m_pDecoder)
{
qs.destroy(m_pDecoder);
m_pDecoder = nullptr;
}
if (bFull)
{
SafeRelease(&m_pD3DDevMngr);
FreeLibrary(qs.quickSyncLib);
}
return S_OK;
}
// ILAVDecoder
STDMETHODIMP CDecQuickSync::Init()
{
DbgLog((LOG_TRACE, 10, L"CDecQuickSync::Init(): Trying to open QuickSync decoder"));
int flags = av_get_cpu_flags();
if (!(flags & AV_CPU_FLAG_SSE4))
{
DbgLog((LOG_TRACE, 10, L"-> CPU is not SSE 4.1 capable, this is not even worth a try...."));
return E_FAIL;
}
if (!qs.quickSyncLib)
{
WCHAR wModuleFile[1024];
GetModuleFileName(g_hInst, wModuleFile, 1024);
PathRemoveFileSpecW(wModuleFile);
wcscat_s(wModuleFile, TEXT("\\") TEXT(QS_DEC_DLL_NAME));
qs.quickSyncLib = LoadLibrary(wModuleFile);
if (qs.quickSyncLib == nullptr)
{
DWORD dwError = GetLastError();
DbgLog((LOG_ERROR, 10, L"-> Loading of " TEXT(QS_DEC_DLL_NAME) L" failed (%d)", dwError));
return E_FAIL;
}
qs.create = (pcreateQuickSync *)GetProcAddress(qs.quickSyncLib, "createQuickSync");
if (qs.create == nullptr)
{
DbgLog((LOG_ERROR, 10, L"-> Failed to load function \"createQuickSync\""));
return E_FAIL;
}
qs.destroy = (pdestroyQuickSync *)GetProcAddress(qs.quickSyncLib, "destroyQuickSync");
if (qs.destroy == nullptr)
{
DbgLog((LOG_ERROR, 10, L"-> Failed to load function \"destroyQuickSync\""));
return E_FAIL;
}
qs.check = (pcheck *)GetProcAddress(qs.quickSyncLib, "check");
if (qs.check == nullptr)
{
DbgLog((LOG_ERROR, 10, L"-> Failed to load function \"check\""));
return E_FAIL;
}
}
return S_OK;
}
STDMETHODIMP CDecQuickSync::PostConnect(IPin *pPin)
{
HRESULT hr = S_OK;
DbgLog((LOG_TRACE, 10, L"CDecQuickSync::PostConnect()"));
// Release the previous manager (if any)
SafeRelease(&m_pD3DDevMngr);
// Create our proxy object
m_pD3DDevMngr = new CIDirect3DDeviceManager9Proxy(pPin);
// Tell the QuickSync decoder about it
m_pDecoder->SetD3DDeviceManager(m_pD3DDevMngr);
return S_OK;
}
STDMETHODIMP CDecQuickSync::Check()
{
if (!qs.check)
return E_FAIL;
DWORD qsflags = qs.check();
if (qsflags & QS_CAP_HW_ACCELERATION)
{
return S_OK;
}
DbgLog((LOG_TRACE, 10, L"-> Decoder records no HW acceleration"));
return E_FAIL;
}
STDMETHODIMP CDecQuickSync::CheckH264Sequence(const BYTE *buffer, size_t buflen, int nal_size, int *pRefFrames,
int *pProfile, int *pLevel)
{
DbgLog((LOG_TRACE, 10, L"CDecQuickSync::CheckH264Sequence(): Checking H264 frame for SPS"));
CH264SequenceParser h264parser;
h264parser.ParseNALs(buffer, buflen, nal_size);
if (h264parser.sps.valid)
{
m_bInterlaced = h264parser.sps.interlaced;
fillDXVAExtFormat(m_DXVAExtendedFormat, h264parser.sps.full_range, h264parser.sps.primaries,
h264parser.sps.colorspace, h264parser.sps.trc);
if (pRefFrames)
*pRefFrames = h264parser.sps.ref_frames;
if (pProfile)
*pProfile = h264parser.sps.profile;
if (pLevel)
*pLevel = h264parser.sps.level;
DbgLog((LOG_TRACE, 10, L"-> SPS found"));
if (h264parser.sps.profile > 100 || h264parser.sps.chroma != 1 || h264parser.sps.luma_bitdepth != 8 ||
h264parser.sps.chroma_bitdepth != 8)
{
DbgLog((LOG_TRACE, 10,
L" -> SPS indicates video incompatible with QuickSync, aborting (profile: %d, chroma: %d, "
L"bitdepth: %d/%d)",
h264parser.sps.profile, h264parser.sps.chroma, h264parser.sps.luma_bitdepth,
h264parser.sps.chroma_bitdepth));
return E_FAIL;
}
DbgLog((LOG_TRACE, 10, L"-> Video seems compatible with QuickSync"));
return S_OK;
}
return S_FALSE;
}
STDMETHODIMP CDecQuickSync::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
HRESULT hr = S_OK;
DbgLog((LOG_TRACE, 10, L"CDecQuickSync::InitDecoder(): Initializing QuickSync decoder"));
DestroyDecoder(false);
FOURCC fourCC = (FOURCC)0;
for (int i = 0; i < countof(quicksync_codecs); i++)
{
if (quicksync_codecs[i].ffcodec == codec)
{
fourCC = quicksync_codecs[i].fourCC;
break;
}
}
if (fourCC == 0)
{
DbgLog((LOG_TRACE, 10, L"-> Codec id %d does not map to a QuickSync FourCC codec", codec));
return E_FAIL;
}
m_pDecoder = qs.create();
if (!m_pDecoder || !m_pDecoder->getOK())
{
DbgLog((LOG_TRACE, 10, L"-> Decoder creation failed"));
return E_FAIL;
}
m_pDecoder->SetDeliverSurfaceCallback(this, &QS_DeliverSurfaceCallback);
m_nAVCNalSize = 0;
if (pmt->subtype == MEDIASUBTYPE_AVC1 || pmt->subtype == MEDIASUBTYPE_avc1 || pmt->subtype == MEDIASUBTYPE_CCV1)
{
if (pmt->formattype == FORMAT_MPEG2Video)
{
MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->pbFormat;
fourCC = FourCC_AVC1;
m_bAVC1 = TRUE;
m_nAVCNalSize = mp2vi->dwFlags;
}
else
{
DbgLog((LOG_TRACE, 10, L"-> AVC1 without MPEG2VIDEOINFO not supported"));
return E_FAIL;
}
}
BYTE *extradata = nullptr;
size_t extralen = 0;
getExtraData(*pmt, nullptr, &extralen);
if (extralen > 0)
{
extradata = (BYTE *)av_malloc(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
if (extradata == nullptr)
return E_OUTOFMEMORY;
getExtraData(*pmt, extradata, nullptr);
}
m_bNeedSequenceCheck = FALSE;
m_bInterlaced = TRUE;
m_bUseTimestampQueue = m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_ONLY_DTS;
int ref_frames = 0;
int profile = 0;
int level = 0;
if (extralen > 0)
{
if (fourCC == FourCC_AVC1 || fourCC == FourCC_H264)
{
hr = CheckH264Sequence(extradata, extralen, m_bAVC1 ? 2 : 0, &ref_frames, &profile, &level);
if (FAILED(hr))
{
return VFW_E_UNSUPPORTED_VIDEO;
}
else if (hr == S_FALSE)
{
m_bNeedSequenceCheck = TRUE;
}
}
else if (fourCC == FourCC_MPG2)
{
DbgLog((LOG_TRACE, 10, L"-> Scanning extradata for MPEG2 sequence header"));
CMPEG2HeaderParser mpeg2parser(extradata, extralen);
if (mpeg2parser.hdr.valid)
{
if (mpeg2parser.hdr.chroma >= 2)
{
DbgLog((LOG_TRACE, 10, L" -> Sequence header indicates incompatible chroma sampling (chroma: %d)",
mpeg2parser.hdr.chroma));
return VFW_E_UNSUPPORTED_VIDEO;
}
m_bInterlaced = mpeg2parser.hdr.interlaced;
}
}
else if (fourCC == FourCC_VC1)
{
CVC1HeaderParser vc1Parser(extradata, extralen);
m_bInterlaced = vc1Parser.hdr.interlaced;
}
}
else
{
m_bNeedSequenceCheck = (fourCC == FourCC_H264);
}
// Done with the extradata
if (extradata)
av_freep(&extradata);
// Configure QuickSync decoder
CQsConfig qsConfig;
m_pDecoder->GetConfig(&qsConfig);
// Timestamp correction is only used for VC-1 codecs which send PTS
// because this is not handled properly by the API (it expects DTS)
qsConfig.bTimeStampCorrection =
1; //(codec == AV_CODEC_ID_VC1 && !(m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_ONLY_DTS));
// Configure number of buffers (dependant on ref_frames)
// MPEG2 and VC1 always use "low latency" mode
if (ref_frames > 8 || qsConfig.bTimeStampCorrection)
qsConfig.nOutputQueueLength = 8;
else
qsConfig.nOutputQueueLength = 0;
// Disallow software fallback
qsConfig.bEnableSwEmulation = false;
// Enable DVD support
qsConfig.bEnableDvdDecoding = true;
// We want the pure image, no mod-16 padding
qsConfig.bMod16Width = false;
// Configure threading
qsConfig.bEnableMultithreading = true;
qsConfig.bEnableMtCopy = true;
// Configure video processing
qsConfig.vpp = 0;
m_bDI = m_pSettings->GetHWAccelDeintMode() == HWDeintMode_Hardware && !m_pSettings->GetDeintTreatAsProgressive();
qsConfig.bEnableVideoProcessing = m_bDI ? true : false;
qsConfig.bVppEnableDeinterlacing = m_bDI ? true : false;
qsConfig.bVppEnableFullRateDI = m_pSettings->GetHWAccelDeintOutput() == DeintOutput_FramePerField;
qsConfig.bVppEnableDITimeStampsInterpolation = true;
qsConfig.bVppEnableForcedDeinterlacing =
m_bDI && ((m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) ||
m_pSettings->GetDeinterlacingMode() == DeintMode_Force);
qsConfig.bForceFieldOrder = m_pSettings->GetDeintFieldOrder() != DeintFieldOrder_Auto;
qsConfig.eFieldOrder = (QsFieldOrder)m_pSettings->GetDeintFieldOrder();
// Save!
m_pDecoder->SetConfig(&qsConfig);
CMediaType mt = *pmt;
// Fixup media type - the QuickSync decoder is a bit picky about this.
// We usually do not trust the media type information and instead scan the bitstream.
// This ensures that we only ever send valid and supported data to the decoder,
// so with this we try to circumvent the checks in the QuickSync decoder
mt.SetType(&MEDIATYPE_Video);
MPEG2VIDEOINFO *mp2vi = (*mt.FormatType() == FORMAT_MPEG2Video) ? (MPEG2VIDEOINFO *)mt.Format() : nullptr;
BITMAPINFOHEADER *bmi = nullptr;
videoFormatTypeHandler(mt.Format(), mt.FormatType(), &bmi);
switch (fourCC)
{
case FourCC_MPG2:
mt.SetSubtype(&MEDIASUBTYPE_MPEG2_VIDEO);
if (mp2vi)
mp2vi->dwProfile = 4;
break;
case FourCC_AVC1:
case FourCC_H264:
if (mp2vi)
{
mp2vi->dwProfile = profile ? profile : 100;
mp2vi->dwLevel = level ? level : 41;
}
break;
case FourCC_VC1:
if (mp2vi)
mp2vi->dwProfile = 3;
bmi->biCompression = fourCC;
break;
case FourCC_WMV3:
mt.SetSubtype(&MEDIASUBTYPE_WMV3);
if (mp2vi)
mp2vi->dwProfile = 0;
bmi->biCompression = fourCC;
break;
}
hr = m_pDecoder->TestMediaType(&mt, fourCC);
if (hr != S_OK)
{
DbgLog((LOG_TRACE, 10, L"-> TestMediaType failed"));
return E_FAIL;
}
hr = m_pDecoder->InitDecoder(&mt, fourCC);
if (hr != S_OK)
{
DbgLog((LOG_TRACE, 10, L"-> InitDecoder failed"));
return E_FAIL;
}
m_Codec = fourCC;
return S_OK;
}
STDMETHODIMP CDecQuickSync::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop,
BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pMediaSample)
{
HRESULT hr;
if (m_bNeedSequenceCheck && (m_Codec == FourCC_H264 || m_Codec == FourCC_AVC1))
{
hr = CheckH264Sequence(buffer, buflen, m_nAVCNalSize);
if (FAILED(hr))
{
return E_FAIL;
}
else if (hr == S_OK)
{
m_bNeedSequenceCheck = FALSE;
}
}
if (m_Codec == FourCC_MPG2)
{
const uint8_t *eosmarker = nullptr;
const uint8_t *end = buffer + buflen;
int status = CheckForSequenceMarkers(AV_CODEC_ID_MPEG2VIDEO, buffer, buflen, &m_MpegParserState, &eosmarker);
// If we found a EOS marker, but its not at the end of the packet, then split the packet
// to be able to individually decode the frame before the EOS, and then decode the remainder
if (status & STATE_EOS_FOUND && eosmarker && eosmarker != end)
{
Decode(buffer, (int)(eosmarker - buffer), rtStart, rtStop, bSyncPoint, bDiscontinuity, nullptr);
rtStart = rtStop = AV_NOPTS_VALUE;
buffer = eosmarker;
buflen = (int)(end - eosmarker);
}
else if (eosmarker)
{
m_bEndOfSequence = TRUE;
}
}
if (rtStart != AV_NOPTS_VALUE)
{
rtStart += RTPADDING;
if (rtStart < 0)
rtStart = 0;
if (rtStop != AV_NOPTS_VALUE)
{
rtStop += RTPADDING;
if (rtStop < 0)
rtStop = AV_NOPTS_VALUE;
}
}
if (m_bUseTimestampQueue)
{
m_timestampQueue.push(rtStart);
}
IMediaSample *pSample = new CQSMediaSample(const_cast<BYTE *>(buffer), buflen);
pSample->SetTime(&rtStart, &rtStop);
pSample->SetDiscontinuity(bDiscontinuity);
pSample->SetSyncPoint(bSyncPoint);
hr = m_pDecoder->Decode(pSample);
SafeRelease(&pSample);
if (m_bEndOfSequence)
{
m_bEndOfSequence = FALSE;
EndOfStream();
Deliver(m_pCallback->GetFlushFrame());
}
return hr;
}
HRESULT CDecQuickSync::QS_DeliverSurfaceCallback(void *obj, QsFrameData *data)
{
CDecQuickSync *filter = (CDecQuickSync *)obj;
if (filter->m_bUseTimestampQueue)
{
if (filter->m_timestampQueue.empty())
{
data->rtStart = AV_NOPTS_VALUE;
}
else
{
data->rtStart = filter->m_timestampQueue.front();
filter->m_timestampQueue.pop();
}
data->rtStop = AV_NOPTS_VALUE;
}
if (data->rtStart != AV_NOPTS_VALUE && data->rtStart > 0)
{
data->rtStart -= RTPADDING;
if (data->rtStop != AV_NOPTS_VALUE)
data->rtStop -= RTPADDING;
}
else
{
data->rtStop = AV_NOPTS_VALUE;
}
filter->HandleFrame(data);
return S_OK;
}
STDMETHODIMP CDecQuickSync::HandleFrame(QsFrameData *data)
{
// Setup the LAVFrame
LAVFrame *pFrame = nullptr;
AllocateFrame(&pFrame);
pFrame->format = LAVPixFmt_NV12;
pFrame->width = data->rcClip.right - data->rcClip.left + 1;
pFrame->height = data->rcClip.bottom - data->rcClip.top + 1;
pFrame->rtStart = data->rtStart;
pFrame->rtStop = (data->rtStop - 1 > data->rtStart) ? data->rtStop : AV_NOPTS_VALUE;
pFrame->repeat = !!(data->dwInterlaceFlags & AM_VIDEO_FLAG_REPEAT_FIELD);
pFrame->aspect_ratio.num = data->dwPictAspectRatioX;
pFrame->aspect_ratio.den = data->dwPictAspectRatioY;
pFrame->ext_format = m_DXVAExtendedFormat;
pFrame->interlaced = !(data->dwInterlaceFlags & AM_VIDEO_FLAG_WEAVE);
pFrame->avgFrameDuration = GetFrameDuration();
LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
pFrame->tff = (fo == DeintFieldOrder_Auto) ? !!(data->dwInterlaceFlags & AM_VIDEO_FLAG_FIELD1FIRST)
: (fo == DeintFieldOrder_TopFieldFirst);
// Assign the buffer to the LAV Frame bufers
pFrame->data[0] = data->y;
pFrame->data[1] = data->u;
pFrame->stride[0] = pFrame->stride[1] = data->dwStride;
if (!m_bInterlaced && pFrame->interlaced)
m_bInterlaced = TRUE;
pFrame->interlaced =
(pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) ||
m_pSettings->GetDeinterlacingMode() == DeintMode_Force) &&
!(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable) && !m_bDI;
if (m_bEndOfSequence)
pFrame->flags |= LAV_FRAME_FLAG_END_OF_SEQUENCE;
m_pCallback->Deliver(pFrame);
return S_OK;
}
STDMETHODIMP CDecQuickSync::Flush()
{
DbgLog((LOG_TRACE, 10, L"CDecQuickSync::Flush(): Flushing QuickSync decoder"));
m_pDecoder->BeginFlush();
m_pDecoder->OnSeek(0);
m_pDecoder->EndFlush();
// Clear timestamp queue
std::queue<REFERENCE_TIME>().swap(m_timestampQueue);
return __super::Flush();
}
STDMETHODIMP CDecQuickSync::EndOfStream()
{
m_pDecoder->Flush(true);
m_pDecoder->OnSeek(0);
return S_OK;
}
STDMETHODIMP CDecQuickSync::GetPixelFormat(LAVPixelFormat *pPix, int *pBpp)
{
// Output is always NV12
if (pPix)
*pPix = LAVPixFmt_NV12;
if (pBpp)
*pBpp = 8;
return S_OK;
}
STDMETHODIMP_(REFERENCE_TIME) CDecQuickSync::GetFrameDuration()
{
CMediaType &mt = m_pCallback->GetInputMediaType();
REFERENCE_TIME rtDuration = 0;
videoFormatTypeHandler(mt.Format(), mt.FormatType(), nullptr, &rtDuration, nullptr, nullptr);
return (m_bInterlaced && m_bDI && m_pSettings->GetHWAccelDeintOutput() == DeintOutput_FramePerField)
? rtDuration / 2
: rtDuration;
}
STDMETHODIMP_(BOOL) CDecQuickSync::IsInterlaced(BOOL bAllowGuess)
{
return (m_bInterlaced || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !m_bDI;
}
| 26,932
|
C++
|
.cpp
| 763
| 28.131062
| 119
| 0.608542
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,181
|
cuvid.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/decoders/cuvid.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "cuvid.h"
#include "moreuuids.h"
#include "parsers/H264SequenceParser.h"
#include "parsers/MPEG2HeaderParser.h"
#include "parsers/VC1HeaderParser.h"
#include "parsers/HEVCSequenceParser.h"
#include "Media.h"
////////////////////////////////////////////////////////////////////////////////
// Constructor
////////////////////////////////////////////////////////////////////////////////
ILAVDecoder *CreateDecoderCUVID()
{
return new CDecCuvid();
}
////////////////////////////////////////////////////////////////////////////////
// CUVID codec map
////////////////////////////////////////////////////////////////////////////////
static struct
{
AVCodecID ffcodec;
cudaVideoCodec cudaCodec;
} cuda_codecs[] = {
{AV_CODEC_ID_MPEG1VIDEO, cudaVideoCodec_MPEG1}, {AV_CODEC_ID_MPEG2VIDEO, cudaVideoCodec_MPEG2},
{AV_CODEC_ID_VC1, cudaVideoCodec_VC1}, {AV_CODEC_ID_H264, cudaVideoCodec_H264},
{AV_CODEC_ID_MPEG4, cudaVideoCodec_MPEG4}, {AV_CODEC_ID_HEVC, cudaVideoCodec_HEVC},
{AV_CODEC_ID_VP9, cudaVideoCodec_VP9},
};
////////////////////////////////////////////////////////////////////////////////
// Compatibility tables
////////////////////////////////////////////////////////////////////////////////
#define LEVEL_C_LOW_LIMIT 0x0A20
static DWORD LevelCBlacklist[] = {
0x0A22, 0x0A67, // Geforce 315, no VDPAU at all
0x0A68, 0x0A69, // Geforce G105M, only B
0x0CA0, 0x0CA7, // Geforce GT 330, only A
0x0CAC, // Geforce GT 220, no VDPAU
0x10C3 // Geforce 8400GS, only A
};
static DWORD LevelCWhitelist[] = {
0x06C0, // Geforce GTX 480
0x06C4, // Geforce GTX 465
0x06CA, // Geforce GTX 480M
0x06CD, // Geforce GTX 470
0x08A5, // Geforce 320M
0x06D8, 0x06DC, // Quadro 6000
0x06D9, // Quadro 5000
0x06DA, // Quadro 5000M
0x06DD, // Quadro 4000
0x06D1, // Tesla C2050 / C2070
0x06D2, // Tesla M2070
0x06DE, // Tesla T20 Processor
0x06DF, // Tesla M2070-Q
};
static BOOL IsLevelC(DWORD deviceId)
{
int idx = 0;
if (deviceId >= LEVEL_C_LOW_LIMIT)
{
for (idx = 0; idx < countof(LevelCBlacklist); idx++)
{
if (LevelCBlacklist[idx] == deviceId)
return FALSE;
}
return TRUE;
}
else
{
for (idx = 0; idx < countof(LevelCWhitelist); idx++)
{
if (LevelCWhitelist[idx] == deviceId)
return TRUE;
}
return FALSE;
}
}
////////////////////////////////////////////////////////////////////////////////
// CUVID decoder implementation
////////////////////////////////////////////////////////////////////////////////
CDecCuvid::CDecCuvid(void)
: CDecBase()
{
ZeroMemory(&cuda, sizeof(cuda));
ZeroMemory(&m_VideoFormat, sizeof(m_VideoFormat));
ZeroMemory(&m_DXVAExtendedFormat, sizeof(m_DXVAExtendedFormat));
ZeroMemory(&m_VideoDecoderInfo, sizeof(m_VideoDecoderInfo));
}
CDecCuvid::~CDecCuvid(void)
{
DestroyDecoder(true);
}
STDMETHODIMP CDecCuvid::DestroyDecoder(bool bFull)
{
if (m_cudaCtxLock)
cuda.cuvidCtxLock(m_cudaCtxLock, 0);
if (m_AnnexBConverter)
{
SAFE_DELETE(m_AnnexBConverter);
}
if (m_hDecoder)
{
cuda.cuvidDestroyDecoder(m_hDecoder);
m_hDecoder = 0;
}
if (m_hParser)
{
cuda.cuvidDestroyVideoParser(m_hParser);
m_hParser = 0;
}
if (m_pbRawNV12)
{
cuda.cuMemFreeHost(m_pbRawNV12);
m_pbRawNV12 = nullptr;
m_cRawNV12 = 0;
}
if (m_cudaCtxLock)
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
if (bFull)
{
if (m_cudaCtxLock)
{
cuda.cuvidCtxLockDestroy(m_cudaCtxLock);
m_cudaCtxLock = 0;
}
if (m_cudaContext)
{
cuda.cuCtxDestroy(m_cudaContext);
m_cudaContext = 0;
}
SafeRelease(&m_pD3DDevice9);
SafeRelease(&m_pD3D9);
FreeLibrary(cuda.cudaLib);
FreeLibrary(cuda.cuvidLib);
ZeroMemory(&cuda, sizeof(cuda));
}
return S_OK;
}
#define STRINGIFY(X) #X
#define GET_PROC_EX_OPT(name, lib) cuda.name = (t##name *)GetProcAddress(lib, #name);
#define GET_PROC_EX(name, lib) \
GET_PROC_EX_OPT(name, lib) \
if (cuda.name == nullptr) \
{ \
DbgLog((LOG_ERROR, 10, L"-> Failed to load function \"%s\"", TEXT(#name))); \
return E_FAIL; \
}
#define GET_PROC_EX_OPT_V2(name, lib) cuda.name = (t##name *)GetProcAddress(lib, STRINGIFY(name##_v2));
#define GET_PROC_EX_V2(name, lib) \
GET_PROC_EX_OPT_V2(name, lib) \
if (cuda.name == nullptr) \
{ \
DbgLog((LOG_ERROR, 10, L"-> Failed to load function \"%s\"", TEXT(STRINGIFY(name##_v2)))); \
return E_FAIL; \
}
#define GET_PROC_CUDA(name) GET_PROC_EX(name, cuda.cudaLib)
#define GET_PROC_CUDA_V2(name) GET_PROC_EX_V2(name, cuda.cudaLib)
#define GET_PROC_CUVID(name) GET_PROC_EX(name, cuda.cuvidLib)
#define GET_PROC_CUVID_V2(name) GET_PROC_EX_V2(name, cuda.cuvidLib)
STDMETHODIMP CDecCuvid::LoadCUDAFuncRefs()
{
// Load CUDA functions
cuda.cudaLib = LoadLibrary(L"nvcuda.dll");
if (cuda.cudaLib == nullptr)
{
DbgLog((LOG_TRACE, 10, L"-> Loading nvcuda.dll failed"));
return E_FAIL;
}
GET_PROC_CUDA(cuInit);
GET_PROC_CUDA_V2(cuCtxCreate);
GET_PROC_CUDA_V2(cuCtxDestroy);
GET_PROC_CUDA_V2(cuCtxPushCurrent);
GET_PROC_CUDA_V2(cuCtxPopCurrent);
GET_PROC_CUDA_V2(cuD3D9CtxCreate);
GET_PROC_CUDA_V2(cuMemAllocHost);
GET_PROC_CUDA(cuMemFreeHost);
GET_PROC_CUDA_V2(cuMemcpyDtoH);
GET_PROC_CUDA(cuDeviceGetCount);
GET_PROC_CUDA(cuDriverGetVersion);
GET_PROC_CUDA(cuDeviceGetName);
GET_PROC_CUDA(cuDeviceComputeCapability);
GET_PROC_CUDA(cuDeviceGetAttribute);
// Load CUVID function
cuda.cuvidLib = LoadLibrary(L"nvcuvid.dll");
if (cuda.cuvidLib == nullptr)
{
DbgLog((LOG_TRACE, 10, L"-> Loading nvcuvid.dll failed"));
return E_FAIL;
}
GET_PROC_CUVID(cuvidCtxLockCreate);
GET_PROC_CUVID(cuvidCtxLockDestroy);
GET_PROC_CUVID(cuvidCtxLock);
GET_PROC_CUVID(cuvidCtxUnlock);
GET_PROC_CUVID(cuvidCreateVideoParser);
GET_PROC_CUVID(cuvidParseVideoData);
GET_PROC_CUVID(cuvidDestroyVideoParser);
GET_PROC_CUVID(cuvidCreateDecoder);
GET_PROC_CUVID(cuvidDecodePicture);
GET_PROC_CUVID(cuvidDestroyDecoder);
#ifdef _M_AMD64
GET_PROC_CUVID(cuvidMapVideoFrame64);
GET_PROC_CUVID(cuvidUnmapVideoFrame64);
cuda.cuvidMapVideoFrame = cuda.cuvidMapVideoFrame64;
cuda.cuvidUnmapVideoFrame = cuda.cuvidUnmapVideoFrame64;
#else
GET_PROC_CUVID(cuvidMapVideoFrame);
GET_PROC_CUVID(cuvidUnmapVideoFrame);
#endif
return S_OK;
}
STDMETHODIMP CDecCuvid::FlushParser()
{
CUVIDSOURCEDATAPACKET pCuvidPacket;
memset(&pCuvidPacket, 0, sizeof(pCuvidPacket));
pCuvidPacket.flags |= CUVID_PKT_ENDOFSTREAM;
CUresult result = CUDA_SUCCESS;
cuda.cuvidCtxLock(m_cudaCtxLock, 0);
__try
{
result = cuda.cuvidParseVideoData(m_hParser, &pCuvidPacket);
}
__except (1)
{
DbgLog((LOG_ERROR, 10, L"cuvidFlushParser(): cuvidParseVideoData threw an exception"));
result = CUDA_ERROR_UNKNOWN;
}
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
return result;
}
// Beginning of GPU Architecture definitions
static int _ConvertSMVer2CoresDrvApi(int major, int minor)
{
// Defines for GPU Architecture types (using the SM version to determine the # of cores per SM
typedef struct
{
int SM; // 0xMm (hexadecimal notation), M = SM Major version, and m = SM minor version
int Cores;
} sSMtoCores;
sSMtoCores nGpuArchCoresPerSM[] = {{0x10, 8}, {0x11, 8}, {0x12, 8}, {0x13, 8}, {0x20, 32},
{0x21, 48}, {0x30, 192}, {0x32, 192}, {0x35, 192}, {0x37, 192},
{0x50, 128}, {0x52, 128}, {0x61, 128}, {0x70, 64} , {0x75, 64},
{0x80, 128}, {0x86, 128}, {0x89, 128},
{-1, -1}};
int index = 0;
while (nGpuArchCoresPerSM[index].SM != -1)
{
if (nGpuArchCoresPerSM[index].SM == ((major << 4) + minor))
{
return nGpuArchCoresPerSM[index].Cores;
}
index++;
}
DbgLog((LOG_ERROR, 10, L"MapSMtoCores undefined SMversion %d.%d!", major, minor));
return -1;
}
int CDecCuvid::GetMaxGflopsGraphicsDeviceId()
{
CUdevice current_device = 0, max_perf_device = 0;
int device_count = 0, sm_per_multiproc = 0;
int best_SM_arch = 0;
int64_t max_compute_perf = 0;
int major = 0, minor = 0, multiProcessorCount, clockRate;
int bTCC = 0, version;
char deviceName[256];
cuda.cuDeviceGetCount(&device_count);
if (device_count <= 0)
return -1;
cuda.cuDriverGetVersion(&version);
// Find the best major SM Architecture GPU device that are graphics devices
while (current_device < device_count)
{
cuda.cuDeviceGetName(deviceName, 256, current_device);
cuda.cuDeviceComputeCapability(&major, &minor, current_device);
if (version >= 3020)
{
cuda.cuDeviceGetAttribute(&bTCC, CU_DEVICE_ATTRIBUTE_TCC_DRIVER, current_device);
}
else
{
// Assume a Tesla GPU is running in TCC if we are running CUDA 3.1
if (deviceName[0] == 'T')
bTCC = 1;
}
if (!bTCC)
{
if (major > 0 && major < 9999)
{
best_SM_arch = max(best_SM_arch, major);
}
}
current_device++;
}
// Find the best CUDA capable GPU device
current_device = 0;
while (current_device < device_count)
{
cuda.cuDeviceGetAttribute(&multiProcessorCount, CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT, current_device);
cuda.cuDeviceGetAttribute(&clockRate, CU_DEVICE_ATTRIBUTE_CLOCK_RATE, current_device);
cuda.cuDeviceComputeCapability(&major, &minor, current_device);
if (version >= 3020)
{
cuda.cuDeviceGetAttribute(&bTCC, CU_DEVICE_ATTRIBUTE_TCC_DRIVER, current_device);
}
else
{
// Assume a Tesla GPU is running in TCC if we are running CUDA 3.1
if (deviceName[0] == 'T')
bTCC = 1;
}
if (major == 9999 && minor == 9999)
{
sm_per_multiproc = 1;
}
else
{
sm_per_multiproc = _ConvertSMVer2CoresDrvApi(major, minor);
}
// If this is a Tesla based GPU and SM 2.0, and TCC is disabled, this is a contender
if (!bTCC) // Is this GPU running the TCC driver? If so we pass on this
{
int64_t compute_perf = int64_t(multiProcessorCount * sm_per_multiproc) * clockRate;
if (compute_perf > max_compute_perf)
{
// If we find GPU with SM major > 2, search only these
if (best_SM_arch > 2)
{
// If our device = dest_SM_arch, then we pick this one
if (major == best_SM_arch)
{
max_compute_perf = compute_perf;
max_perf_device = current_device;
}
}
else
{
max_compute_perf = compute_perf;
max_perf_device = current_device;
}
}
#ifdef DEBUG
cuda.cuDeviceGetName(deviceName, 256, current_device);
DbgLog((LOG_TRACE, 10, L"CUDA Device (%d): %S, Compute: %d.%d, CUDA Cores: %d, Clock: %d MHz",
current_device, deviceName, major, minor, multiProcessorCount * sm_per_multiproc,
clockRate / 1000));
#endif
}
++current_device;
}
return max_perf_device;
}
// ILAVDecoder
STDMETHODIMP CDecCuvid::Init()
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::Init(): Trying to open CUVID device"));
HRESULT hr = S_OK;
CUresult cuStatus = CUDA_SUCCESS;
hr = LoadCUDAFuncRefs();
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> Loading CUDA interfaces failed (hr: 0x%x)", hr));
return hr;
}
cuStatus = cuda.cuInit(0);
if (cuStatus != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"-> cuInit failed (status: %d)", cuStatus));
return E_FAIL;
}
// TODO: select best device
int best_device = GetMaxGflopsGraphicsDeviceId();
DWORD dwDeviceIndex = m_pCallback->GetGPUDeviceIndex();
if (dwDeviceIndex != DWORD_MAX)
{
best_device = (int)dwDeviceIndex;
}
select_device:
hr = InitD3D9(best_device, dwDeviceIndex);
if (hr != S_OK)
{
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> No D3D device available, building non-D3D context on device %d", best_device));
}
cuStatus = cuda.cuCtxCreate(&m_cudaContext, CU_CTX_SCHED_BLOCKING_SYNC, best_device);
if (cuStatus == CUDA_SUCCESS)
{
int major, minor;
cuda.cuDeviceComputeCapability(&major, &minor, best_device);
m_bVDPAULevelC = (major >= 2);
cuda.cuDeviceGetName(m_cudaDeviceName, sizeof(m_cudaDeviceName), best_device);
DbgLog((LOG_TRACE, 10, L"InitCUDA(): pure CUDA context of device with compute %d.%d", major, minor));
}
}
if (cuStatus == CUDA_ERROR_INVALID_DEVICE && dwDeviceIndex != DWORD_MAX)
{
DbgLog((LOG_TRACE, 10,
L"-> Specific device was requested, but no match was found, re-trying automatic selection"));
dwDeviceIndex = DWORD_MAX;
best_device = GetMaxGflopsGraphicsDeviceId();
goto select_device;
}
if (cuStatus == CUDA_SUCCESS)
{
// Switch to a floating context
CUcontext curr_ctx = nullptr;
cuStatus = cuda.cuCtxPopCurrent(&curr_ctx);
if (cuStatus != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"-> Storing context on the stack failed with error %d", cuStatus));
return E_FAIL;
}
cuStatus = cuda.cuvidCtxLockCreate(&m_cudaCtxLock, m_cudaContext);
if (cuStatus != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"-> Creation of floating context failed with error %d", cuStatus));
return E_FAIL;
}
}
else
{
DbgLog((LOG_TRACE, 10, L"-> Creation of CUDA context failed with error %d", cuStatus));
return E_FAIL;
}
return S_OK;
}
STDMETHODIMP CDecCuvid::InitD3D9(int best_device, DWORD requested_device)
{
HRESULT hr = S_OK;
CUresult cuStatus = CUDA_SUCCESS;
int device = 0;
if (IsWindows10OrNewer())
{
DbgLog((LOG_ERROR, 10, L"-> D3D9 CUVID interop is not supported on Windows 10"));
return E_FAIL;
}
// Check if D3D mode is enabled/wanted
if (m_pSettings->GetHWAccelDeintHQ() == FALSE)
{
DbgLog((LOG_ERROR, 10, L"-> HQ mode is turned off, skipping D3D9 init"));
return S_FALSE;
}
if (!m_pD3D9)
m_pD3D9 = Direct3DCreate9(D3D_SDK_VERSION);
if (!m_pD3D9)
{
DbgLog((LOG_ERROR, 10, L"-> Failed to acquire IDirect3D9"));
return E_FAIL;
}
D3DADAPTER_IDENTIFIER9 d3dId;
D3DPRESENT_PARAMETERS d3dpp;
D3DDISPLAYMODE d3ddm;
for (unsigned lAdapter = 0; lAdapter < m_pD3D9->GetAdapterCount(); lAdapter++)
{
DbgLog((LOG_TRACE, 10, L"-> Trying D3D Adapter %d..", lAdapter));
ZeroMemory(&d3dpp, sizeof(d3dpp));
m_pD3D9->GetAdapterDisplayMode(lAdapter, &d3ddm);
d3dpp.Windowed = TRUE;
d3dpp.BackBufferWidth = 640;
d3dpp.BackBufferHeight = 480;
d3dpp.BackBufferCount = 1;
d3dpp.BackBufferFormat = d3ddm.Format;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.Flags = D3DPRESENTFLAG_VIDEO;
IDirect3DDevice9 *pDev = nullptr;
CUcontext cudaCtx = 0;
hr = m_pD3D9->CreateDevice(
lAdapter, D3DDEVTYPE_HAL, GetShellWindow(),
D3DCREATE_HARDWARE_VERTEXPROCESSING | D3DCREATE_MULTITHREADED | D3DCREATE_FPU_PRESERVE, &d3dpp, &pDev);
if (SUCCEEDED(hr))
{
m_pD3D9->GetAdapterIdentifier(lAdapter, 0, &d3dId);
cuStatus = cuda.cuD3D9CtxCreate(&cudaCtx, &device, CU_CTX_SCHED_BLOCKING_SYNC, pDev);
if (cuStatus == CUDA_SUCCESS)
{
DbgLog((LOG_TRACE, 10, L"-> Created D3D Device on adapter %S (%d), using CUDA device %d",
d3dId.Description, lAdapter, device));
BOOL isLevelC = IsLevelC(d3dId.DeviceId);
DbgLog(
(LOG_TRACE, 10, L"InitCUDA(): D3D Device with Id 0x%x is level C: %d", d3dId.DeviceId, isLevelC));
if (m_bVDPAULevelC && !isLevelC)
{
DbgLog((LOG_TRACE, 10, L"InitCUDA(): We already had a Level C+ device, this one is not, skipping"));
cuda.cuCtxDestroy(cudaCtx);
SafeRelease(&pDev);
continue;
}
// Release old resources
SafeRelease(&m_pD3DDevice9);
if (m_cudaContext)
cuda.cuCtxDestroy(m_cudaContext);
// Store resources
m_pD3DDevice9 = pDev;
m_cudaContext = cudaCtx;
m_bVDPAULevelC = isLevelC;
cuda.cuDeviceGetName(m_cudaDeviceName, sizeof(m_cudaDeviceName), best_device);
// Is this the one we want?
if (device == best_device)
break;
}
else
{
DbgLog((LOG_TRACE, 10, L"-> D3D Device on adapter %d is not CUDA capable", lAdapter));
SafeRelease(&pDev);
}
}
}
if (requested_device != DWORD_MAX && device != best_device)
{
DbgLog((LOG_ERROR, 10, L"-> No D3D Device found matching the requested device"));
SafeRelease(&m_pD3DDevice9);
SafeRelease(&m_pD3D9);
if (m_cudaContext)
{
cuda.cuCtxDestroy(m_cudaContext);
m_cudaContext = 0;
}
return E_FAIL;
}
if (!m_pD3DDevice9)
{
SafeRelease(&m_pD3D9);
return E_FAIL;
}
return S_OK;
}
STDMETHODIMP CDecCuvid::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::InitDecoder(): Initializing CUVID decoder"));
HRESULT hr = S_OK;
if (!m_cudaContext)
{
DbgLog((LOG_TRACE, 10, L"-> InitDecoder called without a cuda context"));
return E_FAIL;
}
// Free old device
DestroyDecoder(false);
// Flush Display Queue
memset(&m_DisplayQueue, 0, sizeof(m_DisplayQueue));
for (int i = 0; i < DISPLAY_DELAY; i++)
m_DisplayQueue[i].picture_index = -1;
m_DisplayPos = 0;
m_DisplayDelay = DISPLAY_DELAY;
// Reduce display delay for DVD decoding for lower decode latency
if (m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_DVD)
m_DisplayDelay /= 2;
cudaVideoCodec cudaCodec = (cudaVideoCodec)-1;
for (int i = 0; i < countof(cuda_codecs); i++)
{
if (cuda_codecs[i].ffcodec == codec)
{
cudaCodec = cuda_codecs[i].cudaCodec;
break;
}
}
if (cudaCodec == -1)
{
DbgLog((LOG_TRACE, 10, L"-> Codec id %d does not map to a CUVID codec", codec));
return E_FAIL;
}
if (cudaCodec == cudaVideoCodec_MPEG4 && !m_bVDPAULevelC)
{
DbgLog((LOG_TRACE, 10, L"-> Device is not capable to decode this format (not >= Level C)"));
return E_FAIL;
}
m_bUseTimestampQueue = (m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_ONLY_DTS) ||
(cudaCodec == cudaVideoCodec_MPEG4 && pmt->formattype != FORMAT_MPEG2Video);
m_bWaitForKeyframe = m_bUseTimestampQueue;
m_bInterlaced = TRUE;
m_bFormatIncompatible = FALSE;
m_bTFF = TRUE;
m_rtPrevDiff = AV_NOPTS_VALUE;
m_bARPresent = TRUE;
// Create the CUDA Video Parser
CUVIDPARSERPARAMS oVideoParserParameters;
ZeroMemory(&oVideoParserParameters, sizeof(CUVIDPARSERPARAMS));
oVideoParserParameters.CodecType = cudaCodec;
oVideoParserParameters.ulMaxNumDecodeSurfaces = MAX_DECODE_FRAMES;
oVideoParserParameters.ulMaxDisplayDelay = m_DisplayDelay;
oVideoParserParameters.pUserData = this;
oVideoParserParameters.pfnSequenceCallback =
CDecCuvid::HandleVideoSequence; // Called before decoding frames and/or whenever there is a format change
oVideoParserParameters.pfnDecodePicture =
CDecCuvid::HandlePictureDecode; // Called when a picture is ready to be decoded (decode order)
oVideoParserParameters.pfnDisplayPicture =
CDecCuvid::HandlePictureDisplay; // Called whenever a picture is ready to be displayed (display order)
oVideoParserParameters.ulErrorThreshold = m_bUseTimestampQueue ? 100 : 0;
memset(&m_VideoParserExInfo, 0, sizeof(CUVIDEOFORMATEX));
// Handle AnnexB conversion for H.264 and HEVC
if (pmt->formattype == FORMAT_MPEG2Video &&
(pmt->subtype == MEDIASUBTYPE_AVC1 || pmt->subtype == MEDIASUBTYPE_avc1 || pmt->subtype == MEDIASUBTYPE_CCV1 ||
pmt->subtype == MEDIASUBTYPE_HVC1))
{
MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->Format();
m_AnnexBConverter = new CAnnexBConverter();
m_AnnexBConverter->SetNALUSize(2);
BYTE *annexBextra = nullptr;
int size = 0;
if (cudaCodec == cudaVideoCodec_H264)
{
m_AnnexBConverter->Convert(&annexBextra, &size, (BYTE *)mp2vi->dwSequenceHeader, mp2vi->cbSequenceHeader);
}
else if (cudaCodec == cudaVideoCodec_HEVC && mp2vi->cbSequenceHeader >= 23)
{
BYTE *bHEVCHeader = (BYTE *)mp2vi->dwSequenceHeader;
int nal_len_size = (bHEVCHeader[21] & 3) + 1;
if (nal_len_size != mp2vi->dwFlags)
{
DbgLog((LOG_ERROR, 10, L"hvcC nal length size doesn't match media type"));
}
m_AnnexBConverter->ConvertHEVCExtradata(&annexBextra, &size, (BYTE *)mp2vi->dwSequenceHeader,
mp2vi->cbSequenceHeader);
}
if (annexBextra && size)
{
size = min(size, sizeof(m_VideoParserExInfo.raw_seqhdr_data));
memcpy(m_VideoParserExInfo.raw_seqhdr_data, annexBextra, size);
m_VideoParserExInfo.format.seqhdr_data_length = size;
av_freep(&annexBextra);
}
m_AnnexBConverter->SetNALUSize(mp2vi->dwFlags);
}
else
{
size_t hdr_len = 0;
getExtraData(*pmt, nullptr, &hdr_len);
if (hdr_len <= 1024)
{
getExtraData(*pmt, m_VideoParserExInfo.raw_seqhdr_data, &hdr_len);
m_VideoParserExInfo.format.seqhdr_data_length = (unsigned int)hdr_len;
}
}
int bitdepth = 8;
m_bNeedSequenceCheck = FALSE;
if (m_VideoParserExInfo.format.seqhdr_data_length)
{
if (cudaCodec == cudaVideoCodec_H264)
{
hr = CheckH264Sequence(m_VideoParserExInfo.raw_seqhdr_data, m_VideoParserExInfo.format.seqhdr_data_length);
if (FAILED(hr))
{
return VFW_E_UNSUPPORTED_VIDEO;
}
else if (hr == S_FALSE)
{
m_bNeedSequenceCheck = TRUE;
}
}
else if (cudaCodec == cudaVideoCodec_MPEG2)
{
DbgLog((LOG_TRACE, 10, L"-> Scanning extradata for MPEG2 sequence header"));
CMPEG2HeaderParser mpeg2parser(m_VideoParserExInfo.raw_seqhdr_data,
m_VideoParserExInfo.format.seqhdr_data_length);
if (mpeg2parser.hdr.valid)
{
if (mpeg2parser.hdr.chroma >= 2)
{
DbgLog((LOG_TRACE, 10, L" -> Sequence header indicates incompatible chroma sampling (chroma: %d)",
mpeg2parser.hdr.chroma));
return VFW_E_UNSUPPORTED_VIDEO;
}
m_bInterlaced = mpeg2parser.hdr.interlaced;
}
}
else if (cudaCodec == cudaVideoCodec_VC1)
{
CVC1HeaderParser vc1Parser(m_VideoParserExInfo.raw_seqhdr_data,
m_VideoParserExInfo.format.seqhdr_data_length);
m_bInterlaced = vc1Parser.hdr.interlaced;
}
else if (cudaCodec == cudaVideoCodec_HEVC)
{
hr = CheckHEVCSequence(m_VideoParserExInfo.raw_seqhdr_data, m_VideoParserExInfo.format.seqhdr_data_length,
&bitdepth);
if (FAILED(hr))
{
return VFW_E_UNSUPPORTED_VIDEO;
}
else if (hr == S_FALSE)
{
m_bNeedSequenceCheck = TRUE;
}
}
}
else
{
m_bNeedSequenceCheck = (cudaCodec == cudaVideoCodec_H264 || cudaCodec == cudaVideoCodec_HEVC);
}
oVideoParserParameters.pExtVideoInfo = &m_VideoParserExInfo;
CUresult oResult = cuda.cuvidCreateVideoParser(&m_hParser, &oVideoParserParameters);
if (oResult != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"-> Creating parser for type %d failed with code %d", cudaCodec, oResult));
return E_FAIL;
}
BITMAPINFOHEADER *bmi = nullptr;
videoFormatTypeHandler(pmt->Format(), pmt->FormatType(), &bmi);
{
hr = CreateCUVIDDecoder(cudaCodec, bmi->biWidth, bmi->biHeight, bitdepth, !m_bInterlaced);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"-> Creating CUVID decoder failed"));
return hr;
}
}
m_bForceSequenceUpdate = TRUE;
DecodeSequenceData();
return S_OK;
}
STDMETHODIMP CDecCuvid::CreateCUVIDDecoder(cudaVideoCodec codec, DWORD dwWidth, DWORD dwHeight, int nBitdepth,
bool bProgressiveSequence)
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::CreateCUVIDDecoder(): Creating CUVID decoder instance"));
HRESULT hr = S_OK;
BOOL bDXVAMode = (m_pD3DDevice9 != nullptr);
cuda.cuvidCtxLock(m_cudaCtxLock, 0);
CUVIDDECODECREATEINFO *dci = &m_VideoDecoderInfo;
retry:
if (m_hDecoder)
{
cuda.cuvidDestroyDecoder(m_hDecoder);
m_hDecoder = 0;
}
ZeroMemory(dci, sizeof(*dci));
dci->ulWidth = dwWidth;
dci->ulHeight = dwHeight;
dci->ulNumDecodeSurfaces = MAX_DECODE_FRAMES;
dci->CodecType = codec;
dci->bitDepthMinus8 = nBitdepth - 8;
dci->ChromaFormat = cudaVideoChromaFormat_420;
dci->OutputFormat = nBitdepth > 8 ? cudaVideoSurfaceFormat_P016 : cudaVideoSurfaceFormat_NV12;
dci->DeinterlaceMode = (bProgressiveSequence || (m_pSettings->GetDeinterlacingMode() == DeintMode_Disable))
? cudaVideoDeinterlaceMode_Weave
: (cudaVideoDeinterlaceMode)m_pSettings->GetHWAccelDeintMode();
dci->ulNumOutputSurfaces = 1;
dci->ulTargetWidth = dwWidth;
dci->ulTargetHeight = dwHeight;
// can't provide the original values here, or the decoder starts doing weird things - scaling to the size and
// cropping afterwards
dci->display_area.right = (short)dwWidth;
dci->display_area.bottom = (short)dwHeight;
dci->ulCreationFlags = bDXVAMode ? cudaVideoCreate_PreferDXVA : cudaVideoCreate_PreferCUVID;
dci->vidLock = m_cudaCtxLock;
// create the decoder
CUresult oResult = cuda.cuvidCreateDecoder(&m_hDecoder, dci);
if (oResult != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"-> Creation of decoder for type %d failed with code %d", dci->CodecType, oResult));
if (bDXVAMode)
{
DbgLog((LOG_ERROR, 10, L" -> Retrying in pure CUVID mode"));
bDXVAMode = FALSE;
goto retry;
}
hr = E_FAIL;
}
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
return hr;
}
STDMETHODIMP CDecCuvid::DecodeSequenceData()
{
CUresult oResult;
CUVIDSOURCEDATAPACKET pCuvidPacket;
ZeroMemory(&pCuvidPacket, sizeof(pCuvidPacket));
pCuvidPacket.payload = m_VideoParserExInfo.raw_seqhdr_data;
pCuvidPacket.payload_size = m_VideoParserExInfo.format.seqhdr_data_length;
if (pCuvidPacket.payload && pCuvidPacket.payload_size)
{
cuda.cuvidCtxLock(m_cudaCtxLock, 0);
oResult = cuda.cuvidParseVideoData(m_hParser, &pCuvidPacket);
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
}
return S_OK;
}
CUVIDPARSERDISPINFO *CDecCuvid::GetNextFrame()
{
int next = (m_DisplayPos + 1) % m_DisplayDelay;
return &m_DisplayQueue[next];
}
int CUDAAPI CDecCuvid::HandleVideoSequence(void *obj, CUVIDEOFORMAT *cuvidfmt)
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::HandleVideoSequence(): New Video Sequence"));
CDecCuvid *filter = static_cast<CDecCuvid *>(obj);
CUVIDDECODECREATEINFO *dci = &filter->m_VideoDecoderInfo;
// Check if we should be deinterlacing
bool bShouldDeinterlace =
(!cuvidfmt->progressive_sequence && filter->m_pSettings->GetDeinterlacingMode() != DeintMode_Disable &&
filter->m_pSettings->GetHWAccelDeintMode() != HWDeintMode_Weave);
// Re-initialize the decoder if needed
if ((cuvidfmt->codec != dci->CodecType) || (cuvidfmt->coded_width != dci->ulWidth) ||
(cuvidfmt->coded_height != dci->ulHeight) || (cuvidfmt->chroma_format != dci->ChromaFormat) ||
(cuvidfmt->bit_depth_luma_minus8 != dci->bitDepthMinus8) ||
(bShouldDeinterlace != (dci->DeinterlaceMode != cudaVideoDeinterlaceMode_Weave)) ||
filter->m_bForceSequenceUpdate)
{
filter->m_bForceSequenceUpdate = FALSE;
HRESULT hr =
filter->CreateCUVIDDecoder(cuvidfmt->codec, cuvidfmt->coded_width, cuvidfmt->coded_height,
cuvidfmt->bit_depth_luma_minus8 + 8, cuvidfmt->progressive_sequence != 0);
if (FAILED(hr))
filter->m_bFormatIncompatible = TRUE;
}
filter->m_bInterlaced = !cuvidfmt->progressive_sequence;
filter->m_bDoubleRateDeint =
bShouldDeinterlace && (filter->m_pSettings->GetHWAccelDeintOutput() == DeintOutput_FramePerField);
if (filter->m_bInterlaced && cuvidfmt->frame_rate.numerator && cuvidfmt->frame_rate.denominator)
{
double dFrameTime = 10000000.0 / ((double)cuvidfmt->frame_rate.numerator / cuvidfmt->frame_rate.denominator);
if (filter->m_bDoubleRateDeint && (int)(dFrameTime / 10000.0) == 41)
{
filter->m_bDoubleRateDeint = TRUE;
}
if (cuvidfmt->codec != cudaVideoCodec_MPEG4)
filter->m_rtAvgTimePerFrame = REFERENCE_TIME(dFrameTime + 0.5);
else
filter->m_rtAvgTimePerFrame = AV_NOPTS_VALUE; // TODO: base on media type
}
else
{
filter->m_rtAvgTimePerFrame = AV_NOPTS_VALUE;
}
// Adjust frame time for double-rate deint
if (filter->m_bDoubleRateDeint && filter->m_rtAvgTimePerFrame != AV_NOPTS_VALUE)
filter->m_rtAvgTimePerFrame /= 2;
filter->m_VideoFormat = *cuvidfmt;
if (cuvidfmt->chroma_format != cudaVideoChromaFormat_420)
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::HandleVideoSequence(): Incompatible Chroma Format detected"));
filter->m_bFormatIncompatible = TRUE;
}
fillDXVAExtFormat(filter->m_DXVAExtendedFormat,
(filter->m_iFullRange > 0 || cuvidfmt->video_signal_description.video_full_range_flag),
cuvidfmt->video_signal_description.color_primaries,
cuvidfmt->video_signal_description.matrix_coefficients,
cuvidfmt->video_signal_description.transfer_characteristics);
return TRUE;
}
int CUDAAPI CDecCuvid::HandlePictureDecode(void *obj, CUVIDPICPARAMS *cuvidpic)
{
CDecCuvid *filter = reinterpret_cast<CDecCuvid *>(obj);
if (filter->m_bFlushing)
return FALSE;
if (filter->m_bWaitForKeyframe)
{
if (cuvidpic->intra_pic_flag)
filter->m_bWaitForKeyframe = FALSE;
else
{
// Pop timestamp from the queue, drop frame
if (!filter->m_timestampQueue.empty())
{
filter->m_timestampQueue.pop();
}
return FALSE;
}
}
int flush_pos = filter->m_DisplayPos;
for (;;)
{
bool frame_in_use = false;
for (int i = 0; i < filter->m_DisplayDelay; i++)
{
if (filter->m_DisplayQueue[i].picture_index == cuvidpic->CurrPicIdx)
{
frame_in_use = true;
break;
}
}
if (!frame_in_use)
{
// No problem: we're safe to use this frame
break;
}
// The target frame is still pending in the display queue:
// Flush the oldest entry from the display queue and repeat
if (filter->m_DisplayQueue[flush_pos].picture_index >= 0)
{
filter->Display(&filter->m_DisplayQueue[flush_pos]);
filter->m_DisplayQueue[flush_pos].picture_index = -1;
}
flush_pos = (flush_pos + 1) % filter->m_DisplayDelay;
}
filter->cuda.cuvidCtxLock(filter->m_cudaCtxLock, 0);
filter->m_PicParams[cuvidpic->CurrPicIdx] = *cuvidpic;
__try
{
CUresult cuStatus = filter->cuda.cuvidDecodePicture(filter->m_hDecoder, cuvidpic);
#ifdef DEBUG
if (cuStatus != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"CDecCuvid::HandlePictureDecode(): cuvidDecodePicture returned error code %d",
cuStatus));
}
#endif
}
__except (1)
{
DbgLog((LOG_ERROR, 10, L"CDecCuvid::HandlePictureDecode(): cuvidDecodePicture threw an exception"));
}
filter->cuda.cuvidCtxUnlock(filter->m_cudaCtxLock, 0);
return TRUE;
}
int CUDAAPI CDecCuvid::HandlePictureDisplay(void *obj, CUVIDPARSERDISPINFO *cuviddisp)
{
CDecCuvid *filter = reinterpret_cast<CDecCuvid *>(obj);
if (filter->m_bFlushing)
return FALSE;
if (filter->m_bUseTimestampQueue)
{
if (filter->m_timestampQueue.empty())
{
cuviddisp->timestamp = AV_NOPTS_VALUE;
}
else
{
cuviddisp->timestamp = filter->m_timestampQueue.front();
filter->m_timestampQueue.pop();
}
}
// Drop samples with negative timestamps (preroll) or during flushing
if (cuviddisp->timestamp != AV_NOPTS_VALUE && cuviddisp->timestamp < 0)
return TRUE;
if (filter->m_DisplayQueue[filter->m_DisplayPos].picture_index >= 0)
{
filter->Display(&filter->m_DisplayQueue[filter->m_DisplayPos]);
filter->m_DisplayQueue[filter->m_DisplayPos].picture_index = -1;
}
filter->m_DisplayQueue[filter->m_DisplayPos] = *cuviddisp;
filter->m_DisplayPos = (filter->m_DisplayPos + 1) % filter->m_DisplayDelay;
return TRUE;
}
STDMETHODIMP CDecCuvid::Display(CUVIDPARSERDISPINFO *cuviddisp)
{
BOOL bTreatAsProgressive = m_pSettings->GetDeinterlacingMode() == DeintMode_Disable;
if (bTreatAsProgressive)
{
cuviddisp->progressive_frame = TRUE;
m_nSoftTelecine = FALSE;
}
else
{
if (m_VideoFormat.codec == cudaVideoCodec_MPEG2 || m_VideoFormat.codec == cudaVideoCodec_H264)
{
if (cuviddisp->repeat_first_field)
{
m_nSoftTelecine = 2;
}
else if (m_nSoftTelecine)
{
m_nSoftTelecine--;
}
if (!m_nSoftTelecine)
m_bTFF = cuviddisp->top_field_first;
}
cuviddisp->progressive_frame =
(cuviddisp->progressive_frame &&
!(m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive &&
m_VideoFormat.codec != cudaVideoCodec_VC1 && !m_nSoftTelecine) &&
!(m_pSettings->GetDeinterlacingMode() == DeintMode_Force));
}
LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
cuviddisp->top_field_first = (fo == DeintFieldOrder_Auto) ? (m_nSoftTelecine ? m_bTFF : cuviddisp->top_field_first)
: (fo == DeintFieldOrder_TopFieldFirst);
if (m_bDoubleRateDeint)
{
if (cuviddisp->progressive_frame || m_nSoftTelecine)
{
Deliver(cuviddisp, 2);
}
else
{
Deliver(cuviddisp, 0);
Deliver(cuviddisp, 1);
}
}
else
{
Deliver(cuviddisp);
}
return S_OK;
}
STDMETHODIMP CDecCuvid::Deliver(CUVIDPARSERDISPINFO *cuviddisp, int field)
{
CUdeviceptr devPtr = 0;
unsigned int pitch = 0;
CUVIDPROCPARAMS vpp;
CUresult cuStatus = CUDA_SUCCESS;
memset(&vpp, 0, sizeof(vpp));
vpp.progressive_frame = !m_nSoftTelecine && cuviddisp->progressive_frame;
vpp.top_field_first = cuviddisp->top_field_first;
vpp.second_field = (field == 1);
cuda.cuvidCtxLock(m_cudaCtxLock, 0);
cuStatus = cuda.cuvidMapVideoFrame(m_hDecoder, cuviddisp->picture_index, &devPtr, &pitch, &vpp);
if (cuStatus != CUDA_SUCCESS)
{
DbgLog(
(LOG_CUSTOM1, 1, L"CDecCuvid::Deliver(): cuvidMapVideoFrame failed on index %d", cuviddisp->picture_index));
goto cuda_fail;
}
int size = pitch * m_VideoDecoderInfo.ulTargetHeight * 3 / 2;
if (!m_pbRawNV12 || size > m_cRawNV12)
{
if (m_pbRawNV12)
{
cuda.cuMemFreeHost(m_pbRawNV12);
m_pbRawNV12 = nullptr;
m_cRawNV12 = 0;
}
cuStatus = cuda.cuMemAllocHost((void **)&m_pbRawNV12, size);
if (cuStatus != CUDA_SUCCESS)
{
DbgLog((LOG_CUSTOM1, 1, L"CDecCuvid::Deliver(): cuMemAllocHost failed to allocate %d bytes (%d)", size,
cuStatus));
goto cuda_fail;
}
m_cRawNV12 = size;
}
// Copy memory from the device into the staging area
if (m_pbRawNV12)
{
cuStatus = cuda.cuMemcpyDtoH(m_pbRawNV12, devPtr, size);
if (cuStatus != CUDA_SUCCESS)
{
DbgLog((LOG_ERROR, 10, L"Memory Transfer failed (%d)", cuStatus));
goto cuda_fail;
}
}
else
{
// If we don't have our memory, this is bad.
DbgLog((LOG_ERROR, 10, L"No Valid Staging Memory - failing"));
goto cuda_fail;
}
cuda.cuvidUnmapVideoFrame(m_hDecoder, devPtr);
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
// Setup the LAVFrame
LAVFrame *pFrame = nullptr;
AllocateFrame(&pFrame);
if (m_rtAvgTimePerFrame != AV_NOPTS_VALUE)
{
pFrame->avgFrameDuration = m_rtAvgTimePerFrame;
if (m_bDoubleRateDeint && field == 2)
pFrame->avgFrameDuration *= 2;
}
REFERENCE_TIME rtStart = cuviddisp->timestamp, rtStop = AV_NOPTS_VALUE;
if (rtStart != AV_NOPTS_VALUE)
{
CUVIDPARSERDISPINFO *next = GetNextFrame();
if (next->picture_index != -1 && next->timestamp != AV_NOPTS_VALUE)
{
m_rtPrevDiff = next->timestamp - cuviddisp->timestamp;
}
if (m_rtPrevDiff != AV_NOPTS_VALUE)
{
REFERENCE_TIME rtHalfDiff = m_rtPrevDiff >> 1;
if (field == 1)
rtStart += rtHalfDiff;
rtStop = rtStart + rtHalfDiff;
if (field == 2 || !m_bDoubleRateDeint)
rtStop += rtHalfDiff;
}
// Sanity check in case the duration is null
if (rtStop <= rtStart)
rtStop = AV_NOPTS_VALUE;
}
pFrame->format = (m_VideoDecoderInfo.OutputFormat == cudaVideoSurfaceFormat_P016) ? LAVPixFmt_P016 : LAVPixFmt_NV12;
pFrame->bpp = m_VideoDecoderInfo.bitDepthMinus8 + 8;
pFrame->width = m_VideoFormat.display_area.right;
pFrame->height = m_VideoFormat.display_area.bottom;
pFrame->rtStart = rtStart;
pFrame->rtStop = rtStop;
pFrame->repeat = cuviddisp->repeat_first_field;
{
AVRational ar = {m_VideoFormat.display_aspect_ratio.x, m_VideoFormat.display_aspect_ratio.y};
AVRational arDim = {pFrame->width, pFrame->height};
if (m_bARPresent || av_cmp_q(ar, arDim) != 0)
{
pFrame->aspect_ratio = ar;
}
}
pFrame->ext_format = m_DXVAExtendedFormat;
pFrame->interlaced =
!cuviddisp->progressive_frame && m_VideoDecoderInfo.DeinterlaceMode == cudaVideoDeinterlaceMode_Weave;
pFrame->tff = cuviddisp->top_field_first;
// TODO: This may be wrong for H264 where B-Frames can be references
pFrame->frame_type = m_PicParams[cuviddisp->picture_index].intra_pic_flag
? 'I'
: (m_PicParams[cuviddisp->picture_index].ref_pic_flag ? 'P' : 'B');
// Assign the buffer to the LAV Frame bufers
int Ysize = m_VideoDecoderInfo.ulTargetHeight * pitch;
pFrame->data[0] = m_pbRawNV12;
pFrame->data[1] = m_pbRawNV12 + Ysize;
pFrame->stride[0] = pFrame->stride[1] = pitch;
pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY;
if (m_bEndOfSequence)
pFrame->flags |= LAV_FRAME_FLAG_END_OF_SEQUENCE;
m_pCallback->Deliver(pFrame);
return S_OK;
cuda_fail:
cuda.cuvidUnmapVideoFrame(m_hDecoder, devPtr);
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
return E_FAIL;
}
STDMETHODIMP CDecCuvid::CheckH264Sequence(const BYTE *buffer, int buflen)
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::CheckH264Sequence(): Checking H264 frame for SPS"));
CH264SequenceParser h264parser;
h264parser.ParseNALs(buffer, buflen, 0);
if (h264parser.sps.valid)
{
m_bInterlaced = h264parser.sps.interlaced;
m_iFullRange = h264parser.sps.full_range;
m_bARPresent = h264parser.sps.ar_present;
DbgLog((LOG_TRACE, 10, L"-> SPS found"));
if (h264parser.sps.profile > 100 || h264parser.sps.chroma != 1 || h264parser.sps.luma_bitdepth != 8 ||
h264parser.sps.chroma_bitdepth != 8 || h264parser.sps.level >= 60)
{
DbgLog((LOG_TRACE, 10,
L" -> SPS indicates video incompatible with CUVID, aborting (profile: %d, level: %d, chroma: %d, bitdepth: "
L"%d/%d)",
h264parser.sps.profile, h264parser.sps.level, h264parser.sps.chroma, h264parser.sps.luma_bitdepth,
h264parser.sps.chroma_bitdepth));
return E_FAIL;
}
DbgLog((LOG_TRACE, 10, L"-> Video seems compatible with CUVID"));
return S_OK;
}
return S_FALSE;
}
STDMETHODIMP CDecCuvid::CheckHEVCSequence(const BYTE *buffer, int buflen, int *bitdepth)
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::CheckHEVCSequence(): Checking HEVC frame for SPS"));
CHEVCSequenceParser hevcParser;
hevcParser.ParseNALs(buffer, buflen, 0);
if (hevcParser.sps.valid)
{
DbgLog((LOG_TRACE, 10, L"-> SPS found"));
if (hevcParser.sps.chroma > 1 || hevcParser.sps.bitdepth > 12 ||
!(hevcParser.sps.profile <= FF_PROFILE_HEVC_MAIN_10 ||
(hevcParser.sps.profile == FF_PROFILE_HEVC_REXT &&
hevcParser.sps.rext_profile == HEVC_REXT_PROFILE_MAIN_12)))
{
DbgLog((LOG_TRACE, 10, L" -> SPS indicates video incompatible with CUVID, aborting (profile: %d)",
hevcParser.sps.profile));
return E_FAIL;
}
if (bitdepth)
*bitdepth = hevcParser.sps.bitdepth;
DbgLog((LOG_TRACE, 10, L"-> Video seems compatible with CUVID"));
return S_OK;
}
return S_FALSE;
}
STDMETHODIMP CDecCuvid::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop,
BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pSample)
{
CUresult result;
HRESULT hr = S_OK;
CUVIDSOURCEDATAPACKET pCuvidPacket;
ZeroMemory(&pCuvidPacket, sizeof(pCuvidPacket));
BYTE *pBuffer = nullptr;
if (m_AnnexBConverter)
{
int size = 0;
hr = m_AnnexBConverter->Convert(&pBuffer, &size, buffer, buflen);
if (SUCCEEDED(hr))
{
pCuvidPacket.payload = pBuffer;
pCuvidPacket.payload_size = size;
}
}
else
{
pCuvidPacket.payload = buffer;
pCuvidPacket.payload_size = buflen;
if (m_VideoDecoderInfo.CodecType == cudaVideoCodec_MPEG2)
{
const uint8_t *eosmarker = nullptr;
const uint8_t *end = buffer + buflen;
int status =
CheckForSequenceMarkers(AV_CODEC_ID_MPEG2VIDEO, buffer, buflen, &m_MpegParserState, &eosmarker);
// If we found a EOS marker, but its not at the end of the packet, then split the packet
// to be able to individually decode the frame before the EOS, and then decode the remainder
if (status & STATE_EOS_FOUND && eosmarker && eosmarker != end)
{
Decode(buffer, (int)(eosmarker - buffer), rtStart, rtStop, bSyncPoint, bDiscontinuity, nullptr);
rtStart = rtStop = AV_NOPTS_VALUE;
pCuvidPacket.payload = eosmarker;
pCuvidPacket.payload_size = (int)(end - eosmarker);
}
else if (eosmarker)
{
m_bEndOfSequence = TRUE;
}
}
}
if (m_bNeedSequenceCheck)
{
if (m_VideoDecoderInfo.CodecType == cudaVideoCodec_H264)
{
hr = CheckH264Sequence(pCuvidPacket.payload, pCuvidPacket.payload_size);
}
else if (m_VideoDecoderInfo.CodecType == cudaVideoCodec_HEVC)
{
hr = CheckHEVCSequence(pCuvidPacket.payload, pCuvidPacket.payload_size, nullptr);
}
if (FAILED(hr))
{
m_bFormatIncompatible = TRUE;
}
else if (hr == S_OK)
{
m_bNeedSequenceCheck = FALSE;
}
}
if (rtStart != AV_NOPTS_VALUE)
{
pCuvidPacket.flags |= CUVID_PKT_TIMESTAMP;
pCuvidPacket.timestamp = rtStart;
}
if (bDiscontinuity)
pCuvidPacket.flags |= CUVID_PKT_DISCONTINUITY;
if (m_bUseTimestampQueue)
m_timestampQueue.push(rtStart);
cuda.cuvidCtxLock(m_cudaCtxLock, 0);
__try
{
result = cuda.cuvidParseVideoData(m_hParser, &pCuvidPacket);
}
__except (1)
{
DbgLog((LOG_ERROR, 10, L"CDecCuvid::Decode(): cuvidParseVideoData threw an exception"));
}
cuda.cuvidCtxUnlock(m_cudaCtxLock, 0);
av_freep(&pBuffer);
if (m_bEndOfSequence)
{
EndOfStream();
m_pCallback->Deliver(m_pCallback->GetFlushFrame());
m_bEndOfSequence = FALSE;
}
if (m_bFormatIncompatible)
{
DbgLog((LOG_ERROR, 10, L"CDecCuvid::Decode(): Incompatible format detected, indicating failure..."));
return E_FAIL;
}
return S_OK;
}
STDMETHODIMP CDecCuvid::Flush()
{
DbgLog((LOG_TRACE, 10, L"CDecCuvid::Flush(): Flushing CUVID decoder"));
m_bFlushing = TRUE;
FlushParser();
// Flush display queue
for (int i = 0; i < m_DisplayDelay; ++i)
{
if (m_DisplayQueue[m_DisplayPos].picture_index >= 0)
{
m_DisplayQueue[m_DisplayPos].picture_index = -1;
}
m_DisplayPos = (m_DisplayPos + 1) % m_DisplayDelay;
}
m_bFlushing = FALSE;
m_bWaitForKeyframe = m_bUseTimestampQueue;
// Re-init decoder after flush
DecodeSequenceData();
// Clear timestamp queue
std::queue<REFERENCE_TIME>().swap(m_timestampQueue);
m_nSoftTelecine = 0;
return __super::Flush();
}
STDMETHODIMP CDecCuvid::EndOfStream()
{
FlushParser();
// Display all frames left in the queue
for (int i = 0; i < m_DisplayDelay; ++i)
{
if (m_DisplayQueue[m_DisplayPos].picture_index >= 0)
{
Display(&m_DisplayQueue[m_DisplayPos]);
m_DisplayQueue[m_DisplayPos].picture_index = -1;
}
m_DisplayPos = (m_DisplayPos + 1) % m_DisplayDelay;
}
return S_OK;
}
STDMETHODIMP CDecCuvid::GetPixelFormat(LAVPixelFormat *pPix, int *pBpp)
{
// Output is always NV12
if (pPix)
*pPix = (m_VideoDecoderInfo.OutputFormat == cudaVideoSurfaceFormat_P016) ? LAVPixFmt_P016 : LAVPixFmt_NV12;
if (pBpp)
*pBpp = m_VideoDecoderInfo.bitDepthMinus8 + 8;
return S_OK;
}
STDMETHODIMP_(REFERENCE_TIME) CDecCuvid::GetFrameDuration()
{
return 0;
}
STDMETHODIMP_(BOOL) CDecCuvid::IsInterlaced(BOOL bAllowGuess)
{
return (m_bInterlaced || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) &&
(m_VideoDecoderInfo.DeinterlaceMode == cudaVideoDeinterlaceMode_Weave);
}
STDMETHODIMP CDecCuvid::GetHWAccelActiveDevice(BSTR *pstrDeviceName)
{
CheckPointer(pstrDeviceName, E_POINTER);
if (strlen(m_cudaDeviceName) == 0)
return E_UNEXPECTED;
int len = MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, m_cudaDeviceName, -1, nullptr, 0);
if (len == 0)
return E_FAIL;
*pstrDeviceName = SysAllocStringLen(nullptr, len);
MultiByteToWideChar(CP_UTF8, MB_ERR_INVALID_CHARS, m_cudaDeviceName, -1, *pstrDeviceName, len);
return S_OK;
}
| 51,176
|
C++
|
.cpp
| 1,332
| 30.271021
| 129
| 0.600584
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,182
|
wmv9mft.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/decoders/wmv9mft.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "wmv9mft.h"
#include <mfapi.h>
#include <mferror.h>
#include <wmcodecdsp.h>
#include <propvarutil.h>
#include "parsers/VC1HeaderParser.h"
EXTERN_GUID(CLSID_CWMVDecMediaObject, 0x82d353df, 0x90bd, 0x4382, 0x8b, 0xc2, 0x3f, 0x61, 0x92, 0xb7, 0x6e, 0x34);
////////////////////////////////////////////////////////////////////////////////
// Constructor
////////////////////////////////////////////////////////////////////////////////
ILAVDecoder *CreateDecoderWMV9MFT()
{
return new CDecWMV9MFT();
}
////////////////////////////////////////////////////////////////////////////////
// WMV9 MFT decoder implementation
////////////////////////////////////////////////////////////////////////////////
CDecWMV9MFT::CDecWMV9MFT(void)
: CDecBase()
{
memset(&MF, 0, sizeof(MF));
}
CDecWMV9MFT::~CDecWMV9MFT(void)
{
DestroyDecoder(true);
}
STDMETHODIMP CDecWMV9MFT::DestroyDecoder(bool bFull)
{
SAFE_DELETE(m_vc1Header);
{
CAutoLock lock(&m_BufferCritSec);
for (auto it = m_BufferQueue.begin(); it != m_BufferQueue.end(); it++)
{
SafeRelease(&(*it)->pBuffer);
delete (*it);
}
m_BufferQueue.clear();
}
if (bFull)
{
SafeRelease(&m_pMFT);
if (MF.Shutdown)
MF.Shutdown();
FreeLibrary(MF.mfplat);
}
return S_OK;
}
#define GET_PROC_MF(name) \
MF.name = (tMF##name *)GetProcAddress(MF.mfplat, "MF" #name); \
if (MF.name == nullptr) \
{ \
DbgLog((LOG_ERROR, 10, L"-> Failed to load function \"%s\"", TEXT("MF") TEXT(#name))); \
return E_FAIL; \
}
// ILAVDecoder
STDMETHODIMP CDecWMV9MFT::Init()
{
DbgLog((LOG_TRACE, 10, L"CDecWMV9MFT::Init(): Trying to open WMV9 MFT decoder"));
HRESULT hr = S_OK;
MF.mfplat = LoadLibrary(L"mfplat.dll");
if (!MF.mfplat)
{
DbgLog((LOG_TRACE, 10, L"-> Failed to load mfplat.dll"));
return E_FAIL;
}
GET_PROC_MF(Startup);
GET_PROC_MF(Shutdown);
GET_PROC_MF(CreateMediaType);
GET_PROC_MF(CreateSample);
GET_PROC_MF(CreateAlignedMemoryBuffer);
GET_PROC_MF(AverageTimePerFrameToFrameRate);
MF.Startup(MF_VERSION, MFSTARTUP_LITE);
hr = CoCreateInstance(CLSID_CWMVDecMediaObject, nullptr, CLSCTX_INPROC_SERVER, IID_IMFTransform, (void **)&m_pMFT);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> Failed to create MFT object"));
return hr;
}
// Force decoder deinterlacing, dxva and FI to off
IPropertyStore *pProp = nullptr;
hr = m_pMFT->QueryInterface(&pProp);
if (SUCCEEDED(hr))
{
PROPVARIANT variant;
InitPropVariantFromBoolean(FALSE, &variant);
pProp->SetValue(MFPKEY_DECODER_DEINTERLACING, variant);
pProp->SetValue(MFPKEY_DXVA_ENABLED, variant);
pProp->SetValue(MFPKEY_FI_ENABLED, variant);
SafeRelease(&pProp);
}
return S_OK;
}
static GUID VerifySubtype(AVCodecID codec, GUID subtype)
{
if (codec == AV_CODEC_ID_WMV3)
{
return MEDIASUBTYPE_WMV3;
}
else
{
if (subtype == MEDIASUBTYPE_WVC1 || subtype == MEDIASUBTYPE_wvc1)
return MEDIASUBTYPE_WVC1;
else if (subtype == MEDIASUBTYPE_WMVA || subtype == MEDIASUBTYPE_wmva)
return MEDIASUBTYPE_WMVA;
else // fallback
return MEDIASUBTYPE_WVC1;
}
}
STDMETHODIMP CDecWMV9MFT::InitDecoder(AVCodecID codec, const CMediaType *pmt)
{
HRESULT hr = S_OK;
DbgLog((LOG_TRACE, 10, L"CDecWMV9MFT::InitDecoder(): Initializing WMV9 MFT decoder"));
DestroyDecoder(false);
BITMAPINFOHEADER *pBMI = nullptr;
REFERENCE_TIME rtAvg = 0;
DWORD dwARX = 0, dwARY = 0;
videoFormatTypeHandler(*pmt, &pBMI, &rtAvg, &dwARX, &dwARY);
size_t extralen = 0;
BYTE *extra = nullptr;
getExtraData(*pmt, nullptr, &extralen);
if (extralen > 0)
{
extra = (BYTE *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE);
getExtraData(*pmt, extra, &extralen);
}
if (codec == AV_CODEC_ID_VC1 && extralen)
{
size_t i = 0;
for (i = 0; i < (extralen - 4); i++)
{
uint32_t code = AV_RB32(extra + i);
if ((code & ~0xFF) == 0x00000100)
break;
}
if (i == 0)
{
memmove(extra + 1, extra, extralen);
*extra = 0;
extralen++;
}
else if (i > 1)
{
DbgLog((LOG_TRACE, 10, L"-> VC-1 Header at position %u (should be 0 or 1)", i));
}
}
if (extralen > 0)
{
m_vc1Header = new CVC1HeaderParser(extra, extralen, codec);
}
/* Create input type */
m_nCodecId = codec;
IMFMediaType *pMTIn = nullptr;
MF.CreateMediaType(&pMTIn);
pMTIn->SetUINT32(MF_MT_COMPRESSED, TRUE);
pMTIn->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE);
pMTIn->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, FALSE);
pMTIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
pMTIn->SetGUID(MF_MT_SUBTYPE, VerifySubtype(codec, pmt->subtype));
MFSetAttributeSize(pMTIn, MF_MT_FRAME_SIZE, pBMI->biWidth, pBMI->biHeight);
UINT32 rateNum = 0, rateDen = 0;
MF.AverageTimePerFrameToFrameRate(rtAvg, &rateNum, &rateDen);
MFSetAttributeRatio(pMTIn, MF_MT_FRAME_RATE, rateNum, rateDen);
if (dwARX != 0 && dwARY != 0)
{
int uParX = 1, uParY = 1;
av_reduce(&uParX, &uParY, dwARX * pBMI->biHeight, dwARY * pBMI->biWidth, INT_MAX);
MFSetAttributeRatio(pMTIn, MF_MT_PIXEL_ASPECT_RATIO, uParX, uParY);
}
pMTIn->SetBlob(MF_MT_USER_DATA, extra, (UINT32)extralen);
av_freep(&extra);
hr = m_pMFT->SetInputType(0, pMTIn, 0);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> Failed to set input type on MFT"));
return hr;
}
/* Create output type */
hr = SelectOutputType();
SafeRelease(&pMTIn);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> Failed to set output type on MFT"));
return hr;
}
IMFMediaType *pMTOut = nullptr;
m_pMFT->GetOutputCurrentType(0, &pMTOut);
m_bInterlaced =
MFGetAttributeUINT32(pMTOut, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown) > MFVideoInterlace_Progressive;
SafeRelease(&pMTOut);
m_bManualReorder = (codec == AV_CODEC_ID_VC1) && !(m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_ONLY_DTS);
return S_OK;
}
STDMETHODIMP CDecWMV9MFT::SelectOutputType()
{
HRESULT hr = S_OK;
int idx = 0;
m_OutPixFmt = LAVPixFmt_None;
IMFMediaType *pMTOut = nullptr;
while (SUCCEEDED(hr = m_pMFT->GetOutputAvailableType(0, idx++, &pMTOut)) && m_OutPixFmt == LAVPixFmt_None)
{
GUID outSubtype;
if (SUCCEEDED(pMTOut->GetGUID(MF_MT_SUBTYPE, &outSubtype)))
{
if (outSubtype == MEDIASUBTYPE_NV12)
{
hr = m_pMFT->SetOutputType(0, pMTOut, 0);
m_OutPixFmt = LAVPixFmt_NV12;
break;
}
else if (outSubtype == MEDIASUBTYPE_YV12)
{
hr = m_pMFT->SetOutputType(0, pMTOut, 0);
m_OutPixFmt = LAVPixFmt_YUV420;
break;
}
}
SafeRelease(&pMTOut);
}
return hr;
}
IMFMediaBuffer *CDecWMV9MFT::CreateMediaBuffer(const BYTE *pData, DWORD dwDataLen)
{
HRESULT hr;
IMFMediaBuffer *pBuffer = nullptr;
hr = MF.CreateAlignedMemoryBuffer(dwDataLen, MF_16_BYTE_ALIGNMENT, &pBuffer);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"Unable to allocate MF Media Buffer, hr: 0x%x", hr));
goto done;
}
BYTE *pOutBuffer = nullptr;
hr = pBuffer->Lock(&pOutBuffer, NULL, NULL);
if (FAILED(hr))
{
SafeRelease(&pBuffer);
DbgLog((LOG_ERROR, 10, L"Unable to lock MF Media Buffer, hr: 0x%x", hr));
goto done;
}
memcpy(pOutBuffer, pData, dwDataLen);
pBuffer->Unlock();
pBuffer->SetCurrentLength(dwDataLen);
done:
return pBuffer;
}
STDMETHODIMP CDecWMV9MFT::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop,
BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pMediaSample)
{
HRESULT hr = S_OK;
DWORD dwStatus = 0;
hr = m_pMFT->GetInputStatus(0, &dwStatus);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> GetInputStatus() failed with hr: 0x%x", hr));
return S_FALSE;
}
if (!(dwStatus & MFT_INPUT_STATUS_ACCEPT_DATA))
return S_FALSE;
if (m_vc1Header && (m_bManualReorder || m_bNeedKeyFrame))
{
AVPictureType pictype = m_vc1Header->ParseVC1PictureType(buffer, buflen);
if (m_bManualReorder)
{
if (pictype == AV_PICTURE_TYPE_I || pictype == AV_PICTURE_TYPE_P)
{
if (m_bReorderBufferValid)
m_timestampQueue.push(m_rtReorderBuffer);
m_rtReorderBuffer = rtStart;
m_bReorderBufferValid = TRUE;
}
else
{
m_timestampQueue.push(rtStart);
}
}
if (m_bNeedKeyFrame)
{
if (pictype != AV_PICTURE_TYPE_I)
{
if (m_bManualReorder)
m_timestampQueue.pop();
return S_OK;
}
else
{
m_bNeedKeyFrame = FALSE;
bSyncPoint = TRUE;
}
}
}
IMFSample *pSample = nullptr;
hr = MF.CreateSample(&pSample);
if (FAILED(hr))
{
DbgLog((LOG_ERROR, 10, L"Unable to allocate MF Sample, hr: 0x%x", hr));
return E_FAIL;
}
IMFMediaBuffer *pMFBuffer = CreateMediaBuffer(buffer, buflen);
if (!pMFBuffer)
{
DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffer"));
SafeRelease(&pSample);
return E_FAIL;
}
pSample->AddBuffer(pMFBuffer);
if (rtStart != AV_NOPTS_VALUE)
{
pSample->SetSampleTime(rtStart);
if (rtStop != AV_NOPTS_VALUE && rtStop > (rtStart - 1))
pSample->SetSampleDuration(rtStop - rtStart);
}
pSample->SetUINT32(MFSampleExtension_CleanPoint, bSyncPoint);
pSample->SetUINT32(MFSampleExtension_Discontinuity, bDiscontinuity);
hr = m_pMFT->ProcessInput(0, pSample, 0);
if (hr == MF_E_NOTACCEPTING)
{
// Not accepting data right now, try to process output and try again
ProcessOutput();
hr = m_pMFT->ProcessInput(0, pSample, 0);
}
SafeRelease(&pMFBuffer);
SafeRelease(&pSample);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> ProcessInput failed with hr: 0x%x", hr));
return E_FAIL;
}
return ProcessOutput();
}
static inline void memcpy_plane(BYTE *dst, const BYTE *src, ptrdiff_t width, ptrdiff_t stride, int height)
{
for (int i = 0; i < height; i++)
{
memcpy(dst, src, width);
dst += stride;
src += width;
}
}
IMFMediaBuffer *CDecWMV9MFT::GetBuffer(DWORD dwRequiredSize)
{
CAutoLock lock(&m_BufferCritSec);
HRESULT hr;
Buffer *buffer = nullptr;
for (auto it = m_BufferQueue.begin(); it != m_BufferQueue.end(); it++)
{
if (!(*it)->used)
{
buffer = *it;
break;
}
}
if (buffer)
{
// Validate Size
if (buffer->size < dwRequiredSize || !buffer->pBuffer)
{
SafeRelease(&buffer->pBuffer);
hr = MF.CreateAlignedMemoryBuffer(dwRequiredSize, MF_32_BYTE_ALIGNMENT, &buffer->pBuffer);
if (FAILED(hr))
return nullptr;
buffer->size = dwRequiredSize;
}
}
else
{
// Create a new buffer
DbgLog((LOG_TRACE, 10, L"Allocating new buffer for WMV9 MFT"));
buffer = new Buffer();
hr = MF.CreateAlignedMemoryBuffer(dwRequiredSize, MF_32_BYTE_ALIGNMENT, &buffer->pBuffer);
if (FAILED(hr))
{
delete buffer;
return nullptr;
}
buffer->size = dwRequiredSize;
m_BufferQueue.push_back(buffer);
}
buffer->used = 1;
buffer->pBuffer->AddRef();
buffer->pBuffer->SetCurrentLength(0);
return buffer->pBuffer;
}
void CDecWMV9MFT::ReleaseBuffer(IMFMediaBuffer *pBuffer)
{
CAutoLock lock(&m_BufferCritSec);
Buffer *buffer = nullptr;
for (auto it = m_BufferQueue.begin(); it != m_BufferQueue.end(); it++)
{
if ((*it)->pBuffer == pBuffer)
{
(*it)->used = 0;
break;
}
}
pBuffer->Release();
}
void CDecWMV9MFT::wmv9_buffer_destruct(LAVFrame *pFrame)
{
CDecWMV9MFT *pDec = (CDecWMV9MFT *)pFrame->priv_data;
IMFMediaBuffer *pMFBuffer = (IMFMediaBuffer *)pFrame->data[3];
pMFBuffer->Unlock();
pDec->ReleaseBuffer(pMFBuffer);
}
STDMETHODIMP CDecWMV9MFT::ProcessOutput()
{
HRESULT hr = S_OK;
DWORD dwStatus = 0;
MFT_OUTPUT_STREAM_INFO outputInfo = {0};
m_pMFT->GetOutputStreamInfo(0, &outputInfo);
IMFMediaBuffer *pMFBuffer = nullptr;
ASSERT(!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES));
MFT_OUTPUT_DATA_BUFFER OutputBuffer = {0};
if (!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES))
{
pMFBuffer = GetBuffer(outputInfo.cbSize);
if (!pMFBuffer)
{
DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffere"));
return E_FAIL;
}
IMFSample *pSampleOut = nullptr;
hr = MF.CreateSample(&pSampleOut);
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"Unable to allocate MF sample, hr: 0x%x", hr));
ReleaseBuffer(pMFBuffer);
return E_FAIL;
}
pSampleOut->AddBuffer(pMFBuffer);
OutputBuffer.pSample = pSampleOut;
}
hr = m_pMFT->ProcessOutput(0, 1, &OutputBuffer, &dwStatus);
// We don't process events, just release them
SafeRelease(&OutputBuffer.pEvents);
// handle stream format changes
if (hr == MF_E_TRANSFORM_STREAM_CHANGE || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE)
{
SafeRelease(&OutputBuffer.pSample);
ReleaseBuffer(pMFBuffer);
hr = SelectOutputType();
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> Failed to handle stream change, hr: %x", hr));
return E_FAIL;
}
// try again with the new type, it should work now!
return ProcessOutput();
}
// the MFT generated no output, discard the sample and return
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE)
{
SafeRelease(&OutputBuffer.pSample);
ReleaseBuffer(pMFBuffer);
return S_FALSE;
}
// unknown error condition
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr));
SafeRelease(&OutputBuffer.pSample);
ReleaseBuffer(pMFBuffer);
return E_FAIL;
}
LAVFrame *pFrame = nullptr;
AllocateFrame(&pFrame);
IMFMediaType *pMTOut = nullptr;
m_pMFT->GetOutputCurrentType(0, &pMTOut);
MFGetAttributeSize(pMTOut, MF_MT_FRAME_SIZE, (UINT32 *)&pFrame->width, (UINT32 *)&pFrame->height);
pFrame->format = m_OutPixFmt;
AVRational pixel_aspect_ratio = {1, 1};
MFGetAttributeRatio(pMTOut, MF_MT_PIXEL_ASPECT_RATIO, (UINT32 *)&pixel_aspect_ratio.num,
(UINT32 *)&pixel_aspect_ratio.den);
AVRational display_aspect_ratio = {0, 0};
av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, (int64_t)pixel_aspect_ratio.num * pFrame->width,
(int64_t)pixel_aspect_ratio.den * pFrame->height, INT_MAX);
pFrame->aspect_ratio = display_aspect_ratio;
pFrame->interlaced = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_Interlaced, FALSE);
pFrame->repeat = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_RepeatFirstField, FALSE);
LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
pFrame->tff = (fo == DeintFieldOrder_Auto)
? !MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_BottomFieldFirst, FALSE)
: (fo == DeintFieldOrder_TopFieldFirst);
if (pFrame->interlaced && !m_bInterlaced)
m_bInterlaced = TRUE;
pFrame->interlaced =
(pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) ||
m_pSettings->GetDeinterlacingMode() == DeintMode_Force) &&
!(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable);
pFrame->ext_format.VideoPrimaries = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown);
pFrame->ext_format.VideoTransferFunction =
MFGetAttributeUINT32(pMTOut, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown);
pFrame->ext_format.VideoTransferMatrix =
MFGetAttributeUINT32(pMTOut, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown);
pFrame->ext_format.VideoChromaSubsampling =
MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
pFrame->ext_format.NominalRange = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown);
// HACK: don't flag range=limited if its the only value set, since its also the implied default, this helps to avoid
// a reconnect The MFT always sets this value, even if the bitstream says nothing about it, causing a reconnect on
// every vc1/wmv3 file
if (pFrame->ext_format.value == 0x2000)
pFrame->ext_format.value = 0;
// Timestamps
if (m_bManualReorder)
{
if (!m_timestampQueue.empty())
{
pFrame->rtStart = m_timestampQueue.front();
m_timestampQueue.pop();
LONGLONG llDuration = 0;
hr = OutputBuffer.pSample->GetSampleDuration(&llDuration);
if (SUCCEEDED(hr) && llDuration > 0)
{
pFrame->rtStop = pFrame->rtStart + llDuration;
}
}
}
else
{
LONGLONG llTimestamp = 0;
hr = OutputBuffer.pSample->GetSampleTime(&llTimestamp);
if (SUCCEEDED(hr))
{
pFrame->rtStart = llTimestamp;
LONGLONG llDuration = 0;
hr = OutputBuffer.pSample->GetSampleDuration(&llDuration);
if (SUCCEEDED(hr) && llDuration > 0)
{
pFrame->rtStop = pFrame->rtStart + llDuration;
}
}
}
SafeRelease(&pMTOut);
// Lock memory in the buffer
BYTE *pBuffer = nullptr;
pMFBuffer->Lock(&pBuffer, NULL, NULL);
// Check alignment
// If not properly aligned, we need to make the data aligned.
int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32;
if ((pFrame->width % alignment) != 0)
{
hr = AllocLAVFrameBuffers(pFrame);
if (FAILED(hr))
{
pMFBuffer->Unlock();
ReleaseBuffer(pMFBuffer);
SafeRelease(&OutputBuffer.pSample);
return hr;
}
size_t ySize = pFrame->width * pFrame->height;
memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height);
if (m_OutPixFmt == LAVPixFmt_NV12)
{
memcpy_plane(pFrame->data[1], pBuffer + ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2);
}
else if (m_OutPixFmt == LAVPixFmt_YUV420)
{
size_t uvSize = ySize / 4;
memcpy_plane(pFrame->data[2], pBuffer + ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2);
memcpy_plane(pFrame->data[1], pBuffer + ySize + uvSize, pFrame->width / 2, pFrame->stride[1],
pFrame->height / 2);
}
pMFBuffer->Unlock();
ReleaseBuffer(pMFBuffer);
}
else
{
if (m_OutPixFmt == LAVPixFmt_NV12)
{
pFrame->data[0] = pBuffer;
pFrame->data[1] = pBuffer + pFrame->width * pFrame->height;
pFrame->stride[0] = pFrame->stride[1] = pFrame->width;
}
else if (m_OutPixFmt == LAVPixFmt_YUV420)
{
pFrame->data[0] = pBuffer;
pFrame->data[2] = pBuffer + pFrame->width * pFrame->height;
pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2);
pFrame->stride[0] = pFrame->width;
pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2;
}
pFrame->data[3] = (BYTE *)pMFBuffer;
pFrame->destruct = wmv9_buffer_destruct;
pFrame->priv_data = this;
}
pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY;
Deliver(pFrame);
SafeRelease(&OutputBuffer.pSample);
if (OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE)
return ProcessOutput();
return hr;
}
STDMETHODIMP CDecWMV9MFT::Flush()
{
DbgLog((LOG_TRACE, 10, L"CDecWMV9MFT::Flush(): Flushing WMV9 decoder"));
m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
std::queue<REFERENCE_TIME>().swap(m_timestampQueue);
m_rtReorderBuffer = AV_NOPTS_VALUE;
m_bReorderBufferValid = FALSE;
m_bNeedKeyFrame = TRUE;
return __super::Flush();
}
STDMETHODIMP CDecWMV9MFT::EndOfStream()
{
if (m_bReorderBufferValid)
m_timestampQueue.push(m_rtReorderBuffer);
m_bReorderBufferValid = FALSE;
m_rtReorderBuffer = AV_NOPTS_VALUE;
m_pMFT->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
ProcessOutput();
return S_OK;
}
STDMETHODIMP CDecWMV9MFT::GetPixelFormat(LAVPixelFormat *pPix, int *pBpp)
{
if (pPix)
*pPix = (m_OutPixFmt != LAVPixFmt_None) ? m_OutPixFmt : LAVPixFmt_NV12;
if (pBpp)
*pBpp = 8;
return S_OK;
}
STDMETHODIMP_(BOOL) CDecWMV9MFT::IsInterlaced(BOOL bAllowGuess)
{
return (m_bInterlaced || m_pSettings->GetDeinterlacingMode() == DeintMode_Force);
}
| 23,173
|
C++
|
.cpp
| 641
| 28.954758
| 120
| 0.603407
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| false
| true
| true
| false
|
22,185
|
dxva_common.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/decoders/dxva2/dxva_common.cpp
|
/*
* Copyright (C) 2011-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "dxva_common.h"
#include "moreuuids.h"
#define DXVA_SURFACE_BASE_ALIGN 16
DWORD dxva_align_dimensions(AVCodecID codec, DWORD dim)
{
int align = DXVA_SURFACE_BASE_ALIGN;
// MPEG-2 needs higher alignment on Intel cards, and it doesn't seem to harm anything to do it for all cards.
if (codec == AV_CODEC_ID_MPEG2VIDEO)
align <<= 1;
else if (codec == AV_CODEC_ID_HEVC || codec == AV_CODEC_ID_AV1)
align = 128;
return FFALIGN(dim, align);
}
////////////////////////////////////////////////////////////////////////////////
// Codec Maps
////////////////////////////////////////////////////////////////////////////////
/*
DXVA2 Codec Mappings, as defined by VLC
*/
static const int prof_mpeg2_main[] = { FF_PROFILE_MPEG2_SIMPLE, FF_PROFILE_MPEG2_MAIN, FF_PROFILE_UNKNOWN };
static const int prof_h264_high[] = { FF_PROFILE_H264_CONSTRAINED_BASELINE, FF_PROFILE_H264_MAIN, FF_PROFILE_H264_HIGH, FF_PROFILE_UNKNOWN };
static const int prof_hevc_main[] = { FF_PROFILE_HEVC_MAIN, FF_PROFILE_UNKNOWN };
static const int prof_hevc_main10[] = { FF_PROFILE_HEVC_MAIN_10, FF_PROFILE_UNKNOWN };
static const int prof_vp9_0[] = { FF_PROFILE_VP9_0, FF_PROFILE_UNKNOWN };
static const int prof_vp9_2_10bit[] = { FF_PROFILE_VP9_2, FF_PROFILE_UNKNOWN };
static const int prof_av1_0[] = {FF_PROFILE_AV1_MAIN, FF_PROFILE_UNKNOWN};
static const int prof_av1_1[] = {FF_PROFILE_AV1_HIGH, FF_PROFILE_UNKNOWN};
static const int prof_av1_2[] = {FF_PROFILE_AV1_PROFESSIONAL, FF_PROFILE_UNKNOWN};
/* XXX Preferred modes must come first */
// clang-format off
const dxva_mode_t dxva_modes[] = {
/* MPEG-1/2 */
{ "MPEG-2 variable-length decoder", &DXVA2_ModeMPEG2_VLD, AV_CODEC_ID_MPEG2VIDEO, prof_mpeg2_main },
{ "MPEG-2 & MPEG-1 variable-length decoder", &DXVA2_ModeMPEG2and1_VLD, AV_CODEC_ID_MPEG2VIDEO, prof_mpeg2_main },
{ "MPEG-2 motion compensation", &DXVA2_ModeMPEG2_MoComp, 0 },
{ "MPEG-2 inverse discrete cosine transform", &DXVA2_ModeMPEG2_IDCT, 0 },
{ "MPEG-1 variable-length decoder", &DXVA2_ModeMPEG1_VLD, 0 },
/* H.264 */
{ "H.264 variable-length decoder, film grain technology", &DXVA2_ModeH264_F, AV_CODEC_ID_H264, prof_h264_high },
{ "H.264 variable-length decoder, no film grain technology", &DXVA2_ModeH264_E, AV_CODEC_ID_H264, prof_h264_high },
{ "H.264 variable-length decoder, no film grain technology, FMO/ASO", &DXVA_ModeH264_VLD_WithFMOASO_NoFGT, AV_CODEC_ID_H264, prof_h264_high },
{ "H.264 variable-length decoder, no film grain technology, Flash", &DXVA_ModeH264_VLD_NoFGT_Flash, AV_CODEC_ID_H264, prof_h264_high },
{ "H.264 inverse discrete cosine transform, film grain technology", &DXVA2_ModeH264_D, 0 },
{ "H.264 inverse discrete cosine transform, no film grain technology", &DXVA2_ModeH264_C, 0 },
{ "H.264 motion compensation, film grain technology", &DXVA2_ModeH264_B, 0 },
{ "H.264 motion compensation, no film grain technology", &DXVA2_ModeH264_A, 0 },
/* WMV */
{ "Windows Media Video 8 motion compensation", &DXVA2_ModeWMV8_B, 0 },
{ "Windows Media Video 8 post processing", &DXVA2_ModeWMV8_A, 0 },
{ "Windows Media Video 9 IDCT", &DXVA2_ModeWMV9_C, 0 },
{ "Windows Media Video 9 motion compensation", &DXVA2_ModeWMV9_B, 0 },
{ "Windows Media Video 9 post processing", &DXVA2_ModeWMV9_A, 0 },
/* VC-1 */
{ "VC-1 variable-length decoder (2010)", &DXVA2_ModeVC1_D2010, AV_CODEC_ID_VC1 },
{ "VC-1 variable-length decoder (2010)", &DXVA2_ModeVC1_D2010, AV_CODEC_ID_WMV3 },
{ "VC-1 variable-length decoder", &DXVA2_ModeVC1_D, AV_CODEC_ID_VC1 },
{ "VC-1 variable-length decoder", &DXVA2_ModeVC1_D, AV_CODEC_ID_WMV3 },
{ "VC-1 inverse discrete cosine transform", &DXVA2_ModeVC1_C, 0 },
{ "VC-1 motion compensation", &DXVA2_ModeVC1_B, 0 },
{ "VC-1 post processing", &DXVA2_ModeVC1_A, 0 },
/* MPEG4-ASP */
{ "MPEG-4 Part 2 nVidia bitstream decoder", &DXVA_nVidia_MPEG4_ASP, 0 },
{ "MPEG-4 Part 2 variable-length decoder, Simple Profile", &DXVA_ModeMPEG4pt2_VLD_Simple, 0 },
{ "MPEG-4 Part 2 variable-length decoder, Simple&Advanced Profile, no GMC", &DXVA_ModeMPEG4pt2_VLD_AdvSimple_NoGMC, 0 },
{ "MPEG-4 Part 2 variable-length decoder, Simple&Advanced Profile, GMC", &DXVA_ModeMPEG4pt2_VLD_AdvSimple_GMC, 0 },
{ "MPEG-4 Part 2 variable-length decoder, Simple&Advanced Profile, Avivo", &DXVA_ModeMPEG4pt2_VLD_AdvSimple_Avivo, 0 },
/* H.264 MVC */
{ "H.264 MVC variable-length decoder, stereo, progressive", &DXVA_ModeH264_VLD_Stereo_Progressive_NoFGT, 0 },
{ "H.264 MVC variable-length decoder, stereo", &DXVA_ModeH264_VLD_Stereo_NoFGT, 0 },
{ "H.264 MVC variable-length decoder, multiview", &DXVA_ModeH264_VLD_Multiview_NoFGT, 0 },
/* H.264 SVC */
{ "H.264 SVC variable-length decoder, baseline", &DXVA_ModeH264_VLD_SVC_Scalable_Baseline, 0 },
{ "H.264 SVC variable-length decoder, constrained baseline", &DXVA_ModeH264_VLD_SVC_Restricted_Scalable_Baseline, 0 },
{ "H.264 SVC variable-length decoder, high", &DXVA_ModeH264_VLD_SVC_Scalable_High, 0 },
{ "H.264 SVC variable-length decoder, constrained high progressive", &DXVA_ModeH264_VLD_SVC_Restricted_Scalable_High_Progressive, 0 },
/* HEVC / H.265 */
{ "HEVC / H.265 variable-length decoder, main", &DXVA_ModeHEVC_VLD_Main, AV_CODEC_ID_HEVC, prof_hevc_main },
{ "HEVC / H.265 variable-length decoder, main10", &DXVA_ModeHEVC_VLD_Main10, AV_CODEC_ID_HEVC, prof_hevc_main10 },
{ "HEVC / H.265 variable-length decoder, main12", &DXVA_ModeHEVC_VLD_Main12, 0 },
{ "HEVC / H.265 variable-length decoder, main10 422", &DXVA_ModeHEVC_VLD_Main10_422, 0 },
{ "HEVC / H.265 variable-length decoder, main12 422", &DXVA_ModeHEVC_VLD_Main12_422, 0 },
{ "HEVC / H.265 variable-length decoder, main 444", &DXVA_ModeHEVC_VLD_Main_444, 0 },
{ "HEVC / H.265 variable-length decoder, main10 extended", &DXVA_ModeHEVC_VLD_Main10_Ext, 0 },
{ "HEVC / H.265 variable-length decoder, main10 444", &DXVA_ModeHEVC_VLD_Main10_444, 0 },
{ "HEVC / H.265 variable-length decoder, main12 444", &DXVA_ModeHEVC_VLD_Main12_444, 0 },
{ "HEVC / H.265 variable-length decoder, main16", &DXVA_ModeHEVC_VLD_Main16, 0 },
{ "HEVC / H.265 variable-length decoder, monochrome", &DXVA_ModeHEVC_VLD_Monochrome, 0 },
{ "HEVC / H.265 variable-length decoder, monochrome10", &DXVA_ModeHEVC_VLD_Monochrome10, 0 },
/* VP8/9 */
{ "VP9 variable-length decoder, profile 0", &DXVA_ModeVP9_VLD_Profile0, AV_CODEC_ID_VP9, prof_vp9_0 },
{ "VP9 variable-length decoder, 10bit, profile 2", &DXVA_ModeVP9_VLD_10bit_Profile2, AV_CODEC_ID_VP9, prof_vp9_2_10bit },
{ "VP8 variable-length decoder", &DXVA_ModeVP8_VLD, 0 },
/* AV1 */
{ "AV1 variable-length decoder, profile 0", &DXVA_ModeAV1_VLD_Profile0, AV_CODEC_ID_AV1, prof_av1_0 },
{ "AV1 variable-length decoder, profile 1", &DXVA_ModeAV1_VLD_Profile1, 0 },
{ "AV1 variable-length decoder, profile 2", &DXVA_ModeAV1_VLD_Profile2, 0 },
{ "AV1 variable-length decoder, profile 2 12-bit", &DXVA_ModeAV1_VLD_12bit_Profile2, 0 },
{ "AV1 variable-length decoder, profile 2 12-bit 4:2:0", &DXVA_ModeAV1_VLD_12bit_Profile2_420, 0 },
/* Intel specific modes (only useful on older GPUs) */
{ "H.264 variable-length decoder, no film grain technology (Intel ClearVideo)", &DXVADDI_Intel_ModeH264_E, AV_CODEC_ID_H264, prof_h264_high },
{ "H.264 inverse discrete cosine transform, no film grain technology (Intel)", &DXVADDI_Intel_ModeH264_C, 0 },
{ "H.264 motion compensation, no film grain technology (Intel)", &DXVADDI_Intel_ModeH264_A, 0 },
{ "VC-1 variable-length decoder 2 (Intel)", &DXVA_Intel_VC1_ClearVideo_2, 0 },
{ "VC-1 variable-length decoder (Intel)", &DXVA_Intel_VC1_ClearVideo, 0 },
{ nullptr, nullptr, 0 }
};
// clang-format on
const dxva_mode_t *get_dxva_mode_from_guid(const GUID *guid)
{
for (unsigned i = 0; dxva_modes[i].name; i++) {
if (IsEqualGUID(*dxva_modes[i].guid, *guid))
return &dxva_modes[i];
}
return nullptr;
}
int check_dxva_mode_compatibility(const dxva_mode_t *mode, int codec, int profile, bool b8Bit)
{
if (mode->codec != codec)
return 0;
if (mode->profiles && profile != FF_PROFILE_UNKNOWN)
{
for (int i = 0; mode->profiles[i] != FF_PROFILE_UNKNOWN; i++)
{
if (mode->profiles[i] == profile)
return 1;
}
/* hevc main and main10 are very similar, and in some cases streams can be flagged as main10, but actually contain 8-bit content */
if (codec == AV_CODEC_ID_HEVC && mode->profiles[0] == FF_PROFILE_HEVC_MAIN && profile == FF_PROFILE_HEVC_MAIN_10 && b8Bit)
return 1;
return 0;
}
return 1;
}
#define H264_MAX_REF_DPB(ctx, dpbsize, dpblimit, mbcount) (min(dpblimit, (dpbsize) / (mbcount)))
int check_dxva_codec_profile(const AVCodecContext *ctx, int hwpixfmt)
{
AVCodecID codec = ctx->codec_id;
AVPixelFormat pix_fmt = ctx->pix_fmt;
int profile = ctx->profile;
int level = ctx->level;
// check mpeg2 pixfmt
if (codec == AV_CODEC_ID_MPEG2VIDEO && pix_fmt != AV_PIX_FMT_YUV420P && pix_fmt != AV_PIX_FMT_YUVJ420P && pix_fmt != hwpixfmt && pix_fmt != AV_PIX_FMT_NONE)
return 1;
// check h264 pixfmt
if (codec == AV_CODEC_ID_H264 && pix_fmt != AV_PIX_FMT_YUV420P && pix_fmt != AV_PIX_FMT_YUVJ420P && pix_fmt != hwpixfmt && pix_fmt != AV_PIX_FMT_NONE)
return 1;
// check h264 profile
if (codec == AV_CODEC_ID_H264 && profile != FF_PROFILE_UNKNOWN && !H264_CHECK_PROFILE(profile))
return 1;
// H.264 Level 5.1 ref frame limits
const int h264mb_count = (ctx->coded_width / 16) * (ctx->coded_height / 16);
if (codec == AV_CODEC_ID_H264 && h264mb_count > 0 && ctx->refs > H264_MAX_REF_DPB(ctx, 184320, 16, h264mb_count))
return 1;
// check wmv/vc1 profile
if ((codec == AV_CODEC_ID_WMV3 || codec == AV_CODEC_ID_VC1) && profile == FF_PROFILE_VC1_COMPLEX)
return 1;
// check hevc profile/pixfmt
if (codec == AV_CODEC_ID_HEVC && (!HEVC_CHECK_PROFILE(profile) || (pix_fmt != AV_PIX_FMT_YUV420P && pix_fmt != AV_PIX_FMT_YUVJ420P && pix_fmt != AV_PIX_FMT_YUV420P10 && pix_fmt != hwpixfmt && pix_fmt != AV_PIX_FMT_NONE)))
return 1;
// check vp9 profile/pixfmt
if (codec == AV_CODEC_ID_VP9 && (!VP9_CHECK_PROFILE(profile) || (pix_fmt != AV_PIX_FMT_YUV420P && pix_fmt != AV_PIX_FMT_YUV420P10 && pix_fmt != hwpixfmt && pix_fmt != AV_PIX_FMT_NONE)))
return 1;
// check av1 profile/pixfmt
if (codec == AV_CODEC_ID_AV1 && (profile != FF_PROFILE_AV1_MAIN || (pix_fmt != AV_PIX_FMT_YUV420P && pix_fmt != AV_PIX_FMT_YUV420P10 && pix_fmt != hwpixfmt && pix_fmt != AV_PIX_FMT_NONE)))
return 1;
return 0;
}
| 14,142
|
C++
|
.cpp
| 186
| 72.688172
| 225
| 0.548297
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,188
|
VC1HeaderParser.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/parsers/VC1HeaderParser.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "VC1HeaderParser.h"
#pragma warning(push)
#pragma warning(disable : 4101)
#pragma warning(disable : 5033)
extern "C"
{
#define AVCODEC_X86_MATHOPS_H
#include "libavcodec/get_bits.h"
#include "libavcodec/unary.h"
extern __declspec(dllimport) const AVRational ff_vc1_pixel_aspect[16];
};
#pragma warning(pop)
/** Markers used in VC-1 AP frame data */
//@{
enum VC1Code
{
VC1_CODE_RES0 = 0x00000100,
VC1_CODE_ENDOFSEQ = 0x0000010A,
VC1_CODE_SLICE,
VC1_CODE_FIELD,
VC1_CODE_FRAME,
VC1_CODE_ENTRYPOINT,
VC1_CODE_SEQHDR,
};
//@}
/** Available Profiles */
//@{
enum Profile
{
PROFILE_SIMPLE,
PROFILE_MAIN,
PROFILE_COMPLEX, ///< TODO: WMV9 specific
PROFILE_ADVANCED
};
//@}
enum FrameCodingMode
{
PROGRESSIVE = 0, ///< in the bitstream is reported as 00b
ILACE_FRAME, ///< in the bitstream is reported as 10b
ILACE_FIELD ///< in the bitstream is reported as 11b
};
#define IS_MARKER(x) (((x) & ~0xFF) == VC1_CODE_RES0)
/** Find VC-1 marker in buffer
* @return position where next marker starts or end of buffer if no marker found
*/
static inline const uint8_t *find_next_marker(const uint8_t *src, const uint8_t *end)
{
uint32_t mrk = 0xFFFFFFFF;
if (end - src < 4)
return end;
while (src < end)
{
mrk = (mrk << 8) | *src++;
if (IS_MARKER(mrk))
return src - 4;
}
return end;
}
static inline int vc1_unescape_buffer(const uint8_t *src, int size, uint8_t *dst)
{
int dsize = 0, i;
if (size < 4)
{
for (dsize = 0; dsize < size; dsize++)
*dst++ = *src++;
return size;
}
for (i = 0; i < size; i++, src++)
{
if (src[0] == 3 && i >= 2 && !src[-1] && !src[-2] && i < size - 1 && src[1] < 4)
{
dst[dsize++] = src[1];
src++;
i++;
}
else
dst[dsize++] = *src;
}
return dsize;
}
CVC1HeaderParser::CVC1HeaderParser(const BYTE *pData, size_t length, AVCodecID codec)
{
memset(&hdr, 0, sizeof(hdr));
ParseVC1Header(pData, length, codec);
}
CVC1HeaderParser::~CVC1HeaderParser(void)
{
}
void CVC1HeaderParser::ParseVC1Header(const BYTE *pData, size_t length, AVCodecID codec)
{
GetBitContext gb;
if (codec == AV_CODEC_ID_VC1)
{
if (length < 16)
return;
const uint8_t *start = pData;
const uint8_t *end = start + length;
const uint8_t *next = nullptr;
int size, buf2_size;
uint8_t *buf2;
buf2 = (uint8_t *)av_mallocz(length + AV_INPUT_BUFFER_PADDING_SIZE);
start = find_next_marker(start, end);
next = start;
for (; next < end; start = next)
{
next = find_next_marker(start + 4, end);
size = (int)(next - start - 4);
if (size <= 0)
continue;
buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
init_get_bits(&gb, buf2, buf2_size * 8);
switch (AV_RB32(start))
{
case VC1_CODE_SEQHDR: VC1ParseSequenceHeader(&gb); break;
}
}
av_freep(&buf2);
}
else if (codec == AV_CODEC_ID_WMV3)
{
if (length < 4)
return;
init_get_bits8(&gb, pData, (int)length);
VC1ParseSequenceHeader(&gb);
}
}
void CVC1HeaderParser::VC1ParseSequenceHeader(GetBitContext *gb)
{
hdr.profile = get_bits(gb, 2);
if (hdr.profile == PROFILE_ADVANCED)
{
hdr.valid = 1;
hdr.level = get_bits(gb, 3);
skip_bits(gb, 2); // Chroma Format, only 1 should be set for 4:2:0
skip_bits(gb, 3); // frmrtq_postproc
skip_bits(gb, 5); // bitrtq_postproc
skip_bits1(gb); // postprocflag
hdr.width = (get_bits(gb, 12) + 1) << 1;
hdr.height = (get_bits(gb, 12) + 1) << 1;
hdr.broadcast = get_bits1(gb); // broadcast
hdr.interlaced = get_bits1(gb); // interlaced
skip_bits1(gb); // tfcntrflag
skip_bits1(gb); // finterpflag
skip_bits1(gb); // reserved
skip_bits1(gb); // psf
if (get_bits1(gb))
{ // Display Info
int w, h, ar = 0;
w = get_bits(gb, 14) + 1;
h = get_bits(gb, 14) + 1;
if (get_bits1(gb))
ar = get_bits(gb, 4);
if (ar && ar < 14)
{
hdr.ar = ff_vc1_pixel_aspect[ar];
}
else if (ar == 15)
{
w = get_bits(gb, 8) + 1;
h = get_bits(gb, 8) + 1;
hdr.ar.num = w;
hdr.ar.den = h;
}
else
{
av_reduce(&hdr.ar.num, &hdr.ar.den, (int64_t)hdr.height * w, (int64_t)hdr.width * h, INT_MAX);
}
}
// TODO: add other fields
}
else
{
hdr.valid = 1;
hdr.old_interlaced = get_bits1(gb); // res_y411
skip_bits1(gb); // res_sprite
skip_bits(gb, 3); // frmrtq_postproc
skip_bits(gb, 5); // bitrtq_postproc
skip_bits1(gb); // loop_filter
skip_bits1(gb); // res_x8
skip_bits1(gb); // multires
skip_bits1(gb); // rest_fasttx
skip_bits1(gb); // fastuvmc
skip_bits1(gb); // extended_mv
skip_bits(gb, 2); // dquant
skip_bits1(gb); // vstransform
skip_bits1(gb); // res_transtab
skip_bits1(gb); // overlap
skip_bits1(gb); // resync marker
hdr.rangered = get_bits1(gb);
hdr.bframes = get_bits(gb, 3);
skip_bits(gb, 2); // quant mode
hdr.finterp = get_bits1(gb);
}
}
AVPictureType CVC1HeaderParser::ParseVC1PictureType(const uint8_t *buf, int buflen)
{
AVPictureType pictype = AV_PICTURE_TYPE_NONE;
int skipped = 0;
const BYTE *framestart = buf;
if (IS_MARKER(AV_RB32(buf)))
{
framestart = nullptr;
const BYTE *start, *end, *next;
next = buf;
for (start = buf, end = buf + buflen; next < end; start = next)
{
if (AV_RB32(start) == VC1_CODE_FRAME)
{
framestart = start + 4;
break;
}
next = find_next_marker(start + 4, end);
}
}
if (framestart)
{
GetBitContext gb;
init_get_bits8(&gb, framestart, (int)(buflen - (framestart - buf)));
if (hdr.profile == PROFILE_ADVANCED)
{
int fcm = PROGRESSIVE;
if (hdr.interlaced)
fcm = decode012(&gb);
if (fcm == ILACE_FIELD)
{
int fptype = get_bits(&gb, 3);
pictype = (fptype & 2) ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_I;
if (fptype & 4) // B-picture
pictype = (fptype & 2) ? AV_PICTURE_TYPE_BI : AV_PICTURE_TYPE_B;
}
else
{
switch (get_unary(&gb, 0, 4))
{
case 0: pictype = AV_PICTURE_TYPE_P; break;
case 1: pictype = AV_PICTURE_TYPE_B; break;
case 2: pictype = AV_PICTURE_TYPE_I; break;
case 3: pictype = AV_PICTURE_TYPE_BI; break;
case 4:
pictype = AV_PICTURE_TYPE_P; // skipped pic
skipped = 1;
break;
}
}
}
else
{
if (hdr.finterp)
skip_bits1(&gb);
skip_bits(&gb, 2); // framecnt
if (hdr.rangered)
skip_bits1(&gb);
int pic = get_bits1(&gb);
if (hdr.bframes)
{
if (!pic)
{
if (get_bits1(&gb))
{
pictype = AV_PICTURE_TYPE_I;
}
else
{
pictype = AV_PICTURE_TYPE_B;
}
}
else
{
pictype = AV_PICTURE_TYPE_P;
}
}
else
{
pictype = pic ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_I;
}
}
}
return pictype;
}
| 9,181
|
C++
|
.cpp
| 298
| 22.181208
| 110
| 0.51892
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,190
|
MPEG2HeaderParser.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/parsers/MPEG2HeaderParser.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "MPEG2HeaderParser.h"
#pragma warning(push)
#pragma warning(disable : 4101)
#pragma warning(disable : 5033)
#define AVCODEC_X86_MATHOPS_H
#include "libavcodec/get_bits.h"
#pragma warning(pop)
#define SEQ_START_CODE 0x000001b3
#define EXT_START_CODE 0x000001b5
static inline const uint8_t *find_next_marker(const uint8_t *src, const uint8_t *end)
{
uint32_t mrk = 0xFFFFFFFF;
if (end - src < 4)
return end;
while (src < end)
{
mrk = (mrk << 8) | *src++;
if ((mrk & ~0xFF) == 0x00000100)
return src - 4;
}
return end;
}
CMPEG2HeaderParser::CMPEG2HeaderParser(const BYTE *pData, size_t length)
{
memset(&hdr, 0, sizeof(hdr));
ParseMPEG2Header(pData, length);
}
CMPEG2HeaderParser::~CMPEG2HeaderParser(void)
{
}
void CMPEG2HeaderParser::ParseMPEG2Header(const BYTE *pData, size_t length)
{
if (length < 16)
return;
GetBitContext gb;
const uint8_t *start = pData;
const uint8_t *end = start + length;
const uint8_t *next = nullptr;
int size;
start = find_next_marker(start, end);
next = start;
for (; next < end; start = next)
{
next = find_next_marker(start + 4, end);
size = (int)(next - start - 4);
if (size <= 0)
continue;
init_get_bits(&gb, start + 4, (size - 4) * 8);
switch (AV_RB32(start))
{
case SEQ_START_CODE: MPEG2ParseSequenceHeader(&gb); break;
case EXT_START_CODE: MPEG2ParseExtHeader(&gb); break;
}
}
}
void CMPEG2HeaderParser::MPEG2ParseSequenceHeader(GetBitContext *gb)
{
}
void CMPEG2HeaderParser::MPEG2ParseExtHeader(GetBitContext *gb)
{
int startcode = get_bits(gb, 4); // Start Code
if (startcode == 1)
{
hdr.valid = 1;
skip_bits(gb, 1); // profile and level esc
hdr.profile = get_bits(gb, 3);
hdr.level = get_bits(gb, 4);
hdr.interlaced = !get_bits1(gb);
hdr.chroma = get_bits(gb, 2);
// TODO: Fill in other fields, if needed
}
}
| 2,885
|
C++
|
.cpp
| 91
| 27.263736
| 85
| 0.664023
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| false
| false
| false
| true
| true
| false
|
22,191
|
H264SequenceParser.cpp
|
Nevcairiel_LAVFilters/decoder/LAVVideo/parsers/H264SequenceParser.cpp
|
/*
* Copyright (C) 2010-2021 Hendrik Leppkes
* http://www.1f0.de
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "stdafx.h"
#include "H264SequenceParser.h"
#include "ByteParser.h"
#include "H264Nalu.h"
CH264SequenceParser::CH264SequenceParser(void)
{
ZeroMemory(&sps, sizeof(sps));
ZeroMemory(&pps, sizeof(pps));
}
CH264SequenceParser::~CH264SequenceParser(void)
{
}
HRESULT CH264SequenceParser::ParseNALs(const BYTE *buffer, size_t buflen, int nal_size)
{
CH264Nalu nalu;
nalu.SetBuffer(buffer, buflen, nal_size);
while (nalu.ReadNext())
{
const BYTE *data = nalu.GetDataBuffer() + 1;
const size_t len = nalu.GetDataLength() - 1;
if (nalu.GetType() == NALU_TYPE_SPS)
{
ParseSPS(data, len);
break;
}
}
return S_OK;
}
static void SPSDecodeScalingList(CByteParser &parser, int size)
{
int i, last = 8, next = 8;
int matrix = parser.BitRead(1);
if (matrix)
{
for (i = 0; i < size; i++)
{
if (next)
next = (last + parser.SExpGolombRead()) & 0xff;
if (!i && !next)
{ /* matrix not written */
break;
}
last = next ? next : last;
}
}
}
HRESULT CH264SequenceParser::ParseSPS(const BYTE *buffer, size_t buflen)
{
CByteParser parser(buffer, buflen);
int i;
ZeroMemory(&sps, sizeof(sps));
// Defaults
sps.valid = 1;
sps.primaries = AVCOL_PRI_UNSPECIFIED;
sps.trc = AVCOL_TRC_UNSPECIFIED;
sps.colorspace = AVCOL_SPC_UNSPECIFIED;
sps.full_range = -1;
// Parse
sps.profile = parser.BitRead(8);
parser.BitRead(4); // constraint flags
parser.BitRead(4); // reserved
sps.level = parser.BitRead(8);
parser.UExpGolombRead(); // sps id
if (sps.profile == 100 || sps.profile == 110 || sps.profile == 122 || sps.profile == 244 || sps.profile == 44 ||
sps.profile == 83 || sps.profile == 86 || sps.profile == 118 || sps.profile == 128 || sps.profile == 144)
{
sps.chroma = (int)parser.UExpGolombRead();
if (sps.chroma == 3)
parser.BitRead(1);
sps.luma_bitdepth = (int)parser.UExpGolombRead() + 8;
sps.chroma_bitdepth = (int)parser.UExpGolombRead() + 8;
parser.BitRead(1); // transform_bypass
// decode scaling matrices
int scaling = parser.BitRead(1);
if (scaling)
{
// Decode scaling lists
SPSDecodeScalingList(parser, 16); // Intra, Y
SPSDecodeScalingList(parser, 16); // Intra, Cr
SPSDecodeScalingList(parser, 16); // Intra, Cb
SPSDecodeScalingList(parser, 16); // Inter, Y
SPSDecodeScalingList(parser, 16); // Inter, Cr
SPSDecodeScalingList(parser, 16); // Inter, Cb
SPSDecodeScalingList(parser, 64); // Intra, Y
SPSDecodeScalingList(parser, 64); // Inter, Y
if (sps.chroma == 3)
{
SPSDecodeScalingList(parser, 64); // Intra, Cr
SPSDecodeScalingList(parser, 64); // Inter, Cr
SPSDecodeScalingList(parser, 64); // Intra, Cb
SPSDecodeScalingList(parser, 64); // Inter, Cb
}
}
}
else
{
sps.chroma = 1;
sps.luma_bitdepth = 8;
sps.chroma_bitdepth = 8;
}
parser.UExpGolombRead(); // log2_max_frame_num
int poc_type = (int)parser.UExpGolombRead(); // poc_type
if (poc_type == 0)
parser.UExpGolombRead(); // log2_max_poc_lsb
else if (poc_type == 1)
{
parser.BitRead(1); // delta_pic_order_always_zero_flag
parser.SExpGolombRead(); // offset_for_non_ref_pic
parser.SExpGolombRead(); // offset_for_top_to_bottom_field
int cyclen = (int)parser.UExpGolombRead(); // poc_cycle_length
for (i = 0; i < cyclen; i++)
parser.SExpGolombRead(); // offset_for_ref_frame[i]
}
sps.ref_frames = parser.UExpGolombRead(); // ref_frame_count
parser.BitRead(1); // gaps_in_frame_num_allowed_flag
parser.UExpGolombRead(); // mb_width
parser.UExpGolombRead(); // mb_height
sps.interlaced = !parser.BitRead(1); // frame_mbs_only_flag
if (sps.interlaced)
parser.BitRead(1); // mb_aff
parser.BitRead(1); // direct_8x8_inference_flag
int crop = parser.BitRead(1); // crop
if (crop)
{
parser.UExpGolombRead(); // crop_left
parser.UExpGolombRead(); // crop_right
parser.UExpGolombRead(); // crop_top
parser.UExpGolombRead(); // crop_bottom
}
int vui_present = parser.BitRead(1); // vui_parameters_present_flag
if (vui_present)
{
sps.ar_present = parser.BitRead(1); // aspect_ratio_info_present_flag
if (sps.ar_present)
{
int ar_idc = parser.BitRead(8); // aspect_ratio_idc
if (ar_idc == 255)
{
parser.BitRead(16); // sar.num
parser.BitRead(16); // sar.den
}
}
int overscan = parser.BitRead(1); // overscan_info_present_flag
if (overscan)
parser.BitRead(1); // overscan_appropriate_flag
int vid_sig_type = parser.BitRead(1); // video_signal_type_present_flag
if (vid_sig_type)
{
parser.BitRead(3); // video_format
sps.full_range = parser.BitRead(1); // video_full_range_flag
int colorinfo = parser.BitRead(1); // colour_description_present_flag
if (colorinfo)
{
sps.primaries = parser.BitRead(8);
sps.trc = parser.BitRead(8);
sps.colorspace = parser.BitRead(8);
}
}
}
return S_OK;
}
| 6,655
|
C++
|
.cpp
| 179
| 29.659218
| 116
| 0.585064
|
Nevcairiel/LAVFilters
| 7,362
| 788
| 84
|
GPL-2.0
|
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
| false
| false
| true
| false
| true
| true
| true
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.