Yay encoding works!!!!
Implemented a universal encoder based on FFMpeg. I'll get it to work with gif too.
Adding other formats should be easy. In fact, recordingformats can be made into something much shorter. Soon™️
This commit is contained in:
parent
630e9492ec
commit
22ab688a79
@ -53,7 +53,7 @@ SOURCES += main.cpp\
|
||||
recording/recordingcontroller.cpp \
|
||||
recording/recordingformats.cpp \
|
||||
formats.cpp \
|
||||
recording/encoders/webmencoder.cpp
|
||||
recording/encoders/encoder.cpp
|
||||
|
||||
HEADERS += mainwindow.hpp \
|
||||
cropeditor/cropeditor.hpp \
|
||||
@ -88,7 +88,7 @@ HEADERS += mainwindow.hpp \
|
||||
recording/recordingcontroller.hpp \
|
||||
recording/recordingformats.hpp \
|
||||
formats.hpp \
|
||||
recording/encoders/webmencoder.hpp
|
||||
recording/encoders/encoder.hpp
|
||||
|
||||
LIBS += -lavcodec -lavformat -lavutil -lswscale -lavutil
|
||||
|
||||
@ -96,6 +96,7 @@ mac {
|
||||
SOURCES += $$PWD/platformspecifics/mac/macbackend.cpp
|
||||
HEADERS += $$PWD/platformspecifics/mac/macbackend.hpp
|
||||
LIBS += -framework Carbon
|
||||
warning(Mac is on TODO);
|
||||
} else:win32 {
|
||||
SOURCES += $$PWD/platformspecifics/u32/u32backend.cpp
|
||||
HEADERS += $$PWD/platformspecifics/u32/u32backend.hpp
|
||||
|
@ -16,6 +16,10 @@ See the [wiki](https://github.com/ArsenArsen/KShare/wiki).
|
||||
* Qt 5 GUI
|
||||
* Qt 5 Network
|
||||
* [QHotkey](https://github.com/Skycoder42/QHotkey)
|
||||
* libavformat
|
||||
* libavcodec
|
||||
* libavutil
|
||||
* libswscale
|
||||
* From the above, we are required to have Qt 5 x11extras on Linux.
|
||||
|
||||
Despite the name implying so, this project does not depend on the KDE API at all.
|
||||
|
1
main.cpp
1
main.cpp
@ -38,7 +38,6 @@ void handler(QtMsgType type, const QMessageLogContext &, const QString &msg) {
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
avcodec_register_all();
|
||||
av_register_all();
|
||||
qInstallMessageHandler(handler);
|
||||
QApplication a(argc, argv);
|
||||
|
@ -223,9 +223,9 @@ void MainWindow::on_captureCursor_clicked(bool checked) {
|
||||
}
|
||||
|
||||
void MainWindow::on_formatBox_currentIndexChanged(int index) {
|
||||
settings::settings().setValue("recording/format", index);
|
||||
if (isVisible()) settings::settings().setValue("recording/format", index);
|
||||
}
|
||||
|
||||
void MainWindow::on_imageFormatBox_currentIndexChanged(const QString &arg1) {
|
||||
settings::settings().setValue("imageformat", arg1);
|
||||
void MainWindow::on_imageFormatBox_currentIndexChanged(int index) {
|
||||
if (isVisible()) settings::settings().setValue("imageformat", index);
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ private slots:
|
||||
void on_actionColor_Picker_triggered();
|
||||
void on_captureCursor_clicked(bool checked);
|
||||
void on_formatBox_currentIndexChanged(int index);
|
||||
void on_imageFormatBox_currentIndexChanged(const QString &arg1);
|
||||
void on_imageFormatBox_currentIndexChanged(int index);
|
||||
|
||||
public:
|
||||
explicit MainWindow(QWidget *parent = 0);
|
||||
|
138
recording/encoders/encoder.cpp
Normal file
138
recording/encoders/encoder.cpp
Normal file
@ -0,0 +1,138 @@
|
||||
#include "encoder.hpp"
|
||||
|
||||
#include <settings.hpp>
|
||||
extern "C" {
|
||||
#include <libavutil/imgutils.h>
|
||||
#include <libavutil/opt.h>
|
||||
}
|
||||
|
||||
inline void throwAVErr(int ret, std::string section) {
|
||||
char err[AV_ERROR_MAX_STRING_SIZE];
|
||||
av_make_error_string(err, AV_ERROR_MAX_STRING_SIZE, ret);
|
||||
std::string newString(err);
|
||||
throw std::runtime_error("Error during: " + section + ": " + newString);
|
||||
}
|
||||
|
||||
Encoder::Encoder(QString &targetFile, QSize res) {
|
||||
int ret;
|
||||
// Format
|
||||
ret = avformat_alloc_output_context2(&fc, NULL, NULL, targetFile.toLocal8Bit().constData());
|
||||
if (ret < 0) throwAVErr(ret, "format alloc");
|
||||
|
||||
// Stream
|
||||
out->st = avformat_new_stream(fc, NULL);
|
||||
if (!out->st) throw std::runtime_error("Unable to allocate video context");
|
||||
out->st->id = fc->nb_streams - 1;
|
||||
if (fc->oformat->flags & AVFMT_GLOBALHEADER) fc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||
av_dump_format(fc, 0, targetFile.toLocal8Bit().constData(), 1);
|
||||
|
||||
// Codec
|
||||
if (!fc->oformat->video_codec) throw std::runtime_error("Video codec not found");
|
||||
codec = avcodec_find_encoder(fc->oformat->video_codec);
|
||||
if (!codec) throw std::runtime_error("Video codec not found");
|
||||
out->enc = avcodec_alloc_context3(codec);
|
||||
if (!out->enc) throw std::runtime_error("Unable to allocate video context");
|
||||
|
||||
int fps = settings::settings().value("recording/framerate", 30).toInt();
|
||||
|
||||
out->enc->codec_id = codec->id;
|
||||
|
||||
out->enc->bit_rate = 400000;
|
||||
out->enc->width = res.width() % 2 ? res.width() - 1 : res.width();
|
||||
out->enc->height = res.height() % 2 ? res.height() - 1 : res.height();
|
||||
size = QSize(out->enc->width, out->enc->height);
|
||||
out->st->time_base = { 1, fps };
|
||||
out->enc->time_base = out->st->time_base;
|
||||
|
||||
out->enc->gop_size = 12;
|
||||
out->enc->pix_fmt = AV_PIX_FMT_YUV420P; // blaze it
|
||||
if (out->enc->codec_id == AV_CODEC_ID_MPEG2VIDEO)
|
||||
out->enc->max_b_frames = 2;
|
||||
else if (out->enc->codec_id == AV_CODEC_ID_MPEG1VIDEO)
|
||||
out->enc->mb_decision = 2;
|
||||
|
||||
|
||||
ret = avcodec_open2(out->enc, codec, NULL);
|
||||
if (ret < 0) throwAVErr(ret, "codec open");
|
||||
|
||||
ret = avcodec_parameters_from_context(out->st->codecpar, out->enc);
|
||||
if (ret < 0) throwAVErr(ret, "stream opt copy");
|
||||
|
||||
// Frames
|
||||
out->frame = av_frame_alloc();
|
||||
if (!out->frame) {
|
||||
throw std::runtime_error("frame alloc");
|
||||
}
|
||||
out->frame->format = out->enc->pix_fmt;
|
||||
out->frame->width = out->enc->width;
|
||||
out->frame->height = out->enc->height;
|
||||
ret = av_frame_get_buffer(out->frame, 32);
|
||||
if (ret < 0) throwAVErr(ret, "frame buffer alloc");
|
||||
|
||||
// Writer
|
||||
ret = avio_open(&fc->pb, targetFile.toLocal8Bit().constData(), AVIO_FLAG_WRITE);
|
||||
if (ret < 0) throwAVErr(ret, "writer open");
|
||||
ret = avformat_write_header(fc, NULL);
|
||||
if (ret < 0) throwAVErr(ret, "write header");
|
||||
|
||||
success = true;
|
||||
}
|
||||
|
||||
void Encoder::setFrameRGB(uint8_t *rgb) {
|
||||
int ret = av_frame_make_writable(out->frame);
|
||||
if (ret < 0) throwAVErr(ret, "setFrameRGB");
|
||||
int lineSize[1] = { 3 * out->enc->width };
|
||||
out->sws = sws_getCachedContext(out->sws, out->enc->width, out->enc->height, AV_PIX_FMT_RGB24, out->enc->width,
|
||||
out->enc->height, (AVPixelFormat)out->frame->format, 0, 0, 0, 0);
|
||||
sws_scale(out->sws, (const uint8_t *const *)&rgb, lineSize, 0, out->enc->height, out->frame->data, out->frame->linesize);
|
||||
out->frame->pts = out->nextPts++;
|
||||
}
|
||||
|
||||
Encoder::~Encoder() {
|
||||
end();
|
||||
}
|
||||
|
||||
bool Encoder::addFrame(QImage frm) {
|
||||
if (!success) return false;
|
||||
if (frm.size() != size) frm = frm.copy(QRect(QPoint(0, 0), size));
|
||||
if (frm.format() != QImage::Format_RGB888) frm = frm.convertToFormat(QImage::Format_RGB888);
|
||||
uint8_t *frameData = (uint8_t *)frm.bits();
|
||||
setFrameRGB(frameData);
|
||||
av_init_packet(&pkt);
|
||||
int gotPack = 0;
|
||||
int ret = avcodec_encode_video2(out->enc, &pkt, out->frame, &gotPack);
|
||||
if (ret < 0) {
|
||||
av_packet_unref(&pkt);
|
||||
throwAVErr(ret, "encode video");
|
||||
}
|
||||
|
||||
if (gotPack) {
|
||||
av_packet_rescale_ts(&pkt, out->enc->time_base, out->st->time_base);
|
||||
pkt.stream_index = out->st->index;
|
||||
ret = av_interleaved_write_frame(fc, &pkt);
|
||||
} else
|
||||
ret = 0;
|
||||
av_packet_unref(&pkt);
|
||||
if (ret < 0) throwAVErr(ret, "write frame");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Encoder::isRunning() {
|
||||
return success;
|
||||
}
|
||||
|
||||
bool Encoder::end() {
|
||||
if (!success) {
|
||||
goto cleanup;
|
||||
}
|
||||
|
||||
av_write_trailer(fc);
|
||||
cleanup:
|
||||
avcodec_free_context(&out->enc);
|
||||
av_frame_free(&out->frame);
|
||||
sws_freeContext(out->sws);
|
||||
delete out;
|
||||
if (!(fc->oformat->flags & AVFMT_NOFILE)) avio_closep(&fc->pb);
|
||||
avformat_free_context(fc);
|
||||
return success;
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
#ifndef WEBMENCODER_HPP
|
||||
#define WEBMENCODER_HPP
|
||||
#ifndef ENCODER_HPP
|
||||
#define ENCODER_HPP
|
||||
|
||||
#include <QImage>
|
||||
#include <QSize>
|
||||
@ -10,30 +10,37 @@ extern "C" {
|
||||
#include <libswscale/swscale.h>
|
||||
}
|
||||
|
||||
class WebMEncoder {
|
||||
public:
|
||||
static constexpr AVCodecID CODEC = AV_CODEC_ID_VP8;
|
||||
static constexpr formats::Recording FORMAT = formats::Recording::WebM;
|
||||
struct OutputStream {
|
||||
AVStream *st = NULL;
|
||||
AVCodecContext *enc = NULL;
|
||||
|
||||
WebMEncoder(QSize res);
|
||||
~WebMEncoder();
|
||||
int64_t nextPts = 0;
|
||||
|
||||
AVFrame *frame = NULL;
|
||||
|
||||
SwsContext *sws = NULL;
|
||||
};
|
||||
|
||||
class Encoder {
|
||||
public:
|
||||
Encoder(QString &targetFile, QSize res);
|
||||
~Encoder();
|
||||
bool addFrame(QImage frm);
|
||||
bool isRunning();
|
||||
QByteArray end();
|
||||
bool end();
|
||||
|
||||
private:
|
||||
AVCodec *codec = NULL;
|
||||
AVCodecContext *c = NULL;
|
||||
AVFrame *frame = NULL;
|
||||
|
||||
OutputStream *out = new OutputStream;
|
||||
AVFormatContext *fc = NULL;
|
||||
AVPacket pkt;
|
||||
|
||||
bool success = false;
|
||||
|
||||
QByteArray video;
|
||||
QSize size;
|
||||
struct SwsContext *sws_context = NULL;
|
||||
|
||||
void setFrameRGB(uint8_t *rgb);
|
||||
};
|
||||
|
||||
#endif // WEBMENCODER_HPP
|
||||
#endif // ENCODER_HPP
|
@ -1,114 +0,0 @@
|
||||
#include "webmencoder.hpp"
|
||||
|
||||
#include <settings.hpp>
|
||||
extern "C" {
|
||||
#include <libavutil/imgutils.h>
|
||||
#include <libavutil/opt.h>
|
||||
}
|
||||
|
||||
inline void throwAVErr(int ret) {
|
||||
char err[AV_ERROR_MAX_STRING_SIZE];
|
||||
av_make_error_string(err, AV_ERROR_MAX_STRING_SIZE, ret);
|
||||
std::string newString(err);
|
||||
throw std::runtime_error(newString);
|
||||
}
|
||||
|
||||
WebMEncoder::WebMEncoder(QSize res) {
|
||||
codec = avcodec_find_encoder(CODEC);
|
||||
if (!codec) throw std::runtime_error("Codec not found");
|
||||
c = avcodec_alloc_context3(codec);
|
||||
if (!c) throw std::runtime_error("Unable to allocate video context");
|
||||
c->bit_rate = 400000;
|
||||
|
||||
c->width = res.width() % 2 ? res.width() - 1 : res.width();
|
||||
c->height = res.height() % 2 ? res.height() - 1 : res.height();
|
||||
size = QSize(c->width, c->height);
|
||||
int fps = settings::settings().value("recording/framerate", 30).toInt();
|
||||
c->time_base = { 1, fps };
|
||||
c->framerate = { fps, 1 };
|
||||
c->gop_size = 10;
|
||||
c->max_b_frames = 1;
|
||||
c->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
|
||||
int ret = avcodec_open2(c, codec, NULL);
|
||||
if (ret < 0) throwAVErr(ret);
|
||||
|
||||
frame = av_frame_alloc();
|
||||
if (!frame) {
|
||||
fprintf(stderr, "Could not allocate video frame\n");
|
||||
exit(1);
|
||||
}
|
||||
frame->format = c->pix_fmt;
|
||||
frame->width = c->width;
|
||||
frame->height = c->height;
|
||||
ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height, c->pix_fmt, 32);
|
||||
if (!ret) throwAVErr(ret);
|
||||
|
||||
success = true;
|
||||
}
|
||||
|
||||
void WebMEncoder::setFrameRGB(uint8_t *rgb) {
|
||||
int lineSize[1] = { 3 * c->width };
|
||||
sws_context = sws_getCachedContext(sws_context, c->width, c->height, AV_PIX_FMT_RGB24, c->width, c->height,
|
||||
AV_PIX_FMT_YUV420P, 0, 0, 0, 0);
|
||||
sws_scale(sws_context, (const uint8_t *const *)&rgb, lineSize, 0, c->height, frame->data, frame->linesize);
|
||||
}
|
||||
|
||||
WebMEncoder::~WebMEncoder() {
|
||||
end();
|
||||
}
|
||||
|
||||
bool WebMEncoder::addFrame(QImage frm) {
|
||||
if (!success) return false;
|
||||
if (frm.size() != size) frm = frm.copy(QRect(QPoint(0, 0), size));
|
||||
if (frm.format() != QImage::Format_RGB888) frm = frm.convertToFormat(QImage::Format_RGB888);
|
||||
uint8_t *frameData = (uint8_t *)frm.bits();
|
||||
setFrameRGB(frameData);
|
||||
av_init_packet(&pkt);
|
||||
pkt.data = NULL;
|
||||
pkt.size = 0;
|
||||
int ret = avcodec_send_frame(c, frame);
|
||||
if (ret < 0) return false;
|
||||
while (ret >= 0) {
|
||||
ret = avcodec_receive_packet(c, &pkt);
|
||||
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
|
||||
return true;
|
||||
else if (ret < 0) {
|
||||
return false;
|
||||
}
|
||||
video.append((const char *)pkt.data, pkt.size);
|
||||
av_packet_unref(&pkt);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool WebMEncoder::isRunning() {
|
||||
return success;
|
||||
}
|
||||
|
||||
QByteArray WebMEncoder::end() {
|
||||
int ret;
|
||||
uint8_t endcode[] = { 0, 0, 1, 0xb7 };
|
||||
if (!success) {
|
||||
goto cleanup;
|
||||
}
|
||||
ret = avcodec_send_frame(c, frame);
|
||||
while (ret >= 0) {
|
||||
ret = avcodec_receive_packet(c, &pkt);
|
||||
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
|
||||
break;
|
||||
else if (ret < 0)
|
||||
break;
|
||||
video.append((const char *)pkt.data, pkt.size);
|
||||
av_packet_unref(&pkt);
|
||||
}
|
||||
video.append((const char *)endcode, sizeof(endcode));
|
||||
cleanup:
|
||||
if (c) {
|
||||
avcodec_close(c);
|
||||
avcodec_free_context(&c);
|
||||
}
|
||||
if (frame) av_frame_free(&frame);
|
||||
av_packet_unref(&pkt);
|
||||
return video;
|
||||
}
|
@ -60,7 +60,7 @@ void RecordingController::queue(_QueueContext arr) {
|
||||
|
||||
void RecordingController::timeout() {
|
||||
if (isRunning()) {
|
||||
if (!_context->validator()) {
|
||||
if (!_context->validator(area.size())) {
|
||||
if (preview) {
|
||||
preview->close();
|
||||
preview->deleteLater();
|
||||
|
@ -15,7 +15,7 @@
|
||||
struct RecordingContext {
|
||||
QImage::Format format;
|
||||
std::function<void(QImage)> consumer;
|
||||
std::function<bool()> validator;
|
||||
std::function<bool(QSize)> validator;
|
||||
std::function<QByteArray()> finalizer;
|
||||
QString anotherFormat;
|
||||
};
|
||||
|
@ -18,7 +18,7 @@ RecordingFormats::RecordingFormats(formats::Recording f) {
|
||||
QString path = QStandardPaths::writableLocation(QStandardPaths::TempLocation);
|
||||
|
||||
if (path.isEmpty()) {
|
||||
validator = [] { return false; };
|
||||
validator = [](QSize) { return false; };
|
||||
return;
|
||||
}
|
||||
tmpDir = QDir(path);
|
||||
@ -29,7 +29,7 @@ RecordingFormats::RecordingFormats(formats::Recording f) {
|
||||
switch (f) {
|
||||
case formats::Recording::GIF: {
|
||||
iFormat = QImage::Format_RGBA8888;
|
||||
validator = [] { return true; };
|
||||
validator = [](QSize) { return true; };
|
||||
consumer = [&](QImage img) { frames.push_back(img); };
|
||||
finalizer = [&] {
|
||||
if (frames.size() == 0) return QByteArray();
|
||||
@ -53,11 +53,40 @@ RecordingFormats::RecordingFormats(formats::Recording f) {
|
||||
anotherFormat = formats::recordingFormatName(f);
|
||||
break;
|
||||
}
|
||||
case formats::Recording::WebM: {
|
||||
iFormat = QImage::Format_RGB888;
|
||||
finalizer = [&] {
|
||||
delete enc;
|
||||
QFile res(tmpDir.absoluteFilePath("res.webm"));
|
||||
if (!res.open(QFile::ReadOnly)) {
|
||||
return QByteArray();
|
||||
}
|
||||
QByteArray data = res.readAll();
|
||||
return data;
|
||||
};
|
||||
validator = [&](QSize s) {
|
||||
if (!enc) {
|
||||
QString path = tmpDir.absoluteFilePath("res.webm");
|
||||
enc = new Encoder(path, s);
|
||||
if (!enc->isRunning()) {
|
||||
delete enc;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
consumer = [&](QImage img) { enc->addFrame(img); };
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
RecordingFormats::~RecordingFormats() {
|
||||
tmpDir.removeRecursively();
|
||||
}
|
||||
|
||||
std::function<void(QImage)> RecordingFormats::getConsumer() {
|
||||
return consumer;
|
||||
}
|
||||
@ -66,7 +95,7 @@ std::function<QByteArray()> RecordingFormats::getFinalizer() {
|
||||
return finalizer;
|
||||
}
|
||||
|
||||
std::function<bool()> RecordingFormats::getValidator() {
|
||||
std::function<bool(QSize)> RecordingFormats::getValidator() {
|
||||
return validator;
|
||||
}
|
||||
|
||||
|
@ -8,22 +8,26 @@
|
||||
#include <formats.hpp>
|
||||
#include <functional>
|
||||
|
||||
#include <recording/encoders/encoder.hpp>
|
||||
|
||||
class RecordingFormats {
|
||||
public:
|
||||
RecordingFormats(formats::Recording f);
|
||||
~RecordingFormats();
|
||||
std::function<void(QImage)> getConsumer();
|
||||
std::function<bool(QSize)> getValidator();
|
||||
std::function<QByteArray()> getFinalizer();
|
||||
std::function<bool()> getValidator();
|
||||
QImage::Format getFormat();
|
||||
QString getAnotherFormat();
|
||||
|
||||
private:
|
||||
std::function<void(QImage)> consumer;
|
||||
std::function<bool()> validator;
|
||||
std::function<bool(QSize)> validator;
|
||||
std::function<QByteArray()> finalizer;
|
||||
std::vector<QImage> frames;
|
||||
QImage::Format iFormat;
|
||||
QDir tmpDir;
|
||||
Encoder *enc = NULL;
|
||||
QString anotherFormat;
|
||||
};
|
||||
|
||||
|
@ -18,8 +18,14 @@ void ImgurUploader::doUpload(QByteArray byteArray, QString) {
|
||||
<< QPair<QString, QString>("Authorization", "Client-ID 8a98f183fc895da"),
|
||||
byteArray, [](QJsonDocument res, QByteArray, QNetworkReply *) {
|
||||
QString result = res.object()["data"].toObject()["link"].toString();
|
||||
screenshotutil::toClipboard(result);
|
||||
notifications::notify("KShare imgur Uploader ",
|
||||
result.isEmpty() ? "Failed upload!" : "Uploaded to imgur!");
|
||||
if (!result.isEmpty()) {
|
||||
screenshotutil::toClipboard(result);
|
||||
notifications::notify("KShare imgur Uploader ", "Uploaded to imgur!");
|
||||
} else {
|
||||
notifications::notify("KShare imgur Uploader ",
|
||||
QString("Failed upload! imgur said: HTTP %2: %1")
|
||||
.arg(res.object()["data"].toObject()["error"].toString())
|
||||
.arg(QString::number(res.object()["status"].toInt())));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user