Merge branch 'record' into dev (#292)

Record screen to file
pull/332/head
Romain Vimont 6 years ago
commit 77b620e1d0

@ -12,6 +12,7 @@ src = [
'src/input_manager.c',
'src/lock_util.c',
'src/net.c',
'src/recorder.c',
'src/scrcpy.c',
'src/screen.c',
'src/server.c',

@ -15,4 +15,14 @@ static inline void buffer_write32be(Uint8 *buf, Uint32 value) {
buf[3] = value;
}
static inline Uint32 buffer_read32be(Uint8 *buf) {
return (buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3];
}
static inline Uint64 buffer_read64be(Uint8 *buf) {
Uint32 msb = buffer_read32be(buf);
Uint32 lsb = buffer_read32be(&buf[4]);
return ((Uint64) msb << 32) | lsb;
}
#endif

@ -1,20 +1,119 @@
#include "decoder.h"
#include <libavformat/avformat.h>
#include <libavutil/time.h>
#include <SDL2/SDL_assert.h>
#include <SDL2/SDL_events.h>
#include <SDL2/SDL_mutex.h>
#include <SDL2/SDL_thread.h>
#include <unistd.h>
#include "config.h"
#include "buffer_util.h"
#include "events.h"
#include "frames.h"
#include "lock_util.h"
#include "log.h"
#include "recorder.h"
#define BUFSIZE 0x10000
static int read_packet(void *opaque, uint8_t *buf, int buf_size) {
#define HEADER_SIZE 12
#define NO_PTS UINT64_C(-1)
static struct frame_meta *frame_meta_new(uint64_t pts) {
struct frame_meta *meta = malloc(sizeof(*meta));
if (!meta) {
return meta;
}
meta->pts = pts;
meta->next = NULL;
return meta;
}
static void frame_meta_delete(struct frame_meta *frame_meta) {
free(frame_meta);
}
static SDL_bool receiver_state_push_meta(struct receiver_state *state,
uint64_t pts) {
struct frame_meta *frame_meta = frame_meta_new(pts);
if (!frame_meta) {
return SDL_FALSE;
}
// append to the list
// (iterate to find the last item, in practice the list should be tiny)
struct frame_meta **p = &state->frame_meta_queue;
while (*p) {
p = &(*p)->next;
}
*p = frame_meta;
return SDL_TRUE;
}
static uint64_t receiver_state_take_meta(struct receiver_state *state) {
struct frame_meta *frame_meta = state->frame_meta_queue; // first item
SDL_assert(frame_meta); // must not be empty
uint64_t pts = frame_meta->pts;
state->frame_meta_queue = frame_meta->next; // remove the item
frame_meta_delete(frame_meta);
return pts;
}
static int read_packet_with_meta(void *opaque, uint8_t *buf, int buf_size) {
struct decoder *decoder = opaque;
struct receiver_state *state = &decoder->receiver_state;
// The video stream contains raw packets, without time information. When we
// record, we retrieve the timestamps separately, from a "meta" header
// added by the server before each raw packet.
//
// The "meta" header length is 12 bytes:
// [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
// <-------------> <-----> <-----------------------------...
// PTS packet raw packet
// size
//
// It is followed by <packet_size> bytes containing the packet/frame.
if (!state->remaining) {
#define HEADER_SIZE 12
uint8_t header[HEADER_SIZE];
ssize_t ret = net_recv_all(decoder->video_socket, header, HEADER_SIZE);
if (ret <= 0) {
return ret;
}
// no partial read (net_recv_all())
SDL_assert_release(ret == HEADER_SIZE);
uint64_t pts = buffer_read64be(header);
state->remaining = buffer_read32be(&header[8]);
if (pts != NO_PTS && !receiver_state_push_meta(state, pts)) {
LOGE("Could not store PTS for recording");
// we cannot save the PTS, the recording would be broken
return -1;
}
}
SDL_assert(state->remaining);
if (buf_size > state->remaining)
buf_size = state->remaining;
ssize_t ret = net_recv(decoder->video_socket, buf, buf_size);
if (ret <= 0) {
return ret;
}
SDL_assert(state->remaining >= ret);
state->remaining -= ret;
return ret;
}
static int read_raw_packet(void *opaque, uint8_t *buf, int buf_size) {
struct decoder *decoder = opaque;
return net_recv(decoder->video_socket, buf, buf_size);
}
@ -70,7 +169,15 @@ static int run_decoder(void *data) {
goto run_finally_free_format_ctx;
}
AVIOContext *avio_ctx = avio_alloc_context(buffer, BUFSIZE, 0, decoder, read_packet, NULL, NULL);
// initialize the receiver state
decoder->receiver_state.frame_meta_queue = NULL;
decoder->receiver_state.remaining = 0;
// if recording is enabled, a "header" is sent between raw packets
int (*read_packet)(void *, uint8_t *, int) =
decoder->recorder ? read_packet_with_meta : read_raw_packet;
AVIOContext *avio_ctx = avio_alloc_context(buffer, BUFSIZE, 0, decoder,
read_packet, NULL, NULL);
if (!avio_ctx) {
LOGC("Could not allocate avio context");
// avformat_open_input takes ownership of 'buffer'
@ -86,6 +193,12 @@ static int run_decoder(void *data) {
goto run_finally_free_avio_ctx;
}
if (decoder->recorder &&
!recorder_open(decoder->recorder, codec)) {
LOGE("Could not open recorder");
goto run_finally_close_input;
}
AVPacket packet;
av_init_packet(&packet);
packet.data = NULL;
@ -125,6 +238,23 @@ static int run_decoder(void *data) {
packet.data += len;
}
#endif
if (decoder->recorder) {
// we retrieve the PTS in order they were received, so they will
// be assigned to the correct frame
uint64_t pts = receiver_state_take_meta(&decoder->receiver_state);
packet.pts = pts;
packet.dts = pts;
// no need to rescale with av_packet_rescale_ts(), the timestamps
// are in microseconds both in input and output
if (!recorder_write(decoder->recorder, &packet)) {
LOGE("Could not write frame to output file");
av_packet_unref(&packet);
goto run_quit;
}
}
av_packet_unref(&packet);
if (avio_ctx->eof_reached) {
@ -135,6 +265,10 @@ static int run_decoder(void *data) {
LOGD("End of frames");
run_quit:
if (decoder->recorder) {
recorder_close(decoder->recorder);
}
run_finally_close_input:
avformat_close_input(&format_ctx);
run_finally_free_avio_ctx:
av_freep(&avio_ctx);
@ -149,9 +283,11 @@ run_end:
return 0;
}
void decoder_init(struct decoder *decoder, struct frames *frames, socket_t video_socket) {
void decoder_init(struct decoder *decoder, struct frames *frames,
socket_t video_socket, struct recorder *recorder) {
decoder->frames = frames;
decoder->video_socket = video_socket;
decoder->recorder = recorder;
}
SDL_bool decoder_start(struct decoder *decoder) {
@ -162,7 +298,6 @@ SDL_bool decoder_start(struct decoder *decoder) {
LOGC("Could not start decoder thread");
return SDL_FALSE;
}
return SDL_TRUE;
}

@ -4,18 +4,31 @@
#include <SDL2/SDL_stdinc.h>
#include <SDL2/SDL_thread.h>
#include "common.h"
#include "net.h"
struct frames;
struct frame_meta {
uint64_t pts;
struct frame_meta *next;
};
struct decoder {
struct frames *frames;
socket_t video_socket;
SDL_Thread *thread;
SDL_mutex *mutex;
struct recorder *recorder;
struct receiver_state {
// meta (in order) for frames not consumed yet
struct frame_meta *frame_meta_queue;
size_t remaining; // remaining bytes to receive for the current frame
} receiver_state;
};
void decoder_init(struct decoder *decoder, struct frames *frames, socket_t video_socket);
void decoder_init(struct decoder *decoder, struct frames *frames,
socket_t video_socket, struct recorder *recoder);
SDL_bool decoder_start(struct decoder *decoder);
void decoder_stop(struct decoder *decoder);
void decoder_join(struct decoder *decoder);

@ -11,6 +11,7 @@
struct args {
const char *serial;
const char *crop;
const char *record_filename;
SDL_bool fullscreen;
SDL_bool help;
SDL_bool version;
@ -53,6 +54,9 @@ static void usage(const char *arg0) {
" Set the TCP port the client listens on.\n"
" Default is %d.\n"
"\n"
" -r, --record file.mp4\n"
" Record screen to file.\n"
"\n"
" -s, --serial\n"
" The device serial number. Mandatory only if several devices\n"
" are connected to adb.\n"
@ -208,13 +212,14 @@ static SDL_bool parse_args(struct args *args, int argc, char *argv[]) {
{"help", no_argument, NULL, 'h'},
{"max-size", required_argument, NULL, 'm'},
{"port", required_argument, NULL, 'p'},
{"record", required_argument, NULL, 'r'},
{"serial", required_argument, NULL, 's'},
{"show-touches", no_argument, NULL, 't'},
{"version", no_argument, NULL, 'v'},
{NULL, 0, NULL, 0 },
};
int c;
while ((c = getopt_long(argc, argv, "b:c:fhm:p:s:tv", long_options, NULL)) != -1) {
while ((c = getopt_long(argc, argv, "b:c:fhm:p:r:s:tv", long_options, NULL)) != -1) {
switch (c) {
case 'b':
if (!parse_bit_rate(optarg, &args->bit_rate)) {
@ -240,6 +245,9 @@ static SDL_bool parse_args(struct args *args, int argc, char *argv[]) {
return SDL_FALSE;
}
break;
case 'r':
args->record_filename = optarg;
break;
case 's':
args->serial = optarg;
break;
@ -273,6 +281,7 @@ int main(int argc, char *argv[]) {
struct args args = {
.serial = NULL,
.crop = NULL,
.record_filename = NULL,
.help = SDL_FALSE,
.version = SDL_FALSE,
.show_touches = SDL_FALSE,
@ -310,6 +319,7 @@ int main(int argc, char *argv[]) {
.serial = args.serial,
.crop = args.crop,
.port = args.port,
.record_filename = args.record_filename,
.max_size = args.max_size,
.bit_rate = args.bit_rate,
.show_touches = args.show_touches,

@ -0,0 +1,118 @@
#include "recorder.h"
#include <libavutil/time.h>
#include "config.h"
#include "log.h"
static const AVOutputFormat *find_mp4_muxer(void) {
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(58, 9, 100)
void *opaque = NULL;
#endif
const AVOutputFormat *oformat = NULL;
do {
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(58, 9, 100)
oformat = av_muxer_iterate(&opaque);
#else
oformat = av_oformat_next(oformat);
#endif
// until null or with name "mp4"
} while (oformat && strcmp(oformat->name, "mp4"));
return oformat;
}
SDL_bool recorder_init(struct recorder *recorder, const char *filename,
struct size declared_frame_size) {
recorder->filename = SDL_strdup(filename);
if (!recorder->filename) {
LOGE("Cannot strdup filename");
return SDL_FALSE;
}
recorder->declared_frame_size = declared_frame_size;
return SDL_TRUE;
}
void recorder_destroy(struct recorder *recorder) {
SDL_free(recorder->filename);
}
SDL_bool recorder_open(struct recorder *recorder, AVCodec *input_codec) {
const AVOutputFormat *mp4 = find_mp4_muxer();
if (!mp4) {
LOGE("Could not find mp4 muxer");
return SDL_FALSE;
}
recorder->ctx = avformat_alloc_context();
if (!recorder->ctx) {
LOGE("Could not allocate output context");
return SDL_FALSE;
}
// contrary to the deprecated API (av_oformat_next()), av_muxer_iterate()
// returns (on purpose) a pointer-to-const, but AVFormatContext.oformat
// still expects a pointer-to-non-const (it has not be updated accordingly)
// <https://github.com/FFmpeg/FFmpeg/commit/0694d8702421e7aff1340038559c438b61bb30dd>
recorder->ctx->oformat = (AVOutputFormat *) mp4;
AVStream *ostream = avformat_new_stream(recorder->ctx, input_codec);
if (!ostream) {
avformat_free_context(recorder->ctx);
return SDL_FALSE;
}
// In ffmpeg/doc/APIchanges:
// 2016-04-11 - 6f69f7a / 9200514 - lavf 57.33.100 / 57.5.0 - avformat.h
// Add AVStream.codecpar, deprecate AVStream.codec.
#if (LIBAVFORMAT_VERSION_MICRO >= 100 /* FFmpeg */ && \
LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)) \
|| (LIBAVFORMAT_VERSION_MICRO < 100 && /* Libav */ \
LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 5, 0))
ostream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
ostream->codecpar->codec_id = input_codec->id;
ostream->codecpar->format = AV_PIX_FMT_YUV420P;
ostream->codecpar->width = recorder->declared_frame_size.width;
ostream->codecpar->height = recorder->declared_frame_size.height;
#else
ostream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
ostream->codec->codec_id = input_codec->id;
ostream->codec->pix_fmt = AV_PIX_FMT_YUV420P;
ostream->codec->width = recorder->declared_frame_size.width;
ostream->codec->height = recorder->declared_frame_size.height;
#endif
ostream->time_base = (AVRational) {1, 1000000}; // timestamps in us
int ret = avio_open(&recorder->ctx->pb, recorder->filename,
AVIO_FLAG_WRITE);
if (ret < 0) {
LOGE("Failed to open output file: %s", recorder->filename);
// ostream will be cleaned up during context cleaning
avformat_free_context(recorder->ctx);
return SDL_FALSE;
}
ret = avformat_write_header(recorder->ctx, NULL);
if (ret < 0) {
LOGE("Failed to write header to %s", recorder->filename);
avio_closep(&recorder->ctx->pb);
avformat_free_context(recorder->ctx);
return SDL_FALSE;
}
return SDL_TRUE;
}
void recorder_close(struct recorder *recorder) {
int ret = av_write_trailer(recorder->ctx);
if (ret < 0) {
LOGE("Failed to write trailer to %s", recorder->filename);
}
avio_close(recorder->ctx->pb);
avformat_free_context(recorder->ctx);
}
SDL_bool recorder_write(struct recorder *recorder, AVPacket *packet) {
return av_write_frame(recorder->ctx, packet) >= 0;
}

@ -0,0 +1,24 @@
#ifndef RECORDER_H
#define RECORDER_H
#include <libavformat/avformat.h>
#include <SDL2/SDL_stdinc.h>
#include "common.h"
struct recorder {
char *filename;
AVFormatContext *ctx;
struct size declared_frame_size;
};
SDL_bool recorder_init(struct recorder *recoder, const char *filename,
struct size declared_frame_size);
void recorder_destroy(struct recorder *recorder);
SDL_bool recorder_open(struct recorder *recorder, AVCodec *input_codec);
void recorder_close(struct recorder *recorder);
SDL_bool recorder_write(struct recorder *recorder, AVPacket *packet);
#endif

@ -20,6 +20,7 @@
#include "log.h"
#include "lock_util.h"
#include "net.h"
#include "recorder.h"
#include "screen.h"
#include "server.h"
#include "tiny_xpm.h"
@ -30,6 +31,7 @@ static struct frames frames;
static struct decoder decoder;
static struct controller controller;
static struct file_handler file_handler;
static struct recorder recorder;
static struct input_manager input_manager = {
.controller = &controller,
@ -138,8 +140,10 @@ static void wait_show_touches(process_t process) {
}
SDL_bool scrcpy(const struct scrcpy_options *options) {
SDL_bool send_frame_meta = !!options->record_filename;
if (!server_start(&server, options->serial, options->port,
options->max_size, options->bit_rate, options->crop)) {
options->max_size, options->bit_rate, options->crop,
send_frame_meta)) {
return SDL_FALSE;
}
@ -153,10 +157,6 @@ SDL_bool scrcpy(const struct scrcpy_options *options) {
SDL_bool ret = SDL_TRUE;
if (!SDL_SetHint(SDL_HINT_NO_SIGNAL_HANDLERS, "1")) {
LOGW("Cannot request to keep default signal handlers");
}
if (!sdl_init_and_configure()) {
ret = SDL_FALSE;
goto finally_destroy_server;
@ -193,14 +193,24 @@ SDL_bool scrcpy(const struct scrcpy_options *options) {
goto finally_destroy_frames;
}
decoder_init(&decoder, &frames, device_socket);
struct recorder *rec = NULL;
if (options->record_filename) {
if (!recorder_init(&recorder, options->record_filename, frame_size)) {
ret = SDL_FALSE;
server_stop(&server);
goto finally_destroy_file_handler;
}
rec = &recorder;
}
decoder_init(&decoder, &frames, device_socket, rec);
// now we consumed the header values, the socket receives the video stream
// start the decoder
if (!decoder_start(&decoder)) {
ret = SDL_FALSE;
server_stop(&server);
goto finally_destroy_file_handler;
goto finally_destroy_recorder;
}
if (!controller_init(&controller, device_socket)) {
@ -246,6 +256,10 @@ finally_destroy_file_handler:
file_handler_stop(&file_handler);
file_handler_join(&file_handler);
file_handler_destroy(&file_handler);
finally_destroy_recorder:
if (options->record_filename) {
recorder_destroy(&recorder);
}
finally_destroy_frames:
frames_destroy(&frames);
finally_destroy_server:

@ -6,6 +6,7 @@
struct scrcpy_options {
const char *serial;
const char *crop;
const char *record_filename;
Uint16 port;
Uint16 max_size;
Uint32 bit_rate;

@ -78,7 +78,8 @@ static SDL_bool disable_tunnel(struct server *server) {
static process_t execute_server(const char *serial,
Uint16 max_size, Uint32 bit_rate,
const char *crop, SDL_bool tunnel_forward) {
SDL_bool tunnel_forward, const char *crop,
SDL_bool send_frame_meta) {
char max_size_string[6];
char bit_rate_string[11];
sprintf(max_size_string, "%"PRIu16, max_size);
@ -92,7 +93,8 @@ static process_t execute_server(const char *serial,
max_size_string,
bit_rate_string,
tunnel_forward ? "true" : "false",
crop ? crop : "",
crop ? crop : "''",
send_frame_meta ? "true" : "false",
};
return adb_execute(serial, cmd, sizeof(cmd) / sizeof(cmd[0]));
}
@ -148,8 +150,9 @@ void server_init(struct server *server) {
*server = (struct server) SERVER_INITIALIZER;
}
SDL_bool server_start(struct server *server, const char *serial, Uint16 local_port,
Uint16 max_size, Uint32 bit_rate, const char *crop) {
SDL_bool server_start(struct server *server, const char *serial,
Uint16 local_port, Uint16 max_size, Uint32 bit_rate,
const char *crop, SDL_bool send_frame_meta) {
server->local_port = local_port;
if (serial) {
@ -190,8 +193,10 @@ SDL_bool server_start(struct server *server, const char *serial, Uint16 local_po
}
// server will connect to our server socket
server->process = execute_server(serial, max_size, bit_rate, crop,
server->tunnel_forward);
server->process = execute_server(serial, max_size, bit_rate,
server->tunnel_forward, crop,
send_frame_meta);
if (server->process == PROCESS_NONE) {
if (!server->tunnel_forward) {
close_socket(&server->server_socket);

@ -12,6 +12,7 @@ struct server {
Uint16 local_port;
SDL_bool tunnel_enabled;
SDL_bool tunnel_forward; // use "adb forward" instead of "adb reverse"
SDL_bool send_frame_meta; // request frame PTS to be able to record properly
SDL_bool server_copied_to_device;
};
@ -23,6 +24,7 @@ struct server {
.local_port = 0, \
.tunnel_enabled = SDL_FALSE, \
.tunnel_forward = SDL_FALSE, \
.send_frame_meta = SDL_FALSE, \
.server_copied_to_device = SDL_FALSE, \
}
@ -30,8 +32,9 @@ struct server {
void server_init(struct server *server);
// push, enable tunnel et start the server
SDL_bool server_start(struct server *server, const char *serial, Uint16 local_port,
Uint16 max_size, Uint32 bit_rate, const char *crop);
SDL_bool server_start(struct server *server, const char *serial,
Uint16 local_port, Uint16 max_size, Uint32 bit_rate,
const char *crop, SDL_bool send_frame_meta);
// block until the communication with the server is established
socket_t server_connect_to(struct server *server);

@ -15,3 +15,4 @@ org.gradle.jvmargs=-Xmx1536m
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true

@ -7,6 +7,7 @@ public class Options {
private int bitRate;
private boolean tunnelForward;
private Rect crop;
private boolean sendFrameMeta; // send PTS so that the client may record properly
public int getMaxSize() {
return maxSize;
@ -39,4 +40,12 @@ public class Options {
public void setCrop(Rect crop) {
this.crop = crop;
}
public boolean getSendFrameMeta() {
return sendFrameMeta;
}
public void setSendFrameMeta(boolean sendFrameMeta) {
this.sendFrameMeta = sendFrameMeta;
}
}

@ -3,6 +3,7 @@ package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import android.graphics.Rect;
import android.media.MediaMuxer;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
@ -22,21 +23,26 @@ public class ScreenEncoder implements Device.RotationListener {
private static final int REPEAT_FRAME_DELAY = 6; // repeat after 6 frames
private static final int MICROSECONDS_IN_ONE_SECOND = 1_000_000;
private static final int NO_PTS = -1;
private final AtomicBoolean rotationChanged = new AtomicBoolean();
private final ByteBuffer headerBuffer = ByteBuffer.allocate(12);
private int bitRate;
private int frameRate;
private int iFrameInterval;
private boolean sendFrameMeta;
private long ptsOrigin;
public ScreenEncoder(int bitRate, int frameRate, int iFrameInterval) {
public ScreenEncoder(boolean sendFrameMeta, int bitRate, int frameRate, int iFrameInterval) {
this.sendFrameMeta = sendFrameMeta;
this.bitRate = bitRate;
this.frameRate = frameRate;
this.iFrameInterval = iFrameInterval;
}
public ScreenEncoder(int bitRate) {
this(bitRate, DEFAULT_FRAME_RATE, DEFAULT_I_FRAME_INTERVAL);
public ScreenEncoder(boolean sendFrameMeta, int bitRate) {
this(sendFrameMeta, bitRate, DEFAULT_FRAME_RATE, DEFAULT_I_FRAME_INTERVAL);
}
@Override
@ -80,6 +86,8 @@ public class ScreenEncoder implements Device.RotationListener {
private boolean encode(MediaCodec codec, FileDescriptor fd) throws IOException {
boolean eof = false;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (!consumeRotationChange() && !eof) {
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
@ -90,6 +98,11 @@ public class ScreenEncoder implements Device.RotationListener {
}
if (outputBufferId >= 0) {
ByteBuffer codecBuffer = codec.getOutputBuffer(outputBufferId);
if (sendFrameMeta) {
writeFrameMeta(fd, bufferInfo, codecBuffer.remaining());
}
IO.writeFully(fd, codecBuffer);
}
} finally {
@ -102,6 +115,25 @@ public class ScreenEncoder implements Device.RotationListener {
return !eof;
}
private void writeFrameMeta(FileDescriptor fd, MediaCodec.BufferInfo bufferInfo, int packetSize) throws IOException {
headerBuffer.clear();
long pts;
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
pts = NO_PTS; // non-media data packet
} else {
if (ptsOrigin == 0) {
ptsOrigin = bufferInfo.presentationTimeUs;
}
pts = bufferInfo.presentationTimeUs - ptsOrigin;
}
headerBuffer.putLong(pts);
headerBuffer.putInt(packetSize);
headerBuffer.flip();
IO.writeFully(fd, headerBuffer);
}
private static MediaCodec createCodec() throws IOException {
return MediaCodec.createEncoderByType("video/avc");
}

@ -3,6 +3,7 @@ package com.genymobile.scrcpy;
import android.graphics.Rect;
import java.io.IOException;
import java.util.Arrays;
public final class Server {
@ -14,7 +15,7 @@ public final class Server {
final Device device = new Device(options);
boolean tunnelForward = options.isTunnelForward();
try (DesktopConnection connection = DesktopConnection.open(device, tunnelForward)) {
ScreenEncoder screenEncoder = new ScreenEncoder(options.getBitRate());
ScreenEncoder screenEncoder = new ScreenEncoder(options.getSendFrameMeta(), options.getBitRate());
// asynchronous
startEventController(device, connection);
@ -71,6 +72,12 @@ public final class Server {
Rect crop = parseCrop(args[3]);
options.setCrop(crop);
if (args.length < 5) {
return options;
}
boolean sendFrameMeta = Boolean.parseBoolean(args[4]);
options.setSendFrameMeta(sendFrameMeta);
return options;
}

Loading…
Cancel
Save