声音部分的代码仍需调试,现在panic
This commit is contained in:
parent
7ef721afb7
commit
edbe406f5b
@ -54,6 +54,7 @@ func (s *Server) Run() error {
|
||||
logrus.Fatal("cannot run qemuserver with error: ", err)
|
||||
}
|
||||
}()
|
||||
logrus.Debug("qemu server running")
|
||||
|
||||
return webServer.Run()
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ const BitsPerByte = 8
|
||||
|
||||
type WavFIFODriver struct {
|
||||
PCM <-chan [BufferSize]byte
|
||||
WaveHeader <-chan *WavHeader
|
||||
WaveHeader *WavHeader
|
||||
closed <-chan struct{}
|
||||
cancel func()
|
||||
}
|
||||
@ -42,19 +42,19 @@ func (w *WavFIFODriver) Open() error {
|
||||
}
|
||||
|
||||
func (w *WavFIFODriver) Close() error {
|
||||
defer w.cancel()
|
||||
w.cancel()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *WavFIFODriver) Properties() []prop.Media {
|
||||
waveHeader := <-w.WaveHeader
|
||||
logrus.Debugf("wave header: %v", waveHeader)
|
||||
logrus.Debugf("wave header: %v", w.WaveHeader)
|
||||
return []prop.Media{
|
||||
{
|
||||
Audio: prop.Audio{
|
||||
SampleRate: int(waveHeader.SampleRate),
|
||||
ChannelCount: int(waveHeader.NumChannels),
|
||||
Latency: waveHeader.GetLatnecy(),
|
||||
SampleRate: int(w.WaveHeader.SampleRate),
|
||||
ChannelCount: int(w.WaveHeader.NumChannels),
|
||||
SampleSize: int(w.WaveHeader.BitsPerSample),
|
||||
Latency: w.WaveHeader.GetLatnecy(),
|
||||
IsFloat: false, // just 8bit or 16bit with qemu
|
||||
IsBigEndian: false, // qemu should be little endian
|
||||
IsInterleaved: true,
|
||||
@ -64,13 +64,14 @@ func (w *WavFIFODriver) Properties() []prop.Media {
|
||||
}
|
||||
|
||||
func (w *WavFIFODriver) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
a := wave.NewInt16Interleaved(wave.ChunkInfo{
|
||||
Len: BufferSize / int(p.SampleSize/BitsPerByte),
|
||||
Channels: p.ChannelCount,
|
||||
SamplingRate: p.SampleRate,
|
||||
})
|
||||
logrus.Debug(p)
|
||||
|
||||
reader := func() (wave.Audio, func(), error) {
|
||||
a := wave.NewInt16Interleaved(wave.ChunkInfo{
|
||||
Len: BufferSize / int(p.SampleSize/BitsPerByte),
|
||||
Channels: p.ChannelCount,
|
||||
SamplingRate: p.SampleRate,
|
||||
})
|
||||
select {
|
||||
case <-w.closed:
|
||||
return nil, func() {}, io.EOF
|
||||
|
@ -29,7 +29,7 @@ type CommandLine struct {
|
||||
}
|
||||
|
||||
func ParseEvent(b []byte) (*Event, error) {
|
||||
var event *Event
|
||||
event := &Event{}
|
||||
if err := json.Unmarshal(b, event); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/codec/opus"
|
||||
"github.com/pion/mediadevices/pkg/codec/x264"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v3"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
@ -45,7 +46,10 @@ func New(o *Options) (*Connection, error) {
|
||||
}
|
||||
|
||||
s, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(mtc *mediadevices.MediaTrackConstraints) {},
|
||||
Video: func(mtc *mediadevices.MediaTrackConstraints) {
|
||||
mtc.Height = prop.Int(o.Video.Height)
|
||||
mtc.Width = prop.Int(o.Video.Width)
|
||||
},
|
||||
Audio: func(mtc *mediadevices.MediaTrackConstraints) {},
|
||||
Codec: codecSelector,
|
||||
})
|
||||
@ -84,7 +88,7 @@ func (c *Connection) Regist(offer *webrtc.SessionDescription) (*webrtc.SessionDe
|
||||
|
||||
for _, track := range c.stream.GetTracks() {
|
||||
track.OnEnded(func(err error) {
|
||||
logrus.Errorf("Track (ID: %s) ended with error: %v", track.ID(), err)
|
||||
logrus.Errorf("Track (ID: %s, kind: %s) ended with error: %v", track.ID(), track.Kind().String(), err)
|
||||
})
|
||||
_, err := rtc.AddTransceiverFromTrack(track, webrtc.RTPTransceiverInit{
|
||||
Direction: webrtc.RTPTransceiverDirectionSendonly,
|
||||
|
@ -15,12 +15,19 @@ import (
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const waveHeaderSize = 16
|
||||
|
||||
var waveHeader = &audiodriver.WavHeader{
|
||||
Size: waveHeaderSize,
|
||||
AudioFormat: 1,
|
||||
NumChannels: 2,
|
||||
SampleRate: 44100,
|
||||
BlockAlign: 4,
|
||||
BitsPerSample: 16,
|
||||
}
|
||||
|
||||
type Server struct {
|
||||
options *Options
|
||||
QmpConnector struct {
|
||||
RX chan *qemuconnection.Event
|
||||
TX chan qemu.Status
|
||||
}
|
||||
options *Options
|
||||
qemu *qemu.Domain
|
||||
audioHeader chan *audiodriver.WavHeader
|
||||
pcm chan [audiodriver.BufferSize]byte
|
||||
@ -38,8 +45,6 @@ func NewServer(o *Options) (*Server, error) {
|
||||
audioHeader: make(chan *audiodriver.WavHeader, 1),
|
||||
pcm: make(chan [audiodriver.BufferSize]byte),
|
||||
}
|
||||
server.QmpConnector.RX = make(chan *qemuconnection.Event)
|
||||
server.QmpConnector.TX = make(chan qemu.Status)
|
||||
|
||||
u, err := url.Parse(o.QmpAddress)
|
||||
if err != nil {
|
||||
@ -80,7 +85,7 @@ func NewServer(o *Options) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
audio.PCM = server.pcm
|
||||
audio.WaveHeader = server.audioHeader
|
||||
audio.WaveHeader = waveHeader
|
||||
|
||||
if err := driver.GetManager().Register(
|
||||
vncdriver.NewVnc(o.VNCAddress),
|
||||
@ -98,84 +103,26 @@ func NewServer(o *Options) (*Server, error) {
|
||||
|
||||
func (s *Server) Run() error {
|
||||
logrus.Debug("qemu server running")
|
||||
defer logrus.Debug("qemu server exit")
|
||||
defer s.qemu.Close()
|
||||
|
||||
s.startCapture()
|
||||
logrus.Debug("qemu capture start")
|
||||
|
||||
for ev := range s.QmpConnector.RX {
|
||||
if ev.Type == qemuconnection.QueryStatusEvent {
|
||||
status, err := s.qemu.Status()
|
||||
if err != nil {
|
||||
logrus.Error("get qemu status error: ", err)
|
||||
continue
|
||||
}
|
||||
s.QmpConnector.TX <- status
|
||||
continue
|
||||
}
|
||||
for _, cmd := range ev.ToQemuCommand() {
|
||||
_, err := s.qemu.Run(cmd)
|
||||
if err != nil {
|
||||
logrus.Error("run command error: ", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) startCapture() {
|
||||
const waveHeaderSize = 16
|
||||
waveHeader := &audiodriver.WavHeader{
|
||||
Size: waveHeaderSize,
|
||||
AudioFormat: 1,
|
||||
NumChannels: 2,
|
||||
SampleRate: 44100,
|
||||
BlockAlign: 4,
|
||||
BitsPerSample: 16,
|
||||
f, err := os.Open(s.options.AudioPipe)
|
||||
if err != nil {
|
||||
logrus.Fatal(err)
|
||||
}
|
||||
defer f.Close()
|
||||
logrus.Debug("start reading fifo")
|
||||
|
||||
go func() {
|
||||
f, err := os.Open(s.options.AudioPipe)
|
||||
if err != nil {
|
||||
logrus.Fatal(err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
logrus.Debug("start reading from fifo")
|
||||
|
||||
s.audioHeader <- waveHeader
|
||||
close(s.audioHeader) // only once
|
||||
|
||||
// skip to pcm data, for 44 bytes.
|
||||
var _dataChunkHeader [audiodriver.FmtHeaderOffset +
|
||||
audiodriver.FmtHeaderIDSize + audiodriver.FmtHeaderChunkSizeSize + waveHeaderSize +
|
||||
audiodriver.DataChunkIDSize + audiodriver.DataChunkSizeSize]byte
|
||||
if _, err := f.Read(_dataChunkHeader[:]); err != nil {
|
||||
logrus.Fatal(err)
|
||||
}
|
||||
|
||||
defer close(s.pcm)
|
||||
for {
|
||||
var b [audiodriver.BufferSize]byte
|
||||
if _, err := f.Read(b[:]); err != nil {
|
||||
logrus.Error(err)
|
||||
}
|
||||
select {
|
||||
case s.pcm <- b:
|
||||
case <-time.After(waveHeader.GetLatnecy()):
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
logrus.Debug("setting audio capture")
|
||||
if _, err := s.qemu.Run(qmp.Command{
|
||||
Execute: "human-monitor-command",
|
||||
Args: map[string]string{
|
||||
"command-line": fmt.Sprintf(
|
||||
"wavcapture %s %s",
|
||||
"wavcapture %s %s %d %d %d",
|
||||
s.options.AudioPipe,
|
||||
s.options.AudioDevice,
|
||||
waveHeader.SampleRate,
|
||||
waveHeader.BitsPerSample,
|
||||
waveHeader.NumChannels,
|
||||
),
|
||||
},
|
||||
}); err != nil {
|
||||
@ -183,6 +130,27 @@ func (s *Server) startCapture() {
|
||||
}
|
||||
logrus.Debug("audio capture set")
|
||||
}()
|
||||
|
||||
logrus.Debug("skip wave headers, to the PCM!")
|
||||
// skip to pcm data, for 44 bytes.
|
||||
var _dataChunkHeader [audiodriver.FmtHeaderOffset +
|
||||
audiodriver.FmtHeaderIDSize + audiodriver.FmtHeaderChunkSizeSize + waveHeaderSize +
|
||||
audiodriver.DataChunkIDSize + audiodriver.DataChunkSizeSize]byte
|
||||
if _, err := f.Read(_dataChunkHeader[:]); err != nil {
|
||||
logrus.Fatal(err)
|
||||
}
|
||||
|
||||
defer close(s.pcm)
|
||||
for {
|
||||
var b [audiodriver.BufferSize]byte
|
||||
if _, err := f.Read(b[:]); err != nil {
|
||||
logrus.Error(err)
|
||||
}
|
||||
select {
|
||||
case s.pcm <- b:
|
||||
case <-time.After(waveHeader.GetLatnecy()):
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Server) SendEvent(b []byte) error {
|
||||
@ -190,13 +158,19 @@ func (s *Server) SendEvent(b []byte) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.QmpConnector.RX <- ev
|
||||
for _, cmd := range ev.ToQemuCommand() {
|
||||
_, err := s.qemu.Run(cmd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) GetStatus() qemu.Status {
|
||||
s.QmpConnector.RX <- &qemuconnection.Event{
|
||||
Type: qemuconnection.QueryStatusEvent,
|
||||
status, err := s.qemu.Status()
|
||||
if err != nil {
|
||||
return qemu.StatusIOError
|
||||
}
|
||||
return <-s.QmpConnector.TX
|
||||
return status
|
||||
}
|
||||
|
@ -1,5 +1,8 @@
|
||||
<template>
|
||||
<video autoplay muted id="video"></video>
|
||||
<div>
|
||||
<canvas id="vnc" />
|
||||
<div id="data"></div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
@ -25,6 +28,8 @@ const makeEvent = (evType, args) => ({
|
||||
args: args,
|
||||
});
|
||||
|
||||
let dataChannel;
|
||||
|
||||
// eslint-disable-next-line
|
||||
const sendSpecialKey = (key) => {
|
||||
console.log(key);
|
||||
@ -50,8 +55,6 @@ const resetVM = () => {
|
||||
);
|
||||
};
|
||||
|
||||
let dataChannel;
|
||||
|
||||
onMounted(() => {
|
||||
console.log(props.methods);
|
||||
// eslint-disable-next-line
|
||||
@ -59,8 +62,7 @@ onMounted(() => {
|
||||
// eslint-disable-next-line
|
||||
props.methods.resetVM = resetVM;
|
||||
|
||||
const video = document.querySelector("video#video");
|
||||
video.oncontextmenu = () => false;
|
||||
const video = document.querySelector("canvas#vnc");
|
||||
|
||||
store.getICEServers().then((servers) => {
|
||||
const pc = new RTCPeerConnection({
|
||||
@ -71,19 +73,29 @@ onMounted(() => {
|
||||
],
|
||||
});
|
||||
pc.oniceconnectionstatechange = () => console.log(pc.iceConnectionState);
|
||||
pc.addTransceiver("video");
|
||||
pc.addTransceiver("audio");
|
||||
pc.addTransceiver("video", {
|
||||
direction: "recvonly",
|
||||
});
|
||||
pc.addTransceiver("audio", {
|
||||
direction: "recvonly",
|
||||
});
|
||||
|
||||
const dataChannel = pc.createDataChannel("control");
|
||||
dataChannel = pc.createDataChannel("control");
|
||||
dataChannel.onmessage = (e) => {
|
||||
const d = JSON.parse(e.data);
|
||||
const enc = new TextDecoder("utf-8");
|
||||
const buf = new Uint8Array(e.data);
|
||||
const d = JSON.parse(enc.decode(buf));
|
||||
store.delay = +new Date() - d.server_time;
|
||||
store.qemuStatus = d.qemu_status;
|
||||
};
|
||||
pc.ontrack = (ev) => {
|
||||
video.srcObject = ev.streams[0];
|
||||
video.autoplay = true;
|
||||
video.controls = false;
|
||||
console.log(ev);
|
||||
const el = document.createElement(ev.track.kind);
|
||||
el.id = ev.track.kind;
|
||||
el.srcObject = ev.streams[0];
|
||||
el.autoplay = true;
|
||||
el.controls = true;
|
||||
document.getElementById("data").appendChild(el);
|
||||
};
|
||||
|
||||
dataChannel.onopen = () => {
|
||||
@ -109,16 +121,12 @@ onMounted(() => {
|
||||
};
|
||||
//video.onmousewheel = (ev) => {};
|
||||
window.onkeydown = (ev) => {
|
||||
let key = "";
|
||||
if (ev.ctrlKey && ev.which !== 17) key = "ctrl-" + ev.key;
|
||||
else key = "0x" + ev.which.toString(16);
|
||||
if (ev.shiftKey && ev.which !== 16) key = "shift-" + ev.key;
|
||||
else key = "0x" + ev.which.toString(16);
|
||||
if (ev.altKey && ev.which !== 18) key = "alt-" + ev.key;
|
||||
else key = "0x" + ev.which.toString(16);
|
||||
let key = ev.key;
|
||||
if (ev.ctrlKey && ev.which !== 17) key = "ctrl-" + key;
|
||||
if (ev.shiftKey && ev.which !== 16) key = "shift-" + key;
|
||||
if (ev.altKey && ev.which !== 18) key = "alt-" + key;
|
||||
if (ev.metaKey && ev.which !== 91 && ev.which !== 93)
|
||||
key = "meta-" + ev.key;
|
||||
else key = "0x" + ev.which.toString(16);
|
||||
key = "meta-" + key;
|
||||
if (!ev.altKey && !ev.shiftKey && !ev.ctrlKey && !ev.metaKey)
|
||||
key = "0x" + ev.which.toString(16);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user