feat(web): add voice message recording, chunking, and playback

pull/14/head
alimu 2 weeks ago
parent 8fd031d9fc
commit b1b86487b7

@ -23,6 +23,15 @@ const STORAGE_CURRENT_SERVER_URL_KEY = "oms_current_server_url";
const MAX_SERVER_URLS = 8;
const CHANNEL_BROADCAST = "broadcast";
const CHANNEL_PRIVATE = "private";
const CONTENT_TEXT = "text";
const CONTENT_AUDIO = "audio";
const AUDIO_MESSAGE_PREFIX = "[[OMS_AUDIO_V1]]";
const AUDIO_CHUNK_MESSAGE_PREFIX = "[[OMS_AUDIO_CHUNK_V1]]";
const AUDIO_CHUNK_BASE64_SIZE = 20_000;
const MAX_AUDIO_CHUNK_COUNT = 30;
const AUDIO_CHUNK_BUFFER_TTL_MS = 180_000;
const MIN_AUDIO_DURATION_MS = 350;
const AUDIO_CANCEL_TRIGGER_PX = 96;
function isLikelyLocalHost(host) {
const value = (host || "").toLowerCase();
@ -156,6 +165,118 @@ function summarizeKey(key = "") {
return `${key.slice(0, 8)}...${key.slice(-8)}`;
}
function asPayloadText(data) {
if (typeof data === "string") return data;
if (data == null) return "";
if (typeof data === "object" && typeof data.payload === "string") {
return data.payload;
}
return String(data);
}
function splitAudioBase64(base64, chunkSize) {
if (!base64 || chunkSize <= 0) return [];
if (base64.length <= chunkSize) return [base64];
const chunks = [];
for (let i = 0; i < base64.length; i += chunkSize) {
chunks.push(base64.slice(i, i + chunkSize));
}
return chunks;
}
function parseAudioPayload(payloadText) {
if (!payloadText.startsWith(AUDIO_MESSAGE_PREFIX)) return null;
const encoded = payloadText.slice(AUDIO_MESSAGE_PREFIX.length).trim();
if (!encoded) return null;
const parsed = safeJsonParse(encoded);
if (!parsed || !parsed.data) {
return null;
}
const encoding = String(parsed.encoding || "base64").toLowerCase();
if (encoding !== "base64") {
return null;
}
return {
mimeType: parsed.mimeType || "audio/mp4",
durationMillis: Number(parsed.durationMillis) || 0,
data: String(parsed.data || "")
};
}
function parseAudioChunkPayload(payloadText) {
if (!payloadText.startsWith(AUDIO_CHUNK_MESSAGE_PREFIX)) return null;
const encoded = payloadText.slice(AUDIO_CHUNK_MESSAGE_PREFIX.length).trim();
if (!encoded) return null;
const parsed = safeJsonParse(encoded);
if (!parsed) {
return null;
}
const encoding = String(parsed.encoding || "base64").toLowerCase();
if (encoding !== "base64") {
return null;
}
const total = Number(parsed.total);
const index = Number(parsed.index);
const messageId = String(parsed.messageId || "");
const data = String(parsed.data || "");
if (!messageId || !data || !Number.isInteger(total) || !Number.isInteger(index)) return null;
if (total < 1 || total > MAX_AUDIO_CHUNK_COUNT || index < 0 || index >= total) return null;
return {
mimeType: parsed.mimeType || "audio/mp4",
messageId,
index,
total,
durationMillis: Number(parsed.durationMillis) || 0,
data
};
}
function formatAudioDuration(durationMillis) {
const totalSeconds = Math.max(0, Math.floor((Number(durationMillis) || 0) / 1000));
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return minutes > 0 ? `${minutes}:${String(seconds).padStart(2, "0")}` : `${seconds}s`;
}
function formatRecordingElapsed(durationMillis) {
const clamped = Math.max(0, Number(durationMillis) || 0);
const seconds = Math.floor(clamped / 1000);
const tenths = Math.floor((clamped % 1000) / 100);
return `${seconds}.${tenths}s`;
}
function base64ToBytes(base64) {
const binary = atob(base64);
const bytes = new Uint8Array(binary.length);
for (let i = 0; i < binary.length; i += 1) {
bytes[i] = binary.charCodeAt(i);
}
return bytes;
}
function blobToBase64(blob) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const result = String(reader.result || "");
const marker = "base64,";
const index = result.indexOf(marker);
resolve(index >= 0 ? result.slice(index + marker.length) : result);
};
reader.onerror = () => reject(reader.error || new Error("读取录音失败"));
reader.readAsDataURL(blob);
});
}
function pickRecordingMimeType() {
if (typeof MediaRecorder === "undefined" || typeof MediaRecorder.isTypeSupported !== "function") {
return "";
}
// Android mp4/aac
const candidates = ["audio/mp4;codecs=mp4a.40.2", "audio/mp4"];
return candidates.find((item) => MediaRecorder.isTypeSupported(item)) || "";
}
function createLocalId() {
const c = globalThis.crypto;
if (c?.randomUUID) {
@ -206,6 +327,14 @@ export default function App() {
const targetComposingRef = useRef(false);
const messageListRef = useRef(null);
const stickToBottomRef = useRef(true);
const incomingAudioChunkBuffersRef = useRef(new Map());
const audioPlayerRef = useRef(null);
const audioObjectUrlRef = useRef("");
const recordingSessionRef = useRef(null);
const recordingTimerRef = useRef(0);
const recordingStartedAtRef = useRef(0);
const recordPressDownYRef = useRef(0);
const transientStatusTimerRef = useRef(0);
const [status, setStatus] = useState("idle");
const [statusHint, setStatusHint] = useState("点击连接开始聊天");
@ -219,6 +348,11 @@ export default function App() {
const [messages, setMessages] = useState([]);
const [showSystemMessages, setShowSystemMessages] = useState(false);
const [sending, setSending] = useState(false);
const [inputMode, setInputMode] = useState(CONTENT_TEXT);
const [isRecording, setIsRecording] = useState(false);
const [cancelOnRelease, setCancelOnRelease] = useState(false);
const [recordingElapsedMillis, setRecordingElapsedMillis] = useState(0);
const [playingMessageId, setPlayingMessageId] = useState("");
const [certFingerprint, setCertFingerprint] = useState("");
const [myPublicKey, setMyPublicKey] = useState("");
const [publicKeyBusy, setPublicKeyBusy] = useState(false);
@ -230,6 +364,7 @@ export default function App() {
const canConnect = status === "idle" || status === "error";
const canDisconnect = status !== "idle" && status !== "error";
const canSend = isConnected && draft.trim().length > 0 && !sending;
const canHoldToRecord = isConnected && !sending && (!directMode || targetKey.trim().length > 0);
const activeChannel = directMode ? CHANNEL_PRIVATE : CHANNEL_BROADCAST;
const mobileConnectText = useMemo(() => {
if (status === "ready") return "已连接";
@ -309,6 +444,44 @@ export default function App() {
clearTimeout(messageCopyTimerRef.current);
messageCopyTimerRef.current = 0;
}
if (recordingTimerRef.current) {
clearInterval(recordingTimerRef.current);
recordingTimerRef.current = 0;
}
if (transientStatusTimerRef.current) {
clearTimeout(transientStatusTimerRef.current);
transientStatusTimerRef.current = 0;
}
if (recordingSessionRef.current) {
try {
if (recordingSessionRef.current.recorder?.state !== "inactive") {
recordingSessionRef.current.recorder.stop();
}
} catch {
// ignore
}
recordingSessionRef.current.stream?.getTracks?.().forEach((track) => {
try {
track.stop();
} catch {
// ignore
}
});
recordingSessionRef.current = null;
}
if (audioPlayerRef.current) {
try {
audioPlayerRef.current.pause();
} catch {
// ignore
}
audioPlayerRef.current.src = "";
audioPlayerRef.current = null;
}
if (audioObjectUrlRef.current) {
URL.revokeObjectURL(audioObjectUrlRef.current);
audioObjectUrlRef.current = "";
}
};
}, []);
@ -362,7 +535,7 @@ export default function App() {
]);
}
function pushIncoming(sender, text, subtitle = "", channel = CHANNEL_BROADCAST) {
function pushIncoming(sender, text, subtitle = "", channel = CHANNEL_BROADCAST, options = {}) {
setMessages((prev) => [
...prev,
{
@ -372,12 +545,16 @@ export default function App() {
subtitle,
channel,
content: text,
contentType: options.contentType || CONTENT_TEXT,
audioBase64: options.audioBase64 || "",
audioDurationMillis: Number(options.audioDurationMillis) || 0,
audioMimeType: options.audioMimeType || "",
ts: Date.now()
}
]);
}
function pushOutgoing(text, subtitle = "", channel = CHANNEL_BROADCAST) {
function pushOutgoing(text, subtitle = "", channel = CHANNEL_BROADCAST, options = {}) {
setMessages((prev) => [
...prev,
{
@ -387,11 +564,47 @@ export default function App() {
subtitle,
channel,
content: text,
contentType: options.contentType || CONTENT_TEXT,
audioBase64: options.audioBase64 || "",
audioDurationMillis: Number(options.audioDurationMillis) || 0,
audioMimeType: options.audioMimeType || "",
ts: Date.now()
}
]);
}
function pushIncomingAudio(sender, subtitle, audioBase64, durationMillis, mimeType, channel) {
pushIncoming(sender, "语音消息", subtitle, channel, {
contentType: CONTENT_AUDIO,
audioBase64,
audioDurationMillis: durationMillis,
audioMimeType: mimeType
});
}
function pushOutgoingAudio(subtitle, audioBase64, durationMillis, mimeType, channel) {
pushOutgoing("语音消息", subtitle, channel, {
contentType: CONTENT_AUDIO,
audioBase64,
audioDurationMillis: durationMillis,
audioMimeType: mimeType
});
}
function showTransientStatusHint(text, durationMs = 2200) {
setStatusHint(text);
if (transientStatusTimerRef.current) {
clearTimeout(transientStatusTimerRef.current);
transientStatusTimerRef.current = 0;
}
transientStatusTimerRef.current = window.setTimeout(() => {
transientStatusTimerRef.current = 0;
if (statusRef.current === "ready") {
setStatusHint("已连接,可以开始聊天");
}
}, durationMs);
}
async function ensureIdentity() {
if (identityRef.current) {
return identityRef.current;
@ -484,6 +697,399 @@ export default function App() {
}, 1600);
}
function purgeExpiredAudioChunkBuffers(nowMillis = Date.now()) {
const map = incomingAudioChunkBuffersRef.current;
if (!map.size) return;
for (const [key, value] of map.entries()) {
if (nowMillis - value.createdAtMillis >= AUDIO_CHUNK_BUFFER_TTL_MS) {
map.delete(key);
}
}
}
function ingestIncomingAudioChunk(sender, subtitle, channel, chunk) {
const now = Date.now();
purgeExpiredAudioChunkBuffers(now);
const key = `${channel}:${sender}:${chunk.messageId}`;
const map = incomingAudioChunkBuffersRef.current;
const existing = map.get(key);
const active =
!existing || existing.total !== chunk.total
? {
sender,
subtitle,
channel,
total: chunk.total,
durationMillis: Math.max(0, Number(chunk.durationMillis) || 0),
mimeType: chunk.mimeType || "audio/mp4",
createdAtMillis: now,
chunks: Array.from({ length: chunk.total }, () => "")
}
: existing;
if (!existing || existing.total !== chunk.total) {
map.set(key, active);
} else if (existing.sender !== sender || existing.channel !== channel) {
return;
}
active.chunks[chunk.index] = chunk.data;
if (active.chunks.some((part) => !part)) return;
map.delete(key);
const merged = active.chunks.join("");
if (!merged) return;
pushIncomingAudio(
active.sender,
active.subtitle,
merged,
active.durationMillis,
active.mimeType,
active.channel
);
}
function releaseAudioObjectUrl() {
if (!audioObjectUrlRef.current) return;
URL.revokeObjectURL(audioObjectUrlRef.current);
audioObjectUrlRef.current = "";
}
function stopAudioPlayback() {
if (audioPlayerRef.current) {
try {
audioPlayerRef.current.pause();
} catch {
// ignore
}
audioPlayerRef.current.currentTime = 0;
audioPlayerRef.current.src = "";
}
releaseAudioObjectUrl();
setPlayingMessageId("");
}
async function togglePlayAudioMessage(item) {
if (!item?.audioBase64) return;
if (playingMessageId === item.id) {
stopAudioPlayback();
return;
}
stopAudioPlayback();
try {
const bytes = base64ToBytes(item.audioBase64);
if (!bytes.length) {
pushSystem("语音播放失败:空数据");
return;
}
const mimeType = item.audioMimeType || "audio/mp4";
const blob = new Blob([bytes], { type: mimeType });
const url = URL.createObjectURL(blob);
releaseAudioObjectUrl();
audioObjectUrlRef.current = url;
if (!audioPlayerRef.current) {
audioPlayerRef.current = new Audio();
}
const player = audioPlayerRef.current;
player.onended = () => {
stopAudioPlayback();
};
player.onerror = () => {
stopAudioPlayback();
pushSystem("语音播放失败:浏览器不支持该音频格式");
};
player.src = url;
await player.play();
setPlayingMessageId(item.id);
} catch (error) {
stopAudioPlayback();
pushSystem(`语音播放失败:${error?.message || "unknown error"}`);
}
}
async function sendSignedPayload(type, key, payloadText) {
const ws = wsRef.current;
if (!ws || ws.readyState !== WebSocket.OPEN) {
throw new Error("连接不可用");
}
const identity = identityRef.current;
const serverPublicKey = serverPublicKeyRef.current;
if (!identity || !serverPublicKey) {
throw new Error("身份或服务端公钥未就绪");
}
const timestamp = unixSecondsNow();
const nonce = createNonce();
const signInput = [type, key, payloadText, timestamp, nonce].join("\n");
const signature = await signText(identity.signPrivateKey, signInput);
const envelope = {
type,
key,
data: {
payload: payloadText,
timestamp,
nonce,
signature
}
};
const cipher = await rsaEncryptChunked(serverPublicKey, JSON.stringify(envelope));
ws.send(cipher);
}
async function sendAudioMessage(audioBase64, durationMillis, mimeType = "audio/mp4") {
if (!isConnected || sending) return;
const normalized = String(audioBase64 || "").trim();
if (!normalized) return;
const key = directMode ? targetKey.trim() : "";
if (directMode && !key) {
showTransientStatusHint("请先填写目标公钥,再发送私聊消息");
return;
}
const type = key ? "forward" : "broadcast";
const channel = key ? CHANNEL_PRIVATE : CHANNEL_BROADCAST;
const subtitle = key ? `私聊 ${summarizeKey(key)}` : "";
const safeDuration = Math.max(0, Number(durationMillis) || 0);
const chunks = splitAudioBase64(normalized, AUDIO_CHUNK_BASE64_SIZE);
if (chunks.length > MAX_AUDIO_CHUNK_COUNT) {
showTransientStatusHint("语音过长,超过可发送分片上限");
pushSystem("语音过长,已取消发送");
return;
}
setSending(true);
try {
if (chunks.length === 1) {
const payloadText =
AUDIO_MESSAGE_PREFIX +
JSON.stringify({
version: 1,
encoding: "base64",
mimeType: mimeType || "audio/mp4",
durationMillis: safeDuration,
data: normalized
});
await sendSignedPayload(type, key, payloadText);
} else {
const messageId = createLocalId();
for (let i = 0; i < chunks.length; i += 1) {
const payloadText =
AUDIO_CHUNK_MESSAGE_PREFIX +
JSON.stringify({
version: 1,
encoding: "base64",
mimeType: mimeType || "audio/mp4",
messageId,
index: i,
total: chunks.length,
durationMillis: safeDuration,
data: chunks[i]
});
await sendSignedPayload(type, key, payloadText);
}
}
pushOutgoingAudio(subtitle, normalized, safeDuration, mimeType || "audio/mp4", channel);
} catch (error) {
const message = error?.message || "unknown error";
showTransientStatusHint(`语音发送失败:${message}`);
pushSystem(`语音发送失败:${message}`);
} finally {
setSending(false);
}
}
function clearRecordingTick() {
if (!recordingTimerRef.current) return;
clearInterval(recordingTimerRef.current);
recordingTimerRef.current = 0;
}
async function startRecording() {
if (recordingSessionRef.current || isRecording) return;
if (!canHoldToRecord) {
if (directMode && !targetKey.trim()) {
showTransientStatusHint("请先填写目标公钥,再发送私聊消息");
}
return;
}
if (!navigator.mediaDevices?.getUserMedia || typeof MediaRecorder === "undefined") {
showTransientStatusHint("当前浏览器不支持语音录制");
pushSystem("语音录制失败:浏览器不支持 MediaRecorder");
return;
}
let stream;
try {
stream = await navigator.mediaDevices.getUserMedia({ audio: true });
} catch (error) {
showTransientStatusHint("请先授予麦克风权限");
pushSystem(`语音录制失败:${error?.message || "未获得权限"}`);
return;
}
const mimeType = pickRecordingMimeType();
if (!mimeType) {
stream.getTracks().forEach((track) => {
try {
track.stop();
} catch {
// ignore
}
});
showTransientStatusHint("当前浏览器不支持 MP4 语音录制");
pushSystem("语音录制失败:当前浏览器不支持 MP4/AAC 编码Android 端可能无法播放");
return;
}
const recorder = mimeType ? new MediaRecorder(stream, { mimeType }) : new MediaRecorder(stream);
const session = {
stream,
recorder,
chunks: [],
mimeType: recorder.mimeType || "audio/mp4",
sendOnStop: true
};
recordingSessionRef.current = session;
recordingStartedAtRef.current = Date.now();
setRecordingElapsedMillis(0);
setIsRecording(true);
setCancelOnRelease(false);
clearRecordingTick();
recordingTimerRef.current = window.setInterval(() => {
setRecordingElapsedMillis(Math.max(0, Date.now() - recordingStartedAtRef.current));
}, 100);
recorder.ondataavailable = (event) => {
if (event.data && event.data.size > 0) {
session.chunks.push(event.data);
}
};
recorder.onerror = () => {
showTransientStatusHint("录音失败,请重试");
pushSystem("语音录制失败MediaRecorder 发生错误");
};
recorder.onstop = async () => {
clearRecordingTick();
const recordedDuration = Math.max(0, Date.now() - recordingStartedAtRef.current);
recordingStartedAtRef.current = 0;
setIsRecording(false);
setCancelOnRelease(false);
setRecordingElapsedMillis(0);
recordingSessionRef.current = null;
session.stream.getTracks().forEach((track) => {
try {
track.stop();
} catch {
// ignore
}
});
if (!session.sendOnStop) {
showTransientStatusHint("已取消语音发送", 1600);
return;
}
if (recordedDuration < MIN_AUDIO_DURATION_MS) {
showTransientStatusHint("录音时间太短", 1800);
return;
}
if (session.chunks.length === 0) {
showTransientStatusHint("录音失败,请重试");
pushSystem("语音录制失败:未采集到音频数据");
return;
}
try {
const blob = new Blob(session.chunks, { type: session.mimeType || "audio/webm" });
const base64 = await blobToBase64(blob);
await sendAudioMessage(base64, recordedDuration, session.mimeType || "audio/webm");
} catch (error) {
showTransientStatusHint("录音失败,请重试");
pushSystem(`语音录制失败:${error?.message || "unknown error"}`);
}
};
try {
recorder.start();
} catch (error) {
clearRecordingTick();
recordingSessionRef.current = null;
recordingStartedAtRef.current = 0;
setIsRecording(false);
setCancelOnRelease(false);
setRecordingElapsedMillis(0);
stream.getTracks().forEach((track) => {
try {
track.stop();
} catch {
// ignore
}
});
showTransientStatusHint("录音失败,请重试");
pushSystem(`语音录制失败:${error?.message || "unknown error"}`);
return;
}
}
function finishRecording(send) {
const session = recordingSessionRef.current;
if (!session) return;
session.sendOnStop = Boolean(send);
clearRecordingTick();
setIsRecording(false);
setRecordingElapsedMillis(0);
try {
if (session.recorder.state !== "inactive") {
session.recorder.stop();
}
} catch {
session.stream.getTracks().forEach((track) => {
try {
track.stop();
} catch {
// ignore
}
});
recordingSessionRef.current = null;
showTransientStatusHint("录音失败,请重试");
}
}
async function onHoldToTalkPointerDown(event) {
event.preventDefault();
recordPressDownYRef.current = event.clientY;
setCancelOnRelease(false);
try {
event.currentTarget.setPointerCapture?.(event.pointerId);
} catch {
// ignore
}
await startRecording();
}
function onHoldToTalkPointerMove(event) {
if (!isRecording) return;
const deltaY = recordPressDownYRef.current - event.clientY;
setCancelOnRelease(deltaY > AUDIO_CANCEL_TRIGGER_PX);
}
function onHoldToTalkPointerUp(event) {
if (!isRecording) return;
const shouldSend = !cancelOnRelease;
finishRecording(shouldSend);
try {
event.currentTarget.releasePointerCapture?.(event.pointerId);
} catch {
// ignore
}
}
function onHoldToTalkPointerCancel() {
if (!isRecording) return;
finishRecording(false);
}
async function connect() {
if (!canConnect) return;
const cryptoIssue = getCryptoIssueMessage();
@ -579,6 +1185,10 @@ export default function App() {
function disconnect() {
manualCloseRef.current = true;
if (isRecording) {
finishRecording(false);
}
stopAudioPlayback();
if (wsRef.current) {
wsRef.current.close();
wsRef.current = null;
@ -688,13 +1298,52 @@ export default function App() {
}
if (message.type === "broadcast") {
pushIncoming(message.key || "匿名用户", String(message.data ?? ""), "", CHANNEL_BROADCAST);
const sender = message.key || "匿名用户";
const payloadText = asPayloadText(message.data);
const audioChunk = parseAudioChunkPayload(payloadText);
if (audioChunk) {
ingestIncomingAudioChunk(sender, "", CHANNEL_BROADCAST, audioChunk);
return;
}
const audio = parseAudioPayload(payloadText);
if (audio) {
pushIncomingAudio(
sender,
"",
audio.data,
audio.durationMillis,
audio.mimeType || "audio/mp4",
CHANNEL_BROADCAST
);
} else {
pushIncoming(sender, payloadText, "", CHANNEL_BROADCAST);
}
return;
}
if (message.type === "forward") {
const sourceKey = String(message.key || "");
const sender = "私聊消息";
pushIncoming(sender, String(message.data ?? ""), "", CHANNEL_PRIVATE);
const subtitle = sourceKey ? `来自 ${summarizeKey(sourceKey)}` : "";
const payloadText = asPayloadText(message.data);
const audioChunk = parseAudioChunkPayload(payloadText);
if (audioChunk) {
ingestIncomingAudioChunk(sender, subtitle, CHANNEL_PRIVATE, audioChunk);
return;
}
const audio = parseAudioPayload(payloadText);
if (audio) {
pushIncomingAudio(
sender,
subtitle,
audio.data,
audio.durationMillis,
audio.mimeType || "audio/mp4",
CHANNEL_PRIVATE
);
} else {
pushIncoming(sender, payloadText, subtitle, CHANNEL_PRIVATE);
}
return;
}
@ -727,24 +1376,7 @@ export default function App() {
setSending(true);
try {
const timestamp = unixSecondsNow();
const nonce = createNonce();
const signInput = [type, key, text, timestamp, nonce].join("\n");
const signature = await signText(identity.signPrivateKey, signInput);
const envelope = {
type,
key,
data: {
payload: text,
timestamp,
nonce,
signature
}
};
const cipher = await rsaEncryptChunked(serverPublicKey, JSON.stringify(envelope));
ws.send(cipher);
await sendSignedPayload(type, key, text);
pushOutgoing(text, subtitle, channel);
setDraft("");
} catch (error) {
@ -921,17 +1553,43 @@ export default function App() {
</>
) : (
<>
<div className="msg-head">
<strong>{item.sender}</strong>
{item.subtitle ? <span>{item.subtitle}</span> : null}
<time>{formatTime(item.ts)}</time>
</div>
<p>{item.content}</p>
<div className="msg-actions">
<button className="btn btn-copy" onClick={() => copyMessageText(item.id, item.content)}>
{copiedMessageId === item.id ? "已复制" : "复制"}
</button>
</div>
{(() => {
const isAudioMessage = item.contentType === CONTENT_AUDIO && item.audioBase64;
return (
<>
<div className="msg-head">
<strong>{item.sender}</strong>
{item.subtitle ? <span>{item.subtitle}</span> : null}
<time>{formatTime(item.ts)}</time>
</div>
{isAudioMessage ? (
<button
className={`audio-message ${playingMessageId === item.id ? "playing" : ""}`}
type="button"
onClick={() => togglePlayAudioMessage(item)}
>
<span className="audio-icon">{playingMessageId === item.id ? "■" : "▶"}</span>
<span className="audio-waves" aria-hidden="true">
<i />
<i />
<i />
<i />
</span>
<span className="audio-duration">{formatAudioDuration(item.audioDurationMillis)}</span>
</button>
) : (
<p>{item.content}</p>
)}
{!isAudioMessage ? (
<div className="msg-actions">
<button className="btn btn-copy" onClick={() => copyMessageText(item.id, item.content)}>
{copiedMessageId === item.id ? "已复制" : "复制"}
</button>
</div>
) : null}
</>
);
})()}
</>
)}
</article>
@ -939,25 +1597,64 @@ export default function App() {
)}
</div>
<div className="composer">
<div className="composer-input-wrap">
<textarea
value={draft}
onChange={(event) => setDraft(event.target.value)}
onKeyDown={onDraftKeyDown}
onCompositionStart={() => {
draftComposingRef.current = true;
}}
onCompositionEnd={() => {
draftComposingRef.current = false;
}}
placeholder="输入消息"
rows={1}
/>
</div>
<button className="btn btn-main btn-send" onClick={sendMessage} disabled={!canSend}>
{sending ? "发送中..." : "发送"}
<div className={`composer ${inputMode === CONTENT_AUDIO ? "audio-mode" : ""}`}>
<button
className={`btn btn-ghost btn-input-switch ${inputMode === CONTENT_AUDIO ? "active" : ""}`}
type="button"
onClick={() => {
if (inputMode === CONTENT_AUDIO && isRecording) {
finishRecording(false);
}
setInputMode((prev) => (prev === CONTENT_TEXT ? CONTENT_AUDIO : CONTENT_TEXT));
}}
title={inputMode === CONTENT_TEXT ? "切换到语音输入" : "切换到文字输入"}
>
{inputMode === CONTENT_TEXT ? "语音" : "键盘"}
</button>
{inputMode === CONTENT_TEXT ? (
<>
<div className="composer-input-wrap">
<textarea
value={draft}
onChange={(event) => setDraft(event.target.value)}
onKeyDown={onDraftKeyDown}
onCompositionStart={() => {
draftComposingRef.current = true;
}}
onCompositionEnd={() => {
draftComposingRef.current = false;
}}
placeholder="输入消息"
rows={1}
/>
</div>
<button className="btn btn-main btn-send" onClick={sendMessage} disabled={!canSend}>
{sending ? "发送中..." : "发送"}
</button>
</>
) : (
<button
className={`hold-to-talk ${isRecording ? (cancelOnRelease ? "cancel" : "recording") : ""}`}
type="button"
disabled={!canHoldToRecord}
onPointerDown={onHoldToTalkPointerDown}
onPointerMove={onHoldToTalkPointerMove}
onPointerUp={onHoldToTalkPointerUp}
onPointerCancel={onHoldToTalkPointerCancel}
onPointerLeave={(event) => {
if (isRecording && (event.buttons & 1) === 0) {
onHoldToTalkPointerUp(event);
}
}}
>
{isRecording
? cancelOnRelease
? "松开取消"
: `录制中 ${formatRecordingElapsed(recordingElapsedMillis)}`
: "按住说话"}
</button>
)}
</div>
</section>

@ -326,6 +326,94 @@ body {
font-size: 14px;
}
.audio-message {
width: min(260px, 100%);
border: 0;
border-radius: 12px;
padding: 8px 10px;
margin-top: 2px;
background: rgba(8, 27, 52, 0.08);
color: #143556;
display: inline-flex;
align-items: center;
gap: 10px;
cursor: pointer;
}
.msg.outgoing .audio-message {
background: rgba(57, 139, 18, 0.14);
}
.audio-message:hover {
filter: brightness(0.98);
}
.audio-icon {
width: 24px;
height: 24px;
border-radius: 50%;
display: inline-grid;
place-items: center;
background: rgba(20, 53, 86, 0.14);
font-size: 12px;
font-weight: 700;
}
.audio-waves {
flex: 1;
min-width: 44px;
display: inline-flex;
align-items: center;
gap: 3px;
}
.audio-waves i {
width: 3px;
border-radius: 999px;
background: #1887ff;
display: block;
transform-origin: center bottom;
}
.audio-waves i:nth-child(1) {
height: 9px;
}
.audio-waves i:nth-child(2) {
height: 15px;
}
.audio-waves i:nth-child(3) {
height: 11px;
}
.audio-waves i:nth-child(4) {
height: 13px;
background: #13bca8;
}
.audio-message.playing .audio-waves i {
animation: audio-wave 0.75s ease-in-out infinite alternate;
}
.audio-message.playing .audio-waves i:nth-child(2) {
animation-delay: 0.08s;
}
.audio-message.playing .audio-waves i:nth-child(3) {
animation-delay: 0.16s;
}
.audio-message.playing .audio-waves i:nth-child(4) {
animation-delay: 0.24s;
}
.audio-duration {
color: #4d6684;
font-size: 12px;
font-weight: 700;
}
.msg-actions {
margin-top: 0;
display: flex;
@ -361,12 +449,17 @@ body {
border-top: 1px solid var(--card-border);
padding: 10px 12px;
display: grid;
grid-template-columns: 1fr auto;
grid-template-columns: auto 1fr auto;
gap: 10px;
align-items: center;
background: rgba(255, 255, 255, 0.96);
box-shadow: inset 0 1px 0 rgba(19, 35, 58, 0.06);
}
.composer.audio-mode {
grid-template-columns: auto 1fr;
}
.composer-input-wrap {
border: 1px solid rgba(19, 35, 58, 0.18);
border-radius: 15px;
@ -374,6 +467,41 @@ body {
padding: 6px 10px;
}
.btn-input-switch {
min-width: 56px;
min-height: 38px;
border-radius: 12px;
padding: 0 12px;
align-self: stretch;
}
.btn-input-switch.active {
background: #dff4ff;
color: #145184;
}
.hold-to-talk {
border: 1px solid rgba(19, 35, 58, 0.16);
border-radius: 15px;
min-height: 42px;
background: #f6fbff;
color: #1a426b;
font-size: 14px;
font-weight: 700;
}
.hold-to-talk.recording {
background: rgba(21, 132, 255, 0.14);
border-color: rgba(21, 132, 255, 0.36);
color: #0e4d84;
}
.hold-to-talk.cancel {
background: rgba(216, 56, 99, 0.14);
border-color: rgba(216, 56, 99, 0.34);
color: #8f1534;
}
.composer textarea {
width: 100%;
resize: none;
@ -600,6 +728,15 @@ select:focus {
}
}
@keyframes audio-wave {
from {
transform: scaleY(0.62);
}
to {
transform: scaleY(1.12);
}
}
@media (max-width: 980px) {
html,
body,
@ -805,7 +942,7 @@ select:focus {
.composer {
position: sticky;
bottom: 56px;
grid-template-columns: 1fr auto;
grid-template-columns: auto 1fr auto;
align-items: center;
padding: 6px 8px;
gap: 6px;
@ -813,6 +950,10 @@ select:focus {
border-top: 1px solid rgba(19, 35, 58, 0.08);
}
.composer.audio-mode {
grid-template-columns: auto 1fr;
}
.btn-send {
width: auto;
min-width: 56px;
@ -831,6 +972,20 @@ select:focus {
padding: 6px 12px;
}
.btn-input-switch {
min-width: 52px;
min-height: 38px;
border-radius: 19px;
font-size: 12px;
padding: 0 10px;
}
.hold-to-talk {
min-height: 40px;
border-radius: 20px;
font-size: 13px;
}
.composer textarea {
min-height: 20px;
max-height: 88px;

Loading…
Cancel
Save