1import React, { useState, useRef, useEffect, useCallback, useMemo } from "react";
2
3// Web Speech API types
4interface SpeechRecognitionEvent extends Event {
5 results: SpeechRecognitionResultList;
6 resultIndex: number;
7}
8
9interface SpeechRecognitionResultList {
10 length: number;
11 item(index: number): SpeechRecognitionResult;
12 [index: number]: SpeechRecognitionResult;
13}
14
15interface SpeechRecognitionResult {
16 isFinal: boolean;
17 length: number;
18 item(index: number): SpeechRecognitionAlternative;
19 [index: number]: SpeechRecognitionAlternative;
20}
21
22interface SpeechRecognitionAlternative {
23 transcript: string;
24 confidence: number;
25}
26
27interface SpeechRecognition extends EventTarget {
28 continuous: boolean;
29 interimResults: boolean;
30 lang: string;
31 onresult: ((event: SpeechRecognitionEvent) => void) | null;
32 onerror: ((event: Event & { error: string }) => void) | null;
33 onend: (() => void) | null;
34 start(): void;
35 stop(): void;
36 abort(): void;
37}
38
39declare global {
40 interface Window {
41 SpeechRecognition: new () => SpeechRecognition;
42 webkitSpeechRecognition: new () => SpeechRecognition;
43 }
44}
45
46interface MessageInputProps {
47 onSend: (message: string) => Promise<void>;
48 disabled?: boolean;
49 autoFocus?: boolean;
50 onFocus?: () => void;
51 injectedText?: string;
52 onClearInjectedText?: () => void;
53 /** If set, persist draft message to localStorage under this key */
54 persistKey?: string;
55}
56
57const PERSIST_KEY_PREFIX = "shelley_draft_";
58
59function MessageInput({
60 onSend,
61 disabled = false,
62 autoFocus = false,
63 onFocus,
64 injectedText,
65 onClearInjectedText,
66 persistKey,
67}: MessageInputProps) {
68 const [message, setMessage] = useState(() => {
69 // Load persisted draft if persistKey is set
70 if (persistKey) {
71 return localStorage.getItem(PERSIST_KEY_PREFIX + persistKey) || "";
72 }
73 return "";
74 });
75 const [submitting, setSubmitting] = useState(false);
76 const [uploadsInProgress, setUploadsInProgress] = useState(0);
77 const [dragCounter, setDragCounter] = useState(0);
78 const [isListening, setIsListening] = useState(false);
79 const [isSmallScreen, setIsSmallScreen] = useState(() => {
80 if (typeof window === "undefined") return false;
81 return window.innerWidth < 480;
82 });
83 const textareaRef = useRef<HTMLTextAreaElement>(null);
84 const fileInputRef = useRef<HTMLInputElement>(null);
85 const recognitionRef = useRef<SpeechRecognition | null>(null);
86 // Track the base text (before speech recognition started) and finalized speech text
87 const baseTextRef = useRef<string>("");
88 const finalizedTextRef = useRef<string>("");
89
90 // Check if speech recognition is available
91 const speechRecognitionAvailable =
92 typeof window !== "undefined" && (window.SpeechRecognition || window.webkitSpeechRecognition);
93
94 // Responsive placeholder text
95 const placeholderText = useMemo(
96 () => (isSmallScreen ? "Message..." : "Message, paste image, or attach file..."),
97 [isSmallScreen],
98 );
99
100 // Track screen size for responsive placeholder
101 useEffect(() => {
102 const handleResize = () => {
103 setIsSmallScreen(window.innerWidth < 480);
104 };
105 window.addEventListener("resize", handleResize);
106 return () => window.removeEventListener("resize", handleResize);
107 }, []);
108
109 const stopListening = useCallback(() => {
110 if (recognitionRef.current) {
111 recognitionRef.current.stop();
112 recognitionRef.current = null;
113 }
114 setIsListening(false);
115 }, []);
116
117 const startListening = useCallback(() => {
118 if (!speechRecognitionAvailable) return;
119
120 const SpeechRecognitionClass = window.SpeechRecognition || window.webkitSpeechRecognition;
121 const recognition = new SpeechRecognitionClass();
122
123 recognition.continuous = true;
124 recognition.interimResults = true;
125 recognition.lang = navigator.language || "en-US";
126
127 // Capture current message as base text
128 setMessage((current) => {
129 baseTextRef.current = current;
130 finalizedTextRef.current = "";
131 return current;
132 });
133
134 recognition.onresult = (event: SpeechRecognitionEvent) => {
135 let finalTranscript = "";
136 let interimTranscript = "";
137
138 for (let i = event.resultIndex; i < event.results.length; i++) {
139 const transcript = event.results[i][0].transcript;
140 if (event.results[i].isFinal) {
141 finalTranscript += transcript;
142 } else {
143 interimTranscript += transcript;
144 }
145 }
146
147 // Accumulate finalized text
148 if (finalTranscript) {
149 finalizedTextRef.current += finalTranscript;
150 }
151
152 // Build the full message: base + finalized + interim
153 const base = baseTextRef.current;
154 const needsSpace = base.length > 0 && !/\s$/.test(base);
155 const spacer = needsSpace ? " " : "";
156 const fullText = base + spacer + finalizedTextRef.current + interimTranscript;
157
158 setMessage(fullText);
159 };
160
161 recognition.onerror = (event) => {
162 console.error("Speech recognition error:", event.error);
163 stopListening();
164 };
165
166 recognition.onend = () => {
167 setIsListening(false);
168 recognitionRef.current = null;
169 };
170
171 recognitionRef.current = recognition;
172 recognition.start();
173 setIsListening(true);
174 }, [speechRecognitionAvailable, stopListening]);
175
176 const toggleListening = useCallback(() => {
177 if (isListening) {
178 stopListening();
179 } else {
180 startListening();
181 }
182 }, [isListening, startListening, stopListening]);
183
184 // Cleanup on unmount
185 useEffect(() => {
186 return () => {
187 if (recognitionRef.current) {
188 recognitionRef.current.abort();
189 }
190 };
191 }, []);
192
193 const uploadFile = async (file: File) => {
194 // Add a loading indicator at the end of the current message
195 const loadingText = `[uploading ${file.name}...]`;
196 setMessage((prev) => (prev ? prev + " " : "") + loadingText);
197 setUploadsInProgress((prev) => prev + 1);
198
199 try {
200 const formData = new FormData();
201 formData.append("file", file);
202
203 const response = await fetch("/api/upload", {
204 method: "POST",
205 headers: { "X-Shelley-Request": "1" },
206 body: formData,
207 });
208
209 if (!response.ok) {
210 throw new Error(`Upload failed: ${response.statusText}`);
211 }
212
213 const data = await response.json();
214
215 // Replace the loading placeholder with the actual file path
216 setMessage((currentMessage) => currentMessage.replace(loadingText, `[${data.path}]`));
217 } catch (error) {
218 console.error("Failed to upload file:", error);
219 // Replace loading indicator with error message
220 const errorText = `[upload failed: ${error instanceof Error ? error.message : "unknown error"}]`;
221 setMessage((currentMessage) => currentMessage.replace(loadingText, errorText));
222 } finally {
223 setUploadsInProgress((prev) => prev - 1);
224 }
225 };
226
227 const handlePaste = async (event: React.ClipboardEvent) => {
228 // Check clipboard items (works on both desktop and mobile)
229 // Mobile browsers often don't populate clipboardData.files, but items works
230 const items = event.clipboardData?.items;
231 if (items) {
232 for (let i = 0; i < items.length; i++) {
233 const item = items[i];
234 if (item.kind === "file") {
235 const file = item.getAsFile();
236 if (file) {
237 event.preventDefault();
238 await uploadFile(file);
239 return;
240 }
241 }
242 }
243 }
244 };
245
246 const handleDragOver = (event: React.DragEvent) => {
247 event.preventDefault();
248 event.stopPropagation();
249 };
250
251 const handleDragEnter = (event: React.DragEvent) => {
252 event.preventDefault();
253 event.stopPropagation();
254 setDragCounter((prev) => prev + 1);
255 };
256
257 const handleDragLeave = (event: React.DragEvent) => {
258 event.preventDefault();
259 event.stopPropagation();
260 setDragCounter((prev) => prev - 1);
261 };
262
263 const handleDrop = async (event: React.DragEvent) => {
264 event.preventDefault();
265 event.stopPropagation();
266 setDragCounter(0);
267
268 if (event.dataTransfer && event.dataTransfer.files.length > 0) {
269 // Process all dropped files
270 for (let i = 0; i < event.dataTransfer.files.length; i++) {
271 const file = event.dataTransfer.files[i];
272 await uploadFile(file);
273 }
274 }
275 };
276
277 const handleAttachClick = () => {
278 fileInputRef.current?.click();
279 };
280
281 const handleFileSelect = async (event: React.ChangeEvent<HTMLInputElement>) => {
282 const files = event.target.files;
283 if (!files || files.length === 0) return;
284
285 for (let i = 0; i < files.length; i++) {
286 const file = files[i];
287 await uploadFile(file);
288 }
289
290 // Reset input so same file can be selected again
291 event.target.value = "";
292 };
293
294 // Auto-insert injected text (diff comments) directly into the textarea
295 useEffect(() => {
296 if (injectedText) {
297 setMessage((prev) => {
298 const needsNewline = prev.length > 0 && !prev.endsWith("\n");
299 return prev + (needsNewline ? "\n\n" : "") + injectedText;
300 });
301 onClearInjectedText?.();
302 // Focus the textarea after inserting
303 setTimeout(() => textareaRef.current?.focus(), 0);
304 }
305 }, [injectedText, onClearInjectedText]);
306
307 const handleSubmit = async (e: React.FormEvent) => {
308 e.preventDefault();
309 if (message.trim() && !disabled && !submitting && uploadsInProgress === 0) {
310 // Stop listening if we were recording
311 if (isListening) {
312 stopListening();
313 }
314
315 const messageToSend = message;
316 setSubmitting(true);
317 try {
318 await onSend(messageToSend);
319 // Only clear on success
320 setMessage("");
321 // Clear persisted draft on successful send
322 if (persistKey) {
323 localStorage.removeItem(PERSIST_KEY_PREFIX + persistKey);
324 }
325 } catch {
326 // Keep the message on error so user can retry
327 } finally {
328 setSubmitting(false);
329 }
330 }
331 };
332
333 const handleKeyDown = (e: React.KeyboardEvent) => {
334 // Don't submit while IME is composing (e.g., converting Japanese hiragana to kanji)
335 if (e.nativeEvent.isComposing) {
336 return;
337 }
338 if (e.key === "Enter" && !e.shiftKey) {
339 // On mobile, let Enter create newlines since there's a send button
340 // I'm not convinced the divergence from desktop is the correct answer,
341 // but we can try it and see how it feels.
342 const isMobile = "ontouchstart" in window;
343 if (isMobile) {
344 return;
345 }
346 e.preventDefault();
347 handleSubmit(e);
348 }
349 };
350
351 const adjustTextareaHeight = () => {
352 if (textareaRef.current) {
353 textareaRef.current.style.height = "auto";
354 const scrollHeight = textareaRef.current.scrollHeight;
355 const maxHeight = 200; // Maximum height in pixels
356 textareaRef.current.style.height = `${Math.min(scrollHeight, maxHeight)}px`;
357 }
358 };
359
360 useEffect(() => {
361 adjustTextareaHeight();
362 }, [message]);
363
364 // Persist draft to localStorage when persistKey is set
365 useEffect(() => {
366 if (persistKey) {
367 if (message) {
368 localStorage.setItem(PERSIST_KEY_PREFIX + persistKey, message);
369 } else {
370 localStorage.removeItem(PERSIST_KEY_PREFIX + persistKey);
371 }
372 }
373 }, [message, persistKey]);
374
375 useEffect(() => {
376 if (autoFocus && textareaRef.current) {
377 // Use setTimeout to ensure the component is fully rendered
378 setTimeout(() => {
379 textareaRef.current?.focus();
380 }, 0);
381 }
382 }, [autoFocus]);
383
384 // Handle virtual keyboard appearance on mobile (especially Android Firefox)
385 // The visualViewport API lets us detect when the keyboard shrinks the viewport
386 useEffect(() => {
387 if (typeof window === "undefined" || !window.visualViewport) {
388 return;
389 }
390
391 const handleViewportResize = () => {
392 // Only scroll if our textarea is focused (keyboard is for us)
393 if (document.activeElement === textareaRef.current) {
394 // Small delay to let the viewport settle after resize
395 requestAnimationFrame(() => {
396 textareaRef.current?.scrollIntoView({ behavior: "smooth", block: "center" });
397 });
398 }
399 };
400
401 window.visualViewport.addEventListener("resize", handleViewportResize);
402 return () => {
403 window.visualViewport?.removeEventListener("resize", handleViewportResize);
404 };
405 }, []);
406
407 const isDisabled = disabled || uploadsInProgress > 0;
408 const canSubmit = message.trim() && !isDisabled && !submitting;
409
410 const isDraggingOver = dragCounter > 0;
411 // Check if user is typing a shell command (starts with !)
412 const isShellMode = message.trimStart().startsWith("!");
413 // Note: injectedText is auto-inserted via useEffect, no manual UI needed
414
415 return (
416 <div
417 className={`message-input-container ${isDraggingOver ? "drag-over" : ""} ${isShellMode ? "shell-mode" : ""}`}
418 onDragOver={handleDragOver}
419 onDragEnter={handleDragEnter}
420 onDragLeave={handleDragLeave}
421 onDrop={handleDrop}
422 >
423 {isDraggingOver && (
424 <div className="drag-overlay">
425 <div className="drag-overlay-content">Drop files here</div>
426 </div>
427 )}
428 <form onSubmit={handleSubmit} className="message-input-form">
429 <input
430 type="file"
431 ref={fileInputRef}
432 onChange={handleFileSelect}
433 style={{ display: "none" }}
434 multiple
435 accept="image/*,video/*,audio/*,.pdf,.txt,.md,.json,.csv,.xml,.html,.css,.js,.ts,.tsx,.jsx,.py,.go,.rs,.java,.c,.cpp,.h,.hpp,.sh,.yaml,.yml,.toml,.sql,.log,*"
436 aria-hidden="true"
437 />
438 {isShellMode && (
439 <div className="shell-mode-indicator" title="This will run as a shell command">
440 <svg
441 width="16"
442 height="16"
443 viewBox="0 0 24 24"
444 fill="none"
445 stroke="currentColor"
446 strokeWidth="2"
447 >
448 <polyline points="4 17 10 11 4 5" />
449 <line x1="12" y1="19" x2="20" y2="19" />
450 </svg>
451 </div>
452 )}
453 <textarea
454 ref={textareaRef}
455 value={message}
456 onChange={(e) => setMessage(e.target.value)}
457 onKeyDown={handleKeyDown}
458 onPaste={handlePaste}
459 onFocus={() => {
460 // Scroll to bottom after keyboard animation settles
461 if (onFocus) {
462 requestAnimationFrame(() => requestAnimationFrame(onFocus));
463 }
464 }}
465 placeholder={placeholderText}
466 className="message-textarea"
467 disabled={isDisabled}
468 rows={1}
469 aria-label="Message input"
470 data-testid="message-input"
471 autoFocus={autoFocus}
472 />
473 <button
474 type="button"
475 onClick={handleAttachClick}
476 disabled={isDisabled}
477 className="message-attach-btn"
478 aria-label="Attach file"
479 data-testid="attach-button"
480 >
481 <svg
482 fill="none"
483 stroke="currentColor"
484 strokeWidth="2"
485 viewBox="0 0 24 24"
486 width="20"
487 height="20"
488 >
489 <path
490 strokeLinecap="round"
491 strokeLinejoin="round"
492 d="M15.172 7l-6.586 6.586a2 2 0 102.828 2.828l6.414-6.586a4 4 0 00-5.656-5.656l-6.415 6.585a6 6 0 108.486 8.486L20.5 13"
493 />
494 </svg>
495 </button>
496 {speechRecognitionAvailable && (
497 <button
498 type="button"
499 onClick={toggleListening}
500 disabled={isDisabled}
501 className={`message-voice-btn ${isListening ? "listening" : ""}`}
502 aria-label={isListening ? "Stop voice input" : "Start voice input"}
503 data-testid="voice-button"
504 >
505 {isListening ? (
506 <svg fill="currentColor" viewBox="0 0 24 24" width="20" height="20">
507 <circle cx="12" cy="12" r="6" />
508 </svg>
509 ) : (
510 <svg fill="currentColor" viewBox="0 0 24 24" width="20" height="20">
511 <path d="M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3zm-1-9c0-.55.45-1 1-1s1 .45 1 1v6c0 .55-.45 1-1 1s-1-.45-1-1V5zm6 6c0 2.76-2.24 5-5 5s-5-2.24-5-5H5c0 3.53 2.61 6.43 6 6.92V21h2v-3.08c3.39-.49 6-3.39 6-6.92h-2z" />
512 </svg>
513 )}
514 </button>
515 )}
516 <button
517 type="submit"
518 disabled={!canSubmit}
519 className="message-send-btn"
520 aria-label="Send message"
521 data-testid="send-button"
522 >
523 {isDisabled || submitting ? (
524 <div className="flex items-center justify-center">
525 <div className="spinner spinner-small" style={{ borderTopColor: "white" }}></div>
526 </div>
527 ) : (
528 <svg fill="currentColor" viewBox="0 0 24 24" width="20" height="20">
529 <path d="M12 4l-1.41 1.41L16.17 11H4v2h12.17l-5.58 5.59L12 20l8-8z" />
530 </svg>
531 )}
532 </button>
533 </form>
534 </div>
535 );
536}
537
538export default MessageInput;