v0.50.25: mobile scroll, import timestamps, profile security, mic fallback (#404)

* fix: restore mobile chat scrolling and drawer close (#397)

- static/style.css: add min-height:0 to .layout and .main (flex shrink chain fix for mobile scroll)
- static/style.css: add -webkit-overflow-scrolling:touch, touch-action:pan-y, overscroll-behavior-y:contain to .messages
- static/boot.js: call closeMobileSidebar() on new-conversation button onclick and Ctrl+K shortcut
- tests/test_mobile_layout.py: 41 new lines covering all three CSS fixes and both JS call sites

Original PR by @Jordan-SkyLF

* fix: preserve imported session timestamps (#395)

- api/models.py: add touch_updated_at: bool = True param to Session.save(); import_cli_session() accepts created_at/updated_at kwargs and saves with touch_updated_at=False
- api/routes.py: extract created_at/updated_at from get_cli_sessions() metadata and forward to import_cli_session(); use touch_updated_at=False on post-import save
- tests/test_gateway_sync.py: +53 lines — integration test verifying imported session keeps original timestamp and sorts correctly vs newer sessions; also fix: add WebUI session file cleanup in finally block

Original PR by @Jordan-SkyLF

* fix(profiles): block path traversal in profile switch and delete flows (#399)

Master was vulnerable: switch_profile and delete_profile_api joined user-supplied profile
names directly into filesystem paths with no validation. An attacker could send
'../../etc/passwd' as a profile name to traverse outside the profiles directory.

- api/profiles.py: add _resolve_named_profile_home(name) — validates name with
  ^[a-z0-9][a-z0-9_-]{0,63}$ regex then enforces path containment via
  candidate.resolve().relative_to(profiles_root); use in switch_profile()
- api/profiles.py: add _validate_profile_name() call to delete_profile_api() entry
- api/routes.py: add _validate_profile_name() call at HTTP handler level for
  both /api/profile/switch and /api/profile/delete (fail-fast at API boundary)
- tests/test_profile_path_security.py: 3 tests — traversal rejected, valid name passes

Cherry-picked commit aae7a30 from @Hinotoi-agent (PR was 62 commits behind master)

* feat: add desktop microphone transcription fallback (#396)

Mic button now works in browsers that support getUserMedia/MediaRecorder but
lack SpeechRecognition (e.g. Firefox desktop, some Chromium builds).

- static/boot.js: detect _canRecordAudio (navigator.mediaDevices + getUserMedia + MediaRecorder);
  keep mic button enabled when either SpeechRecognition or MediaRecorder is available;
  MediaRecorder fallback records audio, sends blob to /api/transcribe, inserts transcript
  into the composer; _stopMic() handles all three states (recognition, mediaRecorder, neither)
- api/upload.py: add transcribe_audio() helper — saves uploaded blob to temp file, calls
  transcription_tools.transcribe_audio(), always cleans up temp file
- api/routes.py: add /api/transcribe POST handler — CSRF protected, auth-gated, 20MB limit,
  returns {text:...} or {error:...}
- api/helpers.py: change Permissions-Policy microphone=() to microphone=(self) (required to
  allow getUserMedia in the same origin)
- tests/test_voice_transcribe_endpoint.py: 87 new lines — 3 tests with mocked transcription
- tests/test_sprint19.py: +1 regression guard (microphone=(self) in Permissions-Policy)
- tests/test_sprint20.py: 3 updated tests for new fallback-capability checks

Original PR by @Jordan-SkyLF

* docs: v0.50.25 release — version badge and CHANGELOG

---------

Co-authored-by: Nathan Esquenazi <nesquena@gmail.com>
This commit is contained in:
nesquena-hermes
2026-04-13 22:11:45 -07:00
committed by GitHub
parent 2beebaa6a2
commit 04ed0ff43d
15 changed files with 589 additions and 81 deletions

View File

@@ -172,24 +172,32 @@ function mobileSwitchPanel(name){
});
}
$('btnSend').onclick=()=>{if(window._micActive)_stopMic();send();};
$('btnSend').onclick=()=>{
if(window._micActive){
window._micPendingSend=true;
_stopMic();
return;
}
send();
};
$('btnAttach').onclick=()=>$('fileInput').click();
// ── Voice input (Web Speech API) ─────────────────────────────────────────
// ── Voice input (Web Speech API + MediaRecorder fallback) ───────────────────
(function(){
const SpeechRecognition=window.SpeechRecognition||window.webkitSpeechRecognition;
if(!SpeechRecognition) return; // Browser unsupported — mic button stays hidden
const _canRecordAudio=!!(navigator.mediaDevices&&navigator.mediaDevices.getUserMedia&&window.MediaRecorder);
if(!SpeechRecognition&&!_canRecordAudio) return; // Browser unsupported — mic button stays hidden
const btn=$('btnMic');
const status=$('micStatus');
const ta=$('msg');
btn.style.display=''; // Show button — browser supports speech
const recognition=new SpeechRecognition();
recognition.continuous=false;
recognition.interimResults=true;
recognition.lang=(typeof _locale!=='undefined'&&_locale._speech)||'en-US';
const statusText=status?status.querySelector('.status-text'):null;
btn.style.display=''; // Show button — browser supports speech recognition or recording fallback
let recognition=SpeechRecognition?new SpeechRecognition():null;
let mediaRecorder=null;
let mediaStream=null;
let audioChunks=[];
let _finalText='';
let _prefix='';
@@ -197,67 +205,162 @@ $('btnAttach').onclick=()=>$('fileInput').click();
window._micActive=on;
btn.classList.toggle('recording',on);
status.style.display=on?'':'none';
if(statusText) statusText.textContent=on?'Listening':'Listening';
if(!on){ _finalText=''; _prefix=''; }
}
recognition.onstart=()=>{ _finalText=''; };
recognition.onresult=(event)=>{
let interim='';
let final=_finalText;
for(let i=event.resultIndex;i<event.results.length;i++){
const t=event.results[i][0].transcript;
if(event.results[i].isFinal){ final+=t; _finalText=final; }
else{ interim+=t; }
}
// Append to whatever was already in the textarea before mic started
ta.value=_prefix+(final||interim);
autoResize();
};
recognition.onend=()=>{
// Commit: prefix + final transcription; trim trailing space if prefix was non-empty
const committed=_finalText
function _commitTranscript(text){
const clean=(text||'').trim();
const committed=clean
? (_prefix&&!_prefix.endsWith(' ')&&!_prefix.endsWith('\n')
? _prefix+' '+_finalText.trimStart()
: _prefix+_finalText)
: ta.value; // no speech detected — leave whatever is there
_setRecording(false);
? _prefix+' '+clean.trimStart()
: _prefix+clean)
: ta.value;
ta.value=committed;
autoResize();
};
if(window._micPendingSend){
window._micPendingSend=false;
send();
}
}
recognition.onerror=(event)=>{
_setRecording(false);
const msgs={
'not-allowed':t('mic_denied'),
'no-speech':t('mic_no_speech'),
'network':t('mic_network'),
};
showToast(msgs[event.error]||t('mic_error')+event.error);
};
async function _transcribeBlob(blob){
const ext=(blob.type&&blob.type.includes('ogg'))?'ogg':'webm';
const form=new FormData();
form.append('file',new File([blob],`voice-input.${ext}`,{type:blob.type||`audio/${ext}`}));
setComposerStatus('Transcribing…');
try{
const res=await fetch('/api/transcribe',{method:'POST',body:form});
const data=await res.json().catch(()=>({}));
if(!res.ok) throw new Error(data.error||'Transcription failed');
_commitTranscript(data.transcript||'');
}catch(err){
window._micPendingSend=false;
showToast(err.message||t('mic_network'));
}finally{
setComposerStatus('');
}
}
function _stopTracks(){
if(mediaStream){
mediaStream.getTracks().forEach(track=>track.stop());
mediaStream=null;
}
}
function _stopMic(){
if(window._micActive){ recognition.stop(); }
if(!window._micActive) return;
if(recognition){
recognition.stop();
return;
}
if(mediaRecorder&&mediaRecorder.state!=='inactive'){
mediaRecorder.stop();
return;
}
_setRecording(false);
_stopTracks();
}
window._stopMic=_stopMic; // expose for send-guard above
btn.onclick=()=>{
if(recognition){
recognition.continuous=false;
recognition.interimResults=true;
recognition.lang=(typeof _locale!=='undefined'&&_locale._speech)||'en-US';
recognition.onstart=()=>{ _finalText=''; };
recognition.onresult=(event)=>{
let interim='';
let final=_finalText;
for(let i=event.resultIndex;i<event.results.length;i++){
const t=event.results[i][0].transcript;
if(event.results[i].isFinal){ final+=t; _finalText=final; }
else{ interim+=t; }
}
ta.value=_prefix+(final||interim);
autoResize();
};
recognition.onend=()=>{
const committed=_finalText
? (_prefix&&!_prefix.endsWith(' ')&&!_prefix.endsWith('\n')
? _prefix+' '+_finalText.trimStart()
: _prefix+_finalText)
: ta.value;
_setRecording(false);
ta.value=committed;
autoResize();
if(window._micPendingSend){
window._micPendingSend=false;
send();
}
};
recognition.onerror=(event)=>{
_setRecording(false);
window._micPendingSend=false;
const msgs={
'not-allowed':t('mic_denied'),
'no-speech':t('mic_no_speech'),
'network':t('mic_network'),
};
showToast(msgs[event.error]||t('mic_error')+event.error);
};
}
btn.onclick=async()=>{
if(window._micActive){
recognition.stop();
// _setRecording(false) will be called by onend
} else {
_finalText='';
// Snapshot existing textarea content so we append rather than replace
_prefix=ta.value;
_stopMic();
return;
}
_finalText='';
_prefix=ta.value;
if(recognition){
recognition.start();
_setRecording(true);
return;
}
if(!_canRecordAudio){
showToast(t('mic_network'));
return;
}
try{
mediaStream=await navigator.mediaDevices.getUserMedia({audio:true});
const preferredTypes=['audio/webm;codecs=opus','audio/webm','audio/ogg;codecs=opus','audio/ogg'];
const mimeType=preferredTypes.find(type=>window.MediaRecorder.isTypeSupported?.(type))||'';
mediaRecorder=new MediaRecorder(mediaStream,mimeType?{mimeType}:undefined);
audioChunks=[];
mediaRecorder.ondataavailable=e=>{if(e.data&&e.data.size)audioChunks.push(e.data);};
mediaRecorder.onerror=()=>{
_setRecording(false);
window._micPendingSend=false;
_stopTracks();
showToast(t('mic_network'));
};
mediaRecorder.onstop=async()=>{
const blob=new Blob(audioChunks,{type:mediaRecorder.mimeType||mimeType||'audio/webm'});
_setRecording(false);
_stopTracks();
if(blob.size){ await _transcribeBlob(blob); }
else if(window._micPendingSend){
window._micPendingSend=false;
}
};
mediaRecorder.start();
_setRecording(true);
}catch(err){
window._micPendingSend=false;
_stopTracks();
showToast(t('mic_denied'));
}
};
})();
window._micActive=window._micActive||false;
window._micPendingSend=window._micPendingSend||false;
$('fileInput').onchange=e=>{addFiles(Array.from(e.target.files));e.target.value='';};
$('btnNewChat').onclick=async()=>{await newSession();await renderSessionList();$('msg').focus();};
$('btnNewChat').onclick=async()=>{await newSession();await renderSessionList();closeMobileSidebar();$('msg').focus();};
$('btnDownload').onclick=()=>{
if(!S.session)return;
const blob=new Blob([transcript()],{type:'text/markdown'});
@@ -374,7 +477,7 @@ document.addEventListener('keydown',async e=>{
}
if((e.metaKey||e.ctrlKey)&&e.key==='k'){
e.preventDefault();
if(!S.busy){await newSession();await renderSessionList();$('msg').focus();}
if(!S.busy){await newSession();await renderSessionList();closeMobileSidebar();$('msg').focus();}
}
if(e.key==='Escape'){
// Close settings overlay if open