1use std::fs::File;
6use std::io::{BufRead, BufReader, Read, Seek, SeekFrom};
7use std::ops::Index;
8use std::path::{Path, PathBuf};
9use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
10use std::sync::{Arc, Mutex, RwLock, Weak};
11
12use base::generic_channel;
13use base::threadpool::ThreadPool;
14use embedder_traits::{
15    EmbedderControlId, EmbedderControlResponse, EmbedderMsg, EmbedderProxy, FilePickerRequest,
16    SelectedFile,
17};
18use headers::{ContentLength, ContentRange, ContentType, HeaderMap, HeaderMapExt, Range};
19use http::header::{self, HeaderValue};
20use ipc_channel::ipc::IpcSender;
21use log::warn;
22use mime::{self, Mime};
23use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError};
24use net_traits::filemanager_thread::{
25    FileManagerResult, FileManagerThreadError, FileManagerThreadMsg, FileOrigin, FileTokenCheck,
26    ReadFileProgress, RelativePos,
27};
28use net_traits::http_percent_encode;
29use net_traits::response::{Response, ResponseBody};
30use rustc_hash::{FxHashMap, FxHashSet};
31use servo_arc::Arc as ServoArc;
32use tokio::sync::mpsc::UnboundedSender as TokioSender;
33use url::Url;
34use uuid::Uuid;
35
36use crate::fetch::methods::{CancellationListener, Data, RangeRequestBounds};
37use crate::protocols::get_range_request_bounds;
38
39pub const FILE_CHUNK_SIZE: usize = 32768; struct FileStoreEntry {
43    origin: FileOrigin,
45    file_impl: FileImpl,
47    refs: AtomicUsize,
52    is_valid_url: AtomicBool,
56    outstanding_tokens: FxHashSet<Uuid>,
59}
60
61#[derive(Clone)]
62struct FileMetaData {
63    path: PathBuf,
64    size: u64,
65}
66
67#[derive(Clone)]
69enum FileImpl {
70    MetaDataOnly(FileMetaData),
72    Memory(BlobBuf),
74    Sliced(Uuid, RelativePos),
77}
78
79#[derive(Clone)]
80pub struct FileManager {
81    embedder_proxy: EmbedderProxy,
82    store: Arc<FileManagerStore>,
83    thread_pool: Weak<ThreadPool>,
84}
85
86impl FileManager {
87    pub fn new(embedder_proxy: EmbedderProxy, pool_handle: Weak<ThreadPool>) -> FileManager {
88        FileManager {
89            embedder_proxy,
90            store: Arc::new(FileManagerStore::new()),
91            thread_pool: pool_handle,
92        }
93    }
94
95    pub fn read_file(
96        &self,
97        sender: IpcSender<FileManagerResult<ReadFileProgress>>,
98        id: Uuid,
99        origin: FileOrigin,
100    ) {
101        let store = self.store.clone();
102        self.thread_pool
103            .upgrade()
104            .map(|pool| {
105                pool.spawn(move || {
106                    if let Err(e) = store.try_read_file(&sender, id, origin) {
107                        let _ = sender.send(Err(FileManagerThreadError::BlobURLStoreError(e)));
108                    }
109                });
110            })
111            .unwrap_or_else(|| {
112                warn!("FileManager tried to read a file after CoreResourceManager has exited.");
113            });
114    }
115
116    pub fn get_token_for_file(&self, file_id: &Uuid) -> FileTokenCheck {
117        self.store.get_token_for_file(file_id)
118    }
119
120    pub fn invalidate_token(&self, token: &FileTokenCheck, file_id: &Uuid) {
121        self.store.invalidate_token(token, file_id);
122    }
123
124    #[allow(clippy::too_many_arguments)]
128    pub fn fetch_file(
129        &self,
130        done_sender: &mut TokioSender<Data>,
131        cancellation_listener: Arc<CancellationListener>,
132        id: Uuid,
133        file_token: &FileTokenCheck,
134        origin: FileOrigin,
135        response: &mut Response,
136        range: Option<Range>,
137    ) -> Result<(), BlobURLStoreError> {
138        self.fetch_blob_buf(
139            done_sender,
140            cancellation_listener,
141            &id,
142            file_token,
143            &origin,
144            BlobBounds::Unresolved(range),
145            response,
146        )
147    }
148
149    pub fn promote_memory(&self, id: Uuid, blob_buf: BlobBuf, set_valid: bool, origin: FileOrigin) {
150        self.store.promote_memory(id, blob_buf, set_valid, origin);
151    }
152
153    pub fn handle(&self, msg: FileManagerThreadMsg) {
155        match msg {
156            FileManagerThreadMsg::SelectFiles(control_id, file_picker_request, response_sender) => {
157                let store = self.store.clone();
158                let embedder = self.embedder_proxy.clone();
159                self.thread_pool
160                    .upgrade()
161                    .map(|pool| {
162                        pool.spawn(move || {
163                            store.select_files(control_id, file_picker_request, response_sender, embedder);
164                        });
165                    })
166                    .unwrap_or_else(|| {
167                        warn!(
168                            "FileManager tried to select multiple files after CoreResourceManager has exited."
169                        );
170                    });
171            },
172            FileManagerThreadMsg::ReadFile(sender, id, origin) => {
173                self.read_file(sender, id, origin);
174            },
175            FileManagerThreadMsg::PromoteMemory(id, blob_buf, set_valid, origin) => {
176                self.promote_memory(id, blob_buf, set_valid, origin);
177            },
178            FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) => {
179                self.store.add_sliced_url_entry(id, rel_pos, sender, origin);
180            },
181            FileManagerThreadMsg::DecRef(id, origin, sender) => {
182                let _ = sender.send(self.store.dec_ref(&id, &origin));
183            },
184            FileManagerThreadMsg::RevokeBlobURL(id, origin, sender) => {
185                let _ = sender.send(self.store.set_blob_url_validity(false, &id, &origin));
186            },
187            FileManagerThreadMsg::ActivateBlobURL(id, sender, origin) => {
188                let _ = sender.send(self.store.set_blob_url_validity(true, &id, &origin));
189            },
190        }
191    }
192
193    pub fn fetch_file_in_chunks(
194        &self,
195        done_sender: &mut TokioSender<Data>,
196        mut reader: BufReader<File>,
197        res_body: ServoArc<Mutex<ResponseBody>>,
198        cancellation_listener: Arc<CancellationListener>,
199        range: RelativePos,
200    ) {
201        let done_sender = done_sender.clone();
202        self.thread_pool
203            .upgrade()
204            .map(|pool| {
205                pool.spawn(move || {
206                    loop {
207                        if cancellation_listener.cancelled() {
208                            *res_body.lock().unwrap() = ResponseBody::Done(vec![]);
209                            let _ = done_sender.send(Data::Cancelled);
210                            return;
211                        }
212                        let length = {
213                            let buffer = reader.fill_buf().unwrap().to_vec();
214                            let mut buffer_len = buffer.len();
215                            if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap()
216                            {
217                                let offset = usize::min(
218                                    {
219                                        if let Some(end) = range.end {
220                                            let remaining_bytes =
224                                                end as usize - range.start as usize - body.len() +
225                                                    1;
226                                            if remaining_bytes <= FILE_CHUNK_SIZE {
227                                                buffer_len = 0;
230                                                remaining_bytes
231                                            } else {
232                                                FILE_CHUNK_SIZE
233                                            }
234                                        } else {
235                                            FILE_CHUNK_SIZE
236                                        }
237                                    },
238                                    buffer.len(),
239                                );
240                                let chunk = &buffer[0..offset];
241                                body.extend_from_slice(chunk);
242                                let _ = done_sender.send(Data::Payload(chunk.to_vec()));
243                            }
244                            buffer_len
245                        };
246                        if length == 0 {
247                            let mut body = res_body.lock().unwrap();
248                            let completed_body = match *body {
249                                ResponseBody::Receiving(ref mut body) => std::mem::take(body),
250                                _ => vec![],
251                            };
252                            *body = ResponseBody::Done(completed_body);
253                            let _ = done_sender.send(Data::Done);
254                            break;
255                        }
256                        reader.consume(length);
257                    }
258                });
259            })
260            .unwrap_or_else(|| {
261                warn!("FileManager tried to fetch a file in chunks after CoreResourceManager has exited.");
262            });
263    }
264
265    #[allow(clippy::too_many_arguments)]
266    fn fetch_blob_buf(
267        &self,
268        done_sender: &mut TokioSender<Data>,
269        cancellation_listener: Arc<CancellationListener>,
270        id: &Uuid,
271        file_token: &FileTokenCheck,
272        origin_in: &FileOrigin,
273        bounds: BlobBounds,
274        response: &mut Response,
275    ) -> Result<(), BlobURLStoreError> {
276        let file_impl = self.store.get_impl(id, file_token, origin_in)?;
277        let mut is_range_requested = false;
281        match file_impl {
282            FileImpl::Memory(buf) => {
283                let bounds = match bounds {
284                    BlobBounds::Unresolved(range) => {
285                        if range.is_some() {
286                            is_range_requested = true;
287                        }
288                        get_range_request_bounds(range, buf.size)
289                    },
290                    BlobBounds::Resolved(bounds) => bounds,
291                };
292                let range = bounds
293                    .get_final(Some(buf.size))
294                    .map_err(|_| BlobURLStoreError::InvalidRange)?;
295
296                let range = range.to_abs_blob_range(buf.size as usize);
297                let len = range.len() as u64;
298                let content_range = if is_range_requested {
299                    ContentRange::bytes(range.start as u64..range.end as u64, buf.size).ok()
300                } else {
301                    None
302                };
303
304                set_headers(
305                    &mut response.headers,
306                    len,
307                    buf.type_string.parse().unwrap_or(mime::TEXT_PLAIN),
308                    None,
309                    content_range,
310                );
311
312                let mut bytes = vec![];
313                bytes.extend_from_slice(buf.bytes.index(range));
314
315                let _ = done_sender.send(Data::Payload(bytes));
316                let _ = done_sender.send(Data::Done);
317
318                Ok(())
319            },
320            FileImpl::MetaDataOnly(metadata) => {
321                let file = File::open(&metadata.path)
328                    .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
329                let mut is_range_requested = false;
330                let bounds = match bounds {
331                    BlobBounds::Unresolved(range) => {
332                        if range.is_some() {
333                            is_range_requested = true;
334                        }
335                        get_range_request_bounds(range, metadata.size)
336                    },
337                    BlobBounds::Resolved(bounds) => bounds,
338                };
339                let range = bounds
340                    .get_final(Some(metadata.size))
341                    .map_err(|_| BlobURLStoreError::InvalidRange)?;
342
343                let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
344                if reader.seek(SeekFrom::Start(range.start as u64)).is_err() {
345                    return Err(BlobURLStoreError::External(
346                        "Unexpected method for blob".into(),
347                    ));
348                }
349
350                let filename = metadata
351                    .path
352                    .file_name()
353                    .and_then(|osstr| osstr.to_str())
354                    .map(|s| s.to_string());
355
356                let content_range = if is_range_requested {
357                    let abs_range = range.to_abs_blob_range(metadata.size as usize);
358                    ContentRange::bytes(abs_range.start as u64..abs_range.end as u64, metadata.size)
359                        .ok()
360                } else {
361                    None
362                };
363                set_headers(
364                    &mut response.headers,
365                    metadata.size,
366                    mime_guess::from_path(metadata.path)
367                        .first()
368                        .unwrap_or(mime::TEXT_PLAIN),
369                    filename,
370                    content_range,
371                );
372
373                self.fetch_file_in_chunks(
374                    &mut done_sender.clone(),
375                    reader,
376                    response.body.clone(),
377                    cancellation_listener,
378                    range,
379                );
380
381                Ok(())
382            },
383            FileImpl::Sliced(parent_id, inner_rel_pos) => {
384                let bounds = RangeRequestBounds::Final(
387                    RelativePos::full_range().slice_inner(&inner_rel_pos),
388                );
389                self.fetch_blob_buf(
390                    done_sender,
391                    cancellation_listener,
392                    &parent_id,
393                    file_token,
394                    origin_in,
395                    BlobBounds::Resolved(bounds),
396                    response,
397                )
398            },
399        }
400    }
401}
402
403enum BlobBounds {
404    Unresolved(Option<Range>),
405    Resolved(RangeRequestBounds),
406}
407
408struct FileManagerStore {
412    entries: RwLock<FxHashMap<Uuid, FileStoreEntry>>,
413}
414
415impl FileManagerStore {
416    fn new() -> Self {
417        FileManagerStore {
418            entries: RwLock::new(FxHashMap::default()),
419        }
420    }
421
422    pub fn get_impl(
424        &self,
425        id: &Uuid,
426        file_token: &FileTokenCheck,
427        origin_in: &FileOrigin,
428    ) -> Result<FileImpl, BlobURLStoreError> {
429        match self.entries.read().unwrap().get(id) {
430            Some(entry) => {
431                if *origin_in != *entry.origin {
432                    Err(BlobURLStoreError::InvalidOrigin)
433                } else {
434                    match file_token {
435                        FileTokenCheck::NotRequired => Ok(entry.file_impl.clone()),
436                        FileTokenCheck::Required(token) => {
437                            if entry.outstanding_tokens.contains(token) {
438                                return Ok(entry.file_impl.clone());
439                            }
440                            Err(BlobURLStoreError::InvalidFileID)
441                        },
442                        FileTokenCheck::ShouldFail => Err(BlobURLStoreError::InvalidFileID),
443                    }
444                }
445            },
446            None => Err(BlobURLStoreError::InvalidFileID),
447        }
448    }
449
450    pub fn invalidate_token(&self, token: &FileTokenCheck, file_id: &Uuid) {
451        if let FileTokenCheck::Required(token) = token {
452            let mut entries = self.entries.write().unwrap();
453            if let Some(entry) = entries.get_mut(file_id) {
454                entry.outstanding_tokens.remove(token);
455
456                let zero_refs = entry.refs.load(Ordering::Acquire) == 0;
458
459                let no_outstanding_tokens = entry.outstanding_tokens.is_empty();
461
462                let valid = entry.is_valid_url.load(Ordering::Acquire);
464
465                let do_remove = zero_refs && no_outstanding_tokens && !valid;
467
468                if do_remove {
469                    entries.remove(file_id);
470                }
471            }
472        }
473    }
474
475    pub fn get_token_for_file(&self, file_id: &Uuid) -> FileTokenCheck {
476        let mut entries = self.entries.write().unwrap();
477        let parent_id = match entries.get(file_id) {
478            Some(entry) => {
479                if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
480                    Some(*parent_id)
481                } else {
482                    None
483                }
484            },
485            None => return FileTokenCheck::ShouldFail,
486        };
487        let file_id = match parent_id.as_ref() {
488            Some(id) => id,
489            None => file_id,
490        };
491        if let Some(entry) = entries.get_mut(file_id) {
492            if !entry.is_valid_url.load(Ordering::Acquire) {
493                return FileTokenCheck::ShouldFail;
494            }
495            let token = Uuid::new_v4();
496            entry.outstanding_tokens.insert(token);
497            return FileTokenCheck::Required(token);
498        }
499        FileTokenCheck::ShouldFail
500    }
501
502    fn insert(&self, id: Uuid, entry: FileStoreEntry) {
503        self.entries.write().unwrap().insert(id, entry);
504    }
505
506    fn remove(&self, id: &Uuid) {
507        self.entries.write().unwrap().remove(id);
508    }
509
510    fn inc_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
511        match self.entries.read().unwrap().get(id) {
512            Some(entry) => {
513                if entry.origin == *origin_in {
514                    entry.refs.fetch_add(1, Ordering::Relaxed);
515                    Ok(())
516                } else {
517                    Err(BlobURLStoreError::InvalidOrigin)
518                }
519            },
520            None => Err(BlobURLStoreError::InvalidFileID),
521        }
522    }
523
524    fn add_sliced_url_entry(
525        &self,
526        parent_id: Uuid,
527        rel_pos: RelativePos,
528        sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
529        origin_in: FileOrigin,
530    ) {
531        match self.inc_ref(&parent_id, &origin_in) {
532            Ok(_) => {
533                let new_id = Uuid::new_v4();
534                self.insert(
535                    new_id,
536                    FileStoreEntry {
537                        origin: origin_in,
538                        file_impl: FileImpl::Sliced(parent_id, rel_pos),
539                        refs: AtomicUsize::new(1),
540                        is_valid_url: AtomicBool::new(true),
543                        outstanding_tokens: Default::default(),
544                    },
545                );
546
547                let _ = sender.send(Ok(new_id));
549            },
550            Err(e) => {
551                let _ = sender.send(Err(e));
552            },
553        }
554    }
555
556    fn select_files(
557        &self,
558        control_id: EmbedderControlId,
559        file_picker_request: FilePickerRequest,
560        response_sender: IpcSender<EmbedderControlResponse>,
561        embedder_proxy: EmbedderProxy,
562    ) {
563        let (ipc_sender, ipc_receiver) =
564            generic_channel::channel().expect("Failed to create IPC channel!");
565
566        let origin = file_picker_request.origin.clone();
567        embedder_proxy.send(EmbedderMsg::SelectFiles(
568            control_id,
569            file_picker_request,
570            ipc_sender,
571        ));
572
573        let paths = match ipc_receiver.recv() {
574            Ok(Some(result)) => result,
575            Ok(None) => {
576                let _ = response_sender.send(EmbedderControlResponse::FilePicker(None));
577                return;
578            },
579            Err(error) => {
580                warn!("Failed to receive files from embedder ({:?}).", error);
581                let _ = response_sender.send(EmbedderControlResponse::FilePicker(None));
582                return;
583            },
584        };
585
586        let mut failed = false;
587        let files: Vec<_> = paths
588            .into_iter()
589            .filter_map(|path| match self.create_entry(&path, &origin) {
590                Ok(entry) => Some(entry),
591                Err(error) => {
592                    failed = true;
593                    warn!("Failed to create entry for selected file: {error:?}");
594                    None
595                },
596            })
597            .collect();
598
599        if failed {
606            for file in files.iter() {
607                self.remove(&file.id);
608            }
609            let _ = response_sender.send(EmbedderControlResponse::FilePicker(Some(Vec::new())));
610            return;
611        }
612
613        let _ = response_sender.send(EmbedderControlResponse::FilePicker(Some(files)));
614    }
615
616    fn create_entry(
617        &self,
618        file_path: &Path,
619        origin: &str,
620    ) -> Result<SelectedFile, FileManagerThreadError> {
621        use net_traits::filemanager_thread::FileManagerThreadError::FileSystemError;
622
623        let file = File::open(file_path).map_err(|e| FileSystemError(e.to_string()))?;
624        let metadata = file
625            .metadata()
626            .map_err(|e| FileSystemError(e.to_string()))?;
627        let modified = metadata
628            .modified()
629            .map_err(|e| FileSystemError(e.to_string()))?;
630        let file_size = metadata.len();
631        let file_name = file_path
632            .file_name()
633            .ok_or(FileSystemError("Invalid filepath".to_string()))?;
634
635        let file_impl = FileImpl::MetaDataOnly(FileMetaData {
636            path: file_path.to_path_buf(),
637            size: file_size,
638        });
639
640        let id = Uuid::new_v4();
641
642        self.insert(
643            id,
644            FileStoreEntry {
645                origin: origin.to_string(),
646                file_impl,
647                refs: AtomicUsize::new(1),
648                is_valid_url: AtomicBool::new(false),
650                outstanding_tokens: Default::default(),
651            },
652        );
653
654        let filename_path = Path::new(file_name);
655        let type_string = match mime_guess::from_path(filename_path).first() {
656            Some(x) => format!("{}", x),
657            None => "".to_string(),
658        };
659
660        Ok(SelectedFile {
661            id,
662            filename: filename_path.to_path_buf(),
663            modified,
664            size: file_size,
665            type_string,
666        })
667    }
668
669    fn get_blob_buf(
670        &self,
671        sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
672        id: &Uuid,
673        file_token: &FileTokenCheck,
674        origin_in: &FileOrigin,
675        rel_pos: RelativePos,
676    ) -> Result<(), BlobURLStoreError> {
677        let file_impl = self.get_impl(id, file_token, origin_in)?;
678        match file_impl {
679            FileImpl::Memory(buf) => {
680                let range = rel_pos.to_abs_range(buf.size as usize);
681                let buf = BlobBuf {
682                    filename: None,
683                    type_string: buf.type_string,
684                    size: range.len() as u64,
685                    bytes: buf.bytes.index(range).to_vec(),
686                };
687
688                let _ = sender.send(Ok(ReadFileProgress::Meta(buf)));
689                let _ = sender.send(Ok(ReadFileProgress::EOF));
690
691                Ok(())
692            },
693            FileImpl::MetaDataOnly(metadata) => {
694                let opt_filename = metadata
701                    .path
702                    .file_name()
703                    .and_then(|osstr| osstr.to_str())
704                    .map(|s| s.to_string());
705
706                let mime = mime_guess::from_path(metadata.path.clone()).first();
707                let range = rel_pos.to_abs_range(metadata.size as usize);
708
709                let mut file = File::open(&metadata.path)
710                    .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
711                let seeked_start = file
712                    .seek(SeekFrom::Start(range.start as u64))
713                    .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
714
715                if seeked_start == (range.start as u64) {
716                    let type_string = match mime {
717                        Some(x) => format!("{}", x),
718                        None => "".to_string(),
719                    };
720
721                    read_file_in_chunks(sender, &mut file, range.len(), opt_filename, type_string);
722                    Ok(())
723                } else {
724                    Err(BlobURLStoreError::InvalidEntry)
725                }
726            },
727            FileImpl::Sliced(parent_id, inner_rel_pos) => {
728                self.get_blob_buf(
731                    sender,
732                    &parent_id,
733                    file_token,
734                    origin_in,
735                    rel_pos.slice_inner(&inner_rel_pos),
736                )
737            },
738        }
739    }
740
741    fn try_read_file(
743        &self,
744        sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
745        id: Uuid,
746        origin_in: FileOrigin,
747    ) -> Result<(), BlobURLStoreError> {
748        self.get_blob_buf(
749            sender,
750            &id,
751            &FileTokenCheck::NotRequired,
752            &origin_in,
753            RelativePos::full_range(),
754        )
755    }
756
757    fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
758        let (do_remove, opt_parent_id) = match self.entries.read().unwrap().get(id) {
759            Some(entry) => {
760                if *entry.origin == *origin_in {
761                    let old_refs = entry.refs.fetch_sub(1, Ordering::Release);
762
763                    if old_refs > 1 {
764                        (false, None)
766                    } else {
767                        let is_valid = entry.is_valid_url.load(Ordering::Acquire);
770
771                        let no_outstanding_tokens = entry.outstanding_tokens.is_empty();
773
774                        let do_remove = !is_valid && no_outstanding_tokens;
776
777                        if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
778                            (do_remove, Some(*parent_id))
779                        } else {
780                            (do_remove, None)
781                        }
782                    }
783                } else {
784                    return Err(BlobURLStoreError::InvalidOrigin);
785                }
786            },
787            None => return Err(BlobURLStoreError::InvalidFileID),
788        };
789
790        if do_remove {
793            atomic::fence(Ordering::Acquire);
794            self.remove(id);
795
796            if let Some(parent_id) = opt_parent_id {
797                return self.dec_ref(&parent_id, origin_in);
798            }
799        }
800
801        Ok(())
802    }
803
804    fn promote_memory(&self, id: Uuid, blob_buf: BlobBuf, set_valid: bool, origin: FileOrigin) {
805        if Url::parse(&origin).is_err() {
807            return;
808        }
809        self.insert(
810            id,
811            FileStoreEntry {
812                origin,
813                file_impl: FileImpl::Memory(blob_buf),
814                refs: AtomicUsize::new(1),
815                is_valid_url: AtomicBool::new(set_valid),
816                outstanding_tokens: Default::default(),
817            },
818        );
819    }
820
821    fn set_blob_url_validity(
822        &self,
823        validity: bool,
824        id: &Uuid,
825        origin_in: &FileOrigin,
826    ) -> Result<(), BlobURLStoreError> {
827        let (do_remove, opt_parent_id, res) = match self.entries.read().unwrap().get(id) {
828            Some(entry) => {
829                if *entry.origin == *origin_in {
830                    entry.is_valid_url.store(validity, Ordering::Release);
831
832                    if !validity {
833                        let zero_refs = entry.refs.load(Ordering::Acquire) == 0;
837
838                        let no_outstanding_tokens = entry.outstanding_tokens.is_empty();
840
841                        let do_remove = zero_refs && no_outstanding_tokens;
843
844                        if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
845                            (do_remove, Some(*parent_id), Ok(()))
846                        } else {
847                            (do_remove, None, Ok(()))
848                        }
849                    } else {
850                        (false, None, Ok(()))
851                    }
852                } else {
853                    (false, None, Err(BlobURLStoreError::InvalidOrigin))
854                }
855            },
856            None => (false, None, Err(BlobURLStoreError::InvalidFileID)),
857        };
858
859        if do_remove {
860            atomic::fence(Ordering::Acquire);
861            self.remove(id);
862
863            if let Some(parent_id) = opt_parent_id {
864                return self.dec_ref(&parent_id, origin_in);
865            }
866        }
867        res
868    }
869}
870
871fn read_file_in_chunks(
872    sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
873    file: &mut File,
874    size: usize,
875    opt_filename: Option<String>,
876    type_string: String,
877) {
878    let mut buf = vec![0; FILE_CHUNK_SIZE];
880    match file.read(&mut buf) {
881        Ok(n) => {
882            buf.truncate(n);
883            let blob_buf = BlobBuf {
884                filename: opt_filename,
885                type_string,
886                size: size as u64,
887                bytes: buf,
888            };
889            let _ = sender.send(Ok(ReadFileProgress::Meta(blob_buf)));
890        },
891        Err(e) => {
892            let _ = sender.send(Err(FileManagerThreadError::FileSystemError(e.to_string())));
893            return;
894        },
895    }
896
897    loop {
899        let mut buf = vec![0; FILE_CHUNK_SIZE];
900        match file.read(&mut buf) {
901            Ok(0) => {
902                let _ = sender.send(Ok(ReadFileProgress::EOF));
903                return;
904            },
905            Ok(n) => {
906                buf.truncate(n);
907                let _ = sender.send(Ok(ReadFileProgress::Partial(buf)));
908            },
909            Err(e) => {
910                let _ = sender.send(Err(FileManagerThreadError::FileSystemError(e.to_string())));
911                return;
912            },
913        }
914    }
915}
916
917fn set_headers(
918    headers: &mut HeaderMap,
919    content_length: u64,
920    mime: Mime,
921    filename: Option<String>,
922    content_range: Option<ContentRange>,
923) {
924    headers.typed_insert(ContentLength(content_length));
925    if let Some(content_range) = content_range {
926        headers.typed_insert(content_range);
927    }
928    headers.typed_insert(ContentType::from(mime.clone()));
929    let name = match filename {
930        Some(name) => name,
931        None => return,
932    };
933    let charset = mime.get_param(mime::CHARSET);
934    let charset = charset
935        .map(|c| c.as_ref().into())
936        .unwrap_or("us-ascii".to_owned());
937    headers.insert(
940        header::CONTENT_DISPOSITION,
941        HeaderValue::from_bytes(
942            format!(
943                "inline; {}",
944                if charset.to_lowercase() == "utf-8" {
945                    format!(
946                        "filename=\"{}\"",
947                        String::from_utf8(name.as_bytes().into()).unwrap()
948                    )
949                } else {
950                    format!(
951                        "filename*=\"{}\"''{}",
952                        charset,
953                        http_percent_encode(name.as_bytes())
954                    )
955                }
956            )
957            .as_bytes(),
958        )
959        .unwrap(),
960    );
961}