net/
filemanager_thread.rs

1/* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
4
5use std::fs::File;
6use std::io::{BufRead, BufReader, Read, Seek, SeekFrom};
7use std::ops::Index;
8use std::path::{Path, PathBuf};
9use std::sync::atomic::{self, AtomicBool, AtomicUsize, Ordering};
10use std::sync::{Arc, Mutex, RwLock, Weak};
11
12use base::generic_channel;
13use base::id::WebViewId;
14use base::threadpool::ThreadPool;
15use embedder_traits::{EmbedderMsg, EmbedderProxy, FilterPattern};
16use headers::{ContentLength, ContentRange, ContentType, HeaderMap, HeaderMapExt, Range};
17use http::header::{self, HeaderValue};
18use ipc_channel::ipc::IpcSender;
19use log::warn;
20use mime::{self, Mime};
21use net_traits::blob_url_store::{BlobBuf, BlobURLStoreError};
22use net_traits::filemanager_thread::{
23    FileManagerResult, FileManagerThreadError, FileManagerThreadMsg, FileOrigin, FileTokenCheck,
24    ReadFileProgress, RelativePos, SelectedFile,
25};
26use net_traits::http_percent_encode;
27use net_traits::response::{Response, ResponseBody};
28use rustc_hash::{FxHashMap, FxHashSet};
29use servo_arc::Arc as ServoArc;
30use servo_config::pref;
31use tokio::sync::mpsc::UnboundedSender as TokioSender;
32use url::Url;
33use uuid::Uuid;
34
35use crate::fetch::methods::{CancellationListener, Data, RangeRequestBounds};
36use crate::protocols::get_range_request_bounds;
37
38pub const FILE_CHUNK_SIZE: usize = 32768; // 32 KB
39
40/// FileManagerStore's entry
41struct FileStoreEntry {
42    /// Origin of the entry's "creator"
43    origin: FileOrigin,
44    /// Backend implementation
45    file_impl: FileImpl,
46    /// Number of FileID holders that the ID is used to
47    /// index this entry in `FileManagerStore`.
48    /// Reference holders include a FileStoreEntry or
49    /// a script-side File-based Blob
50    refs: AtomicUsize,
51    /// UUIDs only become valid blob URIs when explicitly requested
52    /// by the user with createObjectURL. Validity can be revoked as well.
53    /// (The UUID is the one that maps to this entry in `FileManagerStore`)
54    is_valid_url: AtomicBool,
55    /// UUIDs of fetch instances that acquired an interest in this file,
56    /// when the url was still valid.
57    outstanding_tokens: FxHashSet<Uuid>,
58}
59
60#[derive(Clone)]
61struct FileMetaData {
62    path: PathBuf,
63    size: u64,
64}
65
66/// File backend implementation
67#[derive(Clone)]
68enum FileImpl {
69    /// Metadata of on-disk file
70    MetaDataOnly(FileMetaData),
71    /// In-memory Blob buffer object
72    Memory(BlobBuf),
73    /// A reference to parent entry in `FileManagerStore`,
74    /// representing a sliced version of the parent entry data
75    Sliced(Uuid, RelativePos),
76}
77
78#[derive(Clone)]
79pub struct FileManager {
80    embedder_proxy: EmbedderProxy,
81    store: Arc<FileManagerStore>,
82    thread_pool: Weak<ThreadPool>,
83}
84
85impl FileManager {
86    pub fn new(embedder_proxy: EmbedderProxy, pool_handle: Weak<ThreadPool>) -> FileManager {
87        FileManager {
88            embedder_proxy,
89            store: Arc::new(FileManagerStore::new()),
90            thread_pool: pool_handle,
91        }
92    }
93
94    pub fn read_file(
95        &self,
96        sender: IpcSender<FileManagerResult<ReadFileProgress>>,
97        id: Uuid,
98        origin: FileOrigin,
99    ) {
100        let store = self.store.clone();
101        self.thread_pool
102            .upgrade()
103            .map(|pool| {
104                pool.spawn(move || {
105                    if let Err(e) = store.try_read_file(&sender, id, origin) {
106                        let _ = sender.send(Err(FileManagerThreadError::BlobURLStoreError(e)));
107                    }
108                });
109            })
110            .unwrap_or_else(|| {
111                warn!("FileManager tried to read a file after CoreResourceManager has exited.");
112            });
113    }
114
115    pub fn get_token_for_file(&self, file_id: &Uuid) -> FileTokenCheck {
116        self.store.get_token_for_file(file_id)
117    }
118
119    pub fn invalidate_token(&self, token: &FileTokenCheck, file_id: &Uuid) {
120        self.store.invalidate_token(token, file_id);
121    }
122
123    /// Read a file for the Fetch implementation.
124    /// It gets the required headers synchronously and reads the actual content
125    /// in a separate thread.
126    #[allow(clippy::too_many_arguments)]
127    pub fn fetch_file(
128        &self,
129        done_sender: &mut TokioSender<Data>,
130        cancellation_listener: Arc<CancellationListener>,
131        id: Uuid,
132        file_token: &FileTokenCheck,
133        origin: FileOrigin,
134        response: &mut Response,
135        range: Option<Range>,
136    ) -> Result<(), BlobURLStoreError> {
137        self.fetch_blob_buf(
138            done_sender,
139            cancellation_listener,
140            &id,
141            file_token,
142            &origin,
143            BlobBounds::Unresolved(range),
144            response,
145        )
146    }
147
148    pub fn promote_memory(&self, id: Uuid, blob_buf: BlobBuf, set_valid: bool, origin: FileOrigin) {
149        self.store.promote_memory(id, blob_buf, set_valid, origin);
150    }
151
152    /// Message handler
153    pub fn handle(&self, msg: FileManagerThreadMsg) {
154        match msg {
155            FileManagerThreadMsg::SelectFile(webview_id, filter, sender, origin, opt_test_path) => {
156                let store = self.store.clone();
157                let embedder = self.embedder_proxy.clone();
158                self.thread_pool
159                    .upgrade()
160                    .map(|pool| {
161                        pool.spawn(move || {
162                            store.select_file(webview_id, filter, sender, origin, opt_test_path, embedder);
163                        });
164                    })
165                    .unwrap_or_else(|| {
166                        warn!(
167                            "FileManager tried to select a file after CoreResourceManager has exited."
168                        );
169                    });
170            },
171            FileManagerThreadMsg::SelectFiles(
172                webview_id,
173                filter,
174                sender,
175                origin,
176                opt_test_paths,
177            ) => {
178                let store = self.store.clone();
179                let embedder = self.embedder_proxy.clone();
180                self.thread_pool
181                    .upgrade()
182                    .map(|pool| {
183                        pool.spawn(move || {
184                            store.select_files(webview_id, filter, sender, origin, opt_test_paths, embedder);
185                        });
186                    })
187                    .unwrap_or_else(|| {
188                        warn!(
189                            "FileManager tried to select multiple files after CoreResourceManager has exited."
190                        );
191                    });
192            },
193            FileManagerThreadMsg::ReadFile(sender, id, origin) => {
194                self.read_file(sender, id, origin);
195            },
196            FileManagerThreadMsg::PromoteMemory(id, blob_buf, set_valid, origin) => {
197                self.promote_memory(id, blob_buf, set_valid, origin);
198            },
199            FileManagerThreadMsg::AddSlicedURLEntry(id, rel_pos, sender, origin) => {
200                self.store.add_sliced_url_entry(id, rel_pos, sender, origin);
201            },
202            FileManagerThreadMsg::DecRef(id, origin, sender) => {
203                let _ = sender.send(self.store.dec_ref(&id, &origin));
204            },
205            FileManagerThreadMsg::RevokeBlobURL(id, origin, sender) => {
206                let _ = sender.send(self.store.set_blob_url_validity(false, &id, &origin));
207            },
208            FileManagerThreadMsg::ActivateBlobURL(id, sender, origin) => {
209                let _ = sender.send(self.store.set_blob_url_validity(true, &id, &origin));
210            },
211        }
212    }
213
214    pub fn fetch_file_in_chunks(
215        &self,
216        done_sender: &mut TokioSender<Data>,
217        mut reader: BufReader<File>,
218        res_body: ServoArc<Mutex<ResponseBody>>,
219        cancellation_listener: Arc<CancellationListener>,
220        range: RelativePos,
221    ) {
222        let done_sender = done_sender.clone();
223        self.thread_pool
224            .upgrade()
225            .map(|pool| {
226                pool.spawn(move || {
227                    loop {
228                        if cancellation_listener.cancelled() {
229                            *res_body.lock().unwrap() = ResponseBody::Done(vec![]);
230                            let _ = done_sender.send(Data::Cancelled);
231                            return;
232                        }
233                        let length = {
234                            let buffer = reader.fill_buf().unwrap().to_vec();
235                            let mut buffer_len = buffer.len();
236                            if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap()
237                            {
238                                let offset = usize::min(
239                                    {
240                                        if let Some(end) = range.end {
241                                            // HTTP Range requests are specified with closed ranges,
242                                            // while Rust uses half-open ranges. We add +1 here so
243                                            // we don't skip the last requested byte.
244                                            let remaining_bytes =
245                                                end as usize - range.start as usize - body.len() +
246                                                    1;
247                                            if remaining_bytes <= FILE_CHUNK_SIZE {
248                                                // This is the last chunk so we set buffer
249                                                // len to 0 to break the reading loop.
250                                                buffer_len = 0;
251                                                remaining_bytes
252                                            } else {
253                                                FILE_CHUNK_SIZE
254                                            }
255                                        } else {
256                                            FILE_CHUNK_SIZE
257                                        }
258                                    },
259                                    buffer.len(),
260                                );
261                                let chunk = &buffer[0..offset];
262                                body.extend_from_slice(chunk);
263                                let _ = done_sender.send(Data::Payload(chunk.to_vec()));
264                            }
265                            buffer_len
266                        };
267                        if length == 0 {
268                            let mut body = res_body.lock().unwrap();
269                            let completed_body = match *body {
270                                ResponseBody::Receiving(ref mut body) => std::mem::take(body),
271                                _ => vec![],
272                            };
273                            *body = ResponseBody::Done(completed_body);
274                            let _ = done_sender.send(Data::Done);
275                            break;
276                        }
277                        reader.consume(length);
278                    }
279                });
280            })
281            .unwrap_or_else(|| {
282                warn!("FileManager tried to fetch a file in chunks after CoreResourceManager has exited.");
283            });
284    }
285
286    #[allow(clippy::too_many_arguments)]
287    fn fetch_blob_buf(
288        &self,
289        done_sender: &mut TokioSender<Data>,
290        cancellation_listener: Arc<CancellationListener>,
291        id: &Uuid,
292        file_token: &FileTokenCheck,
293        origin_in: &FileOrigin,
294        bounds: BlobBounds,
295        response: &mut Response,
296    ) -> Result<(), BlobURLStoreError> {
297        let file_impl = self.store.get_impl(id, file_token, origin_in)?;
298        /*
299           Only Fetch Blob Range Request would have unresolved range, and only in that case we care about range header.
300        */
301        let mut is_range_requested = false;
302        match file_impl {
303            FileImpl::Memory(buf) => {
304                let bounds = match bounds {
305                    BlobBounds::Unresolved(range) => {
306                        if range.is_some() {
307                            is_range_requested = true;
308                        }
309                        get_range_request_bounds(range, buf.size)
310                    },
311                    BlobBounds::Resolved(bounds) => bounds,
312                };
313                let range = bounds
314                    .get_final(Some(buf.size))
315                    .map_err(|_| BlobURLStoreError::InvalidRange)?;
316
317                let range = range.to_abs_blob_range(buf.size as usize);
318                let len = range.len() as u64;
319                let content_range = if is_range_requested {
320                    ContentRange::bytes(range.start as u64..range.end as u64, buf.size).ok()
321                } else {
322                    None
323                };
324
325                set_headers(
326                    &mut response.headers,
327                    len,
328                    buf.type_string.parse().unwrap_or(mime::TEXT_PLAIN),
329                    /* filename */ None,
330                    content_range,
331                );
332
333                let mut bytes = vec![];
334                bytes.extend_from_slice(buf.bytes.index(range));
335
336                let _ = done_sender.send(Data::Payload(bytes));
337                let _ = done_sender.send(Data::Done);
338
339                Ok(())
340            },
341            FileImpl::MetaDataOnly(metadata) => {
342                /* XXX: Snapshot state check (optional) https://w3c.github.io/FileAPI/#snapshot-state.
343                        Concretely, here we create another file, and this file might not
344                        has the same underlying file state (meta-info plus content) as the time
345                        create_entry is called.
346                */
347
348                let file = File::open(&metadata.path)
349                    .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
350                let mut is_range_requested = false;
351                let bounds = match bounds {
352                    BlobBounds::Unresolved(range) => {
353                        if range.is_some() {
354                            is_range_requested = true;
355                        }
356                        get_range_request_bounds(range, metadata.size)
357                    },
358                    BlobBounds::Resolved(bounds) => bounds,
359                };
360                let range = bounds
361                    .get_final(Some(metadata.size))
362                    .map_err(|_| BlobURLStoreError::InvalidRange)?;
363
364                let mut reader = BufReader::with_capacity(FILE_CHUNK_SIZE, file);
365                if reader.seek(SeekFrom::Start(range.start as u64)).is_err() {
366                    return Err(BlobURLStoreError::External(
367                        "Unexpected method for blob".into(),
368                    ));
369                }
370
371                let filename = metadata
372                    .path
373                    .file_name()
374                    .and_then(|osstr| osstr.to_str())
375                    .map(|s| s.to_string());
376
377                let content_range = if is_range_requested {
378                    let abs_range = range.to_abs_blob_range(metadata.size as usize);
379                    ContentRange::bytes(abs_range.start as u64..abs_range.end as u64, metadata.size)
380                        .ok()
381                } else {
382                    None
383                };
384                set_headers(
385                    &mut response.headers,
386                    metadata.size,
387                    mime_guess::from_path(metadata.path)
388                        .first()
389                        .unwrap_or(mime::TEXT_PLAIN),
390                    filename,
391                    content_range,
392                );
393
394                self.fetch_file_in_chunks(
395                    &mut done_sender.clone(),
396                    reader,
397                    response.body.clone(),
398                    cancellation_listener,
399                    range,
400                );
401
402                Ok(())
403            },
404            FileImpl::Sliced(parent_id, inner_rel_pos) => {
405                // Next time we don't need to check validity since
406                // we have already done that for requesting URL if necessary.
407                let bounds = RangeRequestBounds::Final(
408                    RelativePos::full_range().slice_inner(&inner_rel_pos),
409                );
410                self.fetch_blob_buf(
411                    done_sender,
412                    cancellation_listener,
413                    &parent_id,
414                    file_token,
415                    origin_in,
416                    BlobBounds::Resolved(bounds),
417                    response,
418                )
419            },
420        }
421    }
422}
423
424enum BlobBounds {
425    Unresolved(Option<Range>),
426    Resolved(RangeRequestBounds),
427}
428
429/// File manager's data store. It maintains a thread-safe mapping
430/// from FileID to FileStoreEntry which might have different backend implementation.
431/// Access to the content is encapsulated as methods of this struct.
432struct FileManagerStore {
433    entries: RwLock<FxHashMap<Uuid, FileStoreEntry>>,
434}
435
436impl FileManagerStore {
437    fn new() -> Self {
438        FileManagerStore {
439            entries: RwLock::new(FxHashMap::default()),
440        }
441    }
442
443    /// Copy out the file backend implementation content
444    pub fn get_impl(
445        &self,
446        id: &Uuid,
447        file_token: &FileTokenCheck,
448        origin_in: &FileOrigin,
449    ) -> Result<FileImpl, BlobURLStoreError> {
450        match self.entries.read().unwrap().get(id) {
451            Some(entry) => {
452                if *origin_in != *entry.origin {
453                    Err(BlobURLStoreError::InvalidOrigin)
454                } else {
455                    match file_token {
456                        FileTokenCheck::NotRequired => Ok(entry.file_impl.clone()),
457                        FileTokenCheck::Required(token) => {
458                            if entry.outstanding_tokens.contains(token) {
459                                return Ok(entry.file_impl.clone());
460                            }
461                            Err(BlobURLStoreError::InvalidFileID)
462                        },
463                        FileTokenCheck::ShouldFail => Err(BlobURLStoreError::InvalidFileID),
464                    }
465                }
466            },
467            None => Err(BlobURLStoreError::InvalidFileID),
468        }
469    }
470
471    pub fn invalidate_token(&self, token: &FileTokenCheck, file_id: &Uuid) {
472        if let FileTokenCheck::Required(token) = token {
473            let mut entries = self.entries.write().unwrap();
474            if let Some(entry) = entries.get_mut(file_id) {
475                entry.outstanding_tokens.remove(token);
476
477                // Check if there are references left.
478                let zero_refs = entry.refs.load(Ordering::Acquire) == 0;
479
480                // Check if no other fetch has acquired a token for this file.
481                let no_outstanding_tokens = entry.outstanding_tokens.is_empty();
482
483                // Check if there is still a blob URL outstanding.
484                let valid = entry.is_valid_url.load(Ordering::Acquire);
485
486                // Can we remove this file?
487                let do_remove = zero_refs && no_outstanding_tokens && !valid;
488
489                if do_remove {
490                    entries.remove(file_id);
491                }
492            }
493        }
494    }
495
496    pub fn get_token_for_file(&self, file_id: &Uuid) -> FileTokenCheck {
497        let mut entries = self.entries.write().unwrap();
498        let parent_id = match entries.get(file_id) {
499            Some(entry) => {
500                if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
501                    Some(*parent_id)
502                } else {
503                    None
504                }
505            },
506            None => return FileTokenCheck::ShouldFail,
507        };
508        let file_id = match parent_id.as_ref() {
509            Some(id) => id,
510            None => file_id,
511        };
512        if let Some(entry) = entries.get_mut(file_id) {
513            if !entry.is_valid_url.load(Ordering::Acquire) {
514                return FileTokenCheck::ShouldFail;
515            }
516            let token = Uuid::new_v4();
517            entry.outstanding_tokens.insert(token);
518            return FileTokenCheck::Required(token);
519        }
520        FileTokenCheck::ShouldFail
521    }
522
523    fn insert(&self, id: Uuid, entry: FileStoreEntry) {
524        self.entries.write().unwrap().insert(id, entry);
525    }
526
527    fn remove(&self, id: &Uuid) {
528        self.entries.write().unwrap().remove(id);
529    }
530
531    fn inc_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
532        match self.entries.read().unwrap().get(id) {
533            Some(entry) => {
534                if entry.origin == *origin_in {
535                    entry.refs.fetch_add(1, Ordering::Relaxed);
536                    Ok(())
537                } else {
538                    Err(BlobURLStoreError::InvalidOrigin)
539                }
540            },
541            None => Err(BlobURLStoreError::InvalidFileID),
542        }
543    }
544
545    fn add_sliced_url_entry(
546        &self,
547        parent_id: Uuid,
548        rel_pos: RelativePos,
549        sender: IpcSender<Result<Uuid, BlobURLStoreError>>,
550        origin_in: FileOrigin,
551    ) {
552        match self.inc_ref(&parent_id, &origin_in) {
553            Ok(_) => {
554                let new_id = Uuid::new_v4();
555                self.insert(
556                    new_id,
557                    FileStoreEntry {
558                        origin: origin_in,
559                        file_impl: FileImpl::Sliced(parent_id, rel_pos),
560                        refs: AtomicUsize::new(1),
561                        // Valid here since AddSlicedURLEntry implies URL creation
562                        // from a BlobImpl::Sliced
563                        is_valid_url: AtomicBool::new(true),
564                        outstanding_tokens: Default::default(),
565                    },
566                );
567
568                // We assume that the returned id will be held by BlobImpl::File
569                let _ = sender.send(Ok(new_id));
570            },
571            Err(e) => {
572                let _ = sender.send(Err(e));
573            },
574        }
575    }
576
577    fn query_files_from_embedder(
578        &self,
579        webview_id: WebViewId,
580        patterns: Vec<FilterPattern>,
581        multiple_files: bool,
582        embedder_proxy: EmbedderProxy,
583    ) -> Option<Vec<PathBuf>> {
584        let (ipc_sender, ipc_receiver) =
585            generic_channel::channel().expect("Failed to create IPC channel!");
586        embedder_proxy.send(EmbedderMsg::SelectFiles(
587            webview_id,
588            patterns,
589            multiple_files,
590            ipc_sender,
591        ));
592        match ipc_receiver.recv() {
593            Ok(result) => result,
594            Err(e) => {
595                warn!("Failed to receive files from embedder ({:?}).", e);
596                None
597            },
598        }
599    }
600
601    fn select_file(
602        &self,
603        webview_id: WebViewId,
604        patterns: Vec<FilterPattern>,
605        sender: IpcSender<FileManagerResult<SelectedFile>>,
606        origin: FileOrigin,
607        opt_test_path: Option<PathBuf>,
608        embedder_proxy: EmbedderProxy,
609    ) {
610        // Check if the select_files preference is enabled
611        // to ensure process-level security against compromised script;
612        // Then try applying opt_test_path directly for testing convenience
613        let opt_s = if pref!(dom_testing_html_input_element_select_files_enabled) {
614            opt_test_path
615        } else {
616            self.query_files_from_embedder(webview_id, patterns, false, embedder_proxy)
617                .and_then(|mut x| x.pop())
618        };
619
620        match opt_s {
621            Some(s) => {
622                let selected_path = Path::new(&s);
623                let result = self.create_entry(selected_path, &origin);
624                let _ = sender.send(result);
625            },
626            None => {
627                let _ = sender.send(Err(FileManagerThreadError::UserCancelled));
628            },
629        }
630    }
631
632    fn select_files(
633        &self,
634        webview_id: WebViewId,
635        patterns: Vec<FilterPattern>,
636        sender: IpcSender<FileManagerResult<Vec<SelectedFile>>>,
637        origin: FileOrigin,
638        opt_test_paths: Option<Vec<PathBuf>>,
639        embedder_proxy: EmbedderProxy,
640    ) {
641        // Check if the select_files preference is enabled
642        // to ensure process-level security against compromised script;
643        // Then try applying opt_test_paths directly for testing convenience
644        let opt_v = if pref!(dom_testing_html_input_element_select_files_enabled) {
645            opt_test_paths
646        } else {
647            self.query_files_from_embedder(webview_id, patterns, true, embedder_proxy)
648        };
649
650        match opt_v {
651            Some(v) => {
652                let mut selected_paths = vec![];
653
654                for s in &v {
655                    selected_paths.push(Path::new(s));
656                }
657
658                let mut replies = vec![];
659
660                for path in selected_paths {
661                    match self.create_entry(path, &origin) {
662                        Ok(triple) => replies.push(triple),
663                        Err(e) => {
664                            let _ = sender.send(Err(e));
665                            return;
666                        },
667                    };
668                }
669
670                let _ = sender.send(Ok(replies));
671            },
672            None => {
673                let _ = sender.send(Err(FileManagerThreadError::UserCancelled));
674            },
675        }
676    }
677
678    fn create_entry(
679        &self,
680        file_path: &Path,
681        origin: &str,
682    ) -> Result<SelectedFile, FileManagerThreadError> {
683        use net_traits::filemanager_thread::FileManagerThreadError::FileSystemError;
684
685        let file = File::open(file_path).map_err(|e| FileSystemError(e.to_string()))?;
686        let metadata = file
687            .metadata()
688            .map_err(|e| FileSystemError(e.to_string()))?;
689        let modified = metadata
690            .modified()
691            .map_err(|e| FileSystemError(e.to_string()))?;
692        let file_size = metadata.len();
693        let file_name = file_path
694            .file_name()
695            .ok_or(FileSystemError("Invalid filepath".to_string()))?;
696
697        let file_impl = FileImpl::MetaDataOnly(FileMetaData {
698            path: file_path.to_path_buf(),
699            size: file_size,
700        });
701
702        let id = Uuid::new_v4();
703
704        self.insert(
705            id,
706            FileStoreEntry {
707                origin: origin.to_string(),
708                file_impl,
709                refs: AtomicUsize::new(1),
710                // Invalid here since create_entry is called by file selection
711                is_valid_url: AtomicBool::new(false),
712                outstanding_tokens: Default::default(),
713            },
714        );
715
716        let filename_path = Path::new(file_name);
717        let type_string = match mime_guess::from_path(filename_path).first() {
718            Some(x) => format!("{}", x),
719            None => "".to_string(),
720        };
721
722        Ok(SelectedFile {
723            id,
724            filename: filename_path.to_path_buf(),
725            modified,
726            size: file_size,
727            type_string,
728        })
729    }
730
731    fn get_blob_buf(
732        &self,
733        sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
734        id: &Uuid,
735        file_token: &FileTokenCheck,
736        origin_in: &FileOrigin,
737        rel_pos: RelativePos,
738    ) -> Result<(), BlobURLStoreError> {
739        let file_impl = self.get_impl(id, file_token, origin_in)?;
740        match file_impl {
741            FileImpl::Memory(buf) => {
742                let range = rel_pos.to_abs_range(buf.size as usize);
743                let buf = BlobBuf {
744                    filename: None,
745                    type_string: buf.type_string,
746                    size: range.len() as u64,
747                    bytes: buf.bytes.index(range).to_vec(),
748                };
749
750                let _ = sender.send(Ok(ReadFileProgress::Meta(buf)));
751                let _ = sender.send(Ok(ReadFileProgress::EOF));
752
753                Ok(())
754            },
755            FileImpl::MetaDataOnly(metadata) => {
756                /* XXX: Snapshot state check (optional) https://w3c.github.io/FileAPI/#snapshot-state.
757                        Concretely, here we create another file, and this file might not
758                        has the same underlying file state (meta-info plus content) as the time
759                        create_entry is called.
760                */
761
762                let opt_filename = metadata
763                    .path
764                    .file_name()
765                    .and_then(|osstr| osstr.to_str())
766                    .map(|s| s.to_string());
767
768                let mime = mime_guess::from_path(metadata.path.clone()).first();
769                let range = rel_pos.to_abs_range(metadata.size as usize);
770
771                let mut file = File::open(&metadata.path)
772                    .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
773                let seeked_start = file
774                    .seek(SeekFrom::Start(range.start as u64))
775                    .map_err(|e| BlobURLStoreError::External(e.to_string()))?;
776
777                if seeked_start == (range.start as u64) {
778                    let type_string = match mime {
779                        Some(x) => format!("{}", x),
780                        None => "".to_string(),
781                    };
782
783                    read_file_in_chunks(sender, &mut file, range.len(), opt_filename, type_string);
784                    Ok(())
785                } else {
786                    Err(BlobURLStoreError::InvalidEntry)
787                }
788            },
789            FileImpl::Sliced(parent_id, inner_rel_pos) => {
790                // Next time we don't need to check validity since
791                // we have already done that for requesting URL if necessary
792                self.get_blob_buf(
793                    sender,
794                    &parent_id,
795                    file_token,
796                    origin_in,
797                    rel_pos.slice_inner(&inner_rel_pos),
798                )
799            },
800        }
801    }
802
803    // Convenient wrapper over get_blob_buf
804    fn try_read_file(
805        &self,
806        sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
807        id: Uuid,
808        origin_in: FileOrigin,
809    ) -> Result<(), BlobURLStoreError> {
810        self.get_blob_buf(
811            sender,
812            &id,
813            &FileTokenCheck::NotRequired,
814            &origin_in,
815            RelativePos::full_range(),
816        )
817    }
818
819    fn dec_ref(&self, id: &Uuid, origin_in: &FileOrigin) -> Result<(), BlobURLStoreError> {
820        let (do_remove, opt_parent_id) = match self.entries.read().unwrap().get(id) {
821            Some(entry) => {
822                if *entry.origin == *origin_in {
823                    let old_refs = entry.refs.fetch_sub(1, Ordering::Release);
824
825                    if old_refs > 1 {
826                        // not the last reference, no need to touch parent
827                        (false, None)
828                    } else {
829                        // last reference, and if it has a reference to parent id
830                        // dec_ref on parent later if necessary
831                        let is_valid = entry.is_valid_url.load(Ordering::Acquire);
832
833                        // Check if no fetch has acquired a token for this file.
834                        let no_outstanding_tokens = entry.outstanding_tokens.is_empty();
835
836                        // Can we remove this file?
837                        let do_remove = !is_valid && no_outstanding_tokens;
838
839                        if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
840                            (do_remove, Some(*parent_id))
841                        } else {
842                            (do_remove, None)
843                        }
844                    }
845                } else {
846                    return Err(BlobURLStoreError::InvalidOrigin);
847                }
848            },
849            None => return Err(BlobURLStoreError::InvalidFileID),
850        };
851
852        // Trigger removing if its last reference is gone and it is
853        // not a part of a valid Blob URL
854        if do_remove {
855            atomic::fence(Ordering::Acquire);
856            self.remove(id);
857
858            if let Some(parent_id) = opt_parent_id {
859                return self.dec_ref(&parent_id, origin_in);
860            }
861        }
862
863        Ok(())
864    }
865
866    fn promote_memory(&self, id: Uuid, blob_buf: BlobBuf, set_valid: bool, origin: FileOrigin) {
867        // parse to check sanity
868        if Url::parse(&origin).is_err() {
869            return;
870        }
871        self.insert(
872            id,
873            FileStoreEntry {
874                origin,
875                file_impl: FileImpl::Memory(blob_buf),
876                refs: AtomicUsize::new(1),
877                is_valid_url: AtomicBool::new(set_valid),
878                outstanding_tokens: Default::default(),
879            },
880        );
881    }
882
883    fn set_blob_url_validity(
884        &self,
885        validity: bool,
886        id: &Uuid,
887        origin_in: &FileOrigin,
888    ) -> Result<(), BlobURLStoreError> {
889        let (do_remove, opt_parent_id, res) = match self.entries.read().unwrap().get(id) {
890            Some(entry) => {
891                if *entry.origin == *origin_in {
892                    entry.is_valid_url.store(validity, Ordering::Release);
893
894                    if !validity {
895                        // Check if it is the last possible reference
896                        // since refs only accounts for blob id holders
897                        // and store entry id holders
898                        let zero_refs = entry.refs.load(Ordering::Acquire) == 0;
899
900                        // Check if no fetch has acquired a token for this file.
901                        let no_outstanding_tokens = entry.outstanding_tokens.is_empty();
902
903                        // Can we remove this file?
904                        let do_remove = zero_refs && no_outstanding_tokens;
905
906                        if let FileImpl::Sliced(ref parent_id, _) = entry.file_impl {
907                            (do_remove, Some(*parent_id), Ok(()))
908                        } else {
909                            (do_remove, None, Ok(()))
910                        }
911                    } else {
912                        (false, None, Ok(()))
913                    }
914                } else {
915                    (false, None, Err(BlobURLStoreError::InvalidOrigin))
916                }
917            },
918            None => (false, None, Err(BlobURLStoreError::InvalidFileID)),
919        };
920
921        if do_remove {
922            atomic::fence(Ordering::Acquire);
923            self.remove(id);
924
925            if let Some(parent_id) = opt_parent_id {
926                return self.dec_ref(&parent_id, origin_in);
927            }
928        }
929        res
930    }
931}
932
933fn read_file_in_chunks(
934    sender: &IpcSender<FileManagerResult<ReadFileProgress>>,
935    file: &mut File,
936    size: usize,
937    opt_filename: Option<String>,
938    type_string: String,
939) {
940    // First chunk
941    let mut buf = vec![0; FILE_CHUNK_SIZE];
942    match file.read(&mut buf) {
943        Ok(n) => {
944            buf.truncate(n);
945            let blob_buf = BlobBuf {
946                filename: opt_filename,
947                type_string,
948                size: size as u64,
949                bytes: buf,
950            };
951            let _ = sender.send(Ok(ReadFileProgress::Meta(blob_buf)));
952        },
953        Err(e) => {
954            let _ = sender.send(Err(FileManagerThreadError::FileSystemError(e.to_string())));
955            return;
956        },
957    }
958
959    // Send the remaining chunks
960    loop {
961        let mut buf = vec![0; FILE_CHUNK_SIZE];
962        match file.read(&mut buf) {
963            Ok(0) => {
964                let _ = sender.send(Ok(ReadFileProgress::EOF));
965                return;
966            },
967            Ok(n) => {
968                buf.truncate(n);
969                let _ = sender.send(Ok(ReadFileProgress::Partial(buf)));
970            },
971            Err(e) => {
972                let _ = sender.send(Err(FileManagerThreadError::FileSystemError(e.to_string())));
973                return;
974            },
975        }
976    }
977}
978
979fn set_headers(
980    headers: &mut HeaderMap,
981    content_length: u64,
982    mime: Mime,
983    filename: Option<String>,
984    content_range: Option<ContentRange>,
985) {
986    headers.typed_insert(ContentLength(content_length));
987    if let Some(content_range) = content_range {
988        headers.typed_insert(content_range);
989    }
990    headers.typed_insert(ContentType::from(mime.clone()));
991    let name = match filename {
992        Some(name) => name,
993        None => return,
994    };
995    let charset = mime.get_param(mime::CHARSET);
996    let charset = charset
997        .map(|c| c.as_ref().into())
998        .unwrap_or("us-ascii".to_owned());
999    // TODO(eijebong): Replace this once the typed header is there
1000    //                 https://github.com/hyperium/headers/issues/8
1001    headers.insert(
1002        header::CONTENT_DISPOSITION,
1003        HeaderValue::from_bytes(
1004            format!(
1005                "inline; {}",
1006                if charset.to_lowercase() == "utf-8" {
1007                    format!(
1008                        "filename=\"{}\"",
1009                        String::from_utf8(name.as_bytes().into()).unwrap()
1010                    )
1011                } else {
1012                    format!(
1013                        "filename*=\"{}\"''{}",
1014                        charset,
1015                        http_percent_encode(name.as_bytes())
1016                    )
1017                }
1018            )
1019            .as_bytes(),
1020        )
1021        .unwrap(),
1022    );
1023}