component_id_index/
lib.rs

1// Copyright 2020 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// This library must remain platform-agnostic because it used by a host tool and within Fuchsia.
6
7use anyhow::Context;
8use camino::{Utf8Path, Utf8PathBuf};
9use clonable_error::ClonableError;
10use fidl_fuchsia_component_internal as fcomponent_internal;
11use moniker::Moniker;
12use std::collections::{HashMap, HashSet};
13use thiserror::Error;
14
15#[cfg(feature = "serde")]
16use serde::{Deserialize, Serialize};
17
18pub mod fidl_convert;
19mod instance_id;
20
21pub use instance_id::{InstanceId, InstanceIdError};
22
23/// Component ID index entry, only used for persistence to JSON5 and FIDL..
24#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
25#[derive(Debug, PartialEq, Eq, Clone)]
26pub(crate) struct PersistedIndexEntry {
27    pub instance_id: InstanceId,
28    pub moniker: Moniker,
29}
30
31/// Component ID index, only used for persistence to JSON5 and FIDL.
32///
33/// Unlike [Index], this type is not validated, so may contain duplicate monikers
34/// and instance IDs.
35#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
36#[derive(Debug, PartialEq, Eq, Clone)]
37pub(crate) struct PersistedIndex {
38    instances: Vec<PersistedIndexEntry>,
39}
40
41/// An index that maps component monikers to instance IDs.
42///
43/// Unlike [PersistedIndex], this type is validated to only contain unique instance IDs.
44#[cfg_attr(
45    feature = "serde",
46    derive(Deserialize, Serialize),
47    serde(try_from = "PersistedIndex", into = "PersistedIndex")
48)]
49#[derive(Debug, PartialEq, Eq, Clone)]
50pub struct Index {
51    /// Map of a moniker from the index to its instance ID.
52    moniker_to_instance_id: HashMap<Moniker, InstanceId>,
53
54    /// All instance IDs, equivalent to the values of `moniker_to_instance_id`.
55    instance_ids: HashSet<InstanceId>,
56}
57
58#[derive(Error, Clone, Debug)]
59pub enum IndexError {
60    #[error("failed to read index file '{path}'")]
61    ReadFile {
62        #[source]
63        err: ClonableError,
64        path: Utf8PathBuf,
65    },
66    #[error("invalid index")]
67    ValidationError(#[from] ValidationError),
68    #[error("could not merge indices")]
69    MergeError(#[from] MergeError),
70    #[error("could not convert FIDL index")]
71    FidlConversionError(#[from] fidl_convert::FidlConversionError),
72}
73
74impl Index {
75    /// Return an Index parsed from the FIDL file at `path`.
76    pub fn from_fidl_file(path: &Utf8Path) -> Result<Self, IndexError> {
77        fn fidl_index_from_file(
78            path: &Utf8Path,
79        ) -> Result<fcomponent_internal::ComponentIdIndex, anyhow::Error> {
80            let raw_content = std::fs::read(path).context("failed to read file")?;
81            let fidl_index = fidl::unpersist::<fcomponent_internal::ComponentIdIndex>(&raw_content)
82                .context("failed to unpersist FIDL")?;
83            Ok(fidl_index)
84        }
85        let fidl_index = fidl_index_from_file(path)
86            .map_err(|err| IndexError::ReadFile { err: err.into(), path: path.to_owned() })?;
87        let index = Index::try_from(fidl_index)?;
88        Ok(index)
89    }
90
91    /// Construct an Index by merging JSON5 source files.
92    #[cfg(feature = "serde")]
93    pub fn merged_from_json5_files(paths: &[Utf8PathBuf]) -> Result<Self, IndexError> {
94        fn index_from_json5_file(path: &Utf8Path) -> Result<Index, anyhow::Error> {
95            let mut file = std::fs::File::open(&path).context("failed to open")?;
96            let index: Index = serde_json5::from_reader(&mut file).context("failed to parse")?;
97            Ok(index)
98        }
99        let mut ctx = MergeContext::default();
100        for path in paths {
101            let index = index_from_json5_file(path)
102                .map_err(|err| IndexError::ReadFile { err: err.into(), path: path.to_owned() })?;
103            ctx.merge(path, &index)?;
104        }
105        Ok(ctx.output())
106    }
107
108    /// Insert an entry into the index.
109    pub fn insert(
110        &mut self,
111        moniker: Moniker,
112        instance_id: InstanceId,
113    ) -> Result<(), ValidationError> {
114        if !self.instance_ids.insert(instance_id.clone()) {
115            return Err(ValidationError::DuplicateId(instance_id));
116        }
117        if self.moniker_to_instance_id.insert(moniker.clone(), instance_id).is_some() {
118            return Err(ValidationError::DuplicateMoniker(moniker));
119        }
120        Ok(())
121    }
122
123    /// Returns the instance ID for the moniker, if the index contains the moniker.
124    pub fn id_for_moniker(&self, moniker: &Moniker) -> Option<&InstanceId> {
125        self.moniker_to_instance_id.get(&moniker)
126    }
127
128    /// Returns true if the index contains the instance ID.
129    pub fn contains_id(&self, id: &InstanceId) -> bool {
130        self.instance_ids.contains(id)
131    }
132
133    pub fn iter(&self) -> impl Iterator<Item = (&Moniker, &InstanceId)> {
134        self.moniker_to_instance_id.iter()
135    }
136}
137
138impl Default for Index {
139    fn default() -> Self {
140        Index { moniker_to_instance_id: HashMap::new(), instance_ids: HashSet::new() }
141    }
142}
143
144impl TryFrom<PersistedIndex> for Index {
145    type Error = ValidationError;
146
147    fn try_from(value: PersistedIndex) -> Result<Self, Self::Error> {
148        let mut index = Index::default();
149        for entry in value.instances.into_iter() {
150            index.insert(entry.moniker, entry.instance_id)?;
151        }
152        Ok(index)
153    }
154}
155
156impl From<Index> for PersistedIndex {
157    fn from(value: Index) -> Self {
158        let mut instances = value
159            .moniker_to_instance_id
160            .into_iter()
161            .map(|(moniker, instance_id)| PersistedIndexEntry { instance_id, moniker })
162            .collect::<Vec<_>>();
163        instances.sort_by(|a, b| a.moniker.cmp(&b.moniker));
164        Self { instances }
165    }
166}
167
168#[derive(Error, Debug, Clone, PartialEq)]
169pub enum ValidationError {
170    #[error("duplicate moniker: {}", .0)]
171    DuplicateMoniker(Moniker),
172    #[error("duplicate instance ID: {}", .0)]
173    DuplicateId(InstanceId),
174}
175
176#[derive(Error, Debug, Clone, PartialEq)]
177pub enum MergeError {
178    #[error("Moniker {}' must be unique but exists in following index files:\n {}\n {}", .moniker, .source1, .source2)]
179    DuplicateMoniker { moniker: Moniker, source1: Utf8PathBuf, source2: Utf8PathBuf },
180    #[error("Instance ID '{}' must be unique but exists in following index files:\n {}\n {}", .instance_id, .source1, .source2)]
181    DuplicateId { instance_id: InstanceId, source1: Utf8PathBuf, source2: Utf8PathBuf },
182}
183
184/// A builder that merges indices into a single accumulated index.
185pub struct MergeContext {
186    /// Index that contains entries accumulated from calls to [`merge()`].
187    output_index: Index,
188    // Path to the source index file that contains the moniker.
189    moniker_to_source_path: HashMap<Moniker, Utf8PathBuf>,
190    // Path to the source index file that contains the instance ID.
191    instance_id_to_source_path: HashMap<InstanceId, Utf8PathBuf>,
192}
193
194impl MergeContext {
195    // Merge `index` into the into the MergeContext.
196    //
197    // This method can be called multiple times to merge multiple indices.
198    // The resulting index can be accessed with output().
199    pub fn merge(&mut self, source_index_path: &Utf8Path, index: &Index) -> Result<(), MergeError> {
200        for (moniker, instance_id) in &index.moniker_to_instance_id {
201            self.output_index.insert(moniker.clone(), instance_id.clone()).map_err(
202                |err| match err {
203                    ValidationError::DuplicateMoniker(moniker) => {
204                        let previous_source_path =
205                            self.moniker_to_source_path.get(&moniker).cloned().unwrap_or_default();
206                        MergeError::DuplicateMoniker {
207                            moniker,
208                            source1: previous_source_path,
209                            source2: source_index_path.to_owned(),
210                        }
211                    }
212                    ValidationError::DuplicateId(instance_id) => {
213                        let previous_source_path = self
214                            .instance_id_to_source_path
215                            .get(&instance_id)
216                            .cloned()
217                            .unwrap_or_default();
218                        MergeError::DuplicateId {
219                            instance_id,
220                            source1: previous_source_path,
221                            source2: source_index_path.to_owned(),
222                        }
223                    }
224                },
225            )?;
226            self.instance_id_to_source_path
227                .insert(instance_id.clone(), source_index_path.to_owned());
228            self.moniker_to_source_path.insert(moniker.clone(), source_index_path.to_owned());
229        }
230        Ok(())
231    }
232
233    // Return the accumulated index from calls to merge().
234    pub fn output(self) -> Index {
235        self.output_index
236    }
237}
238
239impl Default for MergeContext {
240    fn default() -> Self {
241        MergeContext {
242            output_index: Index::default(),
243            instance_id_to_source_path: HashMap::new(),
244            moniker_to_source_path: HashMap::new(),
245        }
246    }
247}
248
249#[cfg(test)]
250mod tests {
251    use super::*;
252    use anyhow::Result;
253
254    #[test]
255    fn merge_empty_index() {
256        let ctx = MergeContext::default();
257        assert_eq!(ctx.output(), Index::default());
258    }
259
260    #[test]
261    fn merge_single_index() -> Result<()> {
262        let mut ctx = MergeContext::default();
263
264        let mut index = Index::default();
265        let moniker = ["foo"].try_into().unwrap();
266        let instance_id = InstanceId::new_random(&mut rand::thread_rng());
267        index.insert(moniker, instance_id).unwrap();
268
269        ctx.merge(Utf8Path::new("/random/file/path"), &index)?;
270        assert_eq!(ctx.output(), index.clone());
271        Ok(())
272    }
273
274    #[test]
275    fn merge_duplicate_id() -> Result<()> {
276        let source1 = Utf8Path::new("/a/b/c");
277        let source2 = Utf8Path::new("/d/e/f");
278
279        let id = InstanceId::new_random(&mut rand::thread_rng());
280        let index1 = {
281            let mut index = Index::default();
282            let moniker = ["foo"].try_into().unwrap();
283            index.insert(moniker, id.clone()).unwrap();
284            index
285        };
286        let index2 = {
287            let mut index = Index::default();
288            let moniker = ["bar"].try_into().unwrap();
289            index.insert(moniker, id.clone()).unwrap();
290            index
291        };
292
293        let mut ctx = MergeContext::default();
294        ctx.merge(source1, &index1)?;
295
296        let err = ctx.merge(source2, &index2).unwrap_err();
297        assert_eq!(
298            err,
299            MergeError::DuplicateId {
300                instance_id: id,
301                source1: source1.to_owned(),
302                source2: source2.to_owned()
303            }
304        );
305
306        Ok(())
307    }
308
309    #[test]
310    fn merge_duplicate_moniker() -> Result<()> {
311        let source1 = Utf8Path::new("/a/b/c");
312        let source2 = Utf8Path::new("/d/e/f");
313
314        let moniker: Moniker = ["foo"].try_into().unwrap();
315        let index1 = {
316            let mut index = Index::default();
317            let id = InstanceId::new_random(&mut rand::thread_rng());
318            index.insert(moniker.clone(), id).unwrap();
319            index
320        };
321        let index2 = {
322            let mut index = Index::default();
323            let id = InstanceId::new_random(&mut rand::thread_rng());
324            index.insert(moniker.clone(), id).unwrap();
325            index
326        };
327
328        let mut ctx = MergeContext::default();
329        ctx.merge(source1, &index1)?;
330
331        let err = ctx.merge(source2, &index2).unwrap_err();
332        assert_eq!(
333            err,
334            MergeError::DuplicateMoniker {
335                moniker,
336                source1: source1.to_owned(),
337                source2: source2.to_owned()
338            }
339        );
340
341        Ok(())
342    }
343
344    #[cfg(feature = "serde")]
345    #[test]
346    fn merged_from_json5_files() {
347        use std::io::Write;
348
349        let mut index_file_1 = tempfile::NamedTempFile::new().unwrap();
350        index_file_1
351            .write_all(
352                r#"{
353            // Here is a comment.
354            instances: [
355                {
356                    instance_id: "fb94044d62278b37c221c7fdeebdcf1304262f3e11416f68befa5ef88b7a2163",
357                    moniker: "/a/b"
358                }
359            ]
360        }"#
361                .as_bytes(),
362            )
363            .unwrap();
364
365        let mut index_file_2 = tempfile::NamedTempFile::new().unwrap();
366        index_file_2
367            .write_all(
368                r#"{
369            // Here is a comment.
370            instances: [
371                {
372                    instance_id: "4f915af6c4b682867ab7ad2dc9cbca18342ddd9eec61724f19d231cf6d07f122",
373                    moniker: "/c/d"
374                }
375            ]
376        }"#
377                .as_bytes(),
378            )
379            .unwrap();
380
381        let expected_index = {
382            let mut index = Index::default();
383            index
384                .insert(
385                    "/a/b".parse::<Moniker>().unwrap(),
386                    "fb94044d62278b37c221c7fdeebdcf1304262f3e11416f68befa5ef88b7a2163"
387                        .parse::<InstanceId>()
388                        .unwrap(),
389                )
390                .unwrap();
391            index
392                .insert(
393                    "/c/d".parse::<Moniker>().unwrap(),
394                    "4f915af6c4b682867ab7ad2dc9cbca18342ddd9eec61724f19d231cf6d07f122"
395                        .parse::<InstanceId>()
396                        .unwrap(),
397                )
398                .unwrap();
399            index
400        };
401
402        // only checking that we parsed successfully.
403        let files = [
404            Utf8PathBuf::from_path_buf(index_file_1.path().to_path_buf()).unwrap(),
405            Utf8PathBuf::from_path_buf(index_file_2.path().to_path_buf()).unwrap(),
406        ];
407        assert_eq!(expected_index, Index::merged_from_json5_files(&files).unwrap());
408    }
409
410    #[cfg(feature = "serde")]
411    #[test]
412    fn serialize_deserialize() -> Result<()> {
413        let expected_index = {
414            let mut index = Index::default();
415            for i in 0..5 {
416                let moniker: Moniker = [i.to_string().as_str()].try_into().unwrap();
417                let instance_id = InstanceId::new_random(&mut rand::thread_rng());
418                index.insert(moniker, instance_id).unwrap();
419            }
420            index
421        };
422
423        let json_index = serde_json5::to_string(&expected_index)?;
424        let actual_index = serde_json5::from_str(&json_index)?;
425        assert_eq!(expected_index, actual_index);
426
427        Ok(())
428    }
429}