mirror of
https://github.com/martinvonz/jj.git
synced 2025-01-05 20:55:05 +00:00
Replace uses of content_hash! with #[derive(ContentHash)]
This is a pure refactor with no behavior changes. #3054
This commit is contained in:
parent
65d45e0888
commit
e8f324ffde
5 changed files with 127 additions and 260 deletions
|
@ -25,7 +25,7 @@ use std::vec::Vec;
|
|||
use async_trait::async_trait;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::content_hash::{ContentHash, DigestUpdate};
|
||||
use crate::content_hash::ContentHash;
|
||||
use crate::index::Index;
|
||||
use crate::merge::Merge;
|
||||
use crate::object_id::{id_type, ObjectId};
|
||||
|
@ -39,18 +39,14 @@ id_type!(pub FileId);
|
|||
id_type!(pub SymlinkId);
|
||||
id_type!(pub ConflictId);
|
||||
|
||||
content_hash! {
|
||||
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
|
||||
pub struct MillisSinceEpoch(pub i64);
|
||||
}
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
|
||||
pub struct MillisSinceEpoch(pub i64);
|
||||
|
||||
content_hash! {
|
||||
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
|
||||
pub struct Timestamp {
|
||||
pub timestamp: MillisSinceEpoch,
|
||||
// time zone offset in minutes
|
||||
pub tz_offset: i32,
|
||||
}
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
|
||||
pub struct Timestamp {
|
||||
pub timestamp: MillisSinceEpoch,
|
||||
// time zone offset in minutes
|
||||
pub tz_offset: i32,
|
||||
}
|
||||
|
||||
impl Timestamp {
|
||||
|
@ -68,21 +64,17 @@ impl Timestamp {
|
|||
}
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Signature {
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
pub timestamp: Timestamp,
|
||||
}
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Signature {
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
pub timestamp: Timestamp,
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct SecureSig {
|
||||
pub data: Vec<u8>,
|
||||
pub sig: Vec<u8>,
|
||||
}
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct SecureSig {
|
||||
pub data: Vec<u8>,
|
||||
pub sig: Vec<u8>,
|
||||
}
|
||||
|
||||
pub type SigningFn<'a> = dyn FnMut(&[u8]) -> SignResult<Vec<u8>> + 'a;
|
||||
|
@ -92,7 +84,7 @@ pub type SigningFn<'a> = dyn FnMut(&[u8]) -> SignResult<Vec<u8>> + 'a;
|
|||
// TODO(#1624): Delete this type at some point in the future, when we decide to drop
|
||||
// support for conflicts in older repos, or maybe after we have provided an upgrade
|
||||
// mechanism.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(ContentHash, Debug, Clone)]
|
||||
pub enum MergedTreeId {
|
||||
/// The tree id of a legacy tree
|
||||
Legacy(TreeId),
|
||||
|
@ -110,21 +102,6 @@ impl PartialEq for MergedTreeId {
|
|||
|
||||
impl Eq for MergedTreeId {}
|
||||
|
||||
impl ContentHash for MergedTreeId {
|
||||
fn hash(&self, state: &mut impl DigestUpdate) {
|
||||
match self {
|
||||
MergedTreeId::Legacy(tree_id) => {
|
||||
state.update(&0u32.to_le_bytes());
|
||||
ContentHash::hash(tree_id, state);
|
||||
}
|
||||
MergedTreeId::Merge(tree_ids) => {
|
||||
state.update(&1u32.to_le_bytes());
|
||||
ContentHash::hash(tree_ids, state);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MergedTreeId {
|
||||
/// Create a resolved `MergedTreeId` from a single regular tree.
|
||||
pub fn resolved(tree_id: TreeId) -> Self {
|
||||
|
@ -140,37 +117,31 @@ impl MergedTreeId {
|
|||
}
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Commit {
|
||||
pub parents: Vec<CommitId>,
|
||||
pub predecessors: Vec<CommitId>,
|
||||
pub root_tree: MergedTreeId,
|
||||
pub change_id: ChangeId,
|
||||
pub description: String,
|
||||
pub author: Signature,
|
||||
pub committer: Signature,
|
||||
pub secure_sig: Option<SecureSig>,
|
||||
}
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Commit {
|
||||
pub parents: Vec<CommitId>,
|
||||
pub predecessors: Vec<CommitId>,
|
||||
pub root_tree: MergedTreeId,
|
||||
pub change_id: ChangeId,
|
||||
pub description: String,
|
||||
pub author: Signature,
|
||||
pub committer: Signature,
|
||||
pub secure_sig: Option<SecureSig>,
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct ConflictTerm {
|
||||
pub value: TreeValue,
|
||||
}
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct ConflictTerm {
|
||||
pub value: TreeValue,
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(Default, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Conflict {
|
||||
// A conflict is represented by a list of positive and negative states that need to be applied.
|
||||
// In a simple 3-way merge of B and C with merge base A, the conflict will be { add: [B, C],
|
||||
// remove: [A] }. Also note that a conflict of the form { add: [A], remove: [] } is the
|
||||
// same as non-conflict A.
|
||||
pub removes: Vec<ConflictTerm>,
|
||||
pub adds: Vec<ConflictTerm>,
|
||||
}
|
||||
#[derive(ContentHash, Default, Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Conflict {
|
||||
// A conflict is represented by a list of positive and negative states that need to be applied.
|
||||
// In a simple 3-way merge of B and C with merge base A, the conflict will be { add: [B, C],
|
||||
// remove: [A] }. Also note that a conflict of the form { add: [A], remove: [] } is the
|
||||
// same as non-conflict A.
|
||||
pub removes: Vec<ConflictTerm>,
|
||||
pub adds: Vec<ConflictTerm>,
|
||||
}
|
||||
|
||||
/// Error that may occur during backend initialization.
|
||||
|
@ -225,7 +196,7 @@ pub enum BackendError {
|
|||
|
||||
pub type BackendResult<T> = Result<T, BackendError>;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
#[derive(ContentHash, Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub enum TreeValue {
|
||||
File { id: FileId, executable: bool },
|
||||
Symlink(SymlinkId),
|
||||
|
@ -246,35 +217,6 @@ impl TreeValue {
|
|||
}
|
||||
}
|
||||
|
||||
impl ContentHash for TreeValue {
|
||||
fn hash(&self, state: &mut impl DigestUpdate) {
|
||||
use TreeValue::*;
|
||||
match self {
|
||||
File { id, executable } => {
|
||||
state.update(&0u32.to_le_bytes());
|
||||
id.hash(state);
|
||||
executable.hash(state);
|
||||
}
|
||||
Symlink(id) => {
|
||||
state.update(&1u32.to_le_bytes());
|
||||
id.hash(state);
|
||||
}
|
||||
Tree(id) => {
|
||||
state.update(&2u32.to_le_bytes());
|
||||
id.hash(state);
|
||||
}
|
||||
GitSubmodule(id) => {
|
||||
state.update(&3u32.to_le_bytes());
|
||||
id.hash(state);
|
||||
}
|
||||
Conflict(id) => {
|
||||
state.update(&4u32.to_le_bytes());
|
||||
id.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct TreeEntry<'a> {
|
||||
name: &'a RepoPathComponent,
|
||||
|
@ -309,11 +251,9 @@ impl<'a> Iterator for TreeEntriesNonRecursiveIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(Default, PartialEq, Eq, Debug, Clone)]
|
||||
pub struct Tree {
|
||||
entries: BTreeMap<RepoPathComponentBuf, TreeValue>,
|
||||
}
|
||||
#[derive(ContentHash, Default, PartialEq, Eq, Debug, Clone)]
|
||||
pub struct Tree {
|
||||
entries: BTreeMap<RepoPathComponentBuf, TreeValue>,
|
||||
}
|
||||
|
||||
impl Tree {
|
||||
|
|
|
@ -145,33 +145,6 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
macro_rules! content_hash {
|
||||
($(#[$meta:meta])* $vis:vis struct $name:ident {
|
||||
$($(#[$field_meta:meta])* $field_vis:vis $field:ident : $ty:ty),* $(,)?
|
||||
}) => {
|
||||
$(#[$meta])*
|
||||
$vis struct $name {
|
||||
$($(#[$field_meta])* $field_vis $field : $ty),*
|
||||
}
|
||||
|
||||
impl crate::content_hash::ContentHash for $name {
|
||||
fn hash(&self, state: &mut impl digest::Update) {
|
||||
$(<$ty as crate::content_hash::ContentHash>::hash(&self.$field, state);)*
|
||||
}
|
||||
}
|
||||
};
|
||||
($(#[$meta:meta])* $vis:vis struct $name:ident($field_vis:vis $ty:ty);) => {
|
||||
$(#[$meta])*
|
||||
$vis struct $name($field_vis $ty);
|
||||
|
||||
impl crate::content_hash::ContentHash for $name {
|
||||
fn hash(&self, state: &mut impl digest::Update) {
|
||||
<$ty as crate::content_hash::ContentHash>::hash(&self.0, state);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
@ -215,8 +188,9 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_struct_sanity() {
|
||||
content_hash! {
|
||||
struct Foo { x: i32 }
|
||||
#[derive(ContentHash)]
|
||||
struct Foo {
|
||||
x: i32,
|
||||
}
|
||||
assert_ne!(hash(&Foo { x: 42 }), hash(&Foo { x: 12 }));
|
||||
}
|
||||
|
@ -237,8 +211,10 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_consistent_hashing() {
|
||||
content_hash! {
|
||||
struct Foo { x: Vec<Option<i32>>, y: i64 }
|
||||
#[derive(ContentHash)]
|
||||
struct Foo {
|
||||
x: Vec<Option<i32>>,
|
||||
y: i64,
|
||||
}
|
||||
let foo_hash = hex::encode(hash(&Foo {
|
||||
x: vec![None, Some(42)],
|
||||
|
@ -276,30 +252,6 @@ mod tests {
|
|||
assert_eq!(hash(&Option::<i32>::None), hash(&MyOption::<i32>::None));
|
||||
assert_eq!(hash(&Some(1)), hash(&MyOption::Some(1)));
|
||||
}
|
||||
// This will be removed once all uses of content_hash! are replaced by the
|
||||
// derive version.
|
||||
#[test]
|
||||
fn derive_is_equivalent_to_macro() {
|
||||
content_hash! {
|
||||
struct FooMacro { x: Vec<Option<i32>>, y: i64}
|
||||
}
|
||||
|
||||
#[derive(ContentHash)]
|
||||
struct FooDerive {
|
||||
x: Vec<Option<i32>>,
|
||||
y: i64,
|
||||
}
|
||||
|
||||
let foo_macro = FooMacro {
|
||||
x: vec![None, Some(42)],
|
||||
y: 17,
|
||||
};
|
||||
let foo_derive = FooDerive {
|
||||
x: vec![None, Some(42)],
|
||||
y: 17,
|
||||
};
|
||||
assert_eq!(hash(&foo_macro), hash(&foo_derive));
|
||||
}
|
||||
|
||||
fn hash(x: &(impl ContentHash + ?Sized)) -> digest::Output<Blake2b512> {
|
||||
blake2b_hash(x)
|
||||
|
|
|
@ -23,10 +23,8 @@ pub trait ObjectId {
|
|||
|
||||
macro_rules! id_type {
|
||||
($vis:vis $name:ident) => {
|
||||
content_hash! {
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
|
||||
$vis struct $name(Vec<u8>);
|
||||
}
|
||||
#[derive(ContentHash, PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
|
||||
$vis struct $name(Vec<u8>);
|
||||
$crate::object_id::impl_id_type!($name);
|
||||
};
|
||||
}
|
||||
|
|
|
@ -25,14 +25,12 @@ use once_cell::sync::Lazy;
|
|||
use thiserror::Error;
|
||||
|
||||
use crate::backend::{CommitId, MillisSinceEpoch, Timestamp};
|
||||
use crate::content_hash::{ContentHash, DigestUpdate};
|
||||
use crate::content_hash::ContentHash;
|
||||
use crate::merge::Merge;
|
||||
use crate::object_id::{id_type, HexPrefix, ObjectId, PrefixResolution};
|
||||
|
||||
content_hash! {
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
|
||||
pub struct WorkspaceId(String);
|
||||
}
|
||||
#[derive(ContentHash, PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
|
||||
pub struct WorkspaceId(String);
|
||||
|
||||
impl Debug for WorkspaceId {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
|
||||
|
@ -59,11 +57,9 @@ impl WorkspaceId {
|
|||
id_type!(pub ViewId);
|
||||
id_type!(pub OperationId);
|
||||
|
||||
content_hash! {
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub struct RefTarget {
|
||||
merge: Merge<Option<CommitId>>,
|
||||
}
|
||||
#[derive(ContentHash, PartialEq, Eq, Hash, Clone, Debug)]
|
||||
pub struct RefTarget {
|
||||
merge: Merge<Option<CommitId>>,
|
||||
}
|
||||
|
||||
impl Default for RefTarget {
|
||||
|
@ -147,13 +143,11 @@ impl RefTarget {
|
|||
}
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
/// Remote branch or tag.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
|
||||
pub struct RemoteRef {
|
||||
pub target: RefTarget,
|
||||
pub state: RemoteRefState,
|
||||
}
|
||||
/// Remote branch or tag.
|
||||
#[derive(ContentHash, Clone, Debug, Eq, Hash, PartialEq)]
|
||||
pub struct RemoteRef {
|
||||
pub target: RefTarget,
|
||||
pub state: RemoteRefState,
|
||||
}
|
||||
|
||||
impl RemoteRef {
|
||||
|
@ -202,7 +196,7 @@ impl RemoteRef {
|
|||
}
|
||||
|
||||
/// Whether the ref is tracked or not.
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
|
||||
#[derive(ContentHash, Clone, Copy, Debug, Eq, Hash, PartialEq)]
|
||||
pub enum RemoteRefState {
|
||||
/// Remote ref is not merged in to the local ref.
|
||||
New,
|
||||
|
@ -211,15 +205,6 @@ pub enum RemoteRefState {
|
|||
Tracking,
|
||||
}
|
||||
|
||||
impl ContentHash for RemoteRefState {
|
||||
fn hash(&self, state: &mut impl DigestUpdate) {
|
||||
match self {
|
||||
RemoteRefState::New => state.update(&0u32.to_le_bytes()),
|
||||
RemoteRefState::Tracking => state.update(&1u32.to_le_bytes()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to strip redundant `Option<T>` from `RefTarget` lookup result.
|
||||
pub trait RefTargetOptionExt {
|
||||
type Value;
|
||||
|
@ -268,39 +253,35 @@ pub struct BranchTarget<'a> {
|
|||
pub remote_refs: Vec<(&'a str, &'a RemoteRef)>,
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
/// Represents the way the repo looks at a given time, just like how a Tree
|
||||
/// object represents how the file system looks at a given time.
|
||||
#[derive(PartialEq, Eq, Clone, Debug, Default)]
|
||||
pub struct View {
|
||||
/// All head commits
|
||||
pub head_ids: HashSet<CommitId>,
|
||||
pub local_branches: BTreeMap<String, RefTarget>,
|
||||
pub tags: BTreeMap<String, RefTarget>,
|
||||
pub remote_views: BTreeMap<String, RemoteView>,
|
||||
pub git_refs: BTreeMap<String, RefTarget>,
|
||||
/// The commit the Git HEAD points to.
|
||||
// TODO: Support multiple Git worktrees?
|
||||
// TODO: Do we want to store the current branch name too?
|
||||
pub git_head: RefTarget,
|
||||
// The commit that *should be* checked out in the workspace. Note that the working copy
|
||||
// (.jj/working_copy/) has the source of truth about which commit *is* checked out (to be
|
||||
// precise: the commit to which we most recently completed an update to).
|
||||
pub wc_commit_ids: HashMap<WorkspaceId, CommitId>,
|
||||
}
|
||||
/// Represents the way the repo looks at a given time, just like how a Tree
|
||||
/// object represents how the file system looks at a given time.
|
||||
#[derive(ContentHash, PartialEq, Eq, Clone, Debug, Default)]
|
||||
pub struct View {
|
||||
/// All head commits
|
||||
pub head_ids: HashSet<CommitId>,
|
||||
pub local_branches: BTreeMap<String, RefTarget>,
|
||||
pub tags: BTreeMap<String, RefTarget>,
|
||||
pub remote_views: BTreeMap<String, RemoteView>,
|
||||
pub git_refs: BTreeMap<String, RefTarget>,
|
||||
/// The commit the Git HEAD points to.
|
||||
// TODO: Support multiple Git worktrees?
|
||||
// TODO: Do we want to store the current branch name too?
|
||||
pub git_head: RefTarget,
|
||||
// The commit that *should be* checked out in the workspace. Note that the working copy
|
||||
// (.jj/working_copy/) has the source of truth about which commit *is* checked out (to be
|
||||
// precise: the commit to which we most recently completed an update to).
|
||||
pub wc_commit_ids: HashMap<WorkspaceId, CommitId>,
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
/// Represents the state of the remote repo.
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct RemoteView {
|
||||
// TODO: Do we need to support tombstones for remote branches? For example, if the branch
|
||||
// has been deleted locally and you pull from a remote, maybe it should make a difference
|
||||
// whether the branch is known to have existed on the remote. We may not want to resurrect
|
||||
// the branch if the branch's state on the remote was just not known.
|
||||
pub branches: BTreeMap<String, RemoteRef>,
|
||||
// TODO: pub tags: BTreeMap<String, RemoteRef>,
|
||||
}
|
||||
/// Represents the state of the remote repo.
|
||||
#[derive(ContentHash, Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct RemoteView {
|
||||
// TODO: Do we need to support tombstones for remote branches? For example, if the branch
|
||||
// has been deleted locally and you pull from a remote, maybe it should make a difference
|
||||
// whether the branch is known to have existed on the remote. We may not want to resurrect
|
||||
// the branch if the branch's state on the remote was just not known.
|
||||
pub branches: BTreeMap<String, RemoteRef>,
|
||||
// TODO: pub tags: BTreeMap<String, RemoteRef>,
|
||||
}
|
||||
|
||||
/// Iterates pair of local and remote branches by branch name.
|
||||
|
@ -355,25 +336,23 @@ pub(crate) fn flatten_remote_branches(
|
|||
.kmerge_by(|(full_name1, _), (full_name2, _)| full_name1 < full_name2)
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
/// Represents an operation (transaction) on the repo view, just like how a
|
||||
/// Commit object represents an operation on the tree.
|
||||
///
|
||||
/// Operations and views are not meant to be exchanged between repos or users;
|
||||
/// they represent local state and history.
|
||||
///
|
||||
/// The operation history will almost always be linear. It will only have
|
||||
/// forks when parallel operations occurred. The parent is determined when
|
||||
/// the transaction starts. When the transaction commits, a lock will be
|
||||
/// taken and it will be checked that the current head of the operation
|
||||
/// graph is unchanged. If the current head has changed, there has been
|
||||
/// concurrent operation.
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct Operation {
|
||||
pub view_id: ViewId,
|
||||
pub parents: Vec<OperationId>,
|
||||
pub metadata: OperationMetadata,
|
||||
}
|
||||
/// Represents an operation (transaction) on the repo view, just like how a
|
||||
/// Commit object represents an operation on the tree.
|
||||
///
|
||||
/// Operations and views are not meant to be exchanged between repos or users;
|
||||
/// they represent local state and history.
|
||||
///
|
||||
/// The operation history will almost always be linear. It will only have
|
||||
/// forks when parallel operations occurred. The parent is determined when
|
||||
/// the transaction starts. When the transaction commits, a lock will be
|
||||
/// taken and it will be checked that the current head of the operation
|
||||
/// graph is unchanged. If the current head has changed, there has been
|
||||
/// concurrent operation.
|
||||
#[derive(ContentHash, PartialEq, Eq, Clone, Debug)]
|
||||
pub struct Operation {
|
||||
pub view_id: ViewId,
|
||||
pub parents: Vec<OperationId>,
|
||||
pub metadata: OperationMetadata,
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
|
@ -399,19 +378,18 @@ impl Operation {
|
|||
}
|
||||
}
|
||||
|
||||
content_hash! {
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct OperationMetadata {
|
||||
pub start_time: Timestamp,
|
||||
pub end_time: Timestamp,
|
||||
// Whatever is useful to the user, such as exact command line call
|
||||
pub description: String,
|
||||
pub hostname: String,
|
||||
pub username: String,
|
||||
/// Whether this operation represents a pure snapshotting of the working copy.
|
||||
pub is_snapshot: bool,
|
||||
pub tags: HashMap<String, String>,
|
||||
}
|
||||
#[derive(ContentHash, PartialEq, Eq, Clone, Debug)]
|
||||
pub struct OperationMetadata {
|
||||
pub start_time: Timestamp,
|
||||
pub end_time: Timestamp,
|
||||
// Whatever is useful to the user, such as exact command line call
|
||||
pub description: String,
|
||||
pub hostname: String,
|
||||
pub username: String,
|
||||
/// Whether this operation represents a pure snapshotting of the working
|
||||
/// copy.
|
||||
pub is_snapshot: bool,
|
||||
pub tags: HashMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
|
|
|
@ -24,16 +24,15 @@ use std::path::{Component, Path, PathBuf};
|
|||
use ref_cast::{ref_cast_custom, RefCastCustom};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::content_hash::ContentHash;
|
||||
use crate::file_util;
|
||||
|
||||
content_hash! {
|
||||
/// Owned `RepoPath` component.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
|
||||
pub struct RepoPathComponentBuf {
|
||||
// Don't add more fields. Eq, Hash, and Ord must be compatible with the
|
||||
// borrowed RepoPathComponent type.
|
||||
value: String,
|
||||
}
|
||||
/// Owned `RepoPath` component.
|
||||
#[derive(ContentHash, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
|
||||
pub struct RepoPathComponentBuf {
|
||||
// Don't add more fields. Eq, Hash, and Ord must be compatible with the
|
||||
// borrowed RepoPathComponent type.
|
||||
value: String,
|
||||
}
|
||||
|
||||
/// Borrowed `RepoPath` component.
|
||||
|
|
Loading…
Reference in a new issue