op_store: drop support for upgrading from Thrift implementation

This commit is contained in:
Martin von Zweigbergk 2023-03-29 11:13:28 -07:00 committed by Martin von Zweigbergk
parent b707a29f41
commit 68fb46b2af
9 changed files with 4 additions and 1385 deletions

27
Cargo.lock generated
View file

@ -730,12 +730,6 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "integer-encoding"
version = "3.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02"
[[package]]
name = "io-lifetimes"
version = "1.0.1"
@ -864,7 +858,6 @@ dependencies = [
"test-case",
"testutils",
"thiserror",
"thrift",
"tracing",
"version_check",
"whoami",
@ -1110,15 +1103,6 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "ordered-float"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87"
dependencies = [
"num-traits",
]
[[package]]
name = "os_str_bytes"
version = "6.3.0"
@ -1856,17 +1840,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "thrift"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e54bc85fc7faa8bc175c4bab5b92ba8d9a3ce893d0e9f42cc455c8ab16a9e09"
dependencies = [
"byteorder",
"integer-encoding",
"ordered-float",
]
[[package]]
name = "timeago"
version = "0.4.1"

View file

@ -73,6 +73,6 @@ regex = "1.7.3"
testutils = { path = "lib/testutils" }
[features]
default = ["jujutsu-lib/legacy-thrift"]
default = []
bench = ["criterion"]
vendored-openssl = ["git2/vendored-openssl", "jujutsu-lib/vendored-openssl"]

View file

@ -40,7 +40,7 @@
pname = "jujutsu";
version = "unstable-${self.shortRev or "dirty"}";
buildNoDefaultFeatures = true;
buildFeatures = [ "jujutsu-lib/legacy-thrift" ];
buildFeatures = [];
src = filterSrc ./. [
".*\\.nix$"
"^.jj/"

View file

@ -42,7 +42,6 @@ regex = "1.7.3"
serde_json = "1.0.95"
tempfile = "3.5.0"
thiserror = "1.0.40"
thrift = { version = "0.17.0", default-features = false, optional = true }
tracing = "0.1.37"
whoami = "1.4.0"
zstd = "0.12.3"
@ -56,9 +55,5 @@ test-case = "3.0.0"
testutils = { path = "testutils" }
[features]
default = ["legacy-thrift"]
default = []
vendored-openssl = ["git2/vendored-openssl"]
# Enable upgrade of repositories created with storage backends based on
# Thrift format. Only repos accessed by an unreleased jj version in the
# (0.5.1,0.6.0) range used Thrift.
legacy-thrift = ["thrift"]

View file

@ -1,210 +0,0 @@
// Copyright 2022 The Jujutsu Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::fmt::Debug;
use std::fs::File;
use std::io::{ErrorKind, Read};
use std::path::PathBuf;
use itertools::Itertools;
use thrift::protocol::{TCompactInputProtocol, TSerializable};
use crate::backend::{CommitId, MillisSinceEpoch, ObjectId, Timestamp};
use crate::op_store::{
BranchTarget, OpStoreError, OpStoreResult, Operation, OperationId, OperationMetadata,
RefTarget, View, ViewId, WorkspaceId,
};
use crate::simple_op_store_model;
impl From<thrift::Error> for OpStoreError {
fn from(err: thrift::Error) -> Self {
OpStoreError::Other(err.to_string())
}
}
fn not_found_to_store_error(err: std::io::Error) -> OpStoreError {
if err.kind() == ErrorKind::NotFound {
OpStoreError::NotFound
} else {
OpStoreError::from(err)
}
}
#[derive(Debug)]
pub struct ThriftOpStore {
path: PathBuf,
}
impl ThriftOpStore {
pub fn load(store_path: PathBuf) -> Self {
ThriftOpStore { path: store_path }
}
fn view_path(&self, id: &ViewId) -> PathBuf {
self.path.join("views").join(id.hex())
}
fn operation_path(&self, id: &OperationId) -> PathBuf {
self.path.join("operations").join(id.hex())
}
pub fn read_view(&self, id: &ViewId) -> OpStoreResult<View> {
let path = self.view_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let thrift_view = read_thrift(&mut file)?;
Ok(View::from(&thrift_view))
}
pub fn read_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
let path = self.operation_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let thrift_operation = read_thrift(&mut file)?;
Ok(Operation::from(&thrift_operation))
}
}
pub fn read_thrift<T: TSerializable>(input: &mut impl Read) -> OpStoreResult<T> {
let mut protocol = TCompactInputProtocol::new(input);
Ok(TSerializable::read_from_in_protocol(&mut protocol).unwrap())
}
impl From<&simple_op_store_model::Timestamp> for Timestamp {
fn from(timestamp: &simple_op_store_model::Timestamp) -> Self {
Timestamp {
timestamp: MillisSinceEpoch(timestamp.millis_since_epoch),
tz_offset: timestamp.tz_offset,
}
}
}
impl From<&simple_op_store_model::OperationMetadata> for OperationMetadata {
fn from(metadata: &simple_op_store_model::OperationMetadata) -> Self {
let start_time = Timestamp::from(&metadata.start_time);
let end_time = Timestamp::from(&metadata.end_time);
let description = metadata.description.to_owned();
let hostname = metadata.hostname.to_owned();
let username = metadata.username.to_owned();
let tags = metadata
.tags
.iter()
.map(|(key, value)| (key.clone(), value.clone()))
.collect();
OperationMetadata {
start_time,
end_time,
description,
hostname,
username,
tags,
}
}
}
impl From<&simple_op_store_model::Operation> for Operation {
fn from(operation: &simple_op_store_model::Operation) -> Self {
let operation_id_from_thrift = |parent: &Vec<u8>| OperationId::new(parent.clone());
let parents = operation
.parents
.iter()
.map(operation_id_from_thrift)
.collect();
let view_id = ViewId::new(operation.view_id.clone());
let metadata = OperationMetadata::from(operation.metadata.as_ref());
Operation {
view_id,
parents,
metadata,
}
}
}
impl From<&simple_op_store_model::View> for View {
fn from(thrift_view: &simple_op_store_model::View) -> Self {
let mut view = View::default();
for (workspace_id, commit_id) in &thrift_view.wc_commit_ids {
view.wc_commit_ids.insert(
WorkspaceId::new(workspace_id.clone()),
CommitId::new(commit_id.clone()),
);
}
for head_id_bytes in &thrift_view.head_ids {
view.head_ids.insert(CommitId::from_bytes(head_id_bytes));
}
for head_id_bytes in &thrift_view.public_head_ids {
view.public_head_ids
.insert(CommitId::from_bytes(head_id_bytes));
}
for thrift_branch in &thrift_view.branches {
let local_target = thrift_branch.local_target.as_ref().map(RefTarget::from);
let mut remote_targets = BTreeMap::new();
for remote_branch in &thrift_branch.remote_branches {
remote_targets.insert(
remote_branch.remote_name.clone(),
RefTarget::from(&remote_branch.target),
);
}
view.branches.insert(
thrift_branch.name.clone(),
BranchTarget {
local_target,
remote_targets,
},
);
}
for thrift_tag in &thrift_view.tags {
view.tags
.insert(thrift_tag.name.clone(), RefTarget::from(&thrift_tag.target));
}
for git_ref in &thrift_view.git_refs {
view.git_refs
.insert(git_ref.name.clone(), RefTarget::from(&git_ref.target));
}
view.git_head = thrift_view
.git_head
.as_ref()
.map(|head| RefTarget::Normal(CommitId::new(head.clone())));
view
}
}
impl From<&simple_op_store_model::RefTarget> for RefTarget {
fn from(thrift_ref_target: &simple_op_store_model::RefTarget) -> Self {
match thrift_ref_target {
simple_op_store_model::RefTarget::CommitId(commit_id) => {
RefTarget::Normal(CommitId::from_bytes(commit_id))
}
simple_op_store_model::RefTarget::Conflict(conflict) => {
let removes = conflict
.removes
.iter()
.map(|id_bytes| CommitId::from_bytes(id_bytes))
.collect_vec();
let adds = conflict
.adds
.iter()
.map(|id_bytes| CommitId::from_bytes(id_bytes))
.collect_vec();
RefTarget::Conflict { removes, adds }
}
}
}
}

View file

@ -33,8 +33,6 @@ pub mod git_backend;
pub mod gitignore;
pub mod hex_util;
pub mod index;
#[cfg(feature = "legacy-thrift")]
mod legacy_thrift_op_store;
pub mod local_backend;
pub mod lock;
pub mod matchers;
@ -52,8 +50,6 @@ pub mod rewrite;
pub mod settings;
pub mod simple_op_heads_store;
pub mod simple_op_store;
#[cfg(feature = "legacy-thrift")]
mod simple_op_store_model;
pub mod stacked_table;
pub mod store;
pub mod transaction;

View file

@ -32,89 +32,12 @@ impl From<PersistError> for OpStoreError {
}
}
// TODO: In version 0.7.0 or so, inline ProtoOpStore into this type and drop
// support for upgrading from the thrift format
// TODO: In version 0.7.0 or so, inline ProtoOpStore into this type
#[derive(Debug)]
pub struct SimpleOpStore {
delegate: ProtoOpStore,
}
#[cfg(feature = "legacy-thrift")]
fn upgrade_from_thrift(store_path: &Path) -> std::io::Result<()> {
use std::collections::{HashMap, HashSet};
use std::fs;
use itertools::Itertools;
use crate::legacy_thrift_op_store::ThriftOpStore;
println!("Upgrading operation log to Protobuf format...");
let repo_path = store_path.parent().unwrap();
let old_store = ThriftOpStore::load(store_path.to_path_buf());
let tmp_store_dir = tempfile::Builder::new()
.prefix("jj-op-store-upgrade-")
.tempdir_in(repo_path)
.unwrap();
let tmp_store_path = tmp_store_dir.path().to_path_buf();
// Find the current operation head(s) of the operation log
let op_heads_store_path = repo_path.join("op_heads");
let mut old_op_heads = HashSet::new();
for entry in fs::read_dir(op_heads_store_path)? {
let basename = entry?.file_name();
let op_id_str = basename.to_str().unwrap();
if let Ok(op_id_bytes) = hex::decode(op_id_str) {
old_op_heads.insert(OperationId::new(op_id_bytes));
}
}
// Do a DFS to rewrite the operations
let new_store = ProtoOpStore::init(tmp_store_path.clone());
let mut converted: HashMap<OperationId, OperationId> = HashMap::new();
// The DFS stack
let mut to_convert = old_op_heads
.iter()
.map(|op_id| (op_id.clone(), old_store.read_operation(op_id).unwrap()))
.collect_vec();
while !to_convert.is_empty() {
let (_, op) = to_convert.last().unwrap();
let mut new_parent_ids: Vec<OperationId> = vec![];
let mut new_to_convert = vec![];
// Check which parents are already converted and which ones we need to rewrite
// first
for parent_id in &op.parents {
if let Some(new_parent_id) = converted.get(parent_id) {
new_parent_ids.push(new_parent_id.clone());
} else {
let parent_op = old_store.read_operation(parent_id).unwrap();
new_to_convert.push((parent_id.clone(), parent_op));
}
}
if new_to_convert.is_empty() {
// If all parents have already been converted, remove this operation from the
// stack and convert it
let (old_op_id, mut old_op) = to_convert.pop().unwrap();
old_op.parents = new_parent_ids;
let old_view = old_store.read_view(&old_op.view_id).unwrap();
let new_view_id = new_store.write_view(&old_view).unwrap();
old_op.view_id = new_view_id;
let new_op_id = new_store.write_operation(&old_op).unwrap();
converted.insert(old_op_id, new_op_id);
} else {
to_convert.extend(new_to_convert);
}
}
let backup_store_path = repo_path.join("op_store_old");
// Delete existing backup (probably from an earlier upgrade to Thrift)
fs::remove_dir_all(&backup_store_path).ok();
fs::rename(store_path, backup_store_path)?;
fs::rename(&tmp_store_path, store_path)?;
println!("Upgrade complete");
Ok(())
}
impl SimpleOpStore {
pub fn init(store_path: &Path) -> Self {
let delegate = ProtoOpStore::init(store_path.to_path_buf());
@ -122,11 +45,6 @@ impl SimpleOpStore {
}
pub fn load(store_path: &Path) -> Self {
#[cfg(feature = "legacy-thrift")]
if store_path.join("thrift_store").exists() {
upgrade_from_thrift(store_path)
.expect("Failed to upgrade operation log to Protobuf format");
}
let delegate = ProtoOpStore::load(store_path.to_path_buf());
SimpleOpStore { delegate }
}

View file

@ -1,972 +0,0 @@
// Autogenerated by Thrift Compiler (0.17.0)
// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#![allow(unused_imports)]
#![allow(unused_extern_crates)]
#![allow(clippy::too_many_arguments, clippy::type_complexity, clippy::vec_box)]
#![cfg_attr(rustfmt, rustfmt_skip)]
use std::cell::RefCell;
use std::collections::{BTreeMap, BTreeSet};
use std::convert::{From, TryFrom};
use std::default::Default;
use std::error::Error;
use std::fmt;
use std::fmt::{Display, Formatter};
use std::rc::Rc;
use thrift::OrderedFloat;
use thrift::{ApplicationError, ApplicationErrorKind, ProtocolError, ProtocolErrorKind, TThriftClient};
use thrift::protocol::{TFieldIdentifier, TListIdentifier, TMapIdentifier, TMessageIdentifier, TMessageType, TInputProtocol, TOutputProtocol, TSerializable, TSetIdentifier, TStructIdentifier, TType};
use thrift::protocol::field_id;
use thrift::protocol::verify_expected_message_type;
use thrift::protocol::verify_expected_sequence_number;
use thrift::protocol::verify_expected_service_call;
use thrift::protocol::verify_required_field_exists;
//
// RefConflict
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct RefConflict {
pub removes: Vec<Vec<u8>>,
pub adds: Vec<Vec<u8>>,
}
impl RefConflict {
pub fn new(removes: Vec<Vec<u8>>, adds: Vec<Vec<u8>>) -> RefConflict {
RefConflict {
removes,
adds,
}
}
}
impl TSerializable for RefConflict {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<RefConflict> {
i_prot.read_struct_begin()?;
let mut f_1: Option<Vec<Vec<u8>>> = None;
let mut f_2: Option<Vec<Vec<u8>>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Vec<u8>> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_0 = i_prot.read_bytes()?;
val.push(list_elem_0);
}
i_prot.read_list_end()?;
f_1 = Some(val);
},
2 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Vec<u8>> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_1 = i_prot.read_bytes()?;
val.push(list_elem_1);
}
i_prot.read_list_end()?;
f_2 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("RefConflict.removes", &f_1)?;
verify_required_field_exists("RefConflict.adds", &f_2)?;
let ret = RefConflict {
removes: f_1.expect("auto-generated code should have checked for presence of required fields"),
adds: f_2.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("RefConflict");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("removes", TType::List, 1))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::String, self.removes.len() as i32))?;
for e in &self.removes {
o_prot.write_bytes(e)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("adds", TType::List, 2))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::String, self.adds.len() as i32))?;
for e in &self.adds {
o_prot.write_bytes(e)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// RefTarget
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum RefTarget {
CommitId(Vec<u8>),
Conflict(RefConflict),
}
impl TSerializable for RefTarget {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<RefTarget> {
let mut ret: Option<RefTarget> = None;
let mut received_field_count = 0;
i_prot.read_struct_begin()?;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_bytes()?;
if ret.is_none() {
ret = Some(RefTarget::CommitId(val));
}
received_field_count += 1;
},
2 => {
let val = RefConflict::read_from_in_protocol(i_prot)?;
if ret.is_none() {
ret = Some(RefTarget::Conflict(val));
}
received_field_count += 1;
},
_ => {
i_prot.skip(field_ident.field_type)?;
received_field_count += 1;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
if received_field_count == 0 {
Err(
thrift::Error::Protocol(
ProtocolError::new(
ProtocolErrorKind::InvalidData,
"received empty union from remote RefTarget"
)
)
)
} else if received_field_count > 1 {
Err(
thrift::Error::Protocol(
ProtocolError::new(
ProtocolErrorKind::InvalidData,
"received multiple fields for union from remote RefTarget"
)
)
)
} else {
Ok(ret.expect("return value should have been constructed"))
}
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("RefTarget");
o_prot.write_struct_begin(&struct_ident)?;
match self {
RefTarget::CommitId(f) => {
o_prot.write_field_begin(&TFieldIdentifier::new("commit_id", TType::String, 1))?;
o_prot.write_bytes(f)?;
o_prot.write_field_end()?;
},
RefTarget::Conflict(f) => {
o_prot.write_field_begin(&TFieldIdentifier::new("conflict", TType::Struct, 2))?;
f.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
},
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// RemoteBranch
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct RemoteBranch {
pub remote_name: String,
pub target: RefTarget,
}
impl RemoteBranch {
pub fn new(remote_name: String, target: RefTarget) -> RemoteBranch {
RemoteBranch {
remote_name,
target,
}
}
}
impl TSerializable for RemoteBranch {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<RemoteBranch> {
i_prot.read_struct_begin()?;
let mut f_1: Option<String> = None;
let mut f_2: Option<RefTarget> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_string()?;
f_1 = Some(val);
},
2 => {
let val = RefTarget::read_from_in_protocol(i_prot)?;
f_2 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("RemoteBranch.remote_name", &f_1)?;
verify_required_field_exists("RemoteBranch.target", &f_2)?;
let ret = RemoteBranch {
remote_name: f_1.expect("auto-generated code should have checked for presence of required fields"),
target: f_2.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("RemoteBranch");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("remote_name", TType::String, 1))?;
o_prot.write_string(&self.remote_name)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("target", TType::Struct, 2))?;
self.target.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// Branch
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Branch {
pub name: String,
pub local_target: Option<RefTarget>,
pub remote_branches: Vec<RemoteBranch>,
}
impl Branch {
pub fn new<F2>(name: String, local_target: F2, remote_branches: Vec<RemoteBranch>) -> Branch where F2: Into<Option<RefTarget>> {
Branch {
name,
local_target: local_target.into(),
remote_branches,
}
}
}
impl TSerializable for Branch {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<Branch> {
i_prot.read_struct_begin()?;
let mut f_1: Option<String> = None;
let mut f_2: Option<RefTarget> = None;
let mut f_3: Option<Vec<RemoteBranch>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_string()?;
f_1 = Some(val);
},
2 => {
let val = RefTarget::read_from_in_protocol(i_prot)?;
f_2 = Some(val);
},
3 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<RemoteBranch> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_2 = RemoteBranch::read_from_in_protocol(i_prot)?;
val.push(list_elem_2);
}
i_prot.read_list_end()?;
f_3 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("Branch.name", &f_1)?;
verify_required_field_exists("Branch.remote_branches", &f_3)?;
let ret = Branch {
name: f_1.expect("auto-generated code should have checked for presence of required fields"),
local_target: f_2,
remote_branches: f_3.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("Branch");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("name", TType::String, 1))?;
o_prot.write_string(&self.name)?;
o_prot.write_field_end()?;
if let Some(ref fld_var) = self.local_target {
o_prot.write_field_begin(&TFieldIdentifier::new("local_target", TType::Struct, 2))?;
fld_var.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?
}
o_prot.write_field_begin(&TFieldIdentifier::new("remote_branches", TType::List, 3))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::Struct, self.remote_branches.len() as i32))?;
for e in &self.remote_branches {
e.write_to_out_protocol(o_prot)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// GitRef
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct GitRef {
pub name: String,
pub target: RefTarget,
}
impl GitRef {
pub fn new(name: String, target: RefTarget) -> GitRef {
GitRef {
name,
target,
}
}
}
impl TSerializable for GitRef {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<GitRef> {
i_prot.read_struct_begin()?;
let mut f_1: Option<String> = None;
let mut f_2: Option<RefTarget> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_string()?;
f_1 = Some(val);
},
2 => {
let val = RefTarget::read_from_in_protocol(i_prot)?;
f_2 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("GitRef.name", &f_1)?;
verify_required_field_exists("GitRef.target", &f_2)?;
let ret = GitRef {
name: f_1.expect("auto-generated code should have checked for presence of required fields"),
target: f_2.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("GitRef");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("name", TType::String, 1))?;
o_prot.write_string(&self.name)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("target", TType::Struct, 2))?;
self.target.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// Tag
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Tag {
pub name: String,
pub target: RefTarget,
}
impl Tag {
pub fn new(name: String, target: RefTarget) -> Tag {
Tag {
name,
target,
}
}
}
impl TSerializable for Tag {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<Tag> {
i_prot.read_struct_begin()?;
let mut f_1: Option<String> = None;
let mut f_2: Option<RefTarget> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_string()?;
f_1 = Some(val);
},
2 => {
let val = RefTarget::read_from_in_protocol(i_prot)?;
f_2 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("Tag.name", &f_1)?;
verify_required_field_exists("Tag.target", &f_2)?;
let ret = Tag {
name: f_1.expect("auto-generated code should have checked for presence of required fields"),
target: f_2.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("Tag");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("name", TType::String, 1))?;
o_prot.write_string(&self.name)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("target", TType::Struct, 2))?;
self.target.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// View
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct View {
pub head_ids: Vec<Vec<u8>>,
pub public_head_ids: Vec<Vec<u8>>,
pub wc_commit_ids: BTreeMap<String, Vec<u8>>,
pub branches: Vec<Branch>,
pub tags: Vec<Tag>,
pub git_refs: Vec<GitRef>,
pub git_head: Option<Vec<u8>>,
}
impl View {
pub fn new<F7>(head_ids: Vec<Vec<u8>>, public_head_ids: Vec<Vec<u8>>, wc_commit_ids: BTreeMap<String, Vec<u8>>, branches: Vec<Branch>, tags: Vec<Tag>, git_refs: Vec<GitRef>, git_head: F7) -> View where F7: Into<Option<Vec<u8>>> {
View {
head_ids,
public_head_ids,
wc_commit_ids,
branches,
tags,
git_refs,
git_head: git_head.into(),
}
}
}
impl TSerializable for View {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<View> {
i_prot.read_struct_begin()?;
let mut f_1: Option<Vec<Vec<u8>>> = None;
let mut f_2: Option<Vec<Vec<u8>>> = None;
let mut f_3: Option<BTreeMap<String, Vec<u8>>> = None;
let mut f_4: Option<Vec<Branch>> = None;
let mut f_5: Option<Vec<Tag>> = None;
let mut f_6: Option<Vec<GitRef>> = None;
let mut f_7: Option<Vec<u8>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Vec<u8>> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_3 = i_prot.read_bytes()?;
val.push(list_elem_3);
}
i_prot.read_list_end()?;
f_1 = Some(val);
},
2 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Vec<u8>> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_4 = i_prot.read_bytes()?;
val.push(list_elem_4);
}
i_prot.read_list_end()?;
f_2 = Some(val);
},
3 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<String, Vec<u8>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_5 = i_prot.read_string()?;
let map_val_6 = i_prot.read_bytes()?;
val.insert(map_key_5, map_val_6);
}
i_prot.read_map_end()?;
f_3 = Some(val);
},
4 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Branch> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_7 = Branch::read_from_in_protocol(i_prot)?;
val.push(list_elem_7);
}
i_prot.read_list_end()?;
f_4 = Some(val);
},
5 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Tag> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_8 = Tag::read_from_in_protocol(i_prot)?;
val.push(list_elem_8);
}
i_prot.read_list_end()?;
f_5 = Some(val);
},
6 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<GitRef> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_9 = GitRef::read_from_in_protocol(i_prot)?;
val.push(list_elem_9);
}
i_prot.read_list_end()?;
f_6 = Some(val);
},
7 => {
let val = i_prot.read_bytes()?;
f_7 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("View.head_ids", &f_1)?;
verify_required_field_exists("View.public_head_ids", &f_2)?;
verify_required_field_exists("View.wc_commit_ids", &f_3)?;
verify_required_field_exists("View.branches", &f_4)?;
verify_required_field_exists("View.tags", &f_5)?;
verify_required_field_exists("View.git_refs", &f_6)?;
let ret = View {
head_ids: f_1.expect("auto-generated code should have checked for presence of required fields"),
public_head_ids: f_2.expect("auto-generated code should have checked for presence of required fields"),
wc_commit_ids: f_3.expect("auto-generated code should have checked for presence of required fields"),
branches: f_4.expect("auto-generated code should have checked for presence of required fields"),
tags: f_5.expect("auto-generated code should have checked for presence of required fields"),
git_refs: f_6.expect("auto-generated code should have checked for presence of required fields"),
git_head: f_7,
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("View");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("head_ids", TType::List, 1))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::String, self.head_ids.len() as i32))?;
for e in &self.head_ids {
o_prot.write_bytes(e)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("public_head_ids", TType::List, 2))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::String, self.public_head_ids.len() as i32))?;
for e in &self.public_head_ids {
o_prot.write_bytes(e)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("wc_commit_ids", TType::Map, 3))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::String, TType::String, self.wc_commit_ids.len() as i32))?;
for (k, v) in &self.wc_commit_ids {
o_prot.write_string(k)?;
o_prot.write_bytes(v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("branches", TType::List, 4))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::Struct, self.branches.len() as i32))?;
for e in &self.branches {
e.write_to_out_protocol(o_prot)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("tags", TType::List, 5))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::Struct, self.tags.len() as i32))?;
for e in &self.tags {
e.write_to_out_protocol(o_prot)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("git_refs", TType::List, 6))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::Struct, self.git_refs.len() as i32))?;
for e in &self.git_refs {
e.write_to_out_protocol(o_prot)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
if let Some(ref fld_var) = self.git_head {
o_prot.write_field_begin(&TFieldIdentifier::new("git_head", TType::String, 7))?;
o_prot.write_bytes(fld_var)?;
o_prot.write_field_end()?
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// Operation
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Operation {
pub view_id: Vec<u8>,
pub parents: Vec<Vec<u8>>,
pub metadata: Box<OperationMetadata>,
}
impl Operation {
pub fn new(view_id: Vec<u8>, parents: Vec<Vec<u8>>, metadata: Box<OperationMetadata>) -> Operation {
Operation {
view_id,
parents,
metadata,
}
}
}
impl TSerializable for Operation {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<Operation> {
i_prot.read_struct_begin()?;
let mut f_1: Option<Vec<u8>> = None;
let mut f_2: Option<Vec<Vec<u8>>> = None;
let mut f_3: Option<Box<OperationMetadata>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_bytes()?;
f_1 = Some(val);
},
2 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<Vec<u8>> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_10 = i_prot.read_bytes()?;
val.push(list_elem_10);
}
i_prot.read_list_end()?;
f_2 = Some(val);
},
3 => {
let val = Box::new(OperationMetadata::read_from_in_protocol(i_prot)?);
f_3 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("Operation.view_id", &f_1)?;
verify_required_field_exists("Operation.parents", &f_2)?;
verify_required_field_exists("Operation.metadata", &f_3)?;
let ret = Operation {
view_id: f_1.expect("auto-generated code should have checked for presence of required fields"),
parents: f_2.expect("auto-generated code should have checked for presence of required fields"),
metadata: f_3.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("Operation");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("view_id", TType::String, 1))?;
o_prot.write_bytes(&self.view_id)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("parents", TType::List, 2))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::String, self.parents.len() as i32))?;
for e in &self.parents {
o_prot.write_bytes(e)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("metadata", TType::Struct, 3))?;
self.metadata.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// Timestamp
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Timestamp {
pub millis_since_epoch: i64,
pub tz_offset: i32,
}
impl Timestamp {
pub fn new(millis_since_epoch: i64, tz_offset: i32) -> Timestamp {
Timestamp {
millis_since_epoch,
tz_offset,
}
}
}
impl TSerializable for Timestamp {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<Timestamp> {
i_prot.read_struct_begin()?;
let mut f_1: Option<i64> = None;
let mut f_2: Option<i32> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_i64()?;
f_1 = Some(val);
},
2 => {
let val = i_prot.read_i32()?;
f_2 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("Timestamp.millis_since_epoch", &f_1)?;
verify_required_field_exists("Timestamp.tz_offset", &f_2)?;
let ret = Timestamp {
millis_since_epoch: f_1.expect("auto-generated code should have checked for presence of required fields"),
tz_offset: f_2.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("Timestamp");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("millis_since_epoch", TType::I64, 1))?;
o_prot.write_i64(self.millis_since_epoch)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("tz_offset", TType::I32, 2))?;
o_prot.write_i32(self.tz_offset)?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
//
// OperationMetadata
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct OperationMetadata {
pub start_time: Timestamp,
pub end_time: Timestamp,
pub description: String,
pub hostname: String,
pub username: String,
pub tags: BTreeMap<String, String>,
}
impl OperationMetadata {
pub fn new(start_time: Timestamp, end_time: Timestamp, description: String, hostname: String, username: String, tags: BTreeMap<String, String>) -> OperationMetadata {
OperationMetadata {
start_time,
end_time,
description,
hostname,
username,
tags,
}
}
}
impl TSerializable for OperationMetadata {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<OperationMetadata> {
i_prot.read_struct_begin()?;
let mut f_1: Option<Timestamp> = None;
let mut f_2: Option<Timestamp> = None;
let mut f_3: Option<String> = None;
let mut f_4: Option<String> = None;
let mut f_5: Option<String> = None;
let mut f_6: Option<BTreeMap<String, String>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = Timestamp::read_from_in_protocol(i_prot)?;
f_1 = Some(val);
},
2 => {
let val = Timestamp::read_from_in_protocol(i_prot)?;
f_2 = Some(val);
},
3 => {
let val = i_prot.read_string()?;
f_3 = Some(val);
},
4 => {
let val = i_prot.read_string()?;
f_4 = Some(val);
},
5 => {
let val = i_prot.read_string()?;
f_5 = Some(val);
},
6 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<String, String> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_11 = i_prot.read_string()?;
let map_val_12 = i_prot.read_string()?;
val.insert(map_key_11, map_val_12);
}
i_prot.read_map_end()?;
f_6 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
verify_required_field_exists("OperationMetadata.start_time", &f_1)?;
verify_required_field_exists("OperationMetadata.end_time", &f_2)?;
verify_required_field_exists("OperationMetadata.description", &f_3)?;
verify_required_field_exists("OperationMetadata.hostname", &f_4)?;
verify_required_field_exists("OperationMetadata.username", &f_5)?;
verify_required_field_exists("OperationMetadata.tags", &f_6)?;
let ret = OperationMetadata {
start_time: f_1.expect("auto-generated code should have checked for presence of required fields"),
end_time: f_2.expect("auto-generated code should have checked for presence of required fields"),
description: f_3.expect("auto-generated code should have checked for presence of required fields"),
hostname: f_4.expect("auto-generated code should have checked for presence of required fields"),
username: f_5.expect("auto-generated code should have checked for presence of required fields"),
tags: f_6.expect("auto-generated code should have checked for presence of required fields"),
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("OperationMetadata");
o_prot.write_struct_begin(&struct_ident)?;
o_prot.write_field_begin(&TFieldIdentifier::new("start_time", TType::Struct, 1))?;
self.start_time.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("end_time", TType::Struct, 2))?;
self.end_time.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("description", TType::String, 3))?;
o_prot.write_string(&self.description)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("hostname", TType::String, 4))?;
o_prot.write_string(&self.hostname)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("username", TType::String, 5))?;
o_prot.write_string(&self.username)?;
o_prot.write_field_end()?;
o_prot.write_field_begin(&TFieldIdentifier::new("tags", TType::Map, 6))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::String, TType::String, self.tags.len() as i32))?;
for (k, v) in &self.tags {
o_prot.write_string(k)?;
o_prot.write_string(v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?;
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}

View file

@ -1,81 +0,0 @@
// Copyright 2022 The Jujutsu Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"),
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
struct RefConflict {
1: required list<binary> removes,
2: required list<binary> adds,
}
union RefTarget {
1: binary commit_id,
2: RefConflict conflict,
}
struct RemoteBranch {
1: required string remote_name,
2: required RefTarget target,
}
struct Branch {
1: required string name,
// Unset if the branch has been deleted locally.
2: optional RefTarget local_target,
// TODO: How would we support renaming remotes while having undo work? If
// the remote name is stored in config, it's going to become a mess if the
// remote is renamed but the configs are left unchanged. Should each remote
// be identified (here and in configs) by a UUID?
3: required list<RemoteBranch> remote_branches,
}
struct GitRef {
1: required string name,
2: required RefTarget target,
}
struct Tag {
1: required string name,
2: required RefTarget target,
}
struct View {
1: required list<binary> head_ids,
2: required list<binary> public_head_ids,
3: required map<string, binary> wc_commit_ids,
4: required list<Branch> branches,
5: required list<Tag> tags,
// Only a subset of the refs. For example, does not include refs/notes/.
6: required list<GitRef> git_refs,
7: optional binary git_head,
}
struct Operation {
1: required binary view_id,
2: required list<binary> parents,
3: required OperationMetadata metadata,
}
// TODO: Share with store.proto? Do we even need the timezone here?
struct Timestamp {
1: required i64 millis_since_epoch,
2: required i32 tz_offset,
}
struct OperationMetadata {
1: required Timestamp start_time,
2: required Timestamp end_time,
3: required string description,
4: required string hostname,
5: required string username,
6: required map<string, string> tags,
}