mirror of
https://github.com/salsa-rs/salsa.git
synced 2024-12-25 05:29:43 +00:00
CI runs Clippy
This commit is contained in:
parent
a691096063
commit
fc6806a07c
23 changed files with 260 additions and 283 deletions
5
.github/workflows/test.yml
vendored
5
.github/workflows/test.yml
vendored
|
@ -31,7 +31,7 @@ jobs:
|
|||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ matrix.rust }}
|
||||
components: rustfmt
|
||||
components: rustfmt, clippy
|
||||
default: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
|
@ -49,3 +49,6 @@ jobs:
|
|||
with:
|
||||
command: fmt
|
||||
args: -- --check
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
|
|
|
@ -36,185 +36,179 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
|
|||
// Decompose the trait into the corresponding queries.
|
||||
let mut queries = vec![];
|
||||
for item in input.items {
|
||||
match item {
|
||||
TraitItem::Method(method) => {
|
||||
let mut storage = QueryStorage::Memoized;
|
||||
let mut cycle = None;
|
||||
let mut invoke = None;
|
||||
let query_name = method.sig.ident.to_string();
|
||||
let mut query_type = Ident::new(
|
||||
&format!("{}Query", method.sig.ident.to_string().to_camel_case()),
|
||||
Span::call_site(),
|
||||
);
|
||||
let mut num_storages = 0;
|
||||
if let TraitItem::Method(method) = item {
|
||||
let mut storage = QueryStorage::Memoized;
|
||||
let mut cycle = None;
|
||||
let mut invoke = None;
|
||||
let query_name = method.sig.ident.to_string();
|
||||
let mut query_type = Ident::new(
|
||||
&format!("{}Query", method.sig.ident.to_string().to_camel_case()),
|
||||
Span::call_site(),
|
||||
);
|
||||
let mut num_storages = 0;
|
||||
|
||||
// Extract attributes.
|
||||
let (attrs, salsa_attrs) = filter_attrs(method.attrs);
|
||||
for SalsaAttr { name, tts, span } in salsa_attrs {
|
||||
match name.as_str() {
|
||||
"memoized" => {
|
||||
storage = QueryStorage::Memoized;
|
||||
num_storages += 1;
|
||||
}
|
||||
"dependencies" => {
|
||||
storage = QueryStorage::Dependencies;
|
||||
num_storages += 1;
|
||||
}
|
||||
"input" => {
|
||||
storage = QueryStorage::Input;
|
||||
num_storages += 1;
|
||||
}
|
||||
"interned" => {
|
||||
storage = QueryStorage::Interned;
|
||||
num_storages += 1;
|
||||
}
|
||||
"cycle" => {
|
||||
cycle = Some(parse_macro_input!(tts as Parenthesized<syn::Path>).0);
|
||||
}
|
||||
"invoke" => {
|
||||
invoke = Some(parse_macro_input!(tts as Parenthesized<syn::Path>).0);
|
||||
}
|
||||
"query_type" => {
|
||||
query_type = parse_macro_input!(tts as Parenthesized<Ident>).0;
|
||||
}
|
||||
"transparent" => {
|
||||
storage = QueryStorage::Transparent;
|
||||
num_storages += 1;
|
||||
}
|
||||
_ => {
|
||||
return Error::new(span, format!("unknown salsa attribute `{}`", name))
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
// Extract attributes.
|
||||
let (attrs, salsa_attrs) = filter_attrs(method.attrs);
|
||||
for SalsaAttr { name, tts, span } in salsa_attrs {
|
||||
match name.as_str() {
|
||||
"memoized" => {
|
||||
storage = QueryStorage::Memoized;
|
||||
num_storages += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Check attribute combinations.
|
||||
if num_storages > 1 {
|
||||
return Error::new(method.sig.span(), "multiple storage attributes specified")
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
match &invoke {
|
||||
Some(invoke) if storage == QueryStorage::Input => {
|
||||
return Error::new(
|
||||
invoke.span(),
|
||||
"#[salsa::invoke] cannot be set on #[salsa::input] queries",
|
||||
)
|
||||
.to_compile_error()
|
||||
.into();
|
||||
"dependencies" => {
|
||||
storage = QueryStorage::Dependencies;
|
||||
num_storages += 1;
|
||||
}
|
||||
"input" => {
|
||||
storage = QueryStorage::Input;
|
||||
num_storages += 1;
|
||||
}
|
||||
"interned" => {
|
||||
storage = QueryStorage::Interned;
|
||||
num_storages += 1;
|
||||
}
|
||||
"cycle" => {
|
||||
cycle = Some(parse_macro_input!(tts as Parenthesized<syn::Path>).0);
|
||||
}
|
||||
"invoke" => {
|
||||
invoke = Some(parse_macro_input!(tts as Parenthesized<syn::Path>).0);
|
||||
}
|
||||
"query_type" => {
|
||||
query_type = parse_macro_input!(tts as Parenthesized<Ident>).0;
|
||||
}
|
||||
"transparent" => {
|
||||
storage = QueryStorage::Transparent;
|
||||
num_storages += 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Extract keys.
|
||||
let mut iter = method.sig.inputs.iter();
|
||||
match iter.next() {
|
||||
Some(FnArg::Receiver(sr)) if sr.mutability.is_none() => (),
|
||||
_ => {
|
||||
return Error::new(
|
||||
method.sig.span(),
|
||||
format!(
|
||||
"first argument of query `{}` must be `&self`",
|
||||
method.sig.ident,
|
||||
),
|
||||
)
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
}
|
||||
let mut keys: Vec<Type> = vec![];
|
||||
for arg in iter {
|
||||
match *arg {
|
||||
FnArg::Typed(ref arg) => {
|
||||
keys.push((*arg.ty).clone());
|
||||
}
|
||||
ref arg => {
|
||||
return Error::new(
|
||||
arg.span(),
|
||||
format!(
|
||||
"unsupported argument `{:?}` of `{}`",
|
||||
arg, method.sig.ident,
|
||||
),
|
||||
)
|
||||
return Error::new(span, format!("unknown salsa attribute `{}`", name))
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract value.
|
||||
let value = match method.sig.output {
|
||||
ReturnType::Type(_, ref ty) => ty.as_ref().clone(),
|
||||
ref ret => {
|
||||
// Check attribute combinations.
|
||||
if num_storages > 1 {
|
||||
return Error::new(method.sig.span(), "multiple storage attributes specified")
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
match &invoke {
|
||||
Some(invoke) if storage == QueryStorage::Input => {
|
||||
return Error::new(
|
||||
invoke.span(),
|
||||
"#[salsa::invoke] cannot be set on #[salsa::input] queries",
|
||||
)
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Extract keys.
|
||||
let mut iter = method.sig.inputs.iter();
|
||||
match iter.next() {
|
||||
Some(FnArg::Receiver(sr)) if sr.mutability.is_none() => (),
|
||||
_ => {
|
||||
return Error::new(
|
||||
method.sig.span(),
|
||||
format!(
|
||||
"first argument of query `{}` must be `&self`",
|
||||
method.sig.ident,
|
||||
),
|
||||
)
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
}
|
||||
let mut keys: Vec<Type> = vec![];
|
||||
for arg in iter {
|
||||
match *arg {
|
||||
FnArg::Typed(ref arg) => {
|
||||
keys.push((*arg.ty).clone());
|
||||
}
|
||||
ref arg => {
|
||||
return Error::new(
|
||||
ret.span(),
|
||||
format!(
|
||||
"unsupported return type `{:?}` of `{}`",
|
||||
ret, method.sig.ident
|
||||
),
|
||||
arg.span(),
|
||||
format!("unsupported argument `{:?}` of `{}`", arg, method.sig.ident,),
|
||||
)
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
};
|
||||
|
||||
// For `#[salsa::interned]` keys, we create a "lookup key" automatically.
|
||||
//
|
||||
// For a query like:
|
||||
//
|
||||
// fn foo(&self, x: Key1, y: Key2) -> u32
|
||||
//
|
||||
// we would create
|
||||
//
|
||||
// fn lookup_foo(&self, x: u32) -> (Key1, Key2)
|
||||
let lookup_query = if let QueryStorage::Interned = storage {
|
||||
let lookup_query_type = Ident::new(
|
||||
&format!(
|
||||
"{}LookupQuery",
|
||||
method.sig.ident.to_string().to_camel_case()
|
||||
),
|
||||
Span::call_site(),
|
||||
);
|
||||
let lookup_fn_name = Ident::new(
|
||||
&format!("lookup_{}", method.sig.ident.to_string()),
|
||||
method.sig.ident.span(),
|
||||
);
|
||||
let keys = &keys;
|
||||
let lookup_value: Type = parse_quote!((#(#keys),*));
|
||||
let lookup_keys = vec![value.clone()];
|
||||
Some(Query {
|
||||
query_type: lookup_query_type,
|
||||
query_name: format!("lookup_{}", query_name),
|
||||
fn_name: lookup_fn_name,
|
||||
attrs: vec![], // FIXME -- some automatically generated docs on this method?
|
||||
storage: QueryStorage::InternedLookup {
|
||||
intern_query_type: query_type.clone(),
|
||||
},
|
||||
keys: lookup_keys,
|
||||
value: lookup_value,
|
||||
invoke: None,
|
||||
cycle: cycle.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
queries.push(Query {
|
||||
query_type,
|
||||
query_name,
|
||||
fn_name: method.sig.ident,
|
||||
attrs,
|
||||
storage,
|
||||
keys,
|
||||
value,
|
||||
invoke,
|
||||
cycle,
|
||||
});
|
||||
|
||||
queries.extend(lookup_query);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
|
||||
// Extract value.
|
||||
let value = match method.sig.output {
|
||||
ReturnType::Type(_, ref ty) => ty.as_ref().clone(),
|
||||
ref ret => {
|
||||
return Error::new(
|
||||
ret.span(),
|
||||
format!(
|
||||
"unsupported return type `{:?}` of `{}`",
|
||||
ret, method.sig.ident
|
||||
),
|
||||
)
|
||||
.to_compile_error()
|
||||
.into();
|
||||
}
|
||||
};
|
||||
|
||||
// For `#[salsa::interned]` keys, we create a "lookup key" automatically.
|
||||
//
|
||||
// For a query like:
|
||||
//
|
||||
// fn foo(&self, x: Key1, y: Key2) -> u32
|
||||
//
|
||||
// we would create
|
||||
//
|
||||
// fn lookup_foo(&self, x: u32) -> (Key1, Key2)
|
||||
let lookup_query = if let QueryStorage::Interned = storage {
|
||||
let lookup_query_type = Ident::new(
|
||||
&format!(
|
||||
"{}LookupQuery",
|
||||
method.sig.ident.to_string().to_camel_case()
|
||||
),
|
||||
Span::call_site(),
|
||||
);
|
||||
let lookup_fn_name = Ident::new(
|
||||
&format!("lookup_{}", method.sig.ident.to_string()),
|
||||
method.sig.ident.span(),
|
||||
);
|
||||
let keys = &keys;
|
||||
let lookup_value: Type = parse_quote!((#(#keys),*));
|
||||
let lookup_keys = vec![value.clone()];
|
||||
Some(Query {
|
||||
query_type: lookup_query_type,
|
||||
query_name: format!("lookup_{}", query_name),
|
||||
fn_name: lookup_fn_name,
|
||||
attrs: vec![], // FIXME -- some automatically generated docs on this method?
|
||||
storage: QueryStorage::InternedLookup {
|
||||
intern_query_type: query_type.clone(),
|
||||
},
|
||||
keys: lookup_keys,
|
||||
value: lookup_value,
|
||||
invoke: None,
|
||||
cycle: cycle.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
queries.push(Query {
|
||||
query_type,
|
||||
query_name,
|
||||
fn_name: method.sig.ident,
|
||||
attrs,
|
||||
storage,
|
||||
keys,
|
||||
value,
|
||||
invoke,
|
||||
cycle,
|
||||
});
|
||||
|
||||
queries.extend(lookup_query);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -366,7 +360,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
|
|||
|
||||
// Emit an impl of the trait
|
||||
output.extend({
|
||||
let bounds = input.supertraits.clone();
|
||||
let bounds = input.supertraits;
|
||||
quote! {
|
||||
impl<DB> #trait_name for DB
|
||||
where
|
||||
|
@ -380,10 +374,9 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
|
|||
});
|
||||
|
||||
let non_transparent_queries = || {
|
||||
queries.iter().filter(|q| match q.storage {
|
||||
QueryStorage::Transparent => false,
|
||||
_ => true,
|
||||
})
|
||||
queries
|
||||
.iter()
|
||||
.filter(|q| !matches!(q.storage, QueryStorage::Transparent))
|
||||
};
|
||||
|
||||
// Emit the query types.
|
||||
|
@ -393,7 +386,9 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
|
|||
|
||||
let storage = match &query.storage {
|
||||
QueryStorage::Memoized => quote!(salsa::plumbing::MemoizedStorage<Self>),
|
||||
QueryStorage::Dependencies => quote!(salsa::plumbing::DependencyStorage<Self>),
|
||||
QueryStorage::Dependencies => {
|
||||
quote!(salsa::plumbing::DependencyStorage<Self>)
|
||||
}
|
||||
QueryStorage::Input => quote!(salsa::plumbing::InputStorage<Self>),
|
||||
QueryStorage::Interned => quote!(salsa::plumbing::InternedStorage<Self>),
|
||||
QueryStorage::InternedLookup { intern_query_type } => {
|
||||
|
|
|
@ -55,8 +55,7 @@ fn all_classes(db: &dyn Compiler) -> Arc<Vec<Class>> {
|
|||
|
||||
fn fields(db: &dyn Compiler, class: Class) -> Arc<Vec<Field>> {
|
||||
let class = db.lookup_intern_class(class);
|
||||
let fields = class.fields.clone();
|
||||
Arc::new(fields)
|
||||
Arc::new(class.fields)
|
||||
}
|
||||
|
||||
fn all_fields(db: &dyn Compiler) -> Arc<Vec<Field>> {
|
||||
|
|
|
@ -9,7 +9,7 @@ use self::compiler::Compiler;
|
|||
use self::implementation::DatabaseImpl;
|
||||
use self::interner::Interner;
|
||||
|
||||
static INPUT_STR: &'static str = r#"
|
||||
static INPUT_STR: &str = r#"
|
||||
lorem,ipsum
|
||||
dolor,sit,amet,
|
||||
consectetur,adipiscing,elit
|
||||
|
|
|
@ -98,7 +98,7 @@ fn main() {
|
|||
// interface by maintaining a HashSet of inserted keys.
|
||||
// println!("Initially, the length is {}.", db.length(()));
|
||||
|
||||
db.set_input_string((), Arc::new(format!("Hello, world")));
|
||||
db.set_input_string((), Arc::new("Hello, world".to_string()));
|
||||
|
||||
println!("Now, the length is {}.", db.length(()));
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ trait RequestUtil: RequestParser {
|
|||
}
|
||||
|
||||
fn header(db: &dyn RequestUtil) -> Vec<ParsedHeader> {
|
||||
db.parse().header.clone()
|
||||
db.parse().header
|
||||
}
|
||||
|
||||
fn content_type(db: &dyn RequestUtil) -> Option<String> {
|
||||
|
|
|
@ -31,21 +31,17 @@ pub trait DebugQueryTable {
|
|||
|
||||
/// An entry from a query table, for debugging and inspecting the table state.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[non_exhaustive]
|
||||
pub struct TableEntry<K, V> {
|
||||
/// key of the query
|
||||
pub key: K,
|
||||
/// value of the query, if it is stored
|
||||
pub value: Option<V>,
|
||||
_for_future_use: (),
|
||||
}
|
||||
|
||||
impl<K, V> TableEntry<K, V> {
|
||||
pub(crate) fn new(key: K, value: Option<V>) -> TableEntry<K, V> {
|
||||
TableEntry {
|
||||
key,
|
||||
value,
|
||||
_for_future_use: (),
|
||||
}
|
||||
TableEntry { key, value }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ where
|
|||
let database_key_index = DatabaseKeyIndex {
|
||||
group_index: self.group_index,
|
||||
query_index: Q::QUERY_INDEX,
|
||||
key_index: key_index,
|
||||
key_index,
|
||||
};
|
||||
entry
|
||||
.or_insert_with(|| Arc::new(Slot::new(key.clone(), database_key_index)))
|
||||
|
|
|
@ -372,7 +372,7 @@ where
|
|||
durability: err.durability,
|
||||
changed_at: err.changed_at,
|
||||
})
|
||||
.ok_or_else(|| err)
|
||||
.ok_or(err)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -389,7 +389,7 @@ where
|
|||
changed_at: err.changed_at,
|
||||
durability: err.durability,
|
||||
})
|
||||
.ok_or_else(|| err),
|
||||
.ok_or(err),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
@ -575,11 +575,11 @@ where
|
|||
return match self.read_upgrade(db, revision_now) {
|
||||
Ok(v) => {
|
||||
debug!(
|
||||
"maybe_changed_since({:?}: {:?} since (recomputed) value changed at {:?}",
|
||||
self,
|
||||
"maybe_changed_since({:?}: {:?} since (recomputed) value changed at {:?}",
|
||||
self,
|
||||
v.changed_at > revision,
|
||||
v.changed_at,
|
||||
);
|
||||
v.changed_at,
|
||||
);
|
||||
v.changed_at > revision
|
||||
}
|
||||
Err(_) => true,
|
||||
|
@ -672,7 +672,7 @@ where
|
|||
) -> Result<BlockingFuture<WaitResult<Q::Value, DatabaseKeyIndex>>, CycleDetected> {
|
||||
let id = runtime.id();
|
||||
if other_id == id {
|
||||
return Err(CycleDetected { from: id, to: id });
|
||||
Err(CycleDetected { from: id, to: id })
|
||||
} else {
|
||||
if !runtime.try_block_on(self.database_key_index, other_id) {
|
||||
return Err(CycleDetected {
|
||||
|
@ -884,9 +884,7 @@ impl MemoRevisions {
|
|||
MemoInputs::Tracked { inputs } => {
|
||||
let changed_input = inputs
|
||||
.iter()
|
||||
.filter(|&&input| db.maybe_changed_since(input, verified_at))
|
||||
.next();
|
||||
|
||||
.find(|&&input| db.maybe_changed_since(input, verified_at));
|
||||
if let Some(input) = changed_input {
|
||||
debug!("validate_memoized_value: `{:?}` may have changed", input);
|
||||
|
||||
|
@ -916,10 +914,7 @@ impl MemoRevisions {
|
|||
}
|
||||
|
||||
fn has_untracked_input(&self) -> bool {
|
||||
match self.inputs {
|
||||
MemoInputs::Untracked => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.inputs, MemoInputs::Untracked)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(clippy::type_complexity)]
|
||||
#![warn(rust_2018_idioms)]
|
||||
#![warn(missing_docs)]
|
||||
|
||||
|
|
|
@ -257,7 +257,7 @@ impl Runtime {
|
|||
/// - `database_key`: the query whose result was read
|
||||
/// - `changed_revision`: the last revision in which the result of that
|
||||
/// query had changed
|
||||
pub(crate) fn report_query_read<'hack>(
|
||||
pub(crate) fn report_query_read(
|
||||
&self,
|
||||
input: DatabaseKeyIndex,
|
||||
durability: Durability,
|
||||
|
@ -607,10 +607,7 @@ where
|
|||
path: path.into_iter().chain(Some(database_key.clone())).collect(),
|
||||
},
|
||||
);
|
||||
self.labels
|
||||
.entry(database_key.clone())
|
||||
.or_default()
|
||||
.push(from_id);
|
||||
self.labels.entry(database_key).or_default().push(from_id);
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -623,8 +620,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn push_cycle_path<'a>(
|
||||
&'a self,
|
||||
fn push_cycle_path(
|
||||
&self,
|
||||
database_key: K,
|
||||
to: RuntimeId,
|
||||
local_path: impl IntoIterator<Item = K>,
|
||||
|
@ -636,28 +633,22 @@ where
|
|||
let mut last = None;
|
||||
let mut local_path = Some(local_path);
|
||||
|
||||
loop {
|
||||
match current.take() {
|
||||
Some((id, path)) => {
|
||||
let link_key = path.last().unwrap();
|
||||
while let Some((id, path)) = current.take() {
|
||||
let link_key = path.last().unwrap();
|
||||
output.extend(path.iter().cloned());
|
||||
|
||||
output.extend(path.iter().cloned());
|
||||
current = self.edges.get(&id).map(|edge| {
|
||||
let i = edge.path.iter().rposition(|p| p == link_key).unwrap();
|
||||
(edge.id, &edge.path[i + 1..])
|
||||
});
|
||||
|
||||
current = self.edges.get(&id).map(|edge| {
|
||||
let i = edge.path.iter().rposition(|p| p == link_key).unwrap();
|
||||
(edge.id, &edge.path[i + 1..])
|
||||
});
|
||||
|
||||
if current.is_none() {
|
||||
last = local_path.take().map(|local_path| {
|
||||
local_path
|
||||
.into_iter()
|
||||
.skip_while(move |p| *p != *link_key)
|
||||
.skip(1)
|
||||
});
|
||||
}
|
||||
}
|
||||
None => break,
|
||||
if current.is_none() {
|
||||
last = local_path.take().map(|local_path| {
|
||||
local_path
|
||||
.into_iter()
|
||||
.skip_while(move |p| *p != *link_key)
|
||||
.skip(1)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,25 +51,25 @@ fn recover_b(_db: &dyn Database, cycle: &[String]) -> Result<(), Error> {
|
|||
})
|
||||
}
|
||||
|
||||
fn memoized_a(db: &dyn Database) -> () {
|
||||
fn memoized_a(db: &dyn Database) {
|
||||
db.memoized_b()
|
||||
}
|
||||
|
||||
fn memoized_b(db: &dyn Database) -> () {
|
||||
fn memoized_b(db: &dyn Database) {
|
||||
db.memoized_a()
|
||||
}
|
||||
|
||||
fn volatile_a(db: &dyn Database) -> () {
|
||||
fn volatile_a(db: &dyn Database) {
|
||||
db.salsa_runtime().report_untracked_read();
|
||||
db.volatile_b()
|
||||
}
|
||||
|
||||
fn volatile_b(db: &dyn Database) -> () {
|
||||
fn volatile_b(db: &dyn Database) {
|
||||
db.salsa_runtime().report_untracked_read();
|
||||
db.volatile_a()
|
||||
}
|
||||
|
||||
fn cycle_leaf(_db: &dyn Database) -> () {}
|
||||
fn cycle_leaf(_db: &dyn Database) {}
|
||||
|
||||
fn cycle_a(db: &dyn Database) -> Result<(), Error> {
|
||||
let _ = db.cycle_b();
|
||||
|
|
|
@ -46,47 +46,53 @@ impl salsa::InternKey for InternKey {
|
|||
#[test]
|
||||
fn test_intern1() {
|
||||
let db = Database::default();
|
||||
let foo0 = db.intern1(format!("foo"));
|
||||
let bar0 = db.intern1(format!("bar"));
|
||||
let foo1 = db.intern1(format!("foo"));
|
||||
let bar1 = db.intern1(format!("bar"));
|
||||
let foo0 = db.intern1("foo".to_string());
|
||||
let bar0 = db.intern1("bar".to_string());
|
||||
let foo1 = db.intern1("foo".to_string());
|
||||
let bar1 = db.intern1("bar".to_string());
|
||||
|
||||
assert_eq!(foo0, foo1);
|
||||
assert_eq!(bar0, bar1);
|
||||
assert_ne!(foo0, bar0);
|
||||
|
||||
assert_eq!(format!("foo"), db.lookup_intern1(foo0));
|
||||
assert_eq!(format!("bar"), db.lookup_intern1(bar0));
|
||||
assert_eq!("foo".to_string(), db.lookup_intern1(foo0));
|
||||
assert_eq!("bar".to_string(), db.lookup_intern1(bar0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_intern2() {
|
||||
let db = Database::default();
|
||||
let foo0 = db.intern2(format!("x"), format!("foo"));
|
||||
let bar0 = db.intern2(format!("x"), format!("bar"));
|
||||
let foo1 = db.intern2(format!("x"), format!("foo"));
|
||||
let bar1 = db.intern2(format!("x"), format!("bar"));
|
||||
let foo0 = db.intern2("x".to_string(), "foo".to_string());
|
||||
let bar0 = db.intern2("x".to_string(), "bar".to_string());
|
||||
let foo1 = db.intern2("x".to_string(), "foo".to_string());
|
||||
let bar1 = db.intern2("x".to_string(), "bar".to_string());
|
||||
|
||||
assert_eq!(foo0, foo1);
|
||||
assert_eq!(bar0, bar1);
|
||||
assert_ne!(foo0, bar0);
|
||||
|
||||
assert_eq!((format!("x"), format!("foo")), db.lookup_intern2(foo0));
|
||||
assert_eq!((format!("x"), format!("bar")), db.lookup_intern2(bar0));
|
||||
assert_eq!(
|
||||
("x".to_string(), "foo".to_string()),
|
||||
db.lookup_intern2(foo0)
|
||||
);
|
||||
assert_eq!(
|
||||
("x".to_string(), "bar".to_string()),
|
||||
db.lookup_intern2(bar0)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_intern_key() {
|
||||
let db = Database::default();
|
||||
let foo0 = db.intern_key(format!("foo"));
|
||||
let bar0 = db.intern_key(format!("bar"));
|
||||
let foo1 = db.intern_key(format!("foo"));
|
||||
let bar1 = db.intern_key(format!("bar"));
|
||||
let foo0 = db.intern_key("foo".to_string());
|
||||
let bar0 = db.intern_key("bar".to_string());
|
||||
let foo1 = db.intern_key("foo".to_string());
|
||||
let bar1 = db.intern_key("bar".to_string());
|
||||
|
||||
assert_eq!(foo0, foo1);
|
||||
assert_eq!(bar0, bar1);
|
||||
assert_ne!(foo0, bar0);
|
||||
|
||||
assert_eq!(format!("foo"), db.lookup_intern_key(foo0));
|
||||
assert_eq!(format!("bar"), db.lookup_intern_key(bar0));
|
||||
assert_eq!("foo".to_string(), db.lookup_intern_key(foo0));
|
||||
assert_eq!("bar".to_string(), db.lookup_intern_key(bar0));
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ trait MyDatabase: salsa::Database {
|
|||
}
|
||||
|
||||
mod another_module {
|
||||
pub(crate) fn another_name(_: &dyn crate::MyDatabase, (): ()) -> () {}
|
||||
pub(crate) fn another_name(_: &dyn crate::MyDatabase, (): ()) {}
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -86,9 +86,10 @@ fn on_demand_input_durability() {
|
|||
let validated = Rc::new(Cell::new(0));
|
||||
db.on_event = Some(Box::new({
|
||||
let validated = Rc::clone(&validated);
|
||||
move |event| match event.kind {
|
||||
salsa::EventKind::DidValidateMemoizedValue { .. } => validated.set(validated.get() + 1),
|
||||
_ => (),
|
||||
move |event| {
|
||||
if let salsa::EventKind::DidValidateMemoizedValue { .. } = event.kind {
|
||||
validated.set(validated.get() + 1)
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
|
|
|
@ -12,13 +12,13 @@ trait PanicSafelyDatabase: salsa::Database {
|
|||
fn outer(&self) -> ();
|
||||
}
|
||||
|
||||
fn panic_safely(db: &dyn PanicSafelyDatabase) -> () {
|
||||
fn panic_safely(db: &dyn PanicSafelyDatabase) {
|
||||
assert_eq!(db.one(), 1);
|
||||
}
|
||||
|
||||
static OUTER_CALLS: AtomicU32 = AtomicU32::new(0);
|
||||
|
||||
fn outer(db: &dyn PanicSafelyDatabase) -> () {
|
||||
fn outer(db: &dyn PanicSafelyDatabase) {
|
||||
OUTER_CALLS.fetch_add(1, SeqCst);
|
||||
db.panic_safely();
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ fn in_par_get_set_cancellation_immediate() {
|
|||
let mut db = ParDatabaseImpl::default();
|
||||
|
||||
db.set_input('a', 100);
|
||||
db.set_input('b', 010);
|
||||
db.set_input('c', 001);
|
||||
db.set_input('b', 10);
|
||||
db.set_input('c', 1);
|
||||
db.set_input('d', 0);
|
||||
|
||||
let thread1 = std::thread::spawn({
|
||||
|
@ -62,8 +62,8 @@ fn in_par_get_set_cancellation_transitive() {
|
|||
let mut db = ParDatabaseImpl::default();
|
||||
|
||||
db.set_input('a', 100);
|
||||
db.set_input('b', 010);
|
||||
db.set_input('c', 001);
|
||||
db.set_input('b', 10);
|
||||
db.set_input('c', 1);
|
||||
db.set_input('d', 0);
|
||||
|
||||
let thread1 = std::thread::spawn({
|
||||
|
|
|
@ -38,7 +38,6 @@ fn in_par_get_set_cancellation() {
|
|||
});
|
||||
|
||||
let thread2 = std::thread::spawn({
|
||||
let signal = signal.clone();
|
||||
move || {
|
||||
// Wait until thread 1 has asserted that they are not cancelled
|
||||
// before we invoke `set.`
|
||||
|
|
|
@ -8,11 +8,11 @@ fn in_par_two_independent_queries() {
|
|||
let mut db = ParDatabaseImpl::default();
|
||||
|
||||
db.set_input('a', 100);
|
||||
db.set_input('b', 010);
|
||||
db.set_input('c', 001);
|
||||
db.set_input('b', 10);
|
||||
db.set_input('c', 1);
|
||||
db.set_input('d', 200);
|
||||
db.set_input('e', 020);
|
||||
db.set_input('f', 002);
|
||||
db.set_input('e', 20);
|
||||
db.set_input('f', 2);
|
||||
|
||||
let thread1 = std::thread::spawn({
|
||||
let db = db.snapshot();
|
||||
|
|
|
@ -10,17 +10,12 @@ fn in_par_get_set_race() {
|
|||
let mut db = ParDatabaseImpl::default();
|
||||
|
||||
db.set_input('a', 100);
|
||||
db.set_input('b', 010);
|
||||
db.set_input('c', 001);
|
||||
db.set_input('b', 10);
|
||||
db.set_input('c', 1);
|
||||
|
||||
let thread1 = std::thread::spawn({
|
||||
let db = db.snapshot();
|
||||
move || {
|
||||
Cancelled::catch(AssertUnwindSafe(|| {
|
||||
let v = db.sum("abc");
|
||||
v
|
||||
}))
|
||||
}
|
||||
move || Cancelled::catch(AssertUnwindSafe(|| db.sum("abc")))
|
||||
});
|
||||
|
||||
let thread2 = std::thread::spawn(move || {
|
||||
|
|
|
@ -166,12 +166,8 @@ pub(crate) struct ParDatabaseImpl {
|
|||
|
||||
impl Database for ParDatabaseImpl {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
match event.kind {
|
||||
salsa::EventKind::WillBlockOn { .. } => {
|
||||
self.signal(self.knobs().signal_on_will_block.get());
|
||||
}
|
||||
|
||||
_ => {}
|
||||
if let salsa::EventKind::WillBlockOn { .. } = event.kind {
|
||||
self.signal(self.knobs().signal_on_will_block.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ impl rand::distributions::Distribution<WriteOp> for rand::distributions::Standar
|
|||
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> WriteOp {
|
||||
let key = rng.gen::<usize>() % 10;
|
||||
let value = rng.gen::<usize>() % 10;
|
||||
return WriteOp::SetA(key, value);
|
||||
WriteOp::SetA(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,7 +100,7 @@ impl rand::distributions::Distribution<ReadOp> for rand::distributions::Standard
|
|||
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> ReadOp {
|
||||
let query = rng.gen::<Query>();
|
||||
let key = rng.gen::<usize>() % 10;
|
||||
return ReadOp::Get(query, key);
|
||||
ReadOp::Get(query, key)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,8 +11,8 @@ fn true_parallel_different_keys() {
|
|||
let mut db = ParDatabaseImpl::default();
|
||||
|
||||
db.set_input('a', 100);
|
||||
db.set_input('b', 010);
|
||||
db.set_input('c', 001);
|
||||
db.set_input('b', 10);
|
||||
db.set_input('c', 1);
|
||||
|
||||
// Thread 1 will signal stage 1 when it enters and wait for stage 2.
|
||||
let thread1 = std::thread::spawn({
|
||||
|
@ -40,7 +40,7 @@ fn true_parallel_different_keys() {
|
|||
});
|
||||
|
||||
assert_eq!(thread1.join().unwrap(), 100);
|
||||
assert_eq!(thread2.join().unwrap(), 010);
|
||||
assert_eq!(thread2.join().unwrap(), 10);
|
||||
}
|
||||
|
||||
/// Add a test that tries to trigger a conflict, where we fetch
|
||||
|
@ -51,8 +51,8 @@ fn true_parallel_same_keys() {
|
|||
let mut db = ParDatabaseImpl::default();
|
||||
|
||||
db.set_input('a', 100);
|
||||
db.set_input('b', 010);
|
||||
db.set_input('c', 001);
|
||||
db.set_input('b', 10);
|
||||
db.set_input('c', 1);
|
||||
|
||||
// Thread 1 will wait_for a barrier in the start of `sum`
|
||||
let thread1 = std::thread::spawn({
|
||||
|
|
Loading…
Reference in a new issue