first commit

This commit is contained in:
sevki 2024-05-24 12:18:31 +01:00
commit 8ee21e2c6f
13 changed files with 2478 additions and 0 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
target

1797
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

40
Cargo.toml Normal file
View file

@ -0,0 +1,40 @@
[package]
name = "okstd"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.81"
backtrace = { version = "0.3.71", features = ["serde", "serialize-serde"] }
base64 = "0.22.0"
crc16 = "0.4.0"
fastvlq = "1.1.1"
futures = "0.3.30"
hex = "0.4.3"
num_cpus = "1.16.0"
ok_macros = { version = "0.1.0", path = "ok_macros" }
rustc-demangle = "0.1.23"
serde = "*"
serde_json = "*"
slog = "2.7.0"
slog-scope = "4.4.0"
slog-term = "2.9.1"
sourcemap = "8.0.1"
symbolic = { version = "12.8.0", features = [
"common-serde",
"debuginfo-serde",
"demangle",
"symcache",
"debuginfo",
] }
termcolor = "1.4.1"
tokio = { version = "1.37.0", features = ["full"] }
url = "2.5.0"
[workspace]
members = ["integration", "ok_macros"]
[dev-dependencies]
trybuild = "1.0.96"

9
integration/Cargo.toml Normal file
View file

@ -0,0 +1,9 @@
[package]
name = "integration"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
okstd = { version = "*", path = ".." }

12
integration/src/main.rs Normal file
View file

@ -0,0 +1,12 @@
use okstd::prelude::*;
#[okstd::main]
async fn main() {
something();
}
#[okstd::log(debug)]
fn something() {
debug!("Hello, world!");
println!("Hello, world!");
}

17
ok_macros/Cargo.toml Normal file
View file

@ -0,0 +1,17 @@
[package]
name = "ok_macros"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0.63", features = ["full", "extra-traits", "visit-mut"] }
quote = "^1"
darling = "0.10.0"
proc-macro2 = "1.0.3"
slog = "2.7.0"

View file

@ -0,0 +1,100 @@
use proc_macro::TokenStream;
use quote::quote;
use syn::parse::{ParseBuffer, ParseStream};
use syn::{parse_macro_input, punctuated::Punctuated, ItemFn};
use syn::{parse_quote, Attribute, Meta, Token};
pub fn transform_function(
args: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let old_fn = input.clone();
let mut level: Level = Level::Off;
args.into_iter().for_each(|arg| {
let arg = arg.to_string();
match arg.as_str() {
"debug" => level = Level::Debug,
"info" => level = Level::Info,
"critical" => level = Level::Critical,
"warn" => level = Level::Warning,
"error" => level = Level::Error,
"trace" => level = Level::Trace,
"off" => level = Level::Off,
_ => panic!("Unknown log level: {}", arg),
}
});
let mut item_fn = parse_macro_input!(input as ItemFn);
let fn_name = &item_fn.clone().sig.ident;
let attrs: &Vec<syn::Attribute> = &item_fn.attrs;
let asyncness: &Option<syn::token::Async> = &item_fn.sig.asyncness;
let generics: &syn::Generics = &item_fn.sig.generics;
let inputs: &Punctuated<syn::FnArg, syn::token::Comma> = &item_fn.sig.inputs;
let output: &syn::ReturnType = &item_fn.sig.output;
let where_clause: &Option<syn::WhereClause> = &item_fn.sig.generics.where_clause;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Level {
/// Off
Off,
/// Critical
Critical,
/// Error
Error,
/// Warning
Warning,
/// Info
Info,
/// Debug
Debug,
/// Trace
Trace,
}
let orig_ident = item_fn.sig.ident.clone();
// Rename the `test` function to `old_test`
let new_name = format!("__logging_{}", orig_ident.to_string());
let old_ident = syn::Ident::new(new_name.as_str(), item_fn.sig.ident.span());
item_fn.sig.ident = old_ident.clone();
let parsed = quote!(slog_o!());
if level == Level::Off {
return old_fn;
}
let mut olf_fnc: ItemFn = parse_macro_input!(old_fn as ItemFn);
olf_fnc.sig.ident = old_ident.clone();
let level_token = match level {
Level::Off => quote! {},
Level::Critical => quote! {slog::Level::Critical},
Level::Error => quote! {slog::Level::Error},
Level::Warning => quote! {slog::Level::Warning},
Level::Info => quote! {slog::Level::Info},
Level::Debug => quote! {slog::Level::Debug},
Level::Trace => quote! {slog::Level::Trace},
};
let body = &item_fn.block;
let result = quote! {
#( #attrs )*
#asyncness fn #fn_name #generics(#inputs) #output
#where_clause {
setup_logging();
scope(&logger().new(o!()),
|| #body
)
}
};
TokenStream::from(result)
}

94
ok_macros/src/lib.rs Normal file
View file

@ -0,0 +1,94 @@
#![recursion_limit = "128"]
extern crate proc_macro;
extern crate syn;
#[macro_use(slog_o, slog_info, slog_log, slog_record, slog_record_static, slog_b, slog_kv)]
extern crate slog;
use darling::{Error, FromMeta};
use proc_macro::TokenStream;
use quote::{quote, ToTokens};
use syn::parse_quote;
use syn::punctuated::Punctuated;
use syn::{
parse_macro_input, spanned::Spanned, token, Expr, ExprAsync, ExprAwait, ExprBlock, ExprCall, ExprClosure,
ExprParen, FnArg, Ident, ItemFn, Meta, Pat, Result, ReturnType, Stmt, Type, TypePath,
};
mod func_transformer;
#[proc_macro_attribute]
pub fn main(_attr: TokenStream, item: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(item as ItemFn);
// Check if the function is async
if input.sig.asyncness.is_none() {
panic!("The `main` function must be async");
}
// Rename the `main` function to `old_main`
let old_main_ident = syn::Ident::new("old_main", input.sig.ident.span());
input.sig.ident = old_main_ident.clone();
// Generate the new `main` function
let new_main = quote! {
fn main() {
set_hook(Box::new(panic_hook));
setup_logging();
let rt = Runtimes::setup_runtimes().unwrap();
rt.block_on(#old_main_ident())
}
};
// Combine the input and the new `main` function
let output = quote! {
#input
#new_main
};
output.into()
}
// test, just like main, but for tests.
#[proc_macro_attribute]
pub fn test(_attr: TokenStream, item: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(item as ItemFn);
let orig_ident = input.sig.ident.clone();
// Rename the `test` function to `old_test`
let new_name = format!("old_test_{}", orig_ident.to_string());
let old_test_ident = syn::Ident::new(new_name.as_str(), input.sig.ident.span());
input.sig.ident = old_test_ident.clone();
// Check if the function is async
let new_test = if input.sig.asyncness.is_none() {
quote! {
#[test]
fn #orig_ident() {
#old_test_ident()
}
}
} else {
quote! {
#[test]
fn #orig_ident() {
// set_hook(Box::new(panic_hook));
let rt = Runtimes::setup_runtimes().unwrap();
rt.block_on(#old_test_ident())
}
}
};
// Combine the input and the new `test` function
let output = quote! {
#input
#new_test
};
output.into()
}
#[proc_macro_attribute]
pub fn log(args: TokenStream, input: TokenStream) -> TokenStream {
func_transformer::transform_function(args, input)
}

11
src/e2e_tests.rs Normal file
View file

@ -0,0 +1,11 @@
#[cfg(test)]
#[test]
fn debug() {
let t = trybuild::TestCases::new();
let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let path = std::path::PathBuf::from(cargo_manifest_dir);
let path = path.join("integration/src/main.rs");
let path = path.as_path();
t.pass(path);
}

37
src/lib.rs Normal file
View file

@ -0,0 +1,37 @@
//!
pub mod log;
pub mod okasync;
pub mod notokpanic;
mod e2e_tests;
extern crate slog;
pub mod prelude {
pub use crate::okasync::*;
pub use crate::log::setup_logging;
pub use super::main;
// re-export the slog macros
pub use slog_scope::crit;
pub use slog_scope::debug;
pub use slog_scope::error;
pub use slog_scope::info;
pub use slog_scope::trace;
pub use slog_scope::warn;
pub use slog_scope::scope;
pub use slog_scope::logger;
pub use slog::OwnedKV;
pub use slog::o;
pub use slog_scope::set_global_logger;
pub use std::panic::set_hook;
pub use crate::notokpanic::panic_hook;
}
pub use ok_macros::main;
pub use ok_macros::test;
pub use ok_macros::log;

77
src/log.rs Normal file
View file

@ -0,0 +1,77 @@
use std::{io::Write, ops::Add, path::PathBuf};
use slog::{slog_o, Drain, Logger};
use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};
// get module colour hashes the module name
// and attempts to return a uniqe color as far as ansi colors go
fn get_module_colour(module: &str) -> Color {
// crc16 is a good hash for this
let hash = crc16::State::<crc16::XMODEM>::calculate(module.as_bytes());
let hash = hash.add(5);
match hash % 6 {
0 => Color::Red,
1 => Color::Green,
2 => Color::Yellow,
3 => Color::Blue,
4 => Color::Magenta,
5 => Color::Cyan,
_ => Color::White,
}
}
pub fn setup_logging() -> Logger {
let x = drain();
slog::Logger::root(x, slog_o!())
}
#[allow(dead_code)]
pub fn drain() -> slog::Fuse<slog_term::FullFormat<slog_term::PlainSyncDecorator<std::io::Stdout>>>
{
let plain = slog_term::PlainSyncDecorator::new(std::io::stdout());
let ff = slog_term::FullFormat::new(plain);
let x = ff
.use_custom_header_print(|_f, _t, r, _x| {
// print format is: dev.branch/{module} {level} {msg}
// module should be cleaned by :: -> /
// level should be colored use termcolor
let module = r.module().replace("::", "/");
let level = match r.level() {
slog::Level::Critical => termcolor::Color::Red,
slog::Level::Error => termcolor::Color::Red,
slog::Level::Warning => termcolor::Color::Yellow,
slog::Level::Info => termcolor::Color::Green,
slog::Level::Debug => termcolor::Color::Blue,
slog::Level::Trace => termcolor::Color::Cyan,
};
let location_buffer = PathBuf::from(r.location().file).canonicalize().unwrap();
let loc = location_buffer.to_str().unwrap();
let bufwtr = BufferWriter::stderr(ColorChoice::Always);
let mut buffer = bufwtr.buffer();
let module_color = get_module_colour(&module);
buffer.set_color(ColorSpec::new().set_fg(Some(module_color)))?;
let _ = write!(buffer, "dev.branch/software/ok/{} ", module,);
buffer.reset()?;
buffer.set_color(
ColorSpec::new()
.set_dimmed(true)
.set_underline(true)
.set_fg(Some(Color::White)),
)?;
let _ = write!(buffer, "{}:{}", loc, r.location().line);
buffer.reset()?;
buffer.set_color(ColorSpec::new().set_fg(Some(level)).set_intense(true))?;
let _ = write!(buffer, " {}", r.level());
buffer.reset()?;
let _ = write!(buffer, " {}", r.msg());
let _ = bufwtr.print(&buffer);
std::result::Result::Ok(true)
})
.build()
.fuse();
x
}

209
src/notokpanic.rs Normal file
View file

@ -0,0 +1,209 @@
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use sourcemap::SourceMapBuilder;
use std::{backtrace as stdbt, env, panic::PanicInfo};
use symbolic::{
common::{DebugId, Uuid},
debuginfo::ObjectLike,
};
/// Checks if a function is considered to be not in-app
pub fn is_sys_function(func: &str) -> bool {
WELL_KNOWN_SYS_MODULES.iter().any(|m| func.contains(m))
}
/// Checks if a function is a well-known system function
#[allow(dead_code)]
fn is_well_known(func: &str) -> bool {
WELL_KNOWN_BORDER_FRAMES.iter().any(|m| func.starts_with(m))
}
const WELL_KNOWN_SYS_MODULES: &[&str] = &[
"std::",
"core::",
"alloc::",
"backtrace::",
"sentry::",
"sentry_core::",
"sentry_types::",
// these are not modules but things like __rust_maybe_catch_panic
"__rust_",
"___rust_",
// these are well-known library frames
"anyhow::",
"log::",
"tokio::",
"tracing_core::",
];
#[allow(dead_code)]
const WELL_KNOWN_BORDER_FRAMES: &[&str] = &[
"std::panicking::begin_panic",
"core::panicking::panic",
// well-known library frames
"anyhow::",
"<sentry_log::Logger as log::Log>::log",
"tracing_core::",
];
// i64 to u64 without loss of data
fn u64(x: i64) -> u64 {
x as u64
}
// i64 to u64 without loss of data
fn i64(x: u64) -> i64 {
x as i64
}
fn uuid_to_i64(uuid: Uuid) -> (i64, i64) {
let (high, low) = uuid.as_u64_pair();
(
i64::from_be_bytes(high.to_ne_bytes()),
i64::from_ne_bytes(low.to_ne_bytes()),
)
}
fn i64_to_uuid(high: i64, low: i64) -> Uuid {
let high = u64::from_ne_bytes(high.to_ne_bytes());
let low = u64::from_ne_bytes(low.to_ne_bytes());
Uuid::from_u64_pair(high, low)
}
// generate_crashdump_url generates a crashdump URL for the
// given addresses, current platform, architecture and debugId
// buildId, commit and cargo package meta.
fn encode_crashdump_url(addresses: &[i64], debug_id: DebugId) -> String {
let platform = match std::env::consts::OS {
"linux" => "🐧",
"bldy" => "👷",
"oklinux" => "👌",
// browser emoji
"wasi" => "🌐",
_ => "",
};
let arch = match std::env::consts::ARCH {
"x86" => "✖️",
"x86_64" => "", // chess board has 64squares
"aarch64" => "🦾",
"arm" => "💪",
"wasm32" => "🕸️",
_ => "unknown",
};
println!("Filename: {:?}", env::current_exe());
let addrs = sourcemap::vlq::generate_vlq_segment(addresses).unwrap();
let mut pathbuf = std::path::PathBuf::new();
pathbuf.push(platform);
pathbuf.push(arch);
pathbuf.push(debug_id.breakpad().to_string().to_lowercase());
pathbuf.push(addrs);
format!("https://crashdu.mp/{}", pathbuf.to_str().unwrap())
}
// decode_crashdump_url decodes a crashdump URL and returns the
// addresses, debugId, buildId, commit and cargo package meta.
fn decode_crashdump_url(url: &str) -> (Vec<i64>, DebugId) {
let u = url::Url::parse(url).unwrap();
let segments = u.path_segments().unwrap().collect::<Vec<_>>();
let x = segments[2].to_uppercase();
let x = x.as_str();
let debug_id = DebugId::from_breakpad(x).expect("da");
let addrs = sourcemap::vlq::parse_vlq_segment(segments[3]).unwrap();
(addrs, debug_id)
}
pub fn panic_hook(info: &PanicInfo) {
let location = info.location().unwrap();
let mut builder = SourceMapBuilder::new(Some(location.file()));
let mut addrs: Vec<i64> = Vec::new();
let data = std::fs::read(env::current_exe().unwrap()).unwrap();
let obj = symbolic::debuginfo::Object::parse(data.as_slice()).unwrap();
let symbols = obj.symbols().collect::<Vec<_>>();
let _bt = stdbt::Backtrace::force_capture();
let finder = |name: &str| {
symbols.iter().find_map(|symbol| {
let nname = symbolic::demangle::demangle(symbol.name().unwrap());
// println!("Found symbol: {}, {}", name, nname);
if nname == name {
Some(symbol)
} else {
None
}
})
};
let debug_id = obj.debug_id();
backtrace::trace(|frame| {
let _ip = frame.ip();
let symbol_address = frame.symbol_address();
let _addr = symbol_address as i64;
backtrace::resolve_frame(frame, |symbol| {
let name = symbol
.name()
.map_or("<unknown>", |name| name.as_str().unwrap());
let name = symbolic::demangle::demangle(name);
let found = finder(name.to_string().as_str());
if let Some(symbol) = found {
addrs.push(symbol.address.try_into().unwrap());
let name = name.to_string();
builder.add_name(name.as_str());
}
// addrs.push(addr);
});
true // keep going to the next frame
});
println!("{:?}", builder.into_sourcemap());
let message = info.payload().downcast_ref::<&str>().unwrap();
let msgggg = format!("panic occurred: {} at {}", message, location);
let url = encode_crashdump_url(&addrs, debug_id);
let mut str = String::new();
let _x = &URL_SAFE_NO_PAD.encode_string(msgggg, &mut str);
println!("Crashdump URL: {}?{}", url, str);
}
#[cfg(test)]
mod tests {
#[crate::test]
#[should_panic]
fn test_panic() {
panic!("this is a panic message");
}
use super::*;
#[test]
fn test_encode_crashdump_url() {
let addresses = vec![14, 02, 1988];
let debug_id =
DebugId::from_uuid(Uuid::parse_str("08ab7650-ed55-4006-b665-867495ba85c5").unwrap());
let url = encode_crashdump_url(&addresses, debug_id);
assert_eq!(
url,
"https://crashdu.mp/🐧/♔/08ab7650ed554006b665867495ba85c50/2HwcqxB"
);
}
#[test]
fn test_decode_crashdump_url() {
let url = "https://crashdu.mp/🐧/♔/08ab7650ed554006b665867495ba85c50/2HwcqxB";
let (addresses, debug_id) = decode_crashdump_url(url);
assert_eq!(addresses, vec![123, 456, 789]);
assert_eq!(
debug_id,
DebugId::from_uuid(Uuid::parse_str("08ab7650-ed55-4006-b665-867495ba85c5").unwrap())
);
}
}

74
src/okasync.rs Normal file
View file

@ -0,0 +1,74 @@
// async_runtime.rs
use std::future::Future;
pub use futures::{
future::{FutureExt, TryFutureExt},
sink::{Sink, SinkExt},
stream::{Stream, StreamExt, TryStreamExt},
};
pub trait AsyncRead: futures::io::AsyncRead {}
impl<T: ?Sized + futures::io::AsyncRead> AsyncRead for T {}
pub trait AsyncWrite: futures::io::AsyncWrite {}
impl<T: ?Sized + futures::io::AsyncWrite> AsyncWrite for T {}
type Result<T> = anyhow::Result<T>;
pub trait Runtime: Unpin {
fn block_on<F: Future>(&self, future: F) -> F::Output;
fn spawn<F>(&self, future: F)
where
F: Future + Send + 'static,
F::Output: Send + 'static,
{
self.block_on(future);
}
}
impl Runtime for tokio::runtime::Runtime {
fn block_on<F: Future>(&self, future: F) -> F::Output {
self.block_on(future)
}
}
#[cfg(feature = "cros_async")]
impl Runtime for cros_async::Executor {
fn block_on<F: Future>(&self, future: F) -> F::Output {
self.run_until(future).unwrap()
}
}
pub enum Runtimes {
Tokio(tokio::runtime::Runtime),
#[cfg(feature = "cros_async")]
Cros(cros_async::Executor),
}
impl Runtime for Runtimes {
fn block_on<F: Future>(&self, future: F) -> F::Output {
match self {
Runtimes::Tokio(rt) => rt.block_on(future),
#[cfg(feature = "cros_async")]
Runtimes::Cros(executor) => executor.block_on(future),
}
}
}
impl Runtimes {
pub fn setup_runtimes() -> Result<Runtimes> {
let worker_count = num_cpus::get() * 1.5 as usize;
if let Ok(rt) = tokio::runtime::Builder::new_multi_thread()
// create a thread pool with core count of the machine
.worker_threads(worker_count)
.enable_all()
.build()
{
Ok(Runtimes::Tokio(rt))
} else {
Err(anyhow::anyhow!("No supported runtime available"))
}
}
}