mirror of
https://github.com/a2x/cs2-dumper.git
synced 2025-04-05 03:35:34 +08:00
Refactor code writer and fix minor things
Also added the ability to specify which files should be generated based on their file type. E.g. `cs2-dumper.exe -f hpp,json`
This commit is contained in:
parent
541f4acf1d
commit
8b1ecb7afb
@ -1,6 +1,6 @@
|
|||||||
# cs2-dumper
|
# cs2-dumper
|
||||||
|
|
||||||
An external offset/interface dumper for Counter-Strike 2, with support for both Windows & Linux.
|
An external offset/interface dumper for Counter-Strike 2, with support for both Windows & Linux (soon).
|
||||||
|
|
||||||
Powered by [memflow](https://github.com/memflow/memflow).
|
Powered by [memflow](https://github.com/memflow/memflow).
|
||||||
|
|
||||||
@ -21,11 +21,12 @@ E.g. `./cs2-dumper -c pcileech -a device=fpga -vvv`
|
|||||||
|
|
||||||
### Available Arguments
|
### Available Arguments
|
||||||
|
|
||||||
- `-v...`: Increase logging verbosity. Can be specified multiple times.
|
|
||||||
- `-c, --connector <connector>`: The name of the memflow connector to use.
|
- `-c, --connector <connector>`: The name of the memflow connector to use.
|
||||||
- `-a, --connector-args <connector-args>`: Additional arguments to supply to the connector.
|
- `-a, --connector-args <connector-args>`: Additional arguments to pass to the connector.
|
||||||
|
- `-f, --file-types <file-types>`: The types of files to generate. Default: `cs`, `hpp`, `json`, `rs`.
|
||||||
- `-o, --output <output>`: The output directory to write the generated files to. Default: `output`.
|
- `-o, --output <output>`: The output directory to write the generated files to. Default: `output`.
|
||||||
- `-i, --indent-size <indent-size>`: The number of spaces to use per indentation level. Default: `4`.
|
- `-i, --indent-size <indent-size>`: The number of spaces to use per indentation level. Default: `4`.
|
||||||
|
- `-v...`: Increase logging verbosity. Can be specified multiple times.
|
||||||
- `-h, --help`: Print help.
|
- `-h, --help`: Print help.
|
||||||
- `-V, --version`: Print version.
|
- `-V, --version`: Print version.
|
||||||
|
|
||||||
|
@ -10,7 +10,6 @@ use serde::{Deserialize, Serialize};
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::source2::KeyButton;
|
use crate::source2::KeyButton;
|
||||||
|
|
||||||
/// Represents a keyboard button.
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
pub struct Button {
|
pub struct Button {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
@ -14,7 +14,6 @@ use crate::source2::InterfaceReg;
|
|||||||
|
|
||||||
pub type InterfaceMap = BTreeMap<String, Vec<Interface>>;
|
pub type InterfaceMap = BTreeMap<String, Vec<Interface>>;
|
||||||
|
|
||||||
/// Represents an exposed interface.
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
pub struct Interface {
|
pub struct Interface {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
@ -3,7 +3,33 @@ pub use interfaces::*;
|
|||||||
pub use offsets::*;
|
pub use offsets::*;
|
||||||
pub use schemas::*;
|
pub use schemas::*;
|
||||||
|
|
||||||
pub mod buttons;
|
use memflow::prelude::v1::*;
|
||||||
pub mod interfaces;
|
|
||||||
pub mod offsets;
|
use crate::error::Result;
|
||||||
pub mod schemas;
|
|
||||||
|
mod buttons;
|
||||||
|
mod interfaces;
|
||||||
|
mod offsets;
|
||||||
|
mod schemas;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct AnalysisResult {
|
||||||
|
pub buttons: Vec<Button>,
|
||||||
|
pub interfaces: InterfaceMap,
|
||||||
|
pub offsets: OffsetMap,
|
||||||
|
pub schemas: SchemaMap,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn analyze_all(process: &mut IntoProcessInstanceArcBox<'_>) -> Result<AnalysisResult> {
|
||||||
|
let buttons = buttons(process)?;
|
||||||
|
let interfaces = interfaces(process)?;
|
||||||
|
let offsets = offsets(process)?;
|
||||||
|
let schemas = schemas(process)?;
|
||||||
|
|
||||||
|
Ok(AnalysisResult {
|
||||||
|
buttons,
|
||||||
|
interfaces,
|
||||||
|
offsets,
|
||||||
|
schemas,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -98,9 +98,9 @@ pattern_map! {
|
|||||||
"dwNetworkGameClient" => pattern!("48893d${'} 488d15") => None,
|
"dwNetworkGameClient" => pattern!("48893d${'} 488d15") => None,
|
||||||
"dwNetworkGameClient_deltaTick" => pattern!("8983u4 40b7") => None,
|
"dwNetworkGameClient_deltaTick" => pattern!("8983u4 40b7") => None,
|
||||||
"dwNetworkGameClient_getLocalPlayer" => pattern!("4883c0u1 488d0440 458b04c7") => Some(|_view, map, rva| {
|
"dwNetworkGameClient_getLocalPlayer" => pattern!("4883c0u1 488d0440 458b04c7") => Some(|_view, map, rva| {
|
||||||
// .text 48 83 C0 0A add rax, 0Ah
|
// .text 48 83 C0 0A | add rax, 0Ah
|
||||||
// .text 48 8D 04 40 lea rax, [rax+rax*2]
|
// .text 48 8D 04 40 | lea rax, [rax + rax * 2]
|
||||||
// .text 45 8B 04 C7 mov r8d, [r15+rax*8]
|
// .text 45 8B 04 C7 | mov r8d, [r15 + rax * 8]
|
||||||
map.insert("dwNetworkGameClient_getLocalPlayer".to_string(), (rva + (rva * 2)) * 8);
|
map.insert("dwNetworkGameClient_getLocalPlayer".to_string(), (rva + (rva * 2)) * 8);
|
||||||
}),
|
}),
|
||||||
"dwNetworkGameClient_getMaxClients" => pattern!("8b81u2?? c3cccccccccccccccccc 8b81${} ffc0") => None,
|
"dwNetworkGameClient_getMaxClients" => pattern!("8b81u2?? c3cccccccccccccccccc 8b81${} ffc0") => None,
|
||||||
|
@ -15,14 +15,14 @@ use crate::source2::*;
|
|||||||
|
|
||||||
pub type SchemaMap = BTreeMap<String, (Vec<Class>, Vec<Enum>)>;
|
pub type SchemaMap = BTreeMap<String, (Vec<Class>, Vec<Enum>)>;
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub enum ClassMetadata {
|
pub enum ClassMetadata {
|
||||||
Unknown { name: String },
|
Unknown { name: String },
|
||||||
NetworkChangeCallback { name: String },
|
NetworkChangeCallback { name: String },
|
||||||
NetworkVarNames { name: String, type_name: String },
|
NetworkVarNames { name: String, type_name: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct Class {
|
pub struct Class {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub module_name: String,
|
pub module_name: String,
|
||||||
@ -31,14 +31,14 @@ pub struct Class {
|
|||||||
pub fields: Vec<ClassField>,
|
pub fields: Vec<ClassField>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct ClassField {
|
pub struct ClassField {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub type_name: String,
|
pub type_name: String,
|
||||||
pub offset: i32,
|
pub offset: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct Enum {
|
pub struct Enum {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub alignment: u8,
|
pub alignment: u8,
|
||||||
@ -46,13 +46,13 @@ pub struct Enum {
|
|||||||
pub members: Vec<EnumMember>,
|
pub members: Vec<EnumMember>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct EnumMember {
|
pub struct EnumMember {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub value: i64,
|
pub value: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, Serialize)]
|
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||||
pub struct TypeScope {
|
pub struct TypeScope {
|
||||||
pub module_name: String,
|
pub module_name: String,
|
||||||
pub classes: Vec<Class>,
|
pub classes: Vec<Class>,
|
||||||
@ -63,7 +63,7 @@ pub fn schemas(process: &mut IntoProcessInstanceArcBox<'_>) -> Result<SchemaMap>
|
|||||||
let schema_system = read_schema_system(process)?;
|
let schema_system = read_schema_system(process)?;
|
||||||
let type_scopes = read_type_scopes(process, &schema_system)?;
|
let type_scopes = read_type_scopes(process, &schema_system)?;
|
||||||
|
|
||||||
let map: BTreeMap<_, _> = type_scopes
|
let map = type_scopes
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|type_scope| {
|
.map(|type_scope| {
|
||||||
(
|
(
|
||||||
@ -134,10 +134,10 @@ fn read_class_binding_fields(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let name = field.name.read_string(process)?.to_string();
|
let name = field.name.read_string(process)?.to_string();
|
||||||
let type_ = field.schema_type.read(process)?;
|
let schema_type = field.schema_type.read(process)?;
|
||||||
|
|
||||||
// TODO: Parse this properly.
|
// TODO: Parse this properly.
|
||||||
let type_name = type_.name.read_string(process)?.replace(" ", "");
|
let type_name = schema_type.name.read_string(process)?.replace(" ", "");
|
||||||
|
|
||||||
acc.push(ClassField {
|
acc.push(ClassField {
|
||||||
name,
|
name,
|
||||||
|
72
src/main.rs
72
src/main.rs
@ -1,4 +1,3 @@
|
|||||||
use std::env;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
@ -11,7 +10,7 @@ use memflow::prelude::v1::*;
|
|||||||
use simplelog::{ColorChoice, TermLogger};
|
use simplelog::{ColorChoice, TermLogger};
|
||||||
|
|
||||||
use error::Result;
|
use error::Result;
|
||||||
use output::Results;
|
use output::Output;
|
||||||
|
|
||||||
mod analysis;
|
mod analysis;
|
||||||
mod error;
|
mod error;
|
||||||
@ -22,10 +21,10 @@ mod source2;
|
|||||||
const PROCESS_NAME: &str = "cs2.exe";
|
const PROCESS_NAME: &str = "cs2.exe";
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
let start_time = Instant::now();
|
let now = Instant::now();
|
||||||
|
|
||||||
let matches = parse_args();
|
let matches = parse_args();
|
||||||
let (conn_name, conn_args, indent_size, out_dir) = extract_args(&matches)?;
|
let (conn_name, conn_args, file_types, indent_size, out_dir) = extract_args(&matches)?;
|
||||||
|
|
||||||
let os = if let Some(conn_name) = conn_name {
|
let os = if let Some(conn_name) = conn_name {
|
||||||
let inventory = Inventory::scan();
|
let inventory = Inventory::scan();
|
||||||
@ -37,22 +36,18 @@ fn main() -> Result<()> {
|
|||||||
.os("win32")
|
.os("win32")
|
||||||
.build()?
|
.build()?
|
||||||
} else {
|
} else {
|
||||||
// Fallback to the native OS layer if no connector name was provided.
|
// Fallback to the native OS layer if no connector name was specified.
|
||||||
memflow_native::create_os(&Default::default(), Default::default())?
|
memflow_native::create_os(&Default::default(), Default::default())?
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut process = os.into_process_by_name(PROCESS_NAME)?;
|
let mut process = os.into_process_by_name(PROCESS_NAME)?;
|
||||||
|
|
||||||
let buttons = analysis::buttons(&mut process)?;
|
let result = analysis::analyze_all(&mut process)?;
|
||||||
let interfaces = analysis::interfaces(&mut process)?;
|
let output = Output::new(&file_types, indent_size, &out_dir, &result)?;
|
||||||
let offsets = analysis::offsets(&mut process)?;
|
|
||||||
let schemas = analysis::schemas(&mut process)?;
|
|
||||||
|
|
||||||
let results = Results::new(buttons, interfaces, offsets, schemas);
|
output.dump_all(&mut process)?;
|
||||||
|
|
||||||
results.dump_all(&mut process, &out_dir, indent_size)?;
|
info!("finished in {:?}", now.elapsed());
|
||||||
|
|
||||||
info!("finished in {:?}", start_time.elapsed());
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -61,12 +56,6 @@ fn parse_args() -> ArgMatches {
|
|||||||
Command::new("cs2-dumper")
|
Command::new("cs2-dumper")
|
||||||
.version(crate_version!())
|
.version(crate_version!())
|
||||||
.author(crate_authors!())
|
.author(crate_authors!())
|
||||||
.arg(
|
|
||||||
Arg::new("verbose")
|
|
||||||
.help("Increase logging verbosity. Can be specified multiple times.")
|
|
||||||
.short('v')
|
|
||||||
.action(ArgAction::Count),
|
|
||||||
)
|
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("connector")
|
Arg::new("connector")
|
||||||
.help("The name of the memflow connector to use.")
|
.help("The name of the memflow connector to use.")
|
||||||
@ -76,11 +65,31 @@ fn parse_args() -> ArgMatches {
|
|||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("connector-args")
|
Arg::new("connector-args")
|
||||||
.help("Additional arguments to supply to the connector.")
|
.help("Additional arguments to pass to the connector.")
|
||||||
.long("connector-args")
|
.long("connector-args")
|
||||||
.short('a')
|
.short('a')
|
||||||
.required(false),
|
.required(false),
|
||||||
)
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("file-types")
|
||||||
|
.help("The types of files to generate.")
|
||||||
|
.long("file-types")
|
||||||
|
.short('f')
|
||||||
|
.action(ArgAction::Append)
|
||||||
|
.default_values(["cs", "hpp", "json", "rs"])
|
||||||
|
.value_parser(["cs", "hpp", "json", "rs"])
|
||||||
|
.value_delimiter(',')
|
||||||
|
.required(false),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("indent-size")
|
||||||
|
.help("The number of spaces to use per indentation level.")
|
||||||
|
.long("indent-size")
|
||||||
|
.short('i')
|
||||||
|
.default_value("4")
|
||||||
|
.value_parser(value_parser!(usize))
|
||||||
|
.required(false),
|
||||||
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("output")
|
Arg::new("output")
|
||||||
.help("The output directory to write the generated files to.")
|
.help("The output directory to write the generated files to.")
|
||||||
@ -91,18 +100,17 @@ fn parse_args() -> ArgMatches {
|
|||||||
.required(false),
|
.required(false),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::new("indent-size")
|
Arg::new("verbose")
|
||||||
.help("The number of spaces to use per indentation level.")
|
.help("Increase logging verbosity. Can be specified multiple times.")
|
||||||
.long("indent-size")
|
.short('v')
|
||||||
.short('i')
|
.action(ArgAction::Count),
|
||||||
.default_value("4")
|
|
||||||
.value_parser(value_parser!(usize))
|
|
||||||
.required(false),
|
|
||||||
)
|
)
|
||||||
.get_matches()
|
.get_matches()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_args(matches: &ArgMatches) -> Result<(Option<String>, ConnectorArgs, usize, &PathBuf)> {
|
fn extract_args(
|
||||||
|
matches: &ArgMatches,
|
||||||
|
) -> Result<(Option<String>, ConnectorArgs, Vec<String>, usize, &PathBuf)> {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
let log_level = match matches.get_count("verbose") {
|
let log_level = match matches.get_count("verbose") {
|
||||||
@ -131,8 +139,14 @@ fn extract_args(matches: &ArgMatches) -> Result<(Option<String>, ConnectorArgs,
|
|||||||
.map(|s| ConnectorArgs::from_str(&s).expect("unable to parse connector arguments"))
|
.map(|s| ConnectorArgs::from_str(&s).expect("unable to parse connector arguments"))
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let file_types = matches
|
||||||
|
.get_many::<String>("file-types")
|
||||||
|
.unwrap()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect();
|
||||||
|
|
||||||
let indent_size = *matches.get_one::<usize>("indent-size").unwrap();
|
let indent_size = *matches.get_one::<usize>("indent-size").unwrap();
|
||||||
let out_dir = matches.get_one::<PathBuf>("output").unwrap();
|
let out_dir = matches.get_one::<PathBuf>("output").unwrap();
|
||||||
|
|
||||||
Ok((conn_name, conn_args, indent_size, out_dir))
|
Ok((conn_name, conn_args, file_types, indent_size, out_dir))
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,10 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::fmt::Write;
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
use super::{Button, CodeGen, Results};
|
use super::{Button, CodeWriter, Formatter};
|
||||||
|
|
||||||
use crate::error::Result;
|
impl CodeWriter for Vec<Button> {
|
||||||
|
fn write_cs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
impl CodeGen for Vec<Button> {
|
|
||||||
fn to_cs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
fmt.block("namespace CS2Dumper", false, |fmt| {
|
fmt.block("namespace CS2Dumper", false, |fmt| {
|
||||||
writeln!(fmt, "// Module: client.dll")?;
|
writeln!(fmt, "// Module: client.dll")?;
|
||||||
|
|
||||||
@ -22,14 +19,10 @@ impl CodeGen for Vec<Button> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_hpp(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_hpp(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
writeln!(fmt, "#pragma once\n")?;
|
writeln!(fmt, "#pragma once\n")?;
|
||||||
writeln!(fmt, "#include <cstddef>\n")?;
|
writeln!(fmt, "#include <cstddef>\n")?;
|
||||||
|
|
||||||
@ -47,13 +40,10 @@ impl CodeGen for Vec<Button> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_json(&self, _results: &Results, _indent_size: usize) -> Result<String> {
|
fn write_json(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
let content = {
|
let content = {
|
||||||
let buttons: BTreeMap<_, _> = self
|
let buttons: BTreeMap<_, _> = self
|
||||||
.iter()
|
.iter()
|
||||||
@ -63,15 +53,11 @@ impl CodeGen for Vec<Button> {
|
|||||||
BTreeMap::from_iter([("client.dll", buttons)])
|
BTreeMap::from_iter([("client.dll", buttons)])
|
||||||
};
|
};
|
||||||
|
|
||||||
serde_json::to_string_pretty(&content).map_err(Into::into)
|
fmt.write_str(&serde_json::to_string_pretty(&content).expect("failed to serialize"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_rs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_rs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
writeln!(fmt, "#![allow(non_upper_case_globals, unused)]\n")?;
|
||||||
writeln!(
|
|
||||||
fmt,
|
|
||||||
"#![allow(non_upper_case_globals, non_camel_case_types, unused)]\n"
|
|
||||||
)?;
|
|
||||||
|
|
||||||
fmt.block("pub mod cs2_dumper", false, |fmt| {
|
fmt.block("pub mod cs2_dumper", false, |fmt| {
|
||||||
writeln!(fmt, "// Module: client.dll")?;
|
writeln!(fmt, "// Module: client.dll")?;
|
||||||
@ -89,9 +75,6 @@ impl CodeGen for Vec<Button> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,9 @@
|
|||||||
use std::fmt::{self, Write};
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Formatter<'a> {
|
pub struct Formatter<'a> {
|
||||||
/// Write destination.
|
out: &'a mut String,
|
||||||
pub out: &'a mut String,
|
indent_size: usize,
|
||||||
|
indent_level: usize,
|
||||||
/// Number of spaces per indentation level.
|
|
||||||
pub indent_size: usize,
|
|
||||||
|
|
||||||
/// Current indentation level.
|
|
||||||
pub indent_level: usize,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Formatter<'a> {
|
impl<'a> Formatter<'a> {
|
||||||
@ -34,17 +28,17 @@ impl<'a> Formatter<'a> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn indent<F, R>(&mut self, f: F) -> R
|
pub fn indent<F>(&mut self, f: F) -> fmt::Result
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Self) -> R,
|
F: FnOnce(&mut Self) -> fmt::Result,
|
||||||
{
|
{
|
||||||
self.indent_level += 1;
|
self.indent_level += 1;
|
||||||
|
|
||||||
let ret = f(self);
|
f(self)?;
|
||||||
|
|
||||||
self.indent_level -= 1;
|
self.indent_level -= 1;
|
||||||
|
|
||||||
ret
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -1,24 +1,18 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::fmt::Write;
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
use heck::{AsPascalCase, AsSnakeCase};
|
use heck::{AsPascalCase, AsSnakeCase};
|
||||||
|
|
||||||
use super::{CodeGen, InterfaceMap, Results};
|
use super::{slugify, CodeWriter, Formatter, InterfaceMap};
|
||||||
|
|
||||||
use crate::error::Result;
|
impl CodeWriter for InterfaceMap {
|
||||||
|
fn write_cs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
impl CodeGen for InterfaceMap {
|
|
||||||
fn to_cs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
fmt.block("namespace CS2Dumper.Interfaces", false, |fmt| {
|
fmt.block("namespace CS2Dumper.Interfaces", false, |fmt| {
|
||||||
for (module_name, ifaces) in self {
|
for (module_name, ifaces) in self {
|
||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!(
|
&format!("public static class {}", AsPascalCase(slugify(module_name))),
|
||||||
"public static class {}",
|
|
||||||
AsPascalCase(Self::slugify(module_name))
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for iface in ifaces {
|
for iface in ifaces {
|
||||||
@ -34,15 +28,11 @@ impl CodeGen for InterfaceMap {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_hpp(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_hpp(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
writeln!(fmt, "#pragma once\n")?;
|
writeln!(fmt, "#pragma once\n")?;
|
||||||
writeln!(fmt, "#include <cstddef>\n")?;
|
writeln!(fmt, "#include <cstddef>\n")?;
|
||||||
|
|
||||||
@ -52,7 +42,7 @@ impl CodeGen for InterfaceMap {
|
|||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("namespace {}", AsSnakeCase(Self::slugify(module_name))),
|
&format!("namespace {}", AsSnakeCase(slugify(module_name))),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for iface in ifaces {
|
for iface in ifaces {
|
||||||
@ -70,13 +60,10 @@ impl CodeGen for InterfaceMap {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_json(&self, _results: &Results, _indent_size: usize) -> Result<String> {
|
fn write_json(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
let content: BTreeMap<_, _> = self
|
let content: BTreeMap<_, _> = self
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(module_name, ifaces)| {
|
.map(|(module_name, ifaces)| {
|
||||||
@ -89,15 +76,11 @@ impl CodeGen for InterfaceMap {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
serde_json::to_string_pretty(&content).map_err(Into::into)
|
fmt.write_str(&serde_json::to_string_pretty(&content).expect("failed to serialize"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_rs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_rs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
writeln!(fmt, "#![allow(non_upper_case_globals, unused)]\n")?;
|
||||||
writeln!(
|
|
||||||
fmt,
|
|
||||||
"#![allow(non_upper_case_globals, non_camel_case_types, unused)]\n"
|
|
||||||
)?;
|
|
||||||
|
|
||||||
fmt.block("pub mod cs2_dumper", false, |fmt| {
|
fmt.block("pub mod cs2_dumper", false, |fmt| {
|
||||||
fmt.block("pub mod interfaces", false, |fmt| {
|
fmt.block("pub mod interfaces", false, |fmt| {
|
||||||
@ -105,7 +88,7 @@ impl CodeGen for InterfaceMap {
|
|||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("pub mod {}", AsSnakeCase(Self::slugify(module_name))),
|
&format!("pub mod {}", AsSnakeCase(slugify(module_name))),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for iface in ifaces {
|
for iface in ifaces {
|
||||||
@ -123,9 +106,6 @@ impl CodeGen for InterfaceMap {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use std::fmt::Write;
|
use std::fmt::{self, Write};
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
@ -6,7 +6,7 @@ use chrono::{DateTime, Utc};
|
|||||||
|
|
||||||
use memflow::prelude::v1::*;
|
use memflow::prelude::v1::*;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use formatter::Formatter;
|
use formatter::Formatter;
|
||||||
@ -30,216 +30,160 @@ enum Item<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Item<'a> {
|
impl<'a> Item<'a> {
|
||||||
fn generate(&self, results: &Results, indent_size: usize, file_ext: &str) -> Result<String> {
|
fn write(&self, fmt: &mut Formatter<'a>, file_ext: &str) -> fmt::Result {
|
||||||
match file_ext {
|
match file_ext {
|
||||||
"cs" => self.to_cs(results, indent_size),
|
"cs" => self.write_cs(fmt),
|
||||||
"hpp" => self.to_hpp(results, indent_size),
|
"hpp" => self.write_hpp(fmt),
|
||||||
"json" => self.to_json(results, indent_size),
|
"json" => self.write_json(fmt),
|
||||||
"rs" => self.to_rs(results, indent_size),
|
"rs" => self.write_rs(fmt),
|
||||||
_ => unreachable!(),
|
_ => unimplemented!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
trait CodeGen {
|
trait CodeWriter {
|
||||||
fn to_cs(&self, results: &Results, indent_size: usize) -> Result<String>;
|
fn write_cs(&self, fmt: &mut Formatter<'_>) -> fmt::Result;
|
||||||
|
|
||||||
fn to_hpp(&self, results: &Results, indent_size: usize) -> Result<String>;
|
fn write_hpp(&self, fmt: &mut Formatter<'_>) -> fmt::Result;
|
||||||
|
|
||||||
fn to_json(&self, results: &Results, indent_size: usize) -> Result<String>;
|
fn write_json(&self, fmt: &mut Formatter<'_>) -> fmt::Result;
|
||||||
|
|
||||||
fn to_rs(&self, results: &Results, indent_size: usize) -> Result<String>;
|
fn write_rs(&self, fmt: &mut Formatter<'_>) -> fmt::Result;
|
||||||
|
|
||||||
/// Replaces non-alphanumeric characters in a string with underscores.
|
|
||||||
#[inline]
|
|
||||||
fn slugify(input: &str) -> String {
|
|
||||||
input.replace(|c: char| !c.is_alphanumeric(), "_")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_content<F>(&self, results: &Results, indent_size: usize, f: F) -> Result<String>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut Formatter<'_>) -> Result<()>,
|
|
||||||
{
|
|
||||||
let mut buf = String::new();
|
|
||||||
let mut fmt = Formatter::new(&mut buf, indent_size);
|
|
||||||
|
|
||||||
results.write_banner(&mut fmt)?;
|
|
||||||
|
|
||||||
f(&mut fmt)?;
|
|
||||||
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CodeGen for Item<'a> {
|
impl<'a> CodeWriter for Item<'a> {
|
||||||
fn to_cs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_cs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Item::Buttons(buttons) => buttons.to_cs(results, indent_size),
|
Item::Buttons(buttons) => buttons.write_cs(fmt),
|
||||||
Item::Interfaces(ifaces) => ifaces.to_cs(results, indent_size),
|
Item::Interfaces(ifaces) => ifaces.write_cs(fmt),
|
||||||
Item::Offsets(offsets) => offsets.to_cs(results, indent_size),
|
Item::Offsets(offsets) => offsets.write_cs(fmt),
|
||||||
Item::Schemas(schemas) => schemas.to_cs(results, indent_size),
|
Item::Schemas(schemas) => schemas.write_cs(fmt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_hpp(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_hpp(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Item::Buttons(buttons) => buttons.to_hpp(results, indent_size),
|
Item::Buttons(buttons) => buttons.write_hpp(fmt),
|
||||||
Item::Interfaces(ifaces) => ifaces.to_hpp(results, indent_size),
|
Item::Interfaces(ifaces) => ifaces.write_hpp(fmt),
|
||||||
Item::Offsets(offsets) => offsets.to_hpp(results, indent_size),
|
Item::Offsets(offsets) => offsets.write_hpp(fmt),
|
||||||
Item::Schemas(schemas) => schemas.to_hpp(results, indent_size),
|
Item::Schemas(schemas) => schemas.write_hpp(fmt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_json(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_json(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Item::Buttons(buttons) => buttons.to_json(results, indent_size),
|
Item::Buttons(buttons) => buttons.write_json(fmt),
|
||||||
Item::Interfaces(ifaces) => ifaces.to_json(results, indent_size),
|
Item::Interfaces(ifaces) => ifaces.write_json(fmt),
|
||||||
Item::Offsets(offsets) => offsets.to_json(results, indent_size),
|
Item::Offsets(offsets) => offsets.write_json(fmt),
|
||||||
Item::Schemas(schemas) => schemas.to_json(results, indent_size),
|
Item::Schemas(schemas) => schemas.write_json(fmt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_rs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_rs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Item::Buttons(buttons) => buttons.to_rs(results, indent_size),
|
Item::Buttons(buttons) => buttons.write_rs(fmt),
|
||||||
Item::Interfaces(ifaces) => ifaces.to_rs(results, indent_size),
|
Item::Interfaces(ifaces) => ifaces.write_rs(fmt),
|
||||||
Item::Offsets(offsets) => offsets.to_rs(results, indent_size),
|
Item::Offsets(offsets) => offsets.write_rs(fmt),
|
||||||
Item::Schemas(schemas) => schemas.to_rs(results, indent_size),
|
Item::Schemas(schemas) => schemas.write_rs(fmt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
pub struct Output<'a> {
|
||||||
pub struct Results {
|
file_types: &'a Vec<String>,
|
||||||
/// Timestamp of the dump.
|
|
||||||
pub timestamp: DateTime<Utc>,
|
|
||||||
|
|
||||||
/// List of buttons to dump.
|
|
||||||
pub buttons: Vec<Button>,
|
|
||||||
|
|
||||||
/// Map of interfaces to dump.
|
|
||||||
pub interfaces: InterfaceMap,
|
|
||||||
|
|
||||||
/// Map of offsets to dump.
|
|
||||||
pub offsets: OffsetMap,
|
|
||||||
|
|
||||||
/// Map of schema classes/enums to dump.
|
|
||||||
pub schemas: SchemaMap,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Results {
|
|
||||||
pub fn new(
|
|
||||||
buttons: Vec<Button>,
|
|
||||||
interfaces: InterfaceMap,
|
|
||||||
offsets: OffsetMap,
|
|
||||||
schemas: SchemaMap,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
timestamp: Utc::now(),
|
|
||||||
buttons,
|
|
||||||
interfaces,
|
|
||||||
offsets,
|
|
||||||
schemas,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dump_all<P: AsRef<Path>>(
|
|
||||||
&self,
|
|
||||||
process: &mut IntoProcessInstanceArcBox<'_>,
|
|
||||||
out_dir: P,
|
|
||||||
indent_size: usize,
|
indent_size: usize,
|
||||||
) -> Result<()> {
|
out_dir: &'a Path,
|
||||||
// TODO: Make this user-configurable.
|
result: &'a AnalysisResult,
|
||||||
const FILE_EXTS: &[&str] = &["cs", "hpp", "json", "rs"];
|
timestamp: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Output<'a> {
|
||||||
|
pub fn new(
|
||||||
|
file_types: &'a Vec<String>,
|
||||||
|
indent_size: usize,
|
||||||
|
out_dir: &'a Path,
|
||||||
|
result: &'a AnalysisResult,
|
||||||
|
) -> Result<Self> {
|
||||||
fs::create_dir_all(&out_dir)?;
|
fs::create_dir_all(&out_dir)?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
file_types,
|
||||||
|
indent_size,
|
||||||
|
out_dir,
|
||||||
|
result,
|
||||||
|
timestamp: Utc::now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dump_all(&self, process: &mut IntoProcessInstanceArcBox<'_>) -> Result<()> {
|
||||||
let items = [
|
let items = [
|
||||||
("buttons", Item::Buttons(&self.buttons)),
|
("buttons", Item::Buttons(&self.result.buttons)),
|
||||||
("interfaces", Item::Interfaces(&self.interfaces)),
|
("interfaces", Item::Interfaces(&self.result.interfaces)),
|
||||||
("offsets", Item::Offsets(&self.offsets)),
|
("offsets", Item::Offsets(&self.result.offsets)),
|
||||||
];
|
];
|
||||||
|
|
||||||
for (file_name, item) in &items {
|
for (file_name, item) in &items {
|
||||||
self.dump_item(item, &out_dir, indent_size, FILE_EXTS, file_name)?;
|
self.dump_item(file_name, item)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.dump_info(process, &out_dir)?;
|
self.dump_schemas()?;
|
||||||
self.dump_schemas(&out_dir, indent_size, FILE_EXTS)?;
|
self.dump_info(process)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dump_file<P: AsRef<Path>>(
|
fn dump_info(&self, process: &mut IntoProcessInstanceArcBox<'_>) -> Result<()> {
|
||||||
&self,
|
let file_path = self.out_dir.join("info.json");
|
||||||
out_dir: P,
|
|
||||||
file_name: &str,
|
|
||||||
file_ext: &str,
|
|
||||||
content: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
let file_path = out_dir.as_ref().join(format!("{}.{}", file_name, file_ext));
|
|
||||||
|
|
||||||
fs::write(&file_path, content)?;
|
let build_number = self
|
||||||
|
.result
|
||||||
Ok(())
|
.offsets
|
||||||
}
|
|
||||||
|
|
||||||
fn dump_item<P: AsRef<Path>>(
|
|
||||||
&self,
|
|
||||||
item: &Item,
|
|
||||||
out_dir: P,
|
|
||||||
indent_size: usize,
|
|
||||||
file_exts: &[&str],
|
|
||||||
file_name: &str,
|
|
||||||
) -> Result<()> {
|
|
||||||
for ext in file_exts {
|
|
||||||
let content = item.generate(self, indent_size, ext)?;
|
|
||||||
|
|
||||||
self.dump_file(&out_dir, file_name, ext, &content)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dump_info<P: AsRef<Path>>(
|
|
||||||
&self,
|
|
||||||
process: &mut IntoProcessInstanceArcBox<'_>,
|
|
||||||
out_dir: P,
|
|
||||||
) -> Result<()> {
|
|
||||||
let content = &serde_json::to_string_pretty(&json!({
|
|
||||||
"timestamp": self.timestamp.to_rfc3339(),
|
|
||||||
"build_number": self.read_build_number(process).unwrap_or(0),
|
|
||||||
}))?;
|
|
||||||
|
|
||||||
self.dump_file(&out_dir, "info", "json", &content)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn dump_schemas<P: AsRef<Path>>(
|
|
||||||
&self,
|
|
||||||
out_dir: P,
|
|
||||||
indent_size: usize,
|
|
||||||
file_exts: &[&str],
|
|
||||||
) -> Result<()> {
|
|
||||||
for (module_name, (classes, enums)) in &self.schemas {
|
|
||||||
let map = SchemaMap::from([(module_name.clone(), (classes.clone(), enums.clone()))]);
|
|
||||||
let item = Item::Schemas(&map);
|
|
||||||
|
|
||||||
self.dump_item(&item, &out_dir, indent_size, file_exts, &module_name)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_build_number(&self, process: &mut IntoProcessInstanceArcBox<'_>) -> Result<u32> {
|
|
||||||
self.offsets
|
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|(module_name, offsets)| {
|
.find_map(|(module_name, offsets)| {
|
||||||
let module = process.module_by_name(module_name).ok()?;
|
let module = process.module_by_name(module_name).ok()?;
|
||||||
let offset = offsets.iter().find(|(name, _)| *name == "dwBuildNumber")?;
|
let offset = offsets.iter().find(|(name, _)| *name == "dwBuildNumber")?.1;
|
||||||
|
|
||||||
process.read(module.base + offset.1).ok()
|
process.read::<u32>(module.base + offset).ok()
|
||||||
})
|
})
|
||||||
.ok_or(Error::Other("unable to read build number"))
|
.ok_or(Error::Other("unable to read build number"))?;
|
||||||
|
|
||||||
|
let content = serde_json::to_string_pretty(&json!({
|
||||||
|
"timestamp": self.timestamp.to_rfc3339(),
|
||||||
|
"build_number": build_number,
|
||||||
|
}))?;
|
||||||
|
|
||||||
|
fs::write(&file_path, &content)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dump_item(&self, file_name: &str, item: &Item) -> Result<()> {
|
||||||
|
for file_type in self.file_types {
|
||||||
|
let mut out = String::new();
|
||||||
|
let mut fmt = Formatter::new(&mut out, self.indent_size);
|
||||||
|
|
||||||
|
if file_type != "json" {
|
||||||
|
self.write_banner(&mut fmt)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
item.write(&mut fmt, file_type)?;
|
||||||
|
|
||||||
|
let file_path = self.out_dir.join(format!("{}.{}", file_name, file_type));
|
||||||
|
|
||||||
|
fs::write(&file_path, out)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dump_schemas(&self) -> Result<()> {
|
||||||
|
for (module_name, (classes, enums)) in &self.result.schemas {
|
||||||
|
let map = SchemaMap::from([(module_name.clone(), (classes.clone(), enums.clone()))]);
|
||||||
|
|
||||||
|
self.dump_item(module_name, &Item::Schemas(&map))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_banner(&self, fmt: &mut Formatter<'_>) -> Result<()> {
|
fn write_banner(&self, fmt: &mut Formatter<'_>) -> Result<()> {
|
||||||
@ -249,3 +193,7 @@ impl Results {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn slugify(input: &str) -> String {
|
||||||
|
input.replace(|c: char| !c.is_alphanumeric(), "_")
|
||||||
|
}
|
||||||
|
@ -1,23 +1,17 @@
|
|||||||
use std::fmt::Write;
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
use heck::{AsPascalCase, AsSnakeCase};
|
use heck::{AsPascalCase, AsSnakeCase};
|
||||||
|
|
||||||
use super::{CodeGen, OffsetMap, Results};
|
use super::{slugify, CodeWriter, Formatter, OffsetMap};
|
||||||
|
|
||||||
use crate::error::Result;
|
impl CodeWriter for OffsetMap {
|
||||||
|
fn write_cs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
impl CodeGen for OffsetMap {
|
|
||||||
fn to_cs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
fmt.block("namespace CS2Dumper.Offsets", false, |fmt| {
|
fmt.block("namespace CS2Dumper.Offsets", false, |fmt| {
|
||||||
for (module_name, offsets) in self {
|
for (module_name, offsets) in self {
|
||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!(
|
&format!("public static class {}", AsPascalCase(slugify(module_name))),
|
||||||
"public static class {}",
|
|
||||||
AsPascalCase(Self::slugify(module_name))
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for (name, value) in offsets {
|
for (name, value) in offsets {
|
||||||
@ -29,15 +23,11 @@ impl CodeGen for OffsetMap {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_hpp(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_hpp(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
writeln!(fmt, "#pragma once\n")?;
|
writeln!(fmt, "#pragma once\n")?;
|
||||||
writeln!(fmt, "#include <cstddef>\n")?;
|
writeln!(fmt, "#include <cstddef>\n")?;
|
||||||
|
|
||||||
@ -47,15 +37,11 @@ impl CodeGen for OffsetMap {
|
|||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("namespace {}", AsSnakeCase(Self::slugify(module_name))),
|
&format!("namespace {}", AsSnakeCase(slugify(module_name))),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for (name, value) in offsets {
|
for (name, value) in offsets {
|
||||||
writeln!(
|
writeln!(fmt, "constexpr std::ptrdiff_t {} = {:#X};", name, value)?;
|
||||||
fmt,
|
|
||||||
"constexpr std::ptrdiff_t {} = {:#X};",
|
|
||||||
name, value
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -65,22 +51,15 @@ impl CodeGen for OffsetMap {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_json(&self, _results: &Results, _indent_size: usize) -> Result<String> {
|
fn write_json(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
serde_json::to_string_pretty(self).map_err(Into::into)
|
fmt.write_str(&serde_json::to_string_pretty(self).expect("failed to serialize"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_rs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_rs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
writeln!(fmt, "#![allow(non_upper_case_globals, unused)]\n")?;
|
||||||
writeln!(
|
|
||||||
fmt,
|
|
||||||
"#![allow(non_upper_case_globals, non_camel_case_types, unused)]\n"
|
|
||||||
)?;
|
|
||||||
|
|
||||||
fmt.block("pub mod cs2_dumper", false, |fmt| {
|
fmt.block("pub mod cs2_dumper", false, |fmt| {
|
||||||
fmt.block("pub mod offsets", false, |fmt| {
|
fmt.block("pub mod offsets", false, |fmt| {
|
||||||
@ -88,7 +67,7 @@ impl CodeGen for OffsetMap {
|
|||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("pub mod {}", AsSnakeCase(Self::slugify(module_name))),
|
&format!("pub mod {}", AsSnakeCase(slugify(module_name))),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for (name, value) in offsets {
|
for (name, value) in offsets {
|
||||||
@ -102,9 +81,6 @@ impl CodeGen for OffsetMap {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,14 +5,12 @@ use heck::{AsPascalCase, AsSnakeCase};
|
|||||||
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use super::{CodeGen, Formatter, Results, SchemaMap};
|
use super::{slugify, CodeWriter, Formatter, SchemaMap};
|
||||||
|
|
||||||
use crate::analysis::ClassMetadata;
|
use crate::analysis::ClassMetadata;
|
||||||
use crate::error::Result;
|
|
||||||
|
|
||||||
impl CodeGen for SchemaMap {
|
impl CodeWriter for SchemaMap {
|
||||||
fn to_cs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_cs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
fmt.block("namespace CS2Dumper.Schemas", false, |fmt| {
|
fmt.block("namespace CS2Dumper.Schemas", false, |fmt| {
|
||||||
for (module_name, (classes, enums)) in self {
|
for (module_name, (classes, enums)) in self {
|
||||||
writeln!(fmt, "// Module: {}", module_name)?;
|
writeln!(fmt, "// Module: {}", module_name)?;
|
||||||
@ -20,10 +18,7 @@ impl CodeGen for SchemaMap {
|
|||||||
writeln!(fmt, "// Enums count: {}", enums.len())?;
|
writeln!(fmt, "// Enums count: {}", enums.len())?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!(
|
&format!("public static class {}", AsPascalCase(slugify(module_name))),
|
||||||
"public static class {}",
|
|
||||||
AsPascalCase(Self::slugify(module_name))
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for enum_ in enums {
|
for enum_ in enums {
|
||||||
@ -39,11 +34,7 @@ impl CodeGen for SchemaMap {
|
|||||||
writeln!(fmt, "// Members count: {}", enum_.size)?;
|
writeln!(fmt, "// Members count: {}", enum_.size)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!(
|
&format!("public enum {} : {}", slugify(&enum_.name), type_name),
|
||||||
"public enum {} : {}",
|
|
||||||
Self::slugify(&enum_.name),
|
|
||||||
type_name
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
// TODO: Handle the case where multiple members share
|
// TODO: Handle the case where multiple members share
|
||||||
@ -66,7 +57,7 @@ impl CodeGen for SchemaMap {
|
|||||||
let parent_name = class
|
let parent_name = class
|
||||||
.parent
|
.parent
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|parent| Self::slugify(&parent.name))
|
.map(|parent| slugify(&parent.name))
|
||||||
.unwrap_or_else(|| "None".to_string());
|
.unwrap_or_else(|| "None".to_string());
|
||||||
|
|
||||||
writeln!(fmt, "// Parent: {}", parent_name)?;
|
writeln!(fmt, "// Parent: {}", parent_name)?;
|
||||||
@ -75,7 +66,7 @@ impl CodeGen for SchemaMap {
|
|||||||
write_metadata(fmt, &class.metadata)?;
|
write_metadata(fmt, &class.metadata)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("public static class {}", Self::slugify(&class.name)),
|
&format!("public static class {}", slugify(&class.name)),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for field in &class.fields {
|
for field in &class.fields {
|
||||||
@ -96,15 +87,11 @@ impl CodeGen for SchemaMap {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_hpp(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_hpp(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
writeln!(fmt, "#pragma once\n")?;
|
writeln!(fmt, "#pragma once\n")?;
|
||||||
writeln!(fmt, "#include <cstddef>\n")?;
|
writeln!(fmt, "#include <cstddef>\n")?;
|
||||||
|
|
||||||
@ -116,7 +103,7 @@ impl CodeGen for SchemaMap {
|
|||||||
writeln!(fmt, "// Enums count: {}", enums.len())?;
|
writeln!(fmt, "// Enums count: {}", enums.len())?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("namespace {}", AsSnakeCase(Self::slugify(module_name))),
|
&format!("namespace {}", AsSnakeCase(slugify(module_name))),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for enum_ in enums {
|
for enum_ in enums {
|
||||||
@ -132,11 +119,7 @@ impl CodeGen for SchemaMap {
|
|||||||
writeln!(fmt, "// Members count: {}", enum_.size)?;
|
writeln!(fmt, "// Members count: {}", enum_.size)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!(
|
&format!("enum class {} : {}", slugify(&enum_.name), type_name),
|
||||||
"enum class {} : {}",
|
|
||||||
Self::slugify(&enum_.name),
|
|
||||||
type_name
|
|
||||||
),
|
|
||||||
true,
|
true,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
// TODO: Handle the case where multiple members share
|
// TODO: Handle the case where multiple members share
|
||||||
@ -159,7 +142,7 @@ impl CodeGen for SchemaMap {
|
|||||||
let parent_name = class
|
let parent_name = class
|
||||||
.parent
|
.parent
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|parent| Self::slugify(&parent.name))
|
.map(|parent| slugify(&parent.name))
|
||||||
.unwrap_or_else(|| "None".to_string());
|
.unwrap_or_else(|| "None".to_string());
|
||||||
|
|
||||||
writeln!(fmt, "// Parent: {}", parent_name)?;
|
writeln!(fmt, "// Parent: {}", parent_name)?;
|
||||||
@ -168,7 +151,7 @@ impl CodeGen for SchemaMap {
|
|||||||
write_metadata(fmt, &class.metadata)?;
|
write_metadata(fmt, &class.metadata)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("namespace {}", Self::slugify(&class.name)),
|
&format!("namespace {}", slugify(&class.name)),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for field in &class.fields {
|
for field in &class.fields {
|
||||||
@ -191,13 +174,10 @@ impl CodeGen for SchemaMap {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_json(&self, _results: &Results, _indent_size: usize) -> Result<String> {
|
fn write_json(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
let content: BTreeMap<_, _> = self
|
let content: BTreeMap<_, _> = self
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(module_name, (classes, enums))| {
|
.map(|(module_name, (classes, enums))| {
|
||||||
@ -231,7 +211,7 @@ impl CodeGen for SchemaMap {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
(
|
(
|
||||||
Self::slugify(&class.name),
|
slugify(&class.name),
|
||||||
json!({
|
json!({
|
||||||
"parent": class.parent.as_ref().map(|parent| &parent.name),
|
"parent": class.parent.as_ref().map(|parent| &parent.name),
|
||||||
"fields": fields,
|
"fields": fields,
|
||||||
@ -259,7 +239,7 @@ impl CodeGen for SchemaMap {
|
|||||||
};
|
};
|
||||||
|
|
||||||
(
|
(
|
||||||
Self::slugify(&enum_.name),
|
slugify(&enum_.name),
|
||||||
json!({
|
json!({
|
||||||
"alignment": enum_.alignment,
|
"alignment": enum_.alignment,
|
||||||
"type": type_name,
|
"type": type_name,
|
||||||
@ -279,11 +259,10 @@ impl CodeGen for SchemaMap {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
serde_json::to_string_pretty(&content).map_err(Into::into)
|
fmt.write_str(&serde_json::to_string_pretty(&content).expect("failed to serialize"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_rs(&self, results: &Results, indent_size: usize) -> Result<String> {
|
fn write_rs(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
self.write_content(results, indent_size, |fmt| {
|
|
||||||
writeln!(
|
writeln!(
|
||||||
fmt,
|
fmt,
|
||||||
"#![allow(non_upper_case_globals, non_camel_case_types, non_snake_case, unused)]\n"
|
"#![allow(non_upper_case_globals, non_camel_case_types, non_snake_case, unused)]\n"
|
||||||
@ -297,7 +276,7 @@ impl CodeGen for SchemaMap {
|
|||||||
writeln!(fmt, "// Enums count: {}", enums.len())?;
|
writeln!(fmt, "// Enums count: {}", enums.len())?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("pub mod {}", AsSnakeCase(Self::slugify(module_name))),
|
&format!("pub mod {}", AsSnakeCase(slugify(module_name))),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for enum_ in enums {
|
for enum_ in enums {
|
||||||
@ -316,7 +295,7 @@ impl CodeGen for SchemaMap {
|
|||||||
&format!(
|
&format!(
|
||||||
"#[repr({})]\npub enum {}",
|
"#[repr({})]\npub enum {}",
|
||||||
type_name,
|
type_name,
|
||||||
Self::slugify(&enum_.name),
|
slugify(&enum_.name),
|
||||||
),
|
),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
@ -348,7 +327,7 @@ impl CodeGen for SchemaMap {
|
|||||||
let parent_name = class
|
let parent_name = class
|
||||||
.parent
|
.parent
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|parent| Self::slugify(&parent.name))
|
.map(|parent| slugify(&parent.name))
|
||||||
.unwrap_or_else(|| "None".to_string());
|
.unwrap_or_else(|| "None".to_string());
|
||||||
|
|
||||||
writeln!(fmt, "// Parent: {}", parent_name)?;
|
writeln!(fmt, "// Parent: {}", parent_name)?;
|
||||||
@ -357,7 +336,7 @@ impl CodeGen for SchemaMap {
|
|||||||
write_metadata(fmt, &class.metadata)?;
|
write_metadata(fmt, &class.metadata)?;
|
||||||
|
|
||||||
fmt.block(
|
fmt.block(
|
||||||
&format!("pub mod {}", Self::slugify(&class.name)),
|
&format!("pub mod {}", slugify(&class.name)),
|
||||||
false,
|
false,
|
||||||
|fmt| {
|
|fmt| {
|
||||||
for field in &class.fields {
|
for field in &class.fields {
|
||||||
@ -380,9 +359,6 @@ impl CodeGen for SchemaMap {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use memflow::prelude::v1::*;
|
use memflow::prelude::v1::*;
|
||||||
|
|
||||||
/// Represents a keyboard button.
|
|
||||||
#[derive(Pod)]
|
#[derive(Pod)]
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct KeyButton {
|
pub struct KeyButton {
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use memflow::prelude::v1::*;
|
use memflow::prelude::v1::*;
|
||||||
|
|
||||||
/// Represents a node in the linked list of exposed interfaces.
|
|
||||||
#[derive(Pod)]
|
#[derive(Pod)]
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct InterfaceReg {
|
pub struct InterfaceReg {
|
||||||
|
@ -2,7 +2,6 @@ use memflow::prelude::v1::*;
|
|||||||
|
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
|
|
||||||
/// Represents a growable memory class that doubles in size by default.
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct UtlMemory<T> {
|
pub struct UtlMemory<T> {
|
||||||
pub mem: Pointer64<[T]>, // 0x0000
|
pub mem: Pointer64<[T]>, // 0x0000
|
||||||
|
@ -23,7 +23,6 @@ pub struct FreeList {
|
|||||||
pub next: Pointer64<FreeList>, // 0x0000
|
pub next: Pointer64<FreeList>, // 0x0000
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents an optimized pool memory allocator.
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct UtlMemoryPoolBase {
|
pub struct UtlMemoryPoolBase {
|
||||||
pub block_size: i32, // 0x0000
|
pub block_size: i32, // 0x0000
|
||||||
|
@ -31,7 +31,6 @@ pub struct HashFixedDataInternal<D, K> {
|
|||||||
|
|
||||||
unsafe impl<D: 'static, K: 'static> Pod for HashFixedDataInternal<D, K> {}
|
unsafe impl<D: 'static, K: 'static> Pod for HashFixedDataInternal<D, K> {}
|
||||||
|
|
||||||
/// Represents a thread-safe hash table.
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct UtlTsHash<D, const C: usize = 256, K = u64> {
|
pub struct UtlTsHash<D, const C: usize = 256, K = u64> {
|
||||||
pub entry_mem: UtlMemoryPoolBase, // 0x0000
|
pub entry_mem: UtlMemoryPoolBase, // 0x0000
|
||||||
@ -63,7 +62,7 @@ where
|
|||||||
self.entry_mem.peak_alloc
|
self.entry_mem.peak_alloc
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all elements in the hash table.
|
// TODO:
|
||||||
pub fn elements(&self, process: &mut IntoProcessInstanceArcBox<'_>) -> Result<Vec<D>> {
|
pub fn elements(&self, process: &mut IntoProcessInstanceArcBox<'_>) -> Result<Vec<D>> {
|
||||||
let blocks_alloc = self.blocks_alloc() as usize;
|
let blocks_alloc = self.blocks_alloc() as usize;
|
||||||
let peak_alloc = self.peak_count() as usize;
|
let peak_alloc = self.peak_count() as usize;
|
||||||
|
@ -2,7 +2,6 @@ use memflow::prelude::v1::*;
|
|||||||
|
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
|
|
||||||
/// Represents a growable array class that doubles in size by default.
|
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
pub struct UtlVector<T> {
|
pub struct UtlVector<T> {
|
||||||
pub size: i32, // 0x0000
|
pub size: i32, // 0x0000
|
||||||
|
Loading…
x
Reference in New Issue
Block a user