Getting to the way it's supposed to be!

This commit is contained in:
2024-10-12 00:43:51 +02:00
parent 84729f9d27
commit 8f2dad9cec
2663 changed files with 540071 additions and 14 deletions

View File

@@ -0,0 +1,40 @@
import os
import re
import argparse
self_path = os.path.dirname(os.path.abspath(__file__))
fuzz_path = os.path.join(self_path, "..", "data", "fuzz")
p = argparse.ArgumentParser(prog="add_fuzz_cases.py")
p.add_argument("path", default=fuzz_path, help="Path where to rename files in")
p.add_argument("--ext", default="fbx", help="File extension")
argv = p.parse_args()
fuzz_path = argv.path
fuzz_files = { }
file_queue = []
RE_FUZZ = re.compile(f"fuzz_(\\d+).{re.escape(argv.ext)}")
for name in os.listdir(fuzz_path):
path = os.path.join(fuzz_path, name)
with open(path, 'rb') as f:
content = f.read()
m = RE_FUZZ.match(name)
if m:
fuzz_files[content] = name
else:
file_queue.append((name, content))
for name, content in file_queue:
existing = fuzz_files.get(content)
if existing:
print("{}: Exists as {}".format(name, existing))
else:
new_name = "fuzz_{:04}.{}".format(len(fuzz_files), argv.ext)
print("{}: Renaming to {}".format(name, new_name))
fuzz_files[content] = new_name
path = os.path.join(fuzz_path, name)
new_path = os.path.join(fuzz_path, new_name)
os.rename(path, new_path)

View File

@@ -0,0 +1,414 @@
from typing import NamedTuple, List, Mapping, Optional, Tuple, Set, DefaultDict
from collections import defaultdict
import pickle
import argparse
import os
import io
import re
import pcpp
from pycparser import c_ast, CParser
g_failed = False
def verbose(msg):
print(msg, flush=True)
def error(msg):
global g_failed
print(f"\n{msg}", flush=True)
g_failed = True
fake_includes = { }
fake_includes["stdint.h"] = """
#pragma once
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
typedef unsigned long uint64_t;
typedef signed char int8_t;
typedef signed short int16_t;
typedef signed int int32_t;
typedef signed long int64_t;
"""
fake_includes["stdbool.h"] = """
#pragma once
#define bool _Bool
#define true 1
#define false 0
"""
fake_includes["stddef.h"] = """
typedef unsigned long size_t;
typedef unsigned long uintptr_t;
typedef long ptrdiff_t;
"""
fake_includes["stdarg.h"] = """
typedef int va_list;
"""
fake_includes["stdio.h"] = """
typedef int FILE;
typedef unsigned long fpos_t;
"""
fake_includes["string.h"] = ""
fake_includes["stdlib.h"] = ""
fake_includes["locale.h"] = ""
fake_includes["math.h"] = ""
fake_includes["assert.h"] = ""
class Preprocessor(pcpp.Preprocessor):
def __init__(self):
super().__init__()
def on_file_open(self, is_system_include, includepath):
if is_system_include and os.path.basename(includepath) in fake_includes:
return io.StringIO(fake_includes[os.path.basename(includepath)])
return super().on_file_open(is_system_include, includepath)
class ValuePreprocessor(pcpp.Preprocessor):
def __init__(self, func: str, expr: str):
super().__init__()
self.func = func
self.expr = expr
def on_unknown_macro_in_expr(self,tok):
raise RuntimeError(f"Bad recursion bound on function {self.func}: '{self.expr}'")
class FuncInfo:
stack_usage: int
max_recursion: Optional[int]
calls: Set[str]
defined: bool
recursion_stack: List[str]
def __init__(self):
self.stack_usage = 0
self.max_recursion = None
self.calls = set()
self.defined = False
self.recursion_stack = []
self.largest_call = ""
class StackUsage(NamedTuple):
usage: int
largest_call: str
class File:
functions: DefaultDict[str, FuncInfo]
stack_usage: Mapping[str, StackUsage]
addresses: Set[str]
max_dynamic_usage: int
recursion_errors: Mapping[str, List[str]]
def __init__(self):
self.functions = defaultdict(FuncInfo)
self.stack_usage = {}
self.addresses = set()
self.max_dynamic_usage = 0
self.recursion_errors = {}
class AstVisitor(c_ast.NodeVisitor):
file: File
current_func: str
def __init__(self, file: File):
self.file = file
self.current_func = None
def visit_FuncDef(self, node: c_ast.FuncDecl):
if node.body:
func = node.decl.name
if not (func.startswith("ufbx_") or func.startswith("ufbxi_")):
error(f"Bad function definition: {func}")
self.file.functions[func].defined = True
self.current_func = node.decl.name
self.visit(node.body)
self.current_func = None
def visit_UnaryOp(self, node: c_ast.UnaryOp):
if node.op == "&" and isinstance(node.expr, c_ast.ID):
self.file.addresses.add(node.expr.name)
self.visit(node.expr)
def visit_FuncCall(self, node: c_ast.FuncCall):
src = self.current_func
if src:
dst = node.name.name
if (isinstance(dst, str) and (dst.startswith("ufbxi_") or dst.startswith("ufbx_"))):
self.file.functions[src].calls.add(dst)
if node.args:
self.visit(node.args)
def get_stack_usage_to(file: File, func: str, target: str, seen: Set[str] = set()) -> Optional[Tuple[int, List[str]]]:
if func in seen:
return (0, []) if func == target else None
info = file.functions.get(func)
if not info:
raise RuntimeError(f"Function not found: {func}")
seen = seen | { func }
max_path = None
for call in info.calls:
path = get_stack_usage_to(file, call, target, seen)
if path is not None:
if max_path:
max_path = max(max_path, path)
else:
max_path = path
if max_path:
max_usage, stack = max_path
usage = info.stack_usage + max_usage
return (usage, [func] + stack)
else:
return None
def add_ignore(ignores: str, ignore: str) -> str:
if not ignores: ignores = "/"
parts = ignores[1:].split(",") + [ignore]
return "/" + ",".join(sorted(p for p in parts if p))
def is_ignored(ignores: str, func: str) -> bool:
return ignores and func in ignores[1:].split(",")
def get_stack_usage(file: File, func: str, ignores: str = "", stack: List[str] = []) -> StackUsage:
if is_ignored(ignores, func):
return StackUsage(-1, "")
key = f"{func}{ignores}" if ignores else func
existing = file.stack_usage.get(key)
if existing is not None: return existing
info = file.functions.get(func)
if not info:
raise RuntimeError(f"Function not found: {func}")
if info.max_recursion:
rec_path = get_stack_usage_to(file, func, func)
if rec_path is None:
error(f"Unnecessary recursion tag in {func}()\nContains ufbxi_assert_max_recursion() but could not find recursive path to itself")
rec_path = (0, [])
rec_usage, rec_stack = rec_path
info.recursion_stack = rec_stack
self_usage = rec_usage * (info.max_recursion - 1) + info.stack_usage
child_ignores = add_ignore(ignores, func)
stack = []
else:
self_usage = info.stack_usage
child_ignores = ignores
if func in stack:
pos = stack.index(func)
error_stack = stack[pos:] + [func]
prev_error = file.recursion_errors.get(func)
if not prev_error or len(prev_error) > len(error_stack):
file.recursion_errors[func] = error_stack
return StackUsage(0, "")
stack = stack + [func]
max_usage = StackUsage(0, "")
for call in info.calls:
usage = get_stack_usage(file, call, child_ignores, stack).usage
max_usage = max(max_usage, StackUsage(usage, f"{call}{child_ignores}"))
usage = StackUsage(self_usage + max_usage.usage, max_usage.largest_call)
file.stack_usage[key] = usage
return usage
def parse_file(c_path: str, su_path: str, cache_path: Optional[str]) -> File:
pp = Preprocessor()
pp.define("UFBX_STANDARD_C")
pp.define("UFBXI_ANALYSIS_PARSER")
pp.define("UFBXI_ANALYSIS_RECURSIVE")
if cache_path and os.path.exists(cache_path) and os.path.getmtime(cache_path) > os.path.getmtime(c_path):
verbose(f"Loading AST cache: {cache_path}")
with open(cache_path, "rb") as f:
ast, max_recursions = pickle.load(f)
else:
max_recursions = { }
verbose(f"Preprocessing C file: {c_path}")
pp_stream = io.StringIO()
with open(c_path) as f:
pp.parse(f.read(), "ufbx.c")
pp.write(pp_stream)
pp_source = pp_stream.getvalue()
re_recursive_function = re.compile(r"UFBXI_RECURSIVE_FUNCTION\s*\(\s*(\w+),\s*(.+)\s*\);")
for line in pp_source.splitlines():
m = re_recursive_function.search(line)
if m:
name, rec_expr = m.groups()
lit_pp = ValuePreprocessor(name, rec_expr)
toks = lit_pp.tokenize(rec_expr)
rec_value, _ = lit_pp.evalexpr(toks)
if not rec_value:
raise RuntimeError(f"Bad recursion bound on function {name}: '{rec_expr}'")
max_recursions[name] = rec_value
pp_source = re_recursive_function.sub("", pp_source)
verbose("Parsing C file")
parser = CParser()
ast = parser.parse(pp_source)
if cache_path:
verbose(f"Writing AST cache: {cache_path}")
with open(cache_path, "wb") as f:
pickle.dump((ast, max_recursions), f)
verbose("Visiting AST")
file = File()
visitor = AstVisitor(file)
visitor.visit(ast)
# Gather maximum recursion from file
for func, rec in max_recursions.items():
file.functions[func].max_recursion = rec
if su_path:
verbose(f"Reading stack usage file: {su_path}")
with open(su_path) as f:
for line in f:
line = line.strip()
if not line: continue
m = re.match(r".*:\d+:(?:\d+:)?(\w+)(?:\.[a-z0-9\.]+)?\s+(\d+)\s+([a-z,]+)", line)
if not m:
raise RuntimeError(f"Bad .su line: {line}")
func, stack, usage = m.groups()
assert usage in ("static", "dynamic,bounded")
file.functions[func].stack_usage = int(stack)
return file
def get_max_dynamic_usage(file: File) -> Tuple[int, str]:
max_dynamic_usage = (0, "")
addr_funcs = file.addresses & set(file.functions.keys())
for func in addr_funcs:
usage = get_stack_usage(file, func).usage
max_dynamic_usage = max(max_dynamic_usage, (usage, func))
return max_dynamic_usage
def dump_largest_stack(file: File, func: str) -> List[str]:
usage = file.stack_usage[func]
print(f"{func}() {usage.usage} bytes")
index = 0
while True:
if "/" in func:
raw_func, _ = func.split("/")
else:
raw_func = func
info = file.functions.get(raw_func)
stack_usage = file.stack_usage.get(func)
if not info: break
if info.recursion_stack:
rec_usage = 0
for ix, frame in enumerate(info.recursion_stack):
rec_info = file.functions[frame]
ch = "|" if ix > 0 else ">"
fn = f"{frame}()"
usage = f"+{rec_info.stack_usage} bytes"
rec_usage += rec_info.stack_usage
print(f"{ch}{index:3} {fn:<40}{usage:>14}")
index += 1
rec_extra = info.max_recursion - 1
usage = f"(+{rec_usage * rec_extra} bytes)"
prefix = f"(recursion {info.max_recursion} times)"
index += rec_extra * len(info.recursion_stack)
dots = "." * len(str(index - 1))
print(f"|{dots:>3} {prefix:<39}{usage:>16}")
fn = f"{raw_func}()"
usage = f"+{info.stack_usage} bytes"
print(f"{index:4} {fn:<40}{usage:>14}")
func = stack_usage.largest_call
index += 1
if __name__ == "__main__":
parser = argparse.ArgumentParser("analyze_stack.py")
parser.add_argument("su", nargs="?", help="Stack usage .su file")
parser.add_argument("--source", help="Path to ufbx.c")
parser.add_argument("--cache", help="Cache file to use")
parser.add_argument("--limit", help="Maximum stack usage in bytes")
parser.add_argument("--no-su", action="store_true", help="Allow running with no .su file")
argv = parser.parse_args()
if argv.su:
su_path = argv.su
elif not argv.no_su:
raise RuntimeError("Expected an .su file as a positional argument")
else:
su_path = ""
if argv.source:
c_path = argv.source
else:
c_path = os.path.relpath(os.path.join(os.path.dirname(__file__), "..", "ufbx.c"))
file = parse_file(c_path, su_path, argv.cache)
if su_path:
file.max_dynamic_usage = get_max_dynamic_usage(file)
max_usage = (0, "")
for func in file.functions:
usage = get_stack_usage(file, func)
max_usage = max(max_usage, (usage.usage, func))
if argv.limit:
limit = int(argv.limit, base=0)
total = max_usage[0] + file.max_dynamic_usage[0]
if total >= limit:
error(f"Stack overflow in {max_usage[1]}: {max_usage[0]} bytes + {file.max_dynamic_usage[0]} dynamic\noverflows limit of {limit} bytes")
print("\nLargest stack:")
dump_largest_stack(file, max_usage[1])
print("\nLargest dynamic stack:")
dump_largest_stack(file, file.max_dynamic_usage[1])
for func, stack in file.recursion_errors.items():
stack_str = "\n".join(f"{ix:3}: {s}()" for ix, s in enumerate(stack))
error(f"Unbounded recursion in {func}()\nStack trace:\n{stack_str}")
if not g_failed:
interesting_functions = [
"ufbx_load_file",
"ufbx_evaluate_scene",
"ufbx_subdivide_mesh",
"ufbx_tessellate_nurbs_surface",
"ufbx_tessellate_nurbs_curve",
"ufbx_evaluate_transform",
"ufbx_generate_indices",
"ufbx_inflate",
"ufbx_triangulate_face",
]
for func in interesting_functions:
print()
dump_largest_stack(file, func)
print()
print("Largest potentially dynamically called stack:")
dump_largest_stack(file, file.max_dynamic_usage[1])
else:
print("Skipping further tests due to no .su file specified")
if g_failed:
exit(1)
else:
print()
print("Success!")

View File

@@ -0,0 +1,226 @@
import os
import json
from typing import NamedTuple, Optional, List
import subprocess
import glob
import re
import urllib.parse
class TestModel(NamedTuple):
fbx_path: str
obj_path: Optional[str]
mtl_path: Optional[str]
mat_path: Optional[str]
frame: Optional[int]
class TestCase(NamedTuple):
root: str
json_path: str
title: str
author: str
license: str
url: str
extra_files: List[str]
models: List[TestModel]
def log(message=""):
print(message, flush=True)
def single_file(path):
if os.path.exists(path):
return [path]
else:
return []
def strip_ext(path):
if path.endswith(".gz"):
path = path[:-3]
base, _ = os.path.splitext(path)
return base
def get_fbx_files(json_path):
base_path = strip_ext(json_path)
yield from single_file(f"{base_path}.fbx")
yield from single_file(f"{base_path}.ufbx.obj")
yield from glob.glob(f"{glob.escape(base_path)}/*.fbx")
def get_obj_files(fbx_path):
base_path = strip_ext(fbx_path)
yield from single_file(f"{base_path}.obj.gz")
yield from single_file(f"{base_path}.obj")
yield from glob.glob(f"{glob.escape(base_path)}_*.obj.gz")
yield from glob.glob(f"{glob.escape(base_path)}_*.obj")
def get_mtl_files(obj_path):
base_path = strip_ext(obj_path)
yield from single_file(f"{base_path}.mtl")
def get_mat_files(obj_path):
base_path = strip_ext(obj_path)
yield from single_file(f"{base_path}.mat")
def remove_duplicate_files(paths):
seen = set()
for path in paths:
base = strip_ext(path)
if base in seen: continue
seen.add(base)
yield path
def gather_case_models(json_path):
for fbx_path in get_fbx_files(json_path):
for obj_path in remove_duplicate_files(get_obj_files(fbx_path)):
mtl_path = next(get_mtl_files(obj_path), None)
mat_path = next(get_mat_files(fbx_path), None)
fbx_base = strip_ext(fbx_path)
obj_base = strip_ext(obj_path)
flags = obj_base[len(fbx_base):].split("_")
# Parse flags
frame = None
for flag in flags:
m = re.match(r"frame(\d+)", flag)
if m:
frame = int(m.group(1))
yield TestModel(
fbx_path=fbx_path,
obj_path=obj_path,
mtl_path=mtl_path,
mat_path=mat_path,
frame=frame)
else:
# TODO: Handle objless fbx
pass
def gather_dataset_tasks(root_dir):
for root, _, files in os.walk(root_dir):
for filename in files:
if not filename.endswith(".json"):
continue
path = os.path.join(root, filename)
with open(path, "rt", encoding="utf-8") as f:
desc = json.load(f)
models = list(gather_case_models(path))
if not models:
raise RuntimeError(f"No models found for {path}")
extra_files = [os.path.join(root, ex) for ex in desc.get("extra-files", [])]
yield TestCase(
root=root_dir,
json_path=path,
title=desc["title"],
author=desc["author"],
license=desc["license"],
url=desc["url"],
extra_files=extra_files,
models=models,
)
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser("check_dataset.py --root <root>")
parser.add_argument("--root", help="Root directory to search for .json files")
parser.add_argument("--host-url", help="URL where the files are hosted")
parser.add_argument("--exe", help="check_fbx.c executable")
parser.add_argument("--verbose", action="store_true", help="Print verbose information")
argv = parser.parse_args()
cases = list(gather_dataset_tasks(root_dir=argv.root))
def fmt_url(path, root=""):
if root:
path = os.path.relpath(path, root)
path = path.replace("\\", "/")
safe_path = urllib.parse.quote(path)
return f"{argv.host_url}/{safe_path}"
def fmt_rel(path, root=""):
if root:
path = os.path.relpath(path, root)
path = path.replace("\\", "/")
return f"{path}"
ok_count = 0
test_count = 0
case_ok_count = 0
for case in cases:
log(f"== '{case.title}' by '{case.author}' ({case.license}) ==")
log()
if case.url:
log(f" source url: {case.url}")
log(f" .json url: {fmt_url(case.json_path, case.root)}")
for extra in case.extra_files:
log(f" extra url: {fmt_url(extra, case.root)}")
log()
case_ok = True
for model in case.models:
test_count += 1
args = [argv.exe]
args.append(model.fbx_path)
extra = []
if model.obj_path:
args += ["--obj", model.obj_path]
if model.mat_path:
args += ["--mat", model.mat_path]
if model.frame is not None:
extra.append(f"frame {model.frame}")
args += ["--frame", str(model.frame)]
name = fmt_rel(model.fbx_path, case.root)
extra_str = ""
if extra:
extra_str = " [" + ", ".join(extra) + "]"
log(f"-- {name}{extra_str} --")
log()
if argv.host_url:
log(f" .fbx url: {fmt_url(model.fbx_path, case.root)}")
if model.obj_path:
log(f" .obj url: {fmt_url(model.obj_path, case.root)}")
if model.mtl_path:
log(f" .mtl url: {fmt_url(model.mtl_path, case.root)}")
if model.mat_path:
log(f" .mat url: {fmt_url(model.mat_path, case.root)}")
log()
log("$ " + " ".join(args))
log()
try:
subprocess.check_call(args)
log()
log("-- PASS --")
ok_count += 1
except subprocess.CalledProcessError:
log()
log("-- FAIL --")
case_ok = False
log()
if case_ok:
case_ok_count += 1
log(f"{ok_count}/{test_count} files passed ({case_ok_count}/{len(cases)} test cases)")
if ok_count < test_count:
exit(1)

View File

@@ -0,0 +1,87 @@
import argparse
import re
import sys
def strip_comments(line):
if "//" in line:
return line[:line.index("//")]
else:
return line
def forbid(r, line, err):
m = re.search(r, line)
if m:
return (err, m.start(1), m.end(1))
else:
return None
def no_trailing_whitespace(line):
return forbid(r"(\s+)$", line, "trailing whitespace is forbidden")
def indent_tabs(line):
return forbid(r"^\s*?( +)\s*", line, "tabs should be used for indentation")
def no_trailing_tabs(line):
return forbid(r"\S.*(\t+)", line, "tabs should only appear in the beginning of a line")
def keyword_spacing(line):
line = strip_comments(line)
return forbid(r"\b(?:for|if|while)(\()", line, "expected space after keyword")
def pointer_alignment(line):
line = strip_comments(line)
return forbid(r"\w(\* )\w", line, "pointers should be aligned to the right")
checks = [
no_trailing_whitespace,
indent_tabs,
no_trailing_tabs,
keyword_spacing,
pointer_alignment,
]
def check_file(path, colors):
failed = False
if colors:
c_gray = "\033[1;30m"
c_green = "\033[1;32m"
c_red = "\033[1;31m"
c_white = "\033[1;97m"
c_def = "\033[0m"
else:
c_gray = ""
c_green = ""
c_red = ""
c_white = ""
c_def = ""
with open(path, "rt") as f:
for ix, line in enumerate(f):
line = line.rstrip("\r\n")
for check in checks:
err = check(line)
if err:
err_desc, err_begin, err_end = err
l = f"{c_white}{path}:{ix + 1}:{err_begin + 1}: {c_red}error:{c_white} {err_desc} [{check.__name__}]{c_def}"
s = line
s = s.replace("\t", f"{c_gray}\u2192{c_def}")
s = s.replace(" ", f"{c_gray}\u00B7{c_def}")
e = " " * err_begin + c_green + "^" * (err_end - err_begin) + c_def
print(f"{l}\n {s}\n {e}")
failed = True
return failed
if __name__ == "__main__":
p = argparse.ArgumentParser("check_formatting.py")
p.add_argument("files", nargs="*")
p.add_argument("--no-color", action="store_true")
argv = p.parse_args()
failed = False
colors = not argv.no_color and sys.stdout.isatty()
for f in argv.files:
if check_file(f, colors):
failed = True
if failed:
sys.exit(1)

View File

@@ -0,0 +1,12 @@
#include <stdio.h>
#include <math.h>
#include <stdlib.h>
int main(int argc, char **argv)
{
if (argc > 1) {
float val = (float)atof(argv[1]);
printf("sin(%.2f) = %.2f\n", val, sinf(val));
}
return 0;
}

View File

@@ -0,0 +1,12 @@
#include <cstdio>
#include <cmath>
#include <cstdlib>
int main(int argc, char **argv)
{
if (argc > 1) {
float val = (float)atof(argv[1]);
printf("sin(%.2f) = %.2f\n", val, std::sin(val));
}
return 0;
}

View File

@@ -0,0 +1,169 @@
#define _CRT_SECURE_NO_WARNINGS
#include <zlib.h>
#define CPUTIME_IMPLEMENTATION
#include "../../test/cputime.h"
#include "../../ufbx.h"
#include <vector>
#include <assert.h>
#include <algorithm>
#include <stdlib.h>
#define UFBX_RETAIN 1
int inflate_memory(const void *src, int srcLen, void *dst, int dstLen) {
z_stream strm = {0};
strm.total_in = strm.avail_in = srcLen;
strm.total_out = strm.avail_out = dstLen;
strm.next_in = (Bytef *) src;
strm.next_out = (Bytef *) dst;
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
int err = -1;
int ret = -1;
err = inflateInit2(&strm, 15);
if (err == Z_OK) {
err = inflate(&strm, Z_FINISH);
if (err == Z_STREAM_END) {
ret = strm.total_out;
}
else {
inflateEnd(&strm);
return err;
}
}
else {
inflateEnd(&strm);
return err;
}
inflateEnd(&strm);
return ret;
}
std::vector<char> read_file(const char *path)
{
FILE *f = fopen(path, "rb");
fseek(f, 0, SEEK_END);
std::vector<char> data;
data.resize(ftell(f));
fseek(f, 0, SEEK_SET);
fread(data.data(), 1, data.size(), f);
fclose(f);
return data;
}
struct deflate_stream
{
const void *data;
size_t compressed_size;
size_t decompressed_size;
uint64_t zlib_time = UINT64_MAX;
uint64_t ufbx_time = UINT64_MAX;
};
int main(int argc, char **argv)
{
std::vector<char> data = read_file(argv[1]);
std::vector<deflate_stream> streams;
std::vector<char> dst_buf;
cputime_begin_init();
size_t max_decompressed_size = 0;
for (size_t offset = 0; offset < data.size(); ) {
deflate_stream stream;
stream.compressed_size = *(uint32_t*)(data.data() + offset + 0);
stream.decompressed_size = *(uint32_t*)(data.data() + offset + 4);
stream.data = data.data() + offset + 8;
offset += 8 + stream.compressed_size;
if (stream.decompressed_size > max_decompressed_size) {
max_decompressed_size = stream.decompressed_size;
}
streams.push_back(stream);
}
if (argc > 2) {
deflate_stream single = streams[atoi(argv[2])];
streams.clear();
streams.push_back(single);
}
size_t runs = 3;
if (argc > 3) {
runs = (size_t)atoi(argv[3]);
}
dst_buf.resize(max_decompressed_size);
for (size_t i = 0; i < runs; i++) {
for (deflate_stream &stream : streams) {
uint64_t begin = cputime_cpu_tick();
int ret = inflate_memory(stream.data, (int)stream.compressed_size,
dst_buf.data(), (int)stream.decompressed_size);
assert(ret == stream.decompressed_size);
uint64_t end = cputime_cpu_tick();
if (argc > 20) {
fwrite(dst_buf.data(), 1, stream.decompressed_size, stdout);
}
stream.zlib_time = std::min(stream.zlib_time, end - begin);
}
#if UFBX_RETAIN
ufbx_inflate_retain retain;
retain.initialized = false;
#endif
for (deflate_stream &stream : streams) {
uint64_t begin = cputime_cpu_tick();
#if !UFBX_RETAIN
ufbx_inflate_retain retain;
retain.initialized = false;
#endif
ufbx_inflate_input input = { };
input.data_size = input.total_size = stream.compressed_size;
input.data = stream.data;
ptrdiff_t ret = ufbx_inflate(dst_buf.data(), stream.decompressed_size, &input, &retain);
assert(ret == stream.decompressed_size);
if (argc > 20) {
fwrite(dst_buf.data(), 1, stream.decompressed_size, stdout);
}
uint64_t end = cputime_cpu_tick();
stream.ufbx_time = std::min(stream.ufbx_time, end - begin);
}
}
cputime_end_init();
uint32_t index = 0;
for (deflate_stream &stream : streams) {
double ufbx_sec = cputime_cpu_delta_to_sec(NULL, stream.ufbx_time);
double zlib_sec = cputime_cpu_delta_to_sec(NULL, stream.zlib_time);
double ufbx_cbp = (double)stream.ufbx_time / (double)stream.decompressed_size;
double zlib_cbp = (double)stream.zlib_time / (double)stream.decompressed_size;
printf("[%6.2f] %3u: %10zu -> %10zu bytes: %8.4fms (%6.2fcy/b) vs %8.4fms (%6.2fcy/b)\n",
ufbx_sec / zlib_sec * 100.0, index,
stream.compressed_size, stream.decompressed_size,
ufbx_sec*1e3, ufbx_cbp, zlib_sec*1e3, zlib_cbp);
index++;
}
return 0;
}

View File

@@ -0,0 +1,21 @@
import argparse
import os
import subprocess
parser = argparse.ArgumentParser(description="Gather DEFLATE compressed streams from .fbx files")
parser.add_argument("--exe", help="Executable path for per-file gathering, see `gather_deflate_main.cpp`")
parser.add_argument("-o", help="Output file")
parser.add_argument("--root", help="Root path to look for .fbx files")
argv = parser.parse_args()
data = bytearray()
for root,_,files in os.walk(argv.root):
for file in files:
if not file.endswith(".fbx"): continue
path = os.path.join(root, file)
print(path)
data += subprocess.check_output([argv.exe, path])
with open(argv.o, "wb") as f:
f.write(data)

View File

@@ -0,0 +1,61 @@
#define _CRT_SECURE_NO_WARNINGS
#include "../../test/domfuzz/fbxdom.h"
#include <vector>
#include <string>
#include <stdio.h>
#ifdef _WIN32
#include <fcntl.h>
#include <io.h>
#endif
std::vector<char> read_file(const char *path)
{
FILE *f = fopen(path, "rb");
fseek(f, 0, SEEK_END);
std::vector<char> data;
data.resize(ftell(f));
fseek(f, 0, SEEK_SET);
fread(data.data(), 1, data.size(), f);
fclose(f);
return data;
}
void dump_deflate_arrays(fbxdom::node_ptr node)
{
for (fbxdom::value &value : node->values) {
if (value.data_array.encoding == 1) {
uint32_t decompressed_size = 0;
switch (value.type) {
case 'c': case 'b': decompressed_size = value.data_array.length * 1; break;
case 'i': case 'f': decompressed_size = value.data_array.length * 4; break;
case 'l': case 'd': decompressed_size = value.data_array.length * 8; break;
}
fwrite(&value.data_array.compressed_length, 4, 1, stdout);
fwrite(&decompressed_size, 4, 1, stdout);
fwrite(value.data.data(), 1, value.data.size(), stdout);
}
}
for (fbxdom::node_ptr &child : node->children) {
dump_deflate_arrays(child);
}
}
int main(int argc, char **argv)
{
std::vector<char> data = read_file(argv[1]);
fbxdom::node_ptr root = fbxdom::parse(data.data(), data.size());
if (!root) return 0;
#ifdef _WIN32
_setmode(_fileno(stdout), O_BINARY);
#endif
dump_deflate_arrays(root);
return 0;
}

View File

@@ -0,0 +1,39 @@
# Generate lookup table for `base + read(bits)` Deflate operands
def deflate_lookup_table(pairs):
for (base, bits, flags) in pairs:
assert bits < (1 << 4)
assert (flags & 0xe0) == flags
assert base < (1 << 16)
yield bits | flags | base << 16
def format_table(data, cols):
data = list(data)
for base in range(0, len(data), cols):
yield ''.join('0x{:08x}, '.format(x) for x in data[base:base+cols])
# Deflate RFC 1951 3.2.5. tables
length_operands = [
(0,0,0x20),(3,0,0x40),(4,0,0x40),(5,0,0x40),(6,0,0x40),(7,0,0x40),(8,0,0x40),(9,0,0x40),(10,0,0x40),(11,1,0x40),
(13,1,0x40),(15,1,0x40),(17,1,0x40),(19,2,0x40),(23,2,0x40),(27,2,0x40),(31,2,0x40),(35,3,0x40),
(43,3,0x40),(51,3,0x40),(59,3,0x40),(67,4,0x40),(83,4,0x40),(99,4,0x40),(115,4,0x40),(131,5,0x40),
(163,5,0x40),(195,5,0x40),(227,5,0x40),(258,0,0x40),
(1,0,0x20),(1,0,0x20),
]
dist_operands = [
(1,0,0),(2,0,0),(3,0,0),(4,0,0),(5,1,0),(7,1,0),(9,2,0),(13,2,0),(17,3,0),
(25,3,0),(33,4,0),(49,4,0),(65,5,0),(97,5,0),(129,6,0),(193,6,0),(257,7,0),
(385,7,0),(513,8,0),(769,8,0),(1025,9,0),(1537,9,0),(2049,10,0),(3073,10,0),
(4097,11,0),(6145,11,0),(8193,12,0),(12289,12,0),(16385,13,0),(24577,13,0),
(1,0,0x20),(1,0,0x20),
]
print('static const uint32_t ufbxi_deflate_length_lut[] = {')
table = deflate_lookup_table(length_operands)
print('\n'.join('\t' + t for t in format_table(table, 8)))
print('};')
print('static const uint32_t ufbxi_deflate_dist_lut[] = {')
table = deflate_lookup_table(dist_operands)
print('\n'.join('\t' + t for t in format_table(table, 8)))
print('};')

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -Eeuo pipefail
TAG=$( set -x; python3 misc/get_header_tag.py )
echo "Header version tag: $TAG"
if [[ "$TAG" == v*.*.0 ]]; then
echo "No older header available"
elif [[ "$TAG" == v*.*.* ]]; then
OLD_TAG="${TAG%.*}".0
OLD_COMMIT=$( set -x; git rev-list -n 1 "$OLD_TAG" )
echo "Downgrading ufbx.h to tag $OLD_TAG, commit $OLD_COMMIT"
( set -x; git checkout "$OLD_TAG" -- ufbx.h )
else
echo "Error: Malformed tag \"$TAG\""
exit 1
fi

View File

@@ -0,0 +1,127 @@
from collections import namedtuple
NameEnum = namedtuple("NameEnum", "name hash enum")
def str_hash(s):
h = 0x811c9dc5
for c in s:
h = ((h ^ ord(c)) * 0x01000193) & 0xffffffff
if h == 0: h = 1
return h
def nameEnum(name, enum):
return NameEnum(name, str_hash(name), enum)
prop_types = [
nameEnum("Boolean", "BOOLEAN"),
nameEnum("bool", "BOOLEAN"),
nameEnum("Integer", "INTEGER"),
nameEnum("int", "INTEGER"),
nameEnum("enum", "INTEGER"),
nameEnum("Number", "NUMBER"),
nameEnum("double", "NUMBER"),
nameEnum("Vector", "VECTOR"),
nameEnum("Vector3D", "VECTOR"),
nameEnum("Color", "COLOR"),
nameEnum("ColorRGB", "COLOR"),
nameEnum("String", "STRING"),
nameEnum("KString", "STRING"),
nameEnum("DateTime", "DATE_TIME"),
nameEnum("Lcl Translation", "TRANSLATION"),
nameEnum("Lcl Rotation", "ROTATION"),
nameEnum("Lcl Scaling", "SCALING"),
]
node_types = [
nameEnum("Model", "MODEL"),
nameEnum("Geometry", "MESH"),
nameEnum("Material", "MATERIAL"),
nameEnum("Texture", "TEXTURE"),
nameEnum("AnimationCurveNode", "ANIMATION"),
nameEnum("AnimationCurve", "ANIMATION_CURVE"),
nameEnum("AnimationLayer", "ANIMATION_LAYER"),
nameEnum("NodeAttribute", "ATTRIBUTE"),
]
element_mapping_types = [
nameEnum("ByVertex", "VERTEX"),
nameEnum("ByVertice", "VERTEX"),
nameEnum("ByPolygon", "POLYGON"),
nameEnum("ByPolygonVertex", "POLYGON_VERTEX"),
nameEnum("ByEdge", "EDGE"),
nameEnum("AllSame", "ALL_SAME"),
]
element_types = [
nameEnum("Normal", "NORMAL"),
nameEnum("Binormal", "BINORMAL"),
nameEnum("Tangent", "TANGENT"),
nameEnum("Color", "VERTEX_COLOR"),
nameEnum("UV", "UV"),
nameEnum("EdgeCrease", "EDGE_CREASE"),
nameEnum("Material", "FACE_MATERIAL"),
]
def find_params(names, map_size, max_k, max_s):
arr = [None] * map_size
for k in range(max_k):
for s in range(0, max_s):
for i in range(map_size):
arr[i] = None
for n in names:
ix = (n.hash * k >> s) % map_size
if arr[ix]:
break
else:
arr[ix] = n
else:
return k, s, arr
raise ValueError("Could not find params")
decl = []
test = []
def gen_table(names, type_name, enum_name, test_extra=""):
global decl
global test
map_size = 1
while map_size < len(names):
map_size *= 2
while True:
try:
k, s, arr = find_params(names, map_size, 10000, 24)
break
except:
map_size *= 2
decl.append("#define ufbxi_{0}_permute_hash(h) ((((h) * {1}) >> {2}) % {3})".format(type_name, k, s, map_size))
decl.append("static const ufbxi_{0}_map_entry ufbxi_{0}_map[{1}] = {{".format(type_name, map_size))
for n in arr:
if not n:
decl.append("\t{{ 0u, {{ 0,0 }}, {0}_UNKNOWN }},".format(enum_name))
else:
decl.append("\t{{ 0x{0:08x}u, {{ \"{1}\", {2} }}, {3}_{4} }},".format(n.hash, n.name, len(n.name), enum_name, n.enum))
decl.append("};")
test.append("")
test.append("UFBXT_TEST(table_{0}_map_values)".format(type_name))
test.append("#if UFBXT_IMPL")
test.append("{")
for n in names:
test.append("\tufbxt_assert(ufbxi_get_{0}(make_str(\"{1}\"){2}) == {3}_{4});".format(type_name, n.name, test_extra, enum_name, n.enum))
test.append("}")
test.append("#endif")
gen_table(prop_types, "prop_type", "UFBX_PROP", "")
gen_table(node_types, "node_type", "UFBX_NODE", ", ufbx_empty_string")
gen_table(element_mapping_types, "element_mapping", "UFBXI_ELEMENT_BY", "")
gen_table(element_types, "element_type", "UFBXI_ELEMENT", ", UFBXI_ELEMENT_BY_UNKNOWN")
print("\n".join(decl))
print()
print()
print("\n".join(test))
print()

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env python3
import argparse
import os
import time
import subprocess
parser = argparse.ArgumentParser(usage="execute_per_fbx.py --exe loader --root .")
parser.add_argument("--exe", help="Executable to run")
parser.add_argument("--root", default=".", help="Root path to search from")
parser.add_argument("--start", default="", help="Top-level file to start from")
parser.add_argument("--verbose", action="store_true", help="Verbose information")
parser.add_argument('remainder', nargs="...")
argv = parser.parse_args()
begin = time.time()
num_tested = 0
num_fail = 0
total_size = 0
for root, dirs, files in os.walk(argv.root):
for file in files:
if not file.lower().endswith(".fbx"): continue
if file.lower().endswith(".ufbx-fail.fbx"):
num_fail += 1
continue
path = os.path.join(root, file)
size = os.stat(path).st_size
display = os.path.relpath(path, argv.root)
if argv.start and display < argv.start:
continue
print(f"-- {display}", flush=True)
total_size += size
if argv.exe:
args = [argv.exe, path.encode("utf-8")] + argv.remainder[1:]
if argv.verbose:
cmdline = subprocess.list2cmdline(args)
print(f"$ {cmdline}")
subprocess.check_call(args)
num_tested += 1
end = time.time()
dur = end - begin
print()
print("Success!")
print(f"Loaded {num_tested} files in {int(dur//60)}min {int(dur%60)}s.")
print(f"Processed {total_size/1e9:.2f}GB at {total_size/1e6/dur:.2f}MB/s.")
print(f"Ignored {num_fail} invalid files.")

1344
modules/ufbx/misc/fdlibm.c Normal file

File diff suppressed because it is too large Load Diff

38
modules/ufbx/misc/fuzz.sh Normal file
View File

@@ -0,0 +1,38 @@
#!/usr/bin/env bash
cmd="$1"
shift 1
if [ $cmd == "build-ufbx" ]; then
afl-clang-fast -static ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_ufbx
elif [ $cmd == "build-ufbx-32" ]; then
afl-clang-fast -static ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_ufbx_32
elif [ $cmd == "build-ufbx-asan" ]; then
AFL_USE_ASAN=1 afl-clang-fast -DDISCRETE_ALLOCATIONS ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_ufbx_asan
elif [ $cmd == "build-ufbx-asan-32" ]; then
AFL_USE_ASAN=1 afl-clang-fast -DDISCRETE_ALLOCATIONS ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_ufbx_asan_32
elif [ $cmd == "build-cache" ]; then
afl-clang-fast -static ../../misc/fuzz_cache_persist.c -lm -o fuzz_cache
elif [ $cmd == "build-cache-32" ]; then
afl-clang-fast -static ../../misc/fuzz_cache_persist.c -lm -o fuzz_cache_32
elif [ $cmd == "build-cache-asan" ]; then
AFL_USE_ASAN=1 afl-clang-fast -DDISCRETE_ALLOCATIONS ../../misc/fuzz_cache_persist.c -lm -o fuzz_cache_asan
elif [ $cmd == "build-cache-asan-32" ]; then
AFL_USE_ASAN=1 afl-clang-fast -DDISCRETE_ALLOCATIONS ../../misc/fuzz_cache_persist.c -lm -o fuzz_cache_asan_32
elif [ $cmd == "build-obj" ]; then
afl-clang-fast -DLOAD_OBJ -static ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_obj
elif [ $cmd == "build-obj-asan" ]; then
AFL_USE_ASAN=1 afl-clang-fast -DLOAD_OBJ -DDISCRETE_ALLOCATIONS ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_obj_asan
elif [ $cmd == "build-mtl" ]; then
afl-clang-fast -DLOAD_MTL -static ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_mtl
elif [ $cmd == "build-mtl-asan" ]; then
AFL_USE_ASAN=1 afl-clang-fast -DLOAD_MTL -DDISCRETE_ALLOCATIONS ../../misc/fuzz_ufbx_persist.c -lm -o fuzz_mtl_asan
fi
name=$1
shift 1
if [ $cmd == "fuzz" ]; then
cp -r cases "cases_$name"
afl-fuzz "$@" -i "cases_$name" -o "findings_$name" -t 1000 -m 2000 "./$name"
fi

View File

@@ -0,0 +1,68 @@
#define _CRT_SECURE_NO_WARNINGS
#if defined(_WIN32)
#define ufbx_assert(cond) do { \
if (!(cond)) __debugbreak(); \
} while (0)
#else
#define ufbx_assert(cond) do { \
if (!(cond)) __builtin_trap(); \
} while (0)
#endif
#include "../ufbx.c"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <assert.h>
char g_buffer[1024*1024];
size_t g_pos = 0;
size_t g_size = 0;
static size_t read_file(void *user, void *data, size_t size)
{
size_t to_read = g_size - g_pos;
if (to_read > size) to_read = size;
memcpy(data, g_buffer + g_pos, to_read);
g_pos += to_read;
return to_read;
}
static bool open_file(void *user, ufbx_stream *stream, const char *path, size_t path_len)
{
if (!strcmp(path, "memory-cache")) {
assert(g_pos == 0);
stream->read_fn = &read_file;
return true;
} else {
return false;
}
}
int main(int argc, char **argv)
{
ufbx_geometry_cache_opts opts = { 0 };
opts.open_file_fn = &open_file;
opts.temp_allocator.memory_limit = 0x4000000; // 64MB
opts.result_allocator.memory_limit = 0x4000000; // 64MB
#if defined(DISCRETE_ALLOCATIONS)
opts.temp_allocator.huge_threshold = 1;
opts.result_allocator.huge_threshold = 1;
#endif
while (__AFL_LOOP(10000)) {
size_t size = (size_t)read(0, g_buffer, sizeof(g_buffer));
g_size = size;
g_pos = 0;
ufbx_geometry_cache *cache = ufbx_load_geometry_cache("memory-cache", &opts, NULL);
ufbx_free_geometry_cache(cache);
}
return 0;
}

View File

@@ -0,0 +1,61 @@
#define _CRT_SECURE_NO_WARNINGS
#if defined(_WIN32)
#define ufbx_assert(cond) do { \
if (!(cond)) __debugbreak(); \
} while (0)
#else
#define ufbx_assert(cond) do { \
if (!(cond)) __builtin_trap(); \
} while (0)
#endif
#include "../ufbx.c"
#include <stdio.h>
#include <stdlib.h>
int main(int argc, char **argv)
{
if (argc < 2) {
fprintf(stderr, "Usage: fuzz_deflate <file>\n");
return 1;
}
FILE *f = fopen(argv[1], "rb");
if (!f) {
fprintf(stderr, "Failed to open file\n");
return 1;
}
fseek(f, 0, SEEK_END);
size_t src_size = ftell(f);
fseek(f, 0, SEEK_SET);
char *src = malloc(src_size);
size_t num_read = fread(src, 1, src_size, f);
if (num_read != src_size) {
fprintf(stderr, "Failed to read file\n");
return 1;
}
fclose(f);
size_t dst_size = 1024*src_size;
if (argc >= 3) {
dst_size = (size_t)atoi(argv[2]);
}
char *dst = malloc(dst_size);
ufbx_inflate_retain retain;
retain.initialized = false;
ufbx_inflate_input input = { 0 };
input.data = src;
input.data_size = src_size;
input.total_size = src_size;
ptrdiff_t result = ufbx_inflate(dst, dst_size, &input, &retain);
free(src);
free(dst);
printf("%td\n", result);
return 0;
}

View File

@@ -0,0 +1,39 @@
#define _CRT_SECURE_NO_WARNINGS
#if defined(_WIN32)
#define ufbx_assert(cond) do { \
if (!(cond)) __debugbreak(); \
} while (0)
#else
#define ufbx_assert(cond) do { \
if (!(cond)) __builtin_trap(); \
} while (0)
#endif
#include "../ufbx.c"
#include <stdio.h>
#include <stdlib.h>
int main(int argc, char **argv)
{
if (argc < 2) {
fprintf(stderr, "Usage: fuzz_ufbx <file>\n");
return 1;
}
ufbx_load_opts opts = { 0 };
for (int i = 2; i < argc; i++) {
if (!strcmp(argv[i], "-d")) {
opts.temp_huge_size = 1;
opts.result_huge_size = 1;
}
}
ufbx_scene *scene = ufbx_load_file(argv[1], &opts, NULL);
ufbx_free_scene(scene);
return 0;
}

View File

@@ -0,0 +1,60 @@
#define _CRT_SECURE_NO_WARNINGS
#if defined(_WIN32)
#define ufbx_assert(cond) do { \
if (!(cond)) __debugbreak(); \
} while (0)
#else
#define ufbx_assert(cond) do { \
if (!(cond)) __builtin_trap(); \
} while (0)
#endif
#define ufbxt_assert_fail(file, line, msg) ufbx_assert(false)
#define ufbxt_assert(m_cond) ufbx_assert(m_cond)
#include "../ufbx.c"
#include "../test/check_scene.h"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
char g_buffer[1024*1024];
int main(int argc, char **argv)
{
ufbx_load_opts opts = { 0 };
#if defined(DISCRETE_ALLOCATIONS)
opts.temp_allocator.huge_threshold = 1;
opts.result_allocator.huge_threshold = 1;
#endif
#if defined(LOAD_OBJ)
opts.file_format = UFBX_FILE_FORMAT_OBJ;
#elif defined(LOAD_MTL)
opts.file_format = UFBX_FILE_FORMAT_MTL;
#elif defined(LOAD_GUESS)
#else
opts.file_format = UFBX_FILE_FORMAT_FBX;
#endif
#if defined(NO_AFL)
size_t size = (size_t)read(0, g_buffer, sizeof(g_buffer));
for (size_t i = 0; i < 10000; i++) {
#else
while (__AFL_LOOP(10000)) {
size_t size = (size_t)read(0, g_buffer, sizeof(g_buffer));
#endif
ufbx_scene *scene = ufbx_load_memory(g_buffer, size, &opts, NULL);
if (scene) {
ufbxt_check_scene(scene);
}
ufbx_free_scene(scene);
}
return 0;
}

View File

@@ -0,0 +1,175 @@
props_str = """
P: "QuaternionInterpolate", "enum", "", "",0
P: "RotationOffset", "Vector3D", "Vector", "",0,0,0
P: "RotationPivot", "Vector3D", "Vector", "",0,0,0
P: "ScalingOffset", "Vector3D", "Vector", "",0,0,0
P: "ScalingPivot", "Vector3D", "Vector", "",0,0,0
P: "TranslationActive", "bool", "", "",0
P: "TranslationMin", "Vector3D", "Vector", "",0,0,0
P: "TranslationMax", "Vector3D", "Vector", "",0,0,0
P: "TranslationMinX", "bool", "", "",0
P: "TranslationMinY", "bool", "", "",0
P: "TranslationMinZ", "bool", "", "",0
P: "TranslationMaxX", "bool", "", "",0
P: "TranslationMaxY", "bool", "", "",0
P: "TranslationMaxZ", "bool", "", "",0
P: "RotationOrder", "enum", "", "",0
P: "RotationSpaceForLimitOnly", "bool", "", "",0
P: "RotationStiffnessX", "double", "Number", "",0
P: "RotationStiffnessY", "double", "Number", "",0
P: "RotationStiffnessZ", "double", "Number", "",0
P: "AxisLen", "double", "Number", "",10
P: "PreRotation", "Vector3D", "Vector", "",0,0,0
P: "PostRotation", "Vector3D", "Vector", "",0,0,0
P: "RotationActive", "bool", "", "",0
P: "RotationMin", "Vector3D", "Vector", "",0,0,0
P: "RotationMax", "Vector3D", "Vector", "",0,0,0
P: "RotationMinX", "bool", "", "",0
P: "RotationMinY", "bool", "", "",0
P: "RotationMinZ", "bool", "", "",0
P: "RotationMaxX", "bool", "", "",0
P: "RotationMaxY", "bool", "", "",0
P: "RotationMaxZ", "bool", "", "",0
P: "InheritType", "enum", "", "",0
P: "ScalingActive", "bool", "", "",0
P: "ScalingMin", "Vector3D", "Vector", "",0,0,0
P: "ScalingMax", "Vector3D", "Vector", "",1,1,1
P: "ScalingMinX", "bool", "", "",0
P: "ScalingMinY", "bool", "", "",0
P: "ScalingMinZ", "bool", "", "",0
P: "ScalingMaxX", "bool", "", "",0
P: "ScalingMaxY", "bool", "", "",0
P: "ScalingMaxZ", "bool", "", "",0
P: "GeometricTranslation", "Vector3D", "Vector", "",0,0,0
P: "GeometricRotation", "Vector3D", "Vector", "",0,0,0
P: "GeometricScaling", "Vector3D", "Vector", "",1,1,1
P: "MinDampRangeX", "double", "Number", "",0
P: "MinDampRangeY", "double", "Number", "",0
P: "MinDampRangeZ", "double", "Number", "",0
P: "MaxDampRangeX", "double", "Number", "",0
P: "MaxDampRangeY", "double", "Number", "",0
P: "MaxDampRangeZ", "double", "Number", "",0
P: "MinDampStrengthX", "double", "Number", "",0
P: "MinDampStrengthY", "double", "Number", "",0
P: "MinDampStrengthZ", "double", "Number", "",0
P: "MaxDampStrengthX", "double", "Number", "",0
P: "MaxDampStrengthY", "double", "Number", "",0
P: "MaxDampStrengthZ", "double", "Number", "",0
P: "PreferedAngleX", "double", "Number", "",0
P: "PreferedAngleY", "double", "Number", "",0
P: "PreferedAngleZ", "double", "Number", "",0
P: "LookAtProperty", "object", "", ""
P: "UpVectorProperty", "object", "", ""
P: "Show", "bool", "", "",1
P: "NegativePercentShapeSupport", "bool", "", "",1
P: "DefaultAttributeIndex", "int", "Integer", "",-1
P: "Freeze", "bool", "", "",0
P: "LODBox", "bool", "", "",0
P: "Lcl Translation", "Lcl Translation", "", "A",0,0,0
P: "Lcl Rotation", "Lcl Rotation", "", "A",0,0,0
P: "Lcl Scaling", "Lcl Scaling", "", "A",1,1,1
P: "Visibility", "Visibility", "", "A",1
P: "Visibility Inheritance", "Visibility Inheritance", "", "",1
P: "ShadingModel", "KString", "", "", "Lambert"
P: "MultiLayer", "bool", "", "",0
P: "EmissiveColor", "Color", "", "A",0,0,0
P: "EmissiveFactor", "Number", "", "A",1
P: "AmbientColor", "Color", "", "A",0.2,0.2,0.2
P: "AmbientFactor", "Number", "", "A",1
P: "DiffuseColor", "Color", "", "A",0.8,0.8,0.8
P: "DiffuseFactor", "Number", "", "A",1
P: "Bump", "Vector3D", "Vector", "",0,0,0
P: "NormalMap", "Vector3D", "Vector", "",0,0,0
P: "BumpFactor", "double", "Number", "",1
P: "TransparentColor", "Color", "", "A",0,0,0
P: "TransparencyFactor", "Number", "", "A",0
P: "DisplacementColor", "ColorRGB", "Color", "",0,0,0
P: "DisplacementFactor", "double", "Number", "",1
P: "VectorDisplacementColor", "ColorRGB", "Color", "",0,0,0
P: "VectorDisplacementFactor", "double", "Number", "",1
P: "Color", "ColorRGB", "Color", "",0.8,0.8,0.8
P: "BBoxMin", "Vector3D", "Vector", "",0,0,0
P: "BBoxMax", "Vector3D", "Vector", "",0,0,0
P: "Primary Visibility", "bool", "", "",1
P: "Casts Shadows", "bool", "", "",1
P: "Receive Shadows", "bool", "", "",1
P: "CastLightOnObject", "bool", "", "",1
P: "DrawVolumetricLight", "bool", "", "",1
P: "DrawGroundProjection", "bool", "", "",1
P: "DrawFrontFacingVolumetricLight", "bool", "", "",0
P: "Intensity", "Number", "", "A",100
P: "InnerAngle", "Number", "", "A",0
P: "OuterAngle", "Number", "", "A",45
P: "Fog", "Number", "", "A",50
P: "DecayType", "enum", "", "",0
P: "DecayStart", "Number", "", "A",0
P: "FileName", "KString", "", "", ""
P: "EnableNearAttenuation", "bool", "", "",0
P: "NearAttenuationStart", "Number", "", "A",0
P: "NearAttenuationEnd", "Number", "", "A",0
P: "EnableFarAttenuation", "bool", "", "",0
P: "FarAttenuationStart", "Number", "", "A",0
P: "FarAttenuationEnd", "Number", "", "A",0
P: "CastShadows", "bool", "", "",0
P: "ShadowColor", "Color", "", "A",0,0,0
P: "AreaLightShape", "enum", "", "",0
P: "LeftBarnDoor", "Float", "", "A",20
P: "RightBarnDoor", "Float", "", "A",20
P: "TopBarnDoor", "Float", "", "A",20
P: "BottomBarnDoor", "Float", "", "A",20
P: "EnableBarnDoor", "Bool", "", "A",0
"""
props = [tuple(p.strip("\" ") for p in l.lstrip("P: ").split(",")) for l in props_str.splitlines()]
props = sorted(props, key=lambda l: l[0])
type_map = {
"Boolean": "UFBX_PROP_BOOLEAN",
"bool": "UFBX_PROP_BOOLEAN",
"Bool": "UFBX_PROP_BOOLEAN",
"Integer": "UFBX_PROP_INTEGER",
"int": "UFBX_PROP_INTEGER",
"enum": "UFBX_PROP_INTEGER",
"Visibility": "UFBX_PROP_INTEGER",
"Visibility Inheritance": "UFBX_PROP_INTEGER",
"Number": "UFBX_PROP_NUMBER",
"double": "UFBX_PROP_NUMBER",
"Real": "UFBX_PROP_NUMBER",
"Float": "UFBX_PROP_NUMBER",
"Intensity": "UFBX_PROP_NUMBER",
"Vector": "UFBX_PROP_VECTOR",
"Vector3D": "UFBX_PROP_VECTOR",
"Color": "UFBX_PROP_COLOR",
"ColorRGB": "UFBX_PROP_COLOR",
"String": "UFBX_PROP_STRING",
"KString": "UFBX_PROP_STRING",
"object": "UFBX_PROP_STRING",
"DateTime": "UFBX_PROP_DATE_TIME",
"Lcl Translation": "UFBX_PROP_TRANSLATION",
"Lcl Rotation": "UFBX_PROP_ROTATION",
"Lcl Scaling": "UFBX_PROP_SCALING",
}
print("static const ufbx_prop ufbxi_default_props[] = {")
for p in props:
if len(p) < 2: continue
name = p[0]
prop_type = type_map[p[1]]
v = p[4:]
if prop_type == "UFBX_PROP_STRING" or prop_type == "UFBX_PROP_DATE_TIME":
if v:
value = "\"{}\"".format(v[0], len(v[0]))
else:
value = "0"
elif prop_type == "UFBX_PROP_BOOLEAN" or prop_type == "UFBX_PROP_INTEGER" or prop_type == "UFBX_PROP_NUMBER":
value = "0, {}".format(float(v[0]))
else:
value = "0, {}, {}, {}".format(float(v[0]), float(v[1]), float(v[2]))
print("\t{{ \"{}\", {}, {}, {} }},".format(name, len(name), prop_type, value))
print("};")

View File

@@ -0,0 +1,105 @@
from sympy import *
from sympy.matrices import *
from sympy.algebras.quaternion import Quaternion
qx, qy, qz = symbols("q.x q.y q.z", real=True)
vx, vy, vz = symbols("v.x v.y v.z", real=True)
qw = symbols("q.w", real=True)
qv = Matrix([qx, qy, qz])
v = Matrix([vx, vy, vz])
# https://fgiesen.wordpress.com/2019/02/09/rotating-a-single-vector-using-a-quaternion/
t = (2.0*qv).cross(v)
r = v + qw*t + qv.cross(t)
r = simplify(r)
for a in range(3):
for b in range(a+1, 3):
an = "xyz"[a]
bn = "xyz"[b]
e = qv[a]*v[b] - qv[b]*v[a]
s = symbols(an + bn)
print("ufbx_real {} = {};".format(s, e))
r = r.subs(e, s)
print("ufbx_vec3 r;")
def sgns(s):
return "+" if s >= 0 else "-"
for a in range(3):
an = "xyz"[a]
ex, ey, ez = qx, qy, qz
if a == 0: ex = qw
if a == 1: ey = qw
if a == 2: ez = qw
sx, x = simplify(ex*r[a].coeff(ex) / 2).as_coeff_Mul()
sy, y = simplify(ey*r[a].coeff(ey) / 2).as_coeff_Mul()
sz, z = simplify(ez*r[a].coeff(ez) / 2).as_coeff_Mul()
assert abs(sx) == 1 and abs(sy) == 1 and abs(sz) == 1
w = simplify(r[a] - 2*(sx*x+sy*y+sz*z))
print("r.{} = 2.0 * ({} {} {} {} {} {}) + {};".format(an, sgns(sx), x, sgns(sy), y, sgns(sz), z, w))
print()
ax, ay, az, aw = symbols("a.x a.y a.z a.w", real=True)
bx, by, bz, bw = symbols("b.x b.y b.z b.w", real=True)
qa = Quaternion(aw, ax, ay, az)
qb = Quaternion(bw, bx, by, bz)
qr = qa*qb
print("ufbx_vec4 r;")
print("r.x = {};".format(qr.b))
print("r.y = {};".format(qr.c))
print("r.z = {};".format(qr.d))
print("r.w = {};".format(qr.a))
print()
# Unit quaternion
qx, qy, qz, qw = symbols("q.x q.y q.z q.w", real=True)
qq = Quaternion(qw, qx, qy, qz)
ma = qq.to_rotation_matrix()
ma = ma.subs(qw**2 + qx**2 + qy**2 + qz**2, 1)
qc = (qx, qy, qz, qw)
print("ufbx_vec4 q = t->rotation;")
print("ufbx_real ", end="")
for a in range(3):
if a != 0: print(", ", end="")
an = "xyz"[a]
print("s{0} = 2.0 * t->scale.{0}".format(an), end="")
print(";")
for a in range(4):
print("ufbx_real ", end="")
for b in range(a, 4):
an = "xyzw"[a]
bn = "xyzw"[b]
e = qc[a]*qc[b]
s = an + bn
if b != a: print(", ", end="")
print("{} = {}*{}".format(s, qc[a], qc[b]), end="")
ma = ma.subs(e, s)
print(";")
print("ufbx_matrix m;")
for c in range(3):
for r in range(3):
e = ma[r,c]
t, t12 = e.as_coeff_Add()
s1, e1 = t12.args[0].as_coeff_Mul()
s2, e2 = t12.args[1].as_coeff_Mul()
assert abs(s1) == 2 and abs(s2) == 2
assert t == 0 or t == 1
ts = " + 0.5" if t else ""
sx = "s" + "xyz"[c]
print("m.m{}{} = {} * ({} {} {} {}{});".format(r, c, sx, sgns(s1), e1, sgns(s2), e2, ts))
for r in range(3):
rn = "xyz"[r]
print("m.m{}3 = t->translation.{};".format(r, rn))
print("return m;")

View File

@@ -0,0 +1,147 @@
import sympy
from sympy.matrices import Matrix
from sympy.algebras.quaternion import Quaternion
orders = "XYZ XZY YZX YXZ ZXY ZYX".split()
unit_vectors = [
(1,0,0),
(0,1,0),
(0,0,1),
]
def rotate_point(q, v):
qv = Matrix([q.b, q.c, q.d])
qw = q.a
vv = Matrix(v)
# https://fgiesen.wordpress.com/2019/02/09/rotating-a-single-vector-using-a-quaternion/
t = (2.0*qv).cross(vv)
r = vv + qw*t + qv.cross(t)
return (r[0], r[1], r[2])
# "Quaternion to Euler Angle Conversion for Arbitrary Rotation Sequence Using Geometric Methods"
# http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToEuler/nhughes/quat_2_euler_for_MB.pdf
def solve_euler_t01(quat, order):
i0,i1,i2 = order
i0n = (i0+1)%3
i0nn = (i0+2)%3
v2 = unit_vectors[i2]
l = quat.a*quat.a + quat.b*quat.b + quat.c*quat.c + quat.d*quat.d
v2r = tuple(c.subs(l, 1) for c in Quaternion.rotate_point(v2, quat))
if (i0+1) % 3 == i1:
t0 = sympy.atan2(-v2r[i0n].factor(), (v2r[i0nn] + l).factor().subs(l, 1) - 1)
t1 = v2r[i0].factor()
else:
t0 = sympy.atan2(v2r[i0nn].factor(), (v2r[i0n] + l).factor().subs(l, 1) - 1)
t1 = -v2r[i0].factor()
return (t0, t1)
def solve_euler_t2_fallback(quat, order, t):
i0,i1,i2 = order
i0n = (i0+1)%3
i0nn = (i0+2)%3
v0 = unit_vectors[i0]
l = quat.a*quat.a + quat.b*quat.b + quat.c*quat.c + quat.d*quat.d
v0r = tuple(c.subs(l, 1) for c in Quaternion.rotate_point(v0, quat))
if (i0+1) % 3 == i1:
t2 = sympy.atan2(t*v0r[i0n].factor(), -t*((v0r[i0nn] + l).factor().subs(l, 1) - 1))
else:
t2 = sympy.atan2(t*v0r[i0nn].factor(), t*((v0r[i0n] + l).factor().subs(l, 1) - 1))
return t2
qx,qy,qz,qw = sympy.symbols("qx qy qz qw")
quat = Quaternion(qw,qx,qy,qz)
def format_c(expr, prec=0):
if expr.is_Add:
terms = sorted((format_c(a, 1) for a in expr.args),
key=lambda s: s.startswith("-"))
args = []
for term in terms:
if len(args) > 0:
if term.startswith("-"):
args.append("-")
term = term[1:]
else:
args.append("+")
args.append(term)
args = " ".join(args)
return f"({args})" if prec > 1 else args
elif expr.is_Mul:
if expr.args[0] == -1:
args = "*".join(format_c(a, 2) for a in expr.args[1:])
return f"-({args})" if prec > 2 else f"-{args}"
else:
args = "*".join(format_c(a, 2) for a in expr.args)
return f"({args})" if prec > 2 else args
elif expr.is_Function:
args = ", ".join(format_c(a, 0) for a in expr.args)
name = expr.func.__name__
if name == "asin":
name = "ufbxi_asin"
return f"{name}({args})"
elif expr.is_Pow:
base, exp = expr.args
if base.is_Symbol and exp == 2:
b = format_c(base, 2)
return f"({b}*{b})" if prec > 2 else f"{b}*{b}"
else:
return f"pow({format_c(base, 0)}, {format_c(exp, 0)})"
elif expr.is_Integer:
return f"{int(expr.evalf())}.0f"
elif expr.is_Symbol:
return expr.name
else:
raise TypeError(f"Unhandled type {type(expr)}")
def format_code(expr):
return format_c(expr)
# return sympy.ccode(expr).replace("asin", "ufbxi_asin")
print("\tswitch (order) {")
for order_s in orders:
order = tuple(reversed(tuple("XYZ".index(o) for o in order_s)))
t0, t1 = solve_euler_t01(quat, order)
rev_order = tuple(reversed(order))
rev_quat = Quaternion.conjugate(quat)
t2n, _ = solve_euler_t01(rev_quat, rev_order)
t2f = solve_euler_t2_fallback(quat, order, sympy.Symbol("t"))
ts = (t0, t1, -t2n)
print("\tcase UFBX_ROTATION_{}:".format(order_s))
c0 = "xyz"[order[0]]
c1 = "xyz"[order[1]]
c2 = "xyz"[order[2]]
e0 = format_code(t0)
e1 = format_code(t1)
e2 = format_code(-t2n)
f2 = format_code(t2f)
print(f"\t\tt = {e1};")
print(f"\t\tif (fabs(t) < eps) {{")
print(f"\t\t\tv.{c1} = (ufbx_real)asin(t);")
print(f"\t\t\tv.{c0} = (ufbx_real){e0};")
print(f"\t\t\tv.{c2} = (ufbx_real){e2};")
print("\t\t} else {")
print(f"\t\t\tv.{c1} = (ufbx_real)copysign(UFBXI_DPI*0.5, t);")
print(f"\t\t\tv.{c0} = (ufbx_real)({f2});")
print(f"\t\t\tv.{c2} = 0.0f;")
print("\t\t}")
print("\t\tbreak;")
print("\tdefault:")
print("\t\tv.x = v.y = v.z = 0.0;")
print("\t\tbreak;")
print("\t}")

View File

@@ -0,0 +1,25 @@
import sympy
from sympy.algebras.quaternion import Quaternion
cx,sx,cy,sy,cz,sz = sympy.symbols("cx sx cy sy cz sz")
qx = Quaternion(cx, sx, 0, 0)
qy = Quaternion(cy, 0, sy, 0)
qz = Quaternion(cz, 0, 0, sz)
qs = { "X": qx, "Y": qy, "Z": qz }
orders = "XYZ XZY YZX YXZ ZXY ZYX".split()
print("\tswitch (order) {")
for order in orders:
q = qs[order[2]] * qs[order[1]] * qs[order[0]]
print("\tcase UFBX_ROTATION_{}:".format(order))
print("\t\tq.x = {};".format(q.b))
print("\t\tq.y = {};".format(q.c))
print("\t\tq.z = {};".format(q.d))
print("\t\tq.w = {};".format(q.a))
print("\t\tbreak;")
print("\tdefault:")
print("\t\tq.x = q.y = q.z = 0.0; q.w = 1.0;")
print("\t\tbreak;")
print("\t}")

View File

@@ -0,0 +1,29 @@
categories = [
" \r\n\t", # UFBXI_XML_CTYPE_WHITESPACE
"\'", # UFBXI_XML_CTYPE_SINGLE_QUOTE
"\"", # UFBXI_XML_CTYPE_DOUBLE_QUOTE
" \r\n\t?/>\'\"=", # UFBXI_XML_CTYPE_NAME_END
"<", # UFBXI_XML_CTYPE_TAG_START
"\0", # UFBXI_XML_CTYPE_END_OF_FILE
]
def generate_bits(categories):
for ix in range(256):
ch = chr(ix)
bits = 0
for bit, cat in enumerate(categories):
if ch in cat:
bits |= 1 << bit
yield bits
bits = list(generate_bits(categories))
# Truncate down to what's needed
num_bits = 64
assert all(b == 0 for b in bits[num_bits:])
bits = bits[:num_bits]
chunk = 32
for base in range(0, len(bits), chunk):
print("".join("{},".format(b) for b in bits[base:base+chunk]))

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -x
set -e
LLVM_COV="${LLVM_COV:-llvm-cov}"
LLVM_GCOV=$(realpath misc/llvm_gcov.sh)
chmod +x misc/llvm_gcov.sh
clang -lm -coverage -g -std=gnu99 -DNDEBUG -DUFBX_NO_ASSERT -DUFBX_DEV=1 -DUFBX_REGRESSION=1 ufbx.c test/runner.c -o build/cov-runner
build/cov-runner -d data
$LLVM_COV gcov ufbx runner -b
lcov --directory . --base-directory . --gcov-tool $LLVM_GCOV --rc lcov_branch_coverage=1 --capture -o coverage.lcov

View File

@@ -0,0 +1,59 @@
import subprocess
import os
def is_ascii(s):
try:
_ = s.encode("ascii")
return True
except UnicodeEncodeError:
return False
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser("generate_hashes.py --exe <exe> -o hashes.txt")
parser.add_argument("--verbose", action="store_true", help="Show output")
parser.add_argument("--exe", required=True, help="hash_scene.c executable")
parser.add_argument("-o", required=True, help="Output file path")
argv = parser.parse_args()
with open(argv.o, "wt") as f:
for root, dirs, files in os.walk("data"):
for file in files:
path = os.path.join(root, file).replace("\\", "/")
if not is_ascii(path): continue
if "_fail_" in path: continue
if "/fuzz/" in path: continue
if "/obj_fuzz/" in path: continue
if "/mtl_fuzz/" in path: continue
if " " in path: continue
if file.endswith(".fbx"):
prev_output = None
for frame in range(0, 10):
args = [argv.exe, path]
if frame >= 0:
frame = frame * frame
args += ["--frame", str(frame)]
output = subprocess.check_output(args)
output = output.decode("utf-8").strip()
if output == prev_output:
break
line = f"{output} {frame:3} {path}"
if argv.verbose:
print(line)
print(line, file=f)
prev_output = output
elif file.endswith(".obj") or file.endswith(".mtl"):
args = [argv.exe, path]
frame = 0
output = subprocess.check_output(args)
output = output.decode("utf-8").strip()
line = f"{output} {frame:3} {path}"
if argv.verbose:
print(line)
print(line, file=f)
prev_output = output

View File

@@ -0,0 +1,20 @@
import os
import re
self_path = os.path.dirname(__file__)
ufbx_path = os.path.join(self_path, "..", "ufbx.h")
if __name__ == "__main__":
version = None
with open(ufbx_path, "rt") as f:
for line in f:
m = re.match(r"#define\s+UFBX_HEADER_VERSION\s+ufbx_pack_version\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)\s*", line)
if m:
version = (int(m.group(1)), int(m.group(2)), int(m.group(3)))
break
if not version:
raise RuntimeError("Could not find version from header")
major, minor, patch = version
print(f"v{major}.{minor}.{patch}")

View File

@@ -0,0 +1,229 @@
#include "../ufbx.c"
#include <inttypes.h>
typedef struct {
uint32_t begin;
uint32_t increment;
uint64_t attempts;
uint32_t *slots;
size_t num_slots;
size_t target_slot;
} hash_info;
typedef void hash_fn(hash_info info);
typedef struct {
char str[256];
uint32_t *slots;
size_t mask;
size_t max_length;
int64_t attempts_left;
size_t target_slot;
} str_state;
ufbxi_noinline void print_string(str_state *state, size_t length)
{
#if _OPENMP
#pragma omp critical
#endif
{
state->str[length] = '\0';
puts(state->str);
}
}
ufbxi_noinline void hash_string_imp(str_state *state, size_t length)
{
if (state->attempts_left < 0) return;
state->attempts_left -= ('Z' - 'A' + 1) * 2;
uint32_t *slots = state->slots;
size_t mask = state->mask;
size_t target_slot = state->target_slot;
char *p = state->str + length - 1;
for (uint32_t c = 'A'; c <= 'Z'; c++) {
*p = c;
uint32_t hash = ufbxi_hash_string(state->str, length);
if ((hash & mask) == target_slot) print_string(state, length);
slots[hash & mask]++;
*p = c | 0x20;
hash = ufbxi_hash_string(state->str, length);
if ((hash & mask) == target_slot) print_string(state, length);
slots[hash & mask]++;
}
if (length < state->max_length) {
for (uint32_t c = 'A'; c <= 'Z'; c++) {
*p = c;
hash_string_imp(state, length + 1);
*p = c | 0x20;
hash_string_imp(state, length + 1);
}
}
}
ufbxi_noinline void hash_string(hash_info info)
{
size_t mask = info.num_slots - 1;
str_state state;
state.attempts_left = (int64_t)info.attempts;
state.mask = mask;
state.slots = info.slots;
state.target_slot = info.target_slot;
size_t max_len = 0;
uint64_t len_attempts = 1;
while (len_attempts < info.attempts) {
len_attempts *= ('Z' - 'A' + 1) * 2;
max_len += 1;
}
state.max_length = max_len;
for (uint32_t c = 'A' + info.begin; c <= 'Z'; c += info.increment) {
state.str[0] = c;
uint32_t hash = ufbxi_hash_string(state.str, 1);
if ((hash & mask) == info.target_slot) print_string(&state, 1);
info.slots[hash & mask]++;
if (max_len > 1) {
hash_string_imp(&state, 2);
}
state.str[0] = c | 0x20;
hash = ufbxi_hash_string(state.str, 1);
if ((hash & mask) == info.target_slot) print_string(&state, 1);
info.slots[hash & mask]++;
if (max_len > 1) {
hash_string_imp(&state, 2);
}
}
}
ufbxi_noinline void print_uint64(uint64_t v)
{
#if _OPENMP
#pragma omp critical
#endif
{
printf("%" PRIu64 "\n", v);
}
}
ufbxi_noinline void hash_uint64(hash_info info)
{
size_t mask = info.num_slots - 1;
uint64_t increment = info.increment;
uint64_t end = info.attempts;
for (uint64_t i = info.begin; i < end; i += increment) {
uint32_t hash = ufbxi_hash64(i);
if ((hash & mask) == info.target_slot) print_uint64(i);
info.slots[hash & mask]++;
}
}
ufbxi_noinline void run_test(hash_info info, hash_fn *fn, uint32_t *accumulator)
{
info.slots = malloc(sizeof(uint32_t) * info.num_slots);
memset(info.slots, 0, sizeof(uint32_t) * info.num_slots);
fn(info);
#if _OPENMP
#pragma omp critical
#endif
{
for (size_t i = 0; i < info.num_slots; i++) {
accumulator[i] += info.slots[i];
}
}
}
typedef struct {
const char *name;
hash_fn *fn;
} hash_test;
hash_test tests[] = {
"string", &hash_string,
"uint64", &hash_uint64,
};
int main(int argc, char **argv)
{
const char *func = "";
size_t num_slots = 0;
size_t target_slot = SIZE_MAX;
uint64_t attempts = 0;
for (int i = 1; i < argc; i++) {
if (!strcmp(argv[i], "-f")) {
if (++i < argc) func = argv[i];
} else if (!strcmp(argv[i], "--slots")) {
if (++i < argc) num_slots = (size_t)strtoull(argv[i], NULL, 10);
} else if (!strcmp(argv[i], "--attempts")) {
if (++i < argc) attempts = (uint64_t)strtoull(argv[i], NULL, 10);
} else if (!strcmp(argv[i], "--target")) {
if (++i < argc) target_slot = (size_t)strtoull(argv[i], NULL, 10);
} else if (!strcmp(argv[i], "--threads")) {
#if _OPENMP
if (++i < argc) omp_set_num_threads(atoi(argv[i]));
#endif
}
}
hash_fn *hash_fn = NULL;
for (size_t i = 0; i < ufbxi_arraycount(tests); i++) {
if (!strcmp(tests[i].name, func)) {
hash_fn = tests[i].fn;
break;
}
}
if (!hash_fn) {
fprintf(stderr, "Unkonwn hash function '%s'\n", func);
return 1;
}
if ((num_slots & (num_slots - 1)) != 0) {
fprintf(stderr, "Slot amount must be a power of two, got %zu\n", num_slots);
return 1;
}
uint32_t *slots = malloc(num_slots * sizeof(uint32_t));
memset(slots, 0, sizeof(uint32_t) * num_slots);
hash_info info;
info.attempts = (int64_t)attempts;
info.increment = 1;
info.num_slots = num_slots;
info.target_slot = target_slot;
#if _OPENMP
#pragma omp parallel
{
info.begin = (uint64_t)omp_get_thread_num();
info.increment = (uint64_t)omp_get_num_threads();
run_test(info, hash_fn, slots);
}
#else
{
info.begin = 0;
info.increment = 1;
run_test(info, hash_fn, slots);
}
#endif
uint32_t max_collisions = 0;
size_t worst_slot = 0;
for (size_t i = 0; i < num_slots; i++) {
if (slots[i] > max_collisions) {
max_collisions = slots[i];
worst_slot = i;
}
}
if (target_slot == SIZE_MAX) {
printf("Worst slot: %zu (%u collisions)\n", worst_slot, max_collisions);
}
return 0;
}

View File

@@ -0,0 +1,150 @@
import gzip
import os
import re
import sys
def fetch_dumps(file):
files = { }
cur_file = None
cur_dump = None
for line in file:
line = line.rstrip()
if line.startswith("=="):
name = line[2:].strip()
cur_file = { }
files[name] = cur_file
elif line.startswith("--"):
name = line[2:].strip()
cur_dump = []
cur_file[name] = cur_dump
elif cur_dump is not None:
cur_dump.append(line)
return files
def diff_dump(dump, ref, file, runner, ref_runner):
num_lines = max(len(dump), len(ref))
stack = []
has_diff = False
for ix in range(num_lines):
dline = dump[ix] if ix < len(dump) else ""
rline = ref[ix] if ix < len(ref) else ""
if dline == rline:
if "{" in dline:
stack.append(dline)
elif "}" in dline:
stack.pop()
else:
span = 2
start = max(0, ix - span)
stop = ix + span + 1
has_diff = True
print(f"== {file}")
print()
print(" ".join(s.strip().rstrip("{ ") for s in stack))
print()
print(f"-- {runner}")
print()
for lix in range(start, stop):
dl = dump[lix] if lix < len(dump) else ""
rl = ref[lix] if lix < len(ref) else ""
prefix = "> " if dl != rl else " "
print(prefix + dl)
print()
print(f"-- {ref_runner}")
print()
for lix in range(start, stop):
dl = dump[lix] if lix < len(dump) else ""
rl = ref[lix] if lix < len(ref) else ""
prefix = "> " if dl != rl else " "
print(prefix + rl)
print()
break
return has_diff
def do_compress(argv):
with gzip.open(argv.o, "wt", compresslevel=8) as outf:
for file in os.listdir(argv.directory):
name = file[:-4] if file.endswith(".txt") else file
path = os.path.join(argv.directory, file)
print(f"== {name}", file=outf)
with open(path, "rt") as inf:
outf.writelines(inf)
def do_list(argv):
entries = set()
for file in os.listdir(argv.directory):
path = os.path.join(argv.directory, file)
with gzip.open(path, "rt") as inf:
for line in inf:
line = line.strip()
if not line:
continue
m = re.match(r"--\s*(\d+)\s+(.+)", line)
if m:
frame = m.group(1)
path = m.group(2)
entries.add((frame, path))
with open(argv.o, "wt") as outf:
for frame, path in entries:
print(f"0000000000000000 {frame} {path}", file=outf)
def do_diff(argv):
with gzip.open(argv.ref, "rt") as inf:
ref_dumps = fetch_dumps(inf)
ref_tuple = next(iter(ref_dumps.items()), None)
if not ref_tuple:
return
has_diff = False
ref_runner, ref_file = ref_tuple
for file in os.listdir(argv.directory):
path = os.path.join(argv.directory, file)
with gzip.open(path, "rt") as inf:
dumps = fetch_dumps(inf)
for runner, files in dumps.items():
for file, dump in files.items():
ref = ref_file[file]
if diff_dump(dump, ref, file, runner, ref_runner):
has_diff = True
if has_diff:
sys.exit(1)
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser(prog="hash_diff.py")
parser.add_argument("--verbose", action="store_true", help="Show extra information")
subparsers = parser.add_subparsers(metavar="cmd")
parser_compress = subparsers.add_parser("compress", help="Compress files")
parser_compress.add_argument("directory", help="Directory of hash files to compress")
parser_compress.add_argument("-o", metavar="output.gz", required=True, help="Output .gz filename")
parser_compress.set_defaults(func=do_compress)
parser_dump = subparsers.add_parser("list", help="List all files from other dumps in hash check compatible format")
parser_dump.add_argument("directory", help="Directory of .gz dump files generated by 'compress'")
parser_dump.add_argument("-o", metavar="output.txt", required=True, help="Output .txt filename")
parser_dump.set_defaults(func=do_list)
parser_diff = subparsers.add_parser("diff", help="Compare dumps")
parser_diff.add_argument("directory", help="Directory of .gz dump files generated by 'compress'")
parser_diff.add_argument("--ref", metavar="ref.gz", required=True, help="Reference hash dump .gz")
parser_diff.set_defaults(func=do_diff)
argv = parser.parse_args()
if "func" not in argv:
parser.print_help()
else:
argv.func(argv)

View File

@@ -0,0 +1,2 @@
#!/usr/bin/env bash
exec $LLVM_COV gcov "$@"

View File

@@ -0,0 +1,75 @@
import argparse
import re
parser = argparse.ArgumentParser(usage="macro_expander.py macro files")
parser.add_argument("macro")
parser.add_argument("files", nargs="*")
argv = parser.parse_args()
macro_lines = []
macro_params = []
RE_NEWLINE = re.compile(r"\n")
RE_DEF = re.compile(f"\n\s*#define\\s+{re.escape(argv.macro)}\s*\\(([^)]*)\\)\s*(.*)\\\\\s*\r?\n", re.M)
RE_LINE = re.compile(r"\t?(.*?)\s*(\\?)\r?\n", re.M)
RE_USE = re.compile(f"{re.escape(argv.macro)}\s*\\(", re.M)
for path in argv.files:
with open(path, "rt") as f:
text = f.read()
lineno = 1
linestart = 0
pos = 0
while True:
if not macro_lines:
m = RE_DEF.search(text, pos)
if m:
macro_params = [a.strip() for a in m.group(1).split(",")]
first_line = m.group(2).strip()
if first_line:
macro_lines.append(first_line)
pos = m.end()
while True:
m = RE_LINE.match(text, pos)
if not m: break
pos = m.end()
macro_lines.append(m.group(1))
if not m.group(2): break
else:
m = RE_USE.search(text, pos)
if not m: break
lineno += sum(1 for _ in RE_NEWLINE.finditer(text, linestart, m.start()))
linestart = m.start()
tlen = len(text)
args = [[]]
level = 0
pos = m.end()
while pos < tlen:
c = text[pos]
if c == "(":
level += 1
args[-1].append(c)
elif c == ")":
level -= 1
if level < 0: break
args[-1].append(c)
elif c == "," and level == 0:
args.append([])
else:
args[-1].append(c)
pos += 1
args = ["".join(a).strip() for a in args]
print()
print(f"-- {path}:{lineno}")
for param, arg in zip(macro_params, args):
print(f"#define {param} {arg}")
for line in macro_lines:
print(line)
for param in macro_params:
print(f"#undef {param}")

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,37 @@
import transmute_fbx as tfbx
from dataclasses import replace
Node = tfbx.Node
Value = tfbx.Value
divisor = 1
def replace_content(node: Node) -> Node:
global divisor
if node.name == b"Content":
value = node.values[0]
values = []
data = value.value
size = len(data)
step = max(1, size // divisor)
for base in range(0, size, step):
values.append(Value(b"R", data[base:base+step]))
divisor *= 4
return replace(node, values=values)
return replace(node, children=[replace_content(n) for n in node.children])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(usage="transmute_fbx.py src -o dst -v 7400 -f binary-be")
parser.add_argument("src", help="Source file to read")
parser.add_argument("dst", help="Output filename")
argv = parser.parse_args()
with open(argv.src, "rb") as f:
fbx = tfbx.parse_fbx(f)
fbx = replace(fbx, root=replace_content(fbx.root))
with open(argv.dst, "wb") as f:
bf = tfbx.BinaryFormat(fbx.version, False, 1)
tfbx.binary_dump_root(f, fbx.root, bf, fbx.footer)

View File

@@ -0,0 +1,6 @@
#include "../ufbx.c"
int main()
{
return 0;
}

View File

@@ -0,0 +1,58 @@
#define ufbx_assert(cond) do { \
if (!(cond)) exit(1); \
} while (0)
#include "../ufbx.c"
#include <stdio.h>
#include <stdlib.h>
#ifdef _WIN32
#include <fcntl.h>
#include <io.h>
#endif
int main(int argc, char **argv)
{
if (argc < 3) {
fprintf(stderr, "Usage: fuzz_test <src-size> <dst-size>\n");
return 1;
}
#ifdef _WIN32
_setmode(_fileno(stdin), _O_BINARY);
_setmode(_fileno(stdout), _O_BINARY);
#endif
size_t src_size = atoi(argv[1]);
size_t dst_size = atoi(argv[2]);
char *src = malloc(src_size);
char *dst = malloc(dst_size);
size_t num_read = fread(src, 1, src_size, stdin);
if (num_read != src_size) {
fprintf(stderr, "Failed to read input\n");
return 1;
}
ufbx_inflate_retain retain;
retain.initialized = false;
ufbx_inflate_input input = { 0 };
input.data = src;
input.data_size = src_size;
input.total_size = src_size;
ptrdiff_t result = ufbx_inflate(dst, dst_size, &input, &retain);
if (result != dst_size) {
fprintf(stderr, "Failed to decompress: %d\n", (int)result);
return 1;
}
fwrite(dst, 1, dst_size, stdout);
free(src);
free(dst);
return 0;
}

View File

@@ -0,0 +1,58 @@
import os
import sys
import zlib
import subprocess
if len(sys.argv) < 2:
filepath = os.path.dirname(sys.argv[0])
if os.name == 'nt':
exe = os.path.join(filepath, "../build/test_deflate.exe")
else:
exe = os.path.join(filepath, "../build/test_deflate")
exe = os.path.relpath(exe)
else:
exe = sys.argv[1]
path = "."
if len(sys.argv) >= 3:
path = sys.argv[2]
def test(data):
for level in range(1, 10):
if level < 9:
print("{},".format(level), end="")
else:
print("{}".format(level), end="")
compressed = zlib.compress(data, level)
args = [exe, str(len(compressed)), str(len(data))]
result = subprocess.check_output(args, input=compressed)
if result != data:
raise ValueError("Decompression mismatch")
for r,dirs,files in os.walk(path):
# HACK: Ignore .git directories
if ".git" in r:
continue
for file in files:
sys.stdout.flush()
path = os.path.join(r, file)
try:
print(path, end=': ')
except:
# Print fails sometimes with weird filenames (?)
continue
try:
f = open(path, "rb")
except:
print("SKIP")
continue
b = f.read()
try:
test(b)
print()
except Exception as e:
print()
print("FAIL ({})".format(e))
sys.exit(1)
f.close()

View File

@@ -0,0 +1,433 @@
import zlib_debug_compressor as zz
import zlib
import sys
import itertools
import random
def test_dynamic():
"""Simple dynamic Huffman tree compressed block"""
opts = zz.Options(force_block_types=[2])
data = b"Hello Hello!"
return data, zz.deflate(data, opts)
def test_dynamic_no_match():
"""Simple dynamic Huffman tree without matches"""
opts = zz.Options(force_block_types=[2])
data = b"Hello World!"
return data, zz.deflate(data, opts)
def test_dynamic_empty():
"""Dynamic Huffman block with a single symbol (end)"""
opts = zz.Options(force_block_types=[2])
data = b""
return data, zz.deflate(data, opts)
def test_dynamic_rle():
"""Simple dynamic Huffman with a single repeating match"""
opts = zz.Options(force_block_types=[2])
data = b"AAAAAAAAAAAAAAAAA"
message = [zz.Literal(b"A"), zz.Match(16, 1)]
return data, zz.compress_message(message, opts)
def test_dynamic_rle_boundary():
"""Simple dynamic Huffman with a single repeating match, adjusted to cross a 16 byte boundary"""
opts = zz.Options(force_block_types=[2])
data = b"AAAAAAAAAAAAAAAAAAAAAAAAA"
message = [zz.Literal(b"A"), zz.Match(24, 1)]
return data, zz.compress_message(message, opts)
def test_repeat_length():
"""Dynamic Huffman compressed block with repeat lengths"""
data = b"ABCDEFGHIJKLMNOPQRSTUVWXYZZYXWVUTSRQPONMLKJIHGFEDCBA"
return data, zz.deflate(data)
def test_huff_lengths():
"""Test all possible lit/len code lengths"""
data = b"0123456789ABCDE"
freq = 1
probs = { }
for c in data:
probs[c] = freq
freq *= 2
opts = zz.Options(force_block_types=[2], override_litlen_counts=probs)
return data, zz.deflate(data, opts)
def test_multi_part_matches():
"""Matches that refer to earlier compression blocks"""
data = b"Test Part Data Data Test Data Part New Test Data"
opts = zz.Options(block_size=4, force_block_types=[0,1,2,0,1,2])
return data, zz.deflate(data, opts)
def create_match_distances_and_lengths_message():
lens = [3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,
23,24,25,26,27,28,29,30,31,32,33,34,35,39,42,43,48,50,51,
55,58,59,63,66,67,70,82,83,90,98,99,105,114,115,120,130,
131,140,150,162,163,170,180,194,195,200,210,226,227,230,
240,250,257,258]
dists = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,20,24,25,28,
32,33,40,48,49,50,64,65,75,96,97,110,128,129,160,192,
193,230,256,257,330,384,385,400,512,513,600,768,769,
900,1024,1025,1250,1536,1537,1800,2048,2049,2500,3072,
3073,3500,4096,4097,5000,6144,6145,7000,8192,8193,10000,
12288,12289,14000,16384,16385,20000,24576,24577,25000,
26000, 27000, 28000, 29000, 30000, 31000, 32768, 32768+300]
message = []
l_iter = itertools.chain(lens, itertools.repeat(lens[-1]))
lit_iter = itertools.cycle(range(0,256))
pos = 0
prev_d = 1
for d in dists:
while pos < d:
l = next(l_iter)
pos += l
message.append(zz.Literal(bytes([next(lit_iter), next(lit_iter)])))
message.append(zz.Match(l, prev_d))
prev_d = d
return message
def test_static_distances_and_lengths():
"""Test all possible match length and distance buckets (Static)"""
message = create_match_distances_and_lengths_message()
opts = zz.Options(block_size=4294967296, force_block_types=[1])
data = zz.decode(message)
return data, zz.compress_message(message, opts)
def test_dynamic_distances_and_lengths():
"""Test all possible match length and distance buckets (Dynamic)"""
message = create_match_distances_and_lengths_message()
opts = zz.Options(block_size=4294967296, force_block_types=[2])
data = zz.decode(message)
return data, zz.compress_message(message, opts)
def test_long_codes():
"""Test longest possible bit-codes for symbols"""
message = [zz.Literal(b"test")]
pos = 0
matches = [(140,10000),(180,14000),(210,20000),(230,30000)]
while pos < 30000:
message.append(zz.Match(258, 4))
next_pos = pos + 258
for l,o in matches:
if pos < o and next_pos >= o:
for n in range(5):
for m in range(n - 1):
message.append(zz.Literal(bytes([ord("A") + m])))
message.append(zz.Match(l, o))
next_pos += l
l += 1
pos = next_pos
ll_override = { }
count = 1000000000
for ll in itertools.chain([285], b"Test", range(260,284)):
ll_override[ll] = count
count /= 2
dist_override = { }
count = 1000000000
for dist in itertools.chain([3], range(10,28)):
dist_override[dist] = count
count /= 2
opts = zz.Options(block_size=4294967296, force_block_types=[2],
override_litlen_counts=ll_override,
override_dist_counts=dist_override)
data = zz.decode(message)
return data, zz.compress_message(message, opts)
def test_long_code_sequences():
"""Test sequences of long codes with N bit symbols"""
messages = []
# Generate random prefix
random.seed(1)
total_message = []
message = []
data = bytes(random.choices(range(ord("0"), ord("4")), k=300))
message.append(zz.Literal(data))
message_len = 300
while message_len <= 24000:
dist = min(random.randrange(256, 1024), message_len - 200)
message.append(zz.Match(200, dist))
data = bytes(random.choices(range(ord("0"), ord("4")), k=10))
message.append(zz.Literal(data))
message_len += 210
opts = zz.Options(force_block_types=[2])
messages += [message, opts]
total_message += message
# Generate matches with increasing bit counts
for ll_bits in range(2, 15+1):
for dist_bits in [ll_bits, 15]:
message = []
ll_override = { }
dist_override = { }
for n in range(ll_bits - 3):
ll_override[n] = 2**(32-n)
for n in range(dist_bits - 1):
dist_override[n] = 2**(32-n)
for ll in [256, 284, ord("A"), ord("B"), ord("C"), ord("D"), ord("E"), ord("F")]:
ll_override[ll] = 2**8
dist_override[29] = 2**8
match_len = random.randrange(230, 250)
match_dist = random.randrange(17000, 24000)
message.append(zz.Match(match_len, match_dist))
for lits in range(0, 8):
if lits:
message.append(zz.Literal(bytes(random.choices(b"ABCDEF", k=lits))))
match_len = random.randrange(230, 250)
match_dist = random.randrange(17000, 24000)
message.append(zz.Match(match_len, match_dist))
opts = zz.Options(force_block_types=[2],
override_litlen_counts=ll_override,
override_dist_counts=dist_override)
messages += [message, opts]
total_message += message
data = zz.decode(total_message)
return data, zz.compress_message(*messages)
def test_two_symbol_bits():
"""Test some combinations of bit lengths for two symbols"""
messages = []
data = b""
for lo in range(2, 16):
for hi in range(lo, min(lo + 6, 16)):
delta = hi - lo
ll_override = { }
ll_override[256] = 64**16
for n in range(lo):
ll_override[96 + n] = 8**(16-n)
ll_override[ord("A")] = 8**(16-lo)
for n in range(2**delta):
assert n < 64
ll_override[n] = 8**(16-hi)
ll_override[ord("B")] = 8**(16-hi)
message = [zz.Literal(b"AB")]
data += b"AB"
opts = zz.Options(force_block_types=[2],
override_litlen_counts=ll_override)
messages += [message, opts]
return data, zz.compress_message(*messages)
def test_fail_codelen_16_overflow():
"""Test oveflow of codelen symbol 16"""
data = b"\xfd\xfe\xff"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Patch Litlen 254-256 repeat extra N to 4
buf.patch(0x66, 1, 2)
return data, buf
def test_fail_codelen_17_overflow():
"""Test oveflow of codelen symbol 17"""
data = b"\xfc"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Patch Litlen 254-256 zero extra N to 5
buf.patch(0x6c, 2, 3)
return data, buf
def test_fail_codelen_18_overflow():
"""Test oveflow of codelen symbol 18"""
data = b"\xf4"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Patch Litlen 254-256 extra N to 13
buf.patch(0x6a, 2, 7)
return data, buf
def test_fail_codelen_overfull():
"""Test bad codelen Huffman tree with too many symbols"""
data = b"Codelen"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Over-filled Huffman tree
buf.patch(0x30, 1, 3)
return data, buf
def test_fail_codelen_underfull():
"""Test bad codelen Huffman tree too few symbols"""
data = b"Codelen"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Under-filled Huffman tree
buf.patch(0x4e, 5, 3)
return data, buf
def test_fail_litlen_bad_huffman():
"""Test bad lit/len Huffman tree"""
data = b"Literal/Length codes"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Under-filled Huffman tree
buf.patch(0x6d, 1, 2)
return data, buf
def test_fail_distance_bad_huffman():
"""Test bad distance Huffman tree"""
data = b"Dist Dist .. Dist"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Under-filled Huffman tree
buf.patch(0xb1, 0b1111, 4)
return data, buf
def test_fail_bad_distance():
"""Test bad distance symbol (30..31)"""
data = b"Dist Dist"
opts = zz.Options(force_block_types=[1])
buf = zz.deflate(data, opts)
# Distance symbol 30
buf.patch(0x42, 0b01111, 5)
return data, buf
def test_fail_bad_static_litlen():
"""Test bad static lit/length (286..287)"""
data = b"A"
opts = zz.Options(force_block_types=[1])
buf = zz.deflate(data, opts)
buf.patch(19, 0b01100011, 8, "Invalid symbol 285")
return data, buf
def test_fail_distance_too_far():
"""Test with distance too far to the output"""
opts = zz.Options(force_block_types=[1], no_decode=True)
message = [zz.Literal(b"A"), zz.Match(4, 2)]
buf = zz.compress_message(message, opts)
return b"", buf
def test_fail_bad_distance_bit():
"""Test bad distance symbol in one symbol alphabet"""
data = b"asd asd"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Distance code 1
buf.patch(0xaa, 0b1, 1)
return data, buf
def test_fail_bad_distance_empty():
"""Test using distance code from an empty tree"""
data = b"asd asd"
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Add another distance code and replace distance 3 code for 1 (0111)
# with the code for 0 (00) for distances 3 and 4
buf.patch(0x18, 4, 5)
buf.patch(0x98, 0b0000, 4)
return data, buf
def test_fail_bad_lit_length():
"""Test bad lit/length symbol"""
data = b""
opts = zz.Options(force_block_types=[2])
buf = zz.deflate(data, opts)
# Patch end-of-block 0 to 1
buf.patch(0x6b, 0b1, 1)
return data, buf
def test_fail_no_litlen_codes():
"""Test lit/len table with no codes"""
data = b""
probs = { n: 0 for n in range(286) }
opts = zz.Options(force_block_types=[2], override_litlen_counts=probs, invalid_sym=zz.Code(0, 1))
buf = zz.deflate(data, opts)
return data, buf
def test_fail_no_dist_codes():
"""Test distance table with no codes"""
probs = { n: 0 for n in range(30) }
opts = zz.Options(force_block_types=[2], override_dist_counts=probs, invalid_sym=zz.Code(0, 1))
message = [zz.Literal(b"A"), zz.Match(4, 1)]
buf = zz.compress_message(message, opts)
return data, buf
def fmt_bytes(data, cols=20):
lines = []
for begin in range(0, len(data), cols):
chunk = data[begin:begin+cols]
lines.append("\"" + "".join("\\x%02x" % c for c in chunk) + "\"")
return "\n".join(lines)
def fnv1a(data):
h = 0x811c9dc5
for d in data:
h = ((h ^ (d&0xff)) * 0x01000193) & 0xffffffff
return h
test_cases = [
test_dynamic,
test_dynamic_no_match,
test_dynamic_empty,
test_dynamic_rle,
test_dynamic_rle,
test_repeat_length,
test_huff_lengths,
test_multi_part_matches,
test_static_distances_and_lengths,
test_dynamic_distances_and_lengths,
test_long_codes,
test_long_code_sequences,
test_two_symbol_bits,
]
good = True
for case in test_cases:
try:
data, buf = case()
result = zlib.decompress(buf.to_bytes())
if data != result:
raise ValueError("Round trip failed")
print("{}: OK".format(case.__name__))
except Exception as e:
print("{}: FAIL ({})".format(case.__name__, e))
good = False
sys.exit(0 if good else 1)

View File

@@ -0,0 +1,268 @@
#!/usr/bin/env python3
from dataclasses import dataclass
import struct
from typing import Any, Tuple
import zlib
import sys
import io
import argparse
@dataclass
class BinaryFormat:
version: int
big_endian: bool
array_encoding: int = 1
array_original: bool = False
@dataclass
class Value:
type: str
value: Any
original_data: Tuple[int, bytes] = (0, b"")
@dataclass
class Node:
name: bytes
values: list[Value]
children: list["Node"]
@dataclass
class FbxFile:
root: Node
version: int
format: str
footer: bytes = b""
def pack(stream, fmt, *args):
stream.write(struct.pack(fmt, *args))
def unpack(stream, fmt):
size = struct.calcsize(fmt)
data = stream.read(size)
return struct.unpack(fmt, data)
primitive_fmt = {
b"C": "b", b"B": "b",
b"Y": "h",
b"I": "l", b"F": "f",
b"L": "q", b"D": "d",
}
def binary_parse_value(stream, bf):
endian = "<>"[bf.big_endian]
type = stream.read(1)
fmt = primitive_fmt.get(type)
if fmt:
value, = unpack(stream, endian + fmt)
return Value(type, value)
if type in b"cbilfd":
arr_fmt = endian + "L" * 3
fmt = primitive_fmt[type.upper()]
count, encoding, encoded_size = unpack(stream, arr_fmt)
original_data = arr_data = stream.read(encoded_size)
if encoding == 0: pass # Nop
elif encoding == 1:
arr_data = zlib.decompress(arr_data)
else:
raise ValueError(f"Unknown encoding: {encoding}")
values = list(v[0] for v in struct.iter_unpack(endian + fmt, arr_data))
assert len(values) == count
return Value(type, values, original_data=(encoding, original_data))
elif type in b"SR":
length, = unpack(stream, endian + "L")
return Value(type, stream.read(length))
else:
raise ValueError(f"Bad type: '{type}'")
def binary_parse_node(stream, bf):
pos = stream.tell()
endian = "<>"[bf.big_endian]
head_fmt = endian + "LQ"[bf.version >= 7500] * 3 + "B"
end_offset, num_values, values_len, name_len = unpack(stream, head_fmt)
if end_offset == 0 and name_len == 0: return None
name = stream.read(name_len)
values_end = stream.tell() + values_len
values = [binary_parse_value(stream, bf) for _ in range(num_values)]
children = []
if stream.tell() != values_end:
assert stream.tell() < values_end
stream.seek(pos + values_end)
while stream.tell() < end_offset:
node = binary_parse_node(stream, bf)
if not node: break
children.append(node)
return Node(name, values, children)
def parse_fbx(stream):
magic = stream.read(22)
if magic == b"Kaydara FBX Binary \x00\x1a":
big_endian = stream.read(1) != b"\x00"
endian = "<>"[big_endian]
version, = unpack(stream, endian + "L")
bf = BinaryFormat(version, big_endian)
children = []
while True:
node = binary_parse_node(stream, bf)
if not node: break
children.append(node)
footer = stream.read(16)
root = Node("", [], children)
format = "binary-be" if big_endian else "binary"
return FbxFile(root, version, format, footer)
else:
# TODO
raise NotImplementedError()
def binary_dump_value(stream, value: Value, bf: BinaryFormat):
endian = "<>"[bf.big_endian]
fmt = primitive_fmt.get(value.type)
stream.write(value.type)
if fmt:
pack(stream, endian + fmt, value.value)
elif value.type in b"cbilfd":
fmt = endian + primitive_fmt[value.type.upper()]
arr_fmt = endian + "L" * 3
if bf.array_original:
encoding, arr_data = value.original_data
pack(stream, arr_fmt, len(value.value), encoding, len(arr_data))
stream.write(arr_data)
else:
with io.BytesIO() as ds:
for v in value.value:
pack(ds, fmt, v)
arr_data = ds.getvalue()
count = len(value.value)
encoding = bf.array_encoding
if encoding == 1:
arr_data = zlib.compress(arr_data)
encoded_size = len(arr_data)
pack(stream, arr_fmt, count, encoding, encoded_size)
stream.write(arr_data)
elif value.type in b"SR":
pack(stream, endian + "L", len(value.value))
stream.write(value.value)
else:
raise ValueError(f"Bad type: '{value.type}'")
def binary_dump_node(stream, node: Node, bf: BinaryFormat):
endian = "<>"[bf.big_endian]
head_size = 25 if bf.version >= 7500 else 13
head_null = b"\x00" * head_size
off_start = stream.tell()
stream.write(head_null)
stream.write(node.name)
off_value_start = stream.tell()
for value in node.values:
binary_dump_value(stream, value, bf)
values_size = stream.tell() - off_value_start
for child in node.children:
binary_dump_node(stream, child, bf)
if node.children or node.name in { b"References", b"AnimationStack", b"AnimationLayer" }:
stream.write(head_null)
off_end = stream.tell()
head_fmt = endian + "LQ"[bf.version >= 7500] * 3 + "B"
stream.seek(off_start)
pack(stream, head_fmt, off_end, len(node.values), values_size, len(node.name))
stream.seek(off_end)
def binary_dump_root(stream, root: Node, bf: BinaryFormat, footer: bytes):
head_size = 25 if bf.version >= 7500 else 13
head_null = b"\x00" * head_size
endian = "<>"[bf.big_endian]
stream.write(b"Kaydara FBX Binary \x00\x1a")
pack(stream, "B", bf.big_endian)
pack(stream, endian + "L", bf.version)
for node in root.children:
binary_dump_node(stream, node, bf)
stream.write(head_null)
stream.write(footer)
stream.write(b"\x00" * 4)
ofs = stream.tell()
pad = ((ofs + 15) & ~15) - ofs
if pad == 0:
pad = 16
stream.write(b"\0" * pad)
pack(stream, endian + "I", bf.version)
stream.write(b"\0" * 120)
stream.write(b"\xf8\x5a\x8c\x6a\xde\xf5\xd9\x7e\xec\xe9\x0c\xe3\x75\x8f\x29\x0b")
def ascii_dump_value(stream, value: Value, indent: str):
if value.type in b"CBYILFD":
stream.write(str(value.value))
elif value.type in b"SR":
s = str(value.value)[2:-1]
stream.write(f"\"{s}\"")
elif value.type in b"cbilfd":
stream.write(f"* {len(value.value)} {{")
first = True
for v in value.value:
stream.write(" " if first else ", ")
stream.write(str(v))
first = False
stream.write(" }")
else:
raise ValueError(f"Bad value type: '{value.type}'")
def ascii_dump_node(stream, node: Node, indent: str):
name = node.name.decode("utf-8")
stream.write(f"{indent}{name}:")
first = True
for value in node.values:
stream.write(" " if first else ", ")
first = False
ascii_dump_value(stream, value, indent + " ")
if node.children:
stream.write(" {\n")
for node in node.children:
ascii_dump_node(stream, node, indent + " ")
stream.write(indent + "}\n")
else:
stream.write("\n")
def ascii_dump_root(stream, root: Node, version: int):
v0 = version // 1000 % 10
v1 = version // 100 % 10
v2 = version // 10 % 10
stream.write(f"; FBX {v0}.{v1}.{v2} project file\n")
stream.write("----------------------------------------------------\n")
for child in root.children:
ascii_dump_node(stream, child, "")
if __name__ == "__main__":
parser = argparse.ArgumentParser(usage="transmute_fbx.py src -o dst -v 7400 -f binary-be")
parser.add_argument("src", help="Source file to read")
parser.add_argument("--output", "-o", required=True, help="Output filename")
parser.add_argument("--version", "-v", help="File version")
parser.add_argument("--format", "-f", help="File format")
argv = parser.parse_args()
with open(argv.src, "rb") as f:
fbx = parse_fbx(f)
format = argv.format
if not format:
format = fbx.format
version = argv.version
if not version:
version = fbx.version
with open(argv.output, "wt" if format == "ascii" else "wb") as f:
if format == "ascii":
ascii_dump_root(f, fbx.root, version)
else:
if format == "binary-be":
bf = BinaryFormat(version, True, 0)
elif format == "binary":
bf = BinaryFormat(version, False, 1)
else:
raise ValueError(f"Unknown format: {format}")
binary_dump_root(f, fbx.root, bf, fbx.footer)

View File

@@ -0,0 +1,14 @@
[default.extend-identifiers]
# Offset Translation
OT = "OT"
# Typos in the FBX fileformat
PreferedAngleX = "PreferedAngleX"
PreferedAngleY = "PreferedAngleY"
PreferedAngleZ = "PreferedAngleZ"
[default.extend-words]
# Level of Detail
lod = "lod"

View File

@@ -0,0 +1,96 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="ufbx_string">
<DisplayString>{data,[length]s}</DisplayString>
</Type>
<Type Name="ufbx_vec2"><DisplayString>{{ x={x} y={y} }}</DisplayString></Type>
<Type Name="ufbx_vec3"><DisplayString>{{ x={x} y={y} z={z} }}</DisplayString></Type>
<Type Name="ufbx_vec4"><DisplayString>{{ x={x} y={y} z={z} w={w} }}</DisplayString></Type>
<Type Name="ufbx_quat"><DisplayString>{{ x={x} y={y} z={z} w={w} }}</DisplayString></Type>
<Type Name="ufbx_bool_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_uint32_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_real_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_vec2_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_vec3_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_vec4_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_string_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_element_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_unknown_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_node_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_edge_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_mesh_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_light_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_camera_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_bone_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_empty_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_line_curve_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_nurbs_curve_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_patch_surface_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_nurbs_surface_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_nurbs_trim_surface_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_nurbs_trim_boundary_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_procedural_geometry_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_stereo_camera_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_camera_switcher_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_marker_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_lod_group_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_skin_deformer_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_skin_cluster_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_blend_deformer_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_blend_channel_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_blend_shape_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_cache_deformer_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_material_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_texture_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_video_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_anim_stack_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_anim_layer_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_anim_value_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_anim_curve_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_bind_pose_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_connection_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_uv_set_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_color_set_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_anim_prop_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_keyframe_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_anim_layer_desc_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_bone_pose_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_name_element_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_shader_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_shader_binding_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_display_layer_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_selection_set_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_selection_node_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_character_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_constraint_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_pose_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_metadata_object_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_shader_prop_binding_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_material_texture_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_texture_layer_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_shader_texture_input_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_constraint_target_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_line_segment_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_const_prop_override_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_cache_frame_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_cache_channel_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_cache_file_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_face_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_mesh_segment_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_lod_level_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_skin_vertex_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_skin_weight_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_prop_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_subdivision_weight_range_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_subdivision_weight_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_dom_node_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_dom_value_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_mesh_material_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_face_group_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_texture_file_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
<Type Name="ufbx_warning_list"><DisplayString>{{ count={count} }}</DisplayString><Expand><ArrayItems><Size>count</Size><ValuePointer>data</ValuePointer></ArrayItems></Expand></Type>
</AutoVisualizer>

View File

@@ -0,0 +1,363 @@
; FBX 6.1.0 project file
; ----------------------------------------------------
FBXHeaderExtension: {
FBXHeaderVersion: 1003
FBXVersion: 6100
CurrentCameraResolution: {
CameraName: "Producer Perspective"
CameraResolutionMode: "Fixed Resolution"
CameraResolutionW: 960
CameraResolutionH: 540
}
CreationTimeStamp: {
Version: 1000
Year: 2020
Month: 3
Day: 22
Hour: 13
Minute: 20
Second: 20
Millisecond: 453
}
Creator: "FBX SDK/FBX Plugins version 2019.2"
}
; Document Description
;------------------------------------------------------------------
Document: {
Name: ""
}
; Document References
;------------------------------------------------------------------
References: {
}
; Object definitions
;------------------------------------------------------------------
Definitions: {
Version: 100
Count: 3
ObjectType: "Model" {
Count: 1
}
ObjectType: "SceneInfo" {
Count: 1
}
ObjectType: "GlobalSettings" {
Count: 1
}
}
; Object properties
;------------------------------------------------------------------
Objects: {
Model: "Model::pointLight1", "Light" {
Version: 232
Properties60: {
Property: "QuaternionInterpolate", "enum", "",0
Property: "RotationOffset", "Vector3D", "",0,0,0
Property: "RotationPivot", "Vector3D", "",0,0,0
Property: "ScalingOffset", "Vector3D", "",0,0,0
Property: "ScalingPivot", "Vector3D", "",0,0,0
Property: "TranslationActive", "bool", "",0
Property: "TranslationMin", "Vector3D", "",0,0,0
Property: "TranslationMax", "Vector3D", "",0,0,0
Property: "TranslationMinX", "bool", "",0
Property: "TranslationMinY", "bool", "",0
Property: "TranslationMinZ", "bool", "",0
Property: "TranslationMaxX", "bool", "",0
Property: "TranslationMaxY", "bool", "",0
Property: "TranslationMaxZ", "bool", "",0
Property: "RotationOrder", "enum", "",0
Property: "RotationSpaceForLimitOnly", "bool", "",0
Property: "RotationStiffnessX", "double", "",0
Property: "RotationStiffnessY", "double", "",0
Property: "RotationStiffnessZ", "double", "",0
Property: "AxisLen", "double", "",10
Property: "PreRotation", "Vector3D", "",0,0,0
Property: "PostRotation", "Vector3D", "",0,0,0
Property: "RotationActive", "bool", "",1
Property: "RotationMin", "Vector3D", "",0,0,0
Property: "RotationMax", "Vector3D", "",0,0,0
Property: "RotationMinX", "bool", "",0
Property: "RotationMinY", "bool", "",0
Property: "RotationMinZ", "bool", "",0
Property: "RotationMaxX", "bool", "",0
Property: "RotationMaxY", "bool", "",0
Property: "RotationMaxZ", "bool", "",0
Property: "InheritType", "enum", "",1
Property: "ScalingActive", "bool", "",0
Property: "ScalingMin", "Vector3D", "",0,0,0
Property: "ScalingMax", "Vector3D", "",0,0,0
Property: "ScalingMinX", "bool", "",0
Property: "ScalingMinY", "bool", "",0
Property: "ScalingMinZ", "bool", "",0
Property: "ScalingMaxX", "bool", "",0
Property: "ScalingMaxY", "bool", "",0
Property: "ScalingMaxZ", "bool", "",0
Property: "GeometricTranslation", "Vector3D", "",0,0,0
Property: "GeometricRotation", "Vector3D", "",0,0,0
Property: "GeometricScaling", "Vector3D", "",1,1,1
Property: "MinDampRangeX", "double", "",0
Property: "MinDampRangeY", "double", "",0
Property: "MinDampRangeZ", "double", "",0
Property: "MaxDampRangeX", "double", "",0
Property: "MaxDampRangeY", "double", "",0
Property: "MaxDampRangeZ", "double", "",0
Property: "MinDampStrengthX", "double", "",0
Property: "MinDampStrengthY", "double", "",0
Property: "MinDampStrengthZ", "double", "",0
Property: "MaxDampStrengthX", "double", "",0
Property: "MaxDampStrengthY", "double", "",0
Property: "MaxDampStrengthZ", "double", "",0
Property: "PreferedAngleX", "double", "",0
Property: "PreferedAngleY", "double", "",0
Property: "PreferedAngleZ", "double", "",0
Property: "LookAtProperty", "object", ""
Property: "UpVectorProperty", "object", ""
Property: "Show", "bool", "",1
Property: "NegativePercentShapeSupport", "bool", "",1
Property: "DefaultAttributeIndex", "int", "",0
Property: "Freeze", "bool", "",0
Property: "LODBox", "bool", "",0
Property: "Lcl Translation", "Lcl Translation", "A+",0,0,0
Property: "Lcl Rotation", "Lcl Rotation", "A+",0,0,0
Property: "Lcl Scaling", "Lcl Scaling", "A+",1,1,1
Property: "Visibility", "Visibility", "A",1
Property: "Color", "Color", "A+N",0.148112133145332,0.0950400084257126,0.439999997615814
Property: "LightType", "enum", "N",0
Property: "CastLightOnObject", "bool", "N",1
Property: "DrawVolumetricLight", "bool", "N",1
Property: "DrawGroundProjection", "bool", "N",1
Property: "DrawFrontFacingVolumetricLight", "bool", "N",0
Property: "Intensity", "Number", "A+N",307.228912353516
Property: "Fog", "Number", "AN",50
Property: "DecayType", "enum", "N",0
Property: "DecayStart", "Number", "AN",0
Property: "FileName", "KString", "N", ""
Property: "EnableNearAttenuation", "bool", "N",0
Property: "NearAttenuationStart", "Number", "AN",0
Property: "NearAttenuationEnd", "Number", "AN",0
Property: "EnableFarAttenuation", "bool", "N",0
Property: "FarAttenuationStart", "Number", "AN",0
Property: "FarAttenuationEnd", "Number", "AN",0
Property: "CastShadows", "bool", "N",1
Property: "ShadowColor", "Color", "AN",0,0,0
Property: "HotSpot", "Number", "AN",0
Property: "Cone angle", "Number", "AN",45
}
MultiLayer: 0
MultiTake: 0
Shading: Y
Culling: "CullingOff"
TypeFlags: "Light"
GeometryVersion: 124
NodeAttributeName: "NodeAttribute::pointLight1_ncl1_1"
}
SceneInfo: "SceneInfo::GlobalInfo", "UserData" {
Type: "UserData"
Version: 100
MetaData: {
Version: 100
Title: ""
Subject: ""
Author: ""
Keywords: ""
Revision: ""
Comment: ""
}
Properties60: {
Property: "DocumentUrl", "KString", "", "D:\Dev\ufbx\data\maya_anim_light_6100_ascii.fbx"
Property: "SrcDocumentUrl", "KString", "", "D:\Dev\ufbx\data\maya_anim_light_6100_ascii.fbx"
Property: "Original", "Compound", ""
Property: "Original|ApplicationVendor", "KString", "", "Autodesk"
Property: "Original|ApplicationName", "KString", "", "Maya"
Property: "Original|ApplicationVersion", "KString", "", "201900"
Property: "Original|DateTime_GMT", "DateTime", "", "22/03/2020 11:20:20.452"
Property: "Original|FileName", "KString", "", "D:\Dev\ufbx\data\maya_anim_light_6100_ascii.fbx"
Property: "LastSaved", "Compound", ""
Property: "LastSaved|ApplicationVendor", "KString", "", "Autodesk"
Property: "LastSaved|ApplicationName", "KString", "", "Maya"
Property: "LastSaved|ApplicationVersion", "KString", "", "201900"
Property: "LastSaved|DateTime_GMT", "DateTime", "", "22/03/2020 11:20:20.452"
Property: "Original|ApplicationActiveProject", "KString", "", "D:\Dev\ufbx\data"
Property: "Original|ApplicationNativeFile", "KString", "", "W:\Temp\ufbx_test_source\maya_anim_light.mb"
}
}
GlobalSettings: {
Version: 1000
Properties60: {
Property: "UpAxis", "int", "",1
Property: "UpAxisSign", "int", "",1
Property: "FrontAxis", "int", "",2
Property: "FrontAxisSign", "int", "",1
Property: "CoordAxis", "int", "",0
Property: "CoordAxisSign", "int", "",1
Property: "OriginalUpAxis", "int", "",1
Property: "OriginalUpAxisSign", "int", "",1
Property: "UnitScaleFactor", "double", "",1
Property: "OriginalUnitScaleFactor", "double", "",1
Property: "AmbientColor", "ColorRGB", "",0,0,0
Property: "DefaultCamera", "KString", "", "Producer Perspective"
Property: "TimeMode", "enum", "",11
Property: "TimeProtocol", "enum", "",2
Property: "SnapOnFrameMode", "enum", "",0
Property: "TimeSpanStart", "KTime", "",1924423250
Property: "TimeSpanStop", "KTime", "",115465395000
Property: "CustomFrameRate", "double", "",-1
}
}
}
; Object connections
;------------------------------------------------------------------
Connections: {
Connect: "OO", "Model::pointLight1", "Model::Scene"
}
;Takes and animation section
;----------------------------------------------------
Takes: {
Current: "Take 001"
Take: "Take 001" {
FileName: "Take_001.tak"
LocalTime: 1924423250,115465395000
ReferenceTime: 1924423250,115465395000
;Models animation
;----------------------------------------------------
Model: "Model::pointLight1" {
Version: 1.1
Channel: "Transform" {
Channel: "T" {
Channel: "X" {
Default: 0
Color: 1,1,1
}
Channel: "Y" {
Default: 0
Color: 1,1,1
}
Channel: "Z" {
Default: 0
Color: 1,1,1
}
LayerType: 1
}
Channel: "R" {
Channel: "X" {
Default: 0
Color: 1,1,1
}
Channel: "Y" {
Default: 0
Color: 1,1,1
}
Channel: "Z" {
Default: 0
Color: 1,1,1
}
LayerType: 2
}
Channel: "S" {
Channel: "X" {
Default: 1
Color: 1,1,1
}
Channel: "Y" {
Default: 1
Color: 1,1,1
}
Channel: "Z" {
Default: 1
Color: 1,1,1
}
LayerType: 3
}
}
Channel: "Color" {
Channel: "X" {
Default: 0.148112133145332
KeyVer: 4005
KeyCount: 3
Key: 1924423250,0.148112133145332,U,s,0,0,n,57732697500,0.00430000014603138,U,s,0,0,n,115465395000
,0.44200000166893,U,s,0,0,n
Color: 1,1,1
}
Channel: "Y" {
Default: 0.0950400084257126
KeyVer: 4005
KeyCount: 3
Key: 1924423250,0.0950400084257126,U,s,0,0,n,57732697500,0.221799999475479,U,s,0,0,n,115465395000
,0.118897996842861,U,s,0,0,n
Color: 1,1,1
}
Channel: "Z" {
Default: 0.439999997615814
KeyVer: 4005
KeyCount: 3
Key: 1924423250,0.439999997615814,U,s,0,0,n,57732697500,0.112999998033047,U,s,0,0,n,115465395000,0.118897996842861
,U,s,0,0,n
Color: 1,1,1
}
}
Channel: "Intensity" {
Default: 307.228912353516
KeyVer: 4005
KeyCount: 4
Key: 1924423250,307.228912353516,U,s,0,0,n,32715195250,120.48193359375,U,s,0,0,n,78901353250,481.927734375
,U,s,0,0,n,115465395000,114.457832336426,U,s,0,0,n
Color: 1,1,1
}
}
;Generic nodes animation
;----------------------------------------------------
;Textures animation
;----------------------------------------------------
;Materials animation
;----------------------------------------------------
;Constraints animation
;----------------------------------------------------
}
}
;Version 5 settings
;------------------------------------------------------------------
Version5: {
AmbientRenderSettings: {
Version: 101
AmbientLightColor: 0,0,0,1
}
FogOptions: {
FlogEnable: 0
FogMode: 0
FogDensity: 0.002
FogStart: 0.3
FogEnd: 1000
FogColor: 1,1,1,1
}
Settings: {
FrameRate: "24"
TimeFormat: 1
SnapOnFrames: 0
ReferenceTimeIndex: -1
TimeLineStartTime: 1924423250
TimeLineStopTime: 115465395000
}
RendererSetting: {
DefaultCamera: "Producer Perspective"
DefaultViewingMode: 0
}
}

View File

@@ -0,0 +1,393 @@
; FBX 6.1.0 project file
; ----------------------------------------------------
FBXHeaderExtension: {
FBXHeaderVersion: 1003
FBXVersion: 6100
CurrentCameraResolution: {
CameraName: "Producer Perspective"
CameraResolutionMode: "Fixed Resolution"
CameraResolutionW: 960
CameraResolutionH: 540
}
CreationTimeStamp: {
Version: 1000
Year: 2020
Month: 3
Day: 22
Hour: 0
Minute: 0
Second: 25
Millisecond: 548
}
Creator: "FBX SDK/FBX Plugins version 2020.0"
}
; Document Description
;------------------------------------------------------------------
Document: {
Name: ""
}
; Document References
;------------------------------------------------------------------
References: {
}
; Object definitions
;------------------------------------------------------------------
Definitions: {
Version: 100
Count: 4
ObjectType: "Model" {
Count: 1
}
ObjectType: "Material" {
Count: 1
}
ObjectType: "SceneInfo" {
Count: 1
}
ObjectType: "GlobalSettings" {
Count: 1
}
}
; Object properties
;------------------------------------------------------------------
Objects: {
Model: "Model::pCube1", "Mesh" {
Version: 232
Properties60: {
Property: "QuaternionInterpolate", "enum", "",0
Property: "RotationOffset", "Vector3D", "",0,0,0
Property: "RotationPivot", "Vector3D", "",0,0,0
Property: "ScalingOffset", "Vector3D", "",0,0,0
Property: "ScalingPivot", "Vector3D", "",0,0,0
Property: "TranslationActive", "bool", "",0
Property: "TranslationMin", "Vector3D", "",0,0,0
Property: "TranslationMax", "Vector3D", "",0,0,0
Property: "TranslationMinX", "bool", "",0
Property: "TranslationMinY", "bool", "",0
Property: "TranslationMinZ", "bool", "",0
Property: "TranslationMaxX", "bool", "",0
Property: "TranslationMaxY", "bool", "",0
Property: "TranslationMaxZ", "bool", "",0
Property: "RotationOrder", "enum", "",0
Property: "RotationSpaceForLimitOnly", "bool", "",0
Property: "RotationStiffnessX", "double", "",0
Property: "RotationStiffnessY", "double", "",0
Property: "RotationStiffnessZ", "double", "",0
Property: "AxisLen", "double", "",10
Property: "PreRotation", "Vector3D", "",0,0,0
Property: "PostRotation", "Vector3D", "",0,0,0
Property: "RotationActive", "bool", "",1
Property: "RotationMin", "Vector3D", "",0,0,0
Property: "RotationMax", "Vector3D", "",0,0,0
Property: "RotationMinX", "bool", "",0
Property: "RotationMinY", "bool", "",0
Property: "RotationMinZ", "bool", "",0
Property: "RotationMaxX", "bool", "",0
Property: "RotationMaxY", "bool", "",0
Property: "RotationMaxZ", "bool", "",0
Property: "InheritType", "enum", "",1
Property: "ScalingActive", "bool", "",0
Property: "ScalingMin", "Vector3D", "",0,0,0
Property: "ScalingMax", "Vector3D", "",0,0,0
Property: "ScalingMinX", "bool", "",0
Property: "ScalingMinY", "bool", "",0
Property: "ScalingMinZ", "bool", "",0
Property: "ScalingMaxX", "bool", "",0
Property: "ScalingMaxY", "bool", "",0
Property: "ScalingMaxZ", "bool", "",0
Property: "GeometricTranslation", "Vector3D", "",0,0,0
Property: "GeometricRotation", "Vector3D", "",0,0,0
Property: "GeometricScaling", "Vector3D", "",1,1,1
Property: "MinDampRangeX", "double", "",0
Property: "MinDampRangeY", "double", "",0
Property: "MinDampRangeZ", "double", "",0
Property: "MaxDampRangeX", "double", "",0
Property: "MaxDampRangeY", "double", "",0
Property: "MaxDampRangeZ", "double", "",0
Property: "MinDampStrengthX", "double", "",0
Property: "MinDampStrengthY", "double", "",0
Property: "MinDampStrengthZ", "double", "",0
Property: "MaxDampStrengthX", "double", "",0
Property: "MaxDampStrengthY", "double", "",0
Property: "MaxDampStrengthZ", "double", "",0
Property: "PreferedAngleX", "double", "",0
Property: "PreferedAngleY", "double", "",0
Property: "PreferedAngleZ", "double", "",0
Property: "LookAtProperty", "object", ""
Property: "UpVectorProperty", "object", ""
Property: "Show", "bool", "",1
Property: "NegativePercentShapeSupport", "bool", "",1
Property: "DefaultAttributeIndex", "int", "",0
Property: "Freeze", "bool", "",0
Property: "LODBox", "bool", "",0
Property: "Lcl Translation", "Lcl Translation", "A+",0,0,0
Property: "Lcl Rotation", "Lcl Rotation", "A+",0,0,0
Property: "Lcl Scaling", "Lcl Scaling", "A+",1,1,1
Property: "Visibility", "Visibility", "A",1
Property: "currentUVSet", "KString", "U", "map1"
Property: "Color", "ColorRGB", "N",0.8,0.8,0.8
Property: "BBoxMin", "Vector3D", "N",0,0,0
Property: "BBoxMax", "Vector3D", "N",0,0,0
Property: "Primary Visibility", "bool", "N",1
Property: "Casts Shadows", "bool", "N",1
Property: "Receive Shadows", "bool", "N",1
}
MultiLayer: 0
MultiTake: 0
Shading: T
Culling: "CullingOff"
Vertices: -0.5,-0.5,0.5,0.5,-0.5,0.5,-0.5,0.5,0.5,0.5,0.5,0.5,-0.5,0.5,-0.5,0.5,0.5,-0.5,-0.5,-0.5,-0.5,0.5,-0.5,-0.5
PolygonVertexIndex: 0,1,3,-3,2,3,5,-5,4,5,7,-7,6,7,1,-1,1,7,5,-4,6,0,2,-5
Edges: 0,2,6,10,3,1,7,5,11,9,15,13
GeometryVersion: 124
LayerElementNormal: 0 {
Version: 101
Name: ""
MappingInformationType: "ByPolygonVertex"
ReferenceInformationType: "Direct"
Normals: 0,0,1,0,0,1,0,0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,1,0,0,0,-1,0,0,-1,0,0,-1,0,0,-1,0,-1,0,0,-1,0,0,-1,0,0,-1,0,1,0,0
,1,0,0,1,0,0,1,0,0,-1,0,0,-1,0,0,-1,0,0,-1,0,0
}
LayerElementUV: 0 {
Version: 101
Name: "map1"
MappingInformationType: "ByPolygonVertex"
ReferenceInformationType: "IndexToDirect"
UV: 0.375,0,0.625,0,0.375,0.25,0.625,0.25,0.375,0.5,0.625,0.5,0.375,0.75,0.625,0.75,0.375,1,0.625,1,0.875,0,0.875
,0.25,0.125,0,0.125,0.25
UVIndex: 0,1,3,2,2,3,5,4,4,5,7,6,6,7,9,8,1,10,11,3,12,0,2,13
}
LayerElementMaterial: 0 {
Version: 101
Name: ""
MappingInformationType: "AllSame"
ReferenceInformationType: "IndexToDirect"
Materials: 0
}
Layer: 0 {
Version: 100
LayerElement: {
Type: "LayerElementNormal"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementMaterial"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementUV"
TypedIndex: 0
}
}
NodeAttributeName: "Geometry::pCube1_ncl1_1"
}
SceneInfo: "SceneInfo::GlobalInfo", "UserData" {
Type: "UserData"
Version: 100
MetaData: {
Version: 100
Title: ""
Subject: ""
Author: ""
Keywords: ""
Revision: ""
Comment: ""
}
Properties60: {
Property: "DocumentUrl", "KString", "", "D:\Dev\ufbx\data\maya_auto_clamp_6100_ascii.fbx"
Property: "SrcDocumentUrl", "KString", "", "D:\Dev\ufbx\data\maya_auto_clamp_6100_ascii.fbx"
Property: "Original", "Compound", ""
Property: "Original|ApplicationVendor", "KString", "", "Autodesk"
Property: "Original|ApplicationName", "KString", "", "Maya"
Property: "Original|ApplicationVersion", "KString", "", "202000"
Property: "Original|DateTime_GMT", "DateTime", "", "21/03/2020 22:00:25.546"
Property: "Original|FileName", "KString", "", "D:\Dev\ufbx\data\maya_auto_clamp_6100_ascii.fbx"
Property: "LastSaved", "Compound", ""
Property: "LastSaved|ApplicationVendor", "KString", "", "Autodesk"
Property: "LastSaved|ApplicationName", "KString", "", "Maya"
Property: "LastSaved|ApplicationVersion", "KString", "", "202000"
Property: "LastSaved|DateTime_GMT", "DateTime", "", "21/03/2020 22:00:25.546"
Property: "Original|ApplicationActiveProject", "KString", "", "D:\Dev\ufbx\data"
}
}
Material: "Material::lambert1", "" {
Version: 102
ShadingModel: "lambert"
MultiLayer: 0
Properties60: {
Property: "ShadingModel", "KString", "", "Lambert"
Property: "MultiLayer", "bool", "",0
Property: "EmissiveColor", "Color", "A",0,0,0
Property: "EmissiveFactor", "Number", "A",1
Property: "AmbientColor", "Color", "A",0,0,0
Property: "AmbientFactor", "Number", "A",1
Property: "DiffuseColor", "Color", "A",0.5,0.5,0.5
Property: "DiffuseFactor", "Number", "A",0.800000011920929
Property: "Bump", "Vector3D", "",0,0,0
Property: "NormalMap", "Vector3D", "",0,0,0
Property: "BumpFactor", "double", "",1
Property: "TransparentColor", "Color", "A",0,0,0
Property: "TransparencyFactor", "Number", "A",1
Property: "DisplacementColor", "ColorRGB", "",0,0,0
Property: "DisplacementFactor", "double", "",1
Property: "VectorDisplacementColor", "ColorRGB", "",0,0,0
Property: "VectorDisplacementFactor", "double", "",1
Property: "Emissive", "Vector3D", "",0,0,0
Property: "Ambient", "Vector3D", "",0,0,0
Property: "Diffuse", "Vector3D", "",0.400000005960464,0.400000005960464,0.400000005960464
Property: "Opacity", "double", "",1
}
}
GlobalSettings: {
Version: 1000
Properties60: {
Property: "UpAxis", "int", "",1
Property: "UpAxisSign", "int", "",1
Property: "FrontAxis", "int", "",2
Property: "FrontAxisSign", "int", "",1
Property: "CoordAxis", "int", "",0
Property: "CoordAxisSign", "int", "",1
Property: "OriginalUpAxis", "int", "",1
Property: "OriginalUpAxisSign", "int", "",1
Property: "UnitScaleFactor", "double", "",1
Property: "OriginalUnitScaleFactor", "double", "",1
Property: "AmbientColor", "ColorRGB", "",0,0,0
Property: "DefaultCamera", "KString", "", "Producer Perspective"
Property: "TimeMode", "enum", "",11
Property: "TimeProtocol", "enum", "",2
Property: "SnapOnFrameMode", "enum", "",0
Property: "TimeSpanStart", "KTime", "",1924423250
Property: "TimeSpanStop", "KTime", "",57732697500
Property: "CustomFrameRate", "double", "",-1
}
}
}
; Object connections
;------------------------------------------------------------------
Connections: {
Connect: "OO", "Model::pCube1", "Model::Scene"
Connect: "OO", "Material::lambert1", "Model::pCube1"
}
;Takes and animation section
;----------------------------------------------------
Takes: {
Current: "Take 001"
Take: "Take 001" {
FileName: "Take_001.tak"
LocalTime: 1924423250,57732697500
ReferenceTime: 1924423250,57732697500
;Models animation
;----------------------------------------------------
Model: "Model::pCube1" {
Version: 1.1
Channel: "Transform" {
Channel: "T" {
Channel: "X" {
Default: 0
KeyVer: 4005
KeyCount: 4
Key: 1924423250,0,U,s,6.86190509796143,5.37557697296143,a,0.722972273826599,0.333333343267441
,21168655750,0.746607899665833,U,s,5.37557697296143,3.9690523147583,a,0.333333343267441
,0.333333343267441,38488465000,9.44874286651611,U,s,3.9690523147583,-6.61502313613892
,a,0.333333343267441,0.773677349090576,57732697500,10,U,s,-6.61502504348755,0,n
Color: 1,1,1
}
Channel: "Y" {
Default: 0
Color: 1,1,1
}
Channel: "Z" {
Default: 0
Color: 1,1,1
}
LayerType: 1
}
Channel: "R" {
Channel: "X" {
Default: 0
Color: 1,1,1
}
Channel: "Y" {
Default: 0
Color: 1,1,1
}
Channel: "Z" {
Default: 0
Color: 1,1,1
}
LayerType: 2
}
Channel: "S" {
Channel: "X" {
Default: 1
Color: 1,1,1
}
Channel: "Y" {
Default: 1
Color: 1,1,1
}
Channel: "Z" {
Default: 1
Color: 1,1,1
}
LayerType: 3
}
}
}
;Generic nodes animation
;----------------------------------------------------
;Textures animation
;----------------------------------------------------
;Materials animation
;----------------------------------------------------
;Constraints animation
;----------------------------------------------------
}
}
;Version 5 settings
;------------------------------------------------------------------
Version5: {
AmbientRenderSettings: {
Version: 101
AmbientLightColor: 0,0,0,1
}
FogOptions: {
FlogEnable: 0
FogMode: 0
FogDensity: 0.002
FogStart: 0.3
FogEnd: 1000
FogColor: 1,1,1,1
}
Settings: {
FrameRate: "24"
TimeFormat: 1
SnapOnFrames: 0
ReferenceTimeIndex: -1
TimeLineStartTime: 5880000
TimeLineStopTime: 176400000
}
RendererSetting: {
DefaultCamera: "Producer Perspective"
DefaultViewingMode: 0
}
}

View File

@@ -0,0 +1,429 @@
; FBX 7.5.0 project file
; ----------------------------------------------------
FBXHeaderExtension: {
FBXHeaderVersion: 1003
FBXVersion: 7500
CreationTimeStamp: {
Version: 1000
Year: 2020
Month: 3
Day: 26
Hour: 13
Minute: 36
Second: 45
Millisecond: 788
}
Creator: "FBX SDK/FBX Plugins version 2019.2"
SceneInfo: "SceneInfo::GlobalInfo", "UserData" {
Type: "UserData"
Version: 100
MetaData: {
Version: 100
Title: ""
Subject: ""
Author: ""
Keywords: ""
Revision: ""
Comment: ""
}
Properties70: {
P: "DocumentUrl", "KString", "Url", "", "D:\Dev\ufbx\data\maya_pivots_7500_ascii.fbx"
P: "SrcDocumentUrl", "KString", "Url", "", "D:\Dev\ufbx\data\maya_pivots_7500_ascii.fbx"
P: "Original", "Compound", "", ""
P: "Original|ApplicationVendor", "KString", "", "", "Autodesk"
P: "Original|ApplicationName", "KString", "", "", "Maya"
P: "Original|ApplicationVersion", "KString", "", "", "201900"
P: "Original|DateTime_GMT", "DateTime", "", "", "26/03/2020 11:36:45.786"
P: "Original|FileName", "KString", "", "", "D:\Dev\ufbx\data\maya_pivots_7500_ascii.fbx"
P: "LastSaved", "Compound", "", ""
P: "LastSaved|ApplicationVendor", "KString", "", "", "Autodesk"
P: "LastSaved|ApplicationName", "KString", "", "", "Maya"
P: "LastSaved|ApplicationVersion", "KString", "", "", "201900"
P: "LastSaved|DateTime_GMT", "DateTime", "", "", "26/03/2020 11:36:45.786"
P: "Original|ApplicationActiveProject", "KString", "", "", "D:\Dev\ufbx\data"
}
}
}
GlobalSettings: {
Version: 1000
Properties70: {
P: "UpAxis", "int", "Integer", "",1
P: "UpAxisSign", "int", "Integer", "",1
P: "FrontAxis", "int", "Integer", "",2
P: "FrontAxisSign", "int", "Integer", "",1
P: "CoordAxis", "int", "Integer", "",0
P: "CoordAxisSign", "int", "Integer", "",1
P: "OriginalUpAxis", "int", "Integer", "",1
P: "OriginalUpAxisSign", "int", "Integer", "",1
P: "UnitScaleFactor", "double", "Number", "",1
P: "OriginalUnitScaleFactor", "double", "Number", "",1
P: "AmbientColor", "ColorRGB", "Color", "",0,0,0
P: "DefaultCamera", "KString", "", "", "Producer Perspective"
P: "TimeMode", "enum", "", "",11
P: "TimeProtocol", "enum", "", "",2
P: "SnapOnFrameMode", "enum", "", "",0
P: "TimeSpanStart", "KTime", "Time", "",1924423250
P: "TimeSpanStop", "KTime", "Time", "",384884650000
P: "CustomFrameRate", "double", "Number", "",-1
P: "TimeMarker", "Compound", "", ""
P: "CurrentTimeMarker", "int", "Integer", "",-1
}
}
; Documents Description
;------------------------------------------------------------------
Documents: {
Count: 1
Document: 2459250420992, "", "Scene" {
Properties70: {
P: "SourceObject", "object", "", ""
P: "ActiveAnimStackName", "KString", "", "", "Take 001"
}
RootNode: 0
}
}
; Document References
;------------------------------------------------------------------
References: {
}
; Object definitions
;------------------------------------------------------------------
Definitions: {
Version: 100
Count: 6
ObjectType: "GlobalSettings" {
Count: 1
}
ObjectType: "AnimationStack" {
Count: 1
PropertyTemplate: "FbxAnimStack" {
Properties70: {
P: "Description", "KString", "", "", ""
P: "LocalStart", "KTime", "Time", "",0
P: "LocalStop", "KTime", "Time", "",0
P: "ReferenceStart", "KTime", "Time", "",0
P: "ReferenceStop", "KTime", "Time", "",0
}
}
}
ObjectType: "AnimationLayer" {
Count: 1
PropertyTemplate: "FbxAnimLayer" {
Properties70: {
P: "Weight", "Number", "", "A",100
P: "Mute", "bool", "", "",0
P: "Solo", "bool", "", "",0
P: "Lock", "bool", "", "",0
P: "Color", "ColorRGB", "Color", "",0.8,0.8,0.8
P: "BlendMode", "enum", "", "",0
P: "RotationAccumulationMode", "enum", "", "",0
P: "ScaleAccumulationMode", "enum", "", "",0
P: "BlendModeBypass", "ULongLong", "", "",0
}
}
}
ObjectType: "Geometry" {
Count: 1
PropertyTemplate: "FbxMesh" {
Properties70: {
P: "Color", "ColorRGB", "Color", "",0.8,0.8,0.8
P: "BBoxMin", "Vector3D", "Vector", "",0,0,0
P: "BBoxMax", "Vector3D", "Vector", "",0,0,0
P: "Primary Visibility", "bool", "", "",1
P: "Casts Shadows", "bool", "", "",1
P: "Receive Shadows", "bool", "", "",1
}
}
}
ObjectType: "Material" {
Count: 1
PropertyTemplate: "FbxSurfaceLambert" {
Properties70: {
P: "ShadingModel", "KString", "", "", "Lambert"
P: "MultiLayer", "bool", "", "",0
P: "EmissiveColor", "Color", "", "A",0,0,0
P: "EmissiveFactor", "Number", "", "A",1
P: "AmbientColor", "Color", "", "A",0.2,0.2,0.2
P: "AmbientFactor", "Number", "", "A",1
P: "DiffuseColor", "Color", "", "A",0.8,0.8,0.8
P: "DiffuseFactor", "Number", "", "A",1
P: "Bump", "Vector3D", "Vector", "",0,0,0
P: "NormalMap", "Vector3D", "Vector", "",0,0,0
P: "BumpFactor", "double", "Number", "",1
P: "TransparentColor", "Color", "", "A",0,0,0
P: "TransparencyFactor", "Number", "", "A",0
P: "DisplacementColor", "ColorRGB", "Color", "",0,0,0
P: "DisplacementFactor", "double", "Number", "",1
P: "VectorDisplacementColor", "ColorRGB", "Color", "",0,0,0
P: "VectorDisplacementFactor", "double", "Number", "",1
}
}
}
ObjectType: "Model" {
Count: 1
PropertyTemplate: "FbxNode" {
Properties70: {
P: "QuaternionInterpolate", "enum", "", "",0
P: "RotationOffset", "Vector3D", "Vector", "",0,0,0
P: "RotationPivot", "Vector3D", "Vector", "",0,0,0
P: "ScalingOffset", "Vector3D", "Vector", "",0,0,0
P: "ScalingPivot", "Vector3D", "Vector", "",0,0,0
P: "TranslationActive", "bool", "", "",0
P: "TranslationMin", "Vector3D", "Vector", "",0,0,0
P: "TranslationMax", "Vector3D", "Vector", "",0,0,0
P: "TranslationMinX", "bool", "", "",0
P: "TranslationMinY", "bool", "", "",0
P: "TranslationMinZ", "bool", "", "",0
P: "TranslationMaxX", "bool", "", "",0
P: "TranslationMaxY", "bool", "", "",0
P: "TranslationMaxZ", "bool", "", "",0
P: "RotationOrder", "enum", "", "",0
P: "RotationSpaceForLimitOnly", "bool", "", "",0
P: "RotationStiffnessX", "double", "Number", "",0
P: "RotationStiffnessY", "double", "Number", "",0
P: "RotationStiffnessZ", "double", "Number", "",0
P: "AxisLen", "double", "Number", "",10
P: "PreRotation", "Vector3D", "Vector", "",0,0,0
P: "PostRotation", "Vector3D", "Vector", "",0,0,0
P: "RotationActive", "bool", "", "",0
P: "RotationMin", "Vector3D", "Vector", "",0,0,0
P: "RotationMax", "Vector3D", "Vector", "",0,0,0
P: "RotationMinX", "bool", "", "",0
P: "RotationMinY", "bool", "", "",0
P: "RotationMinZ", "bool", "", "",0
P: "RotationMaxX", "bool", "", "",0
P: "RotationMaxY", "bool", "", "",0
P: "RotationMaxZ", "bool", "", "",0
P: "InheritType", "enum", "", "",0
P: "ScalingActive", "bool", "", "",0
P: "ScalingMin", "Vector3D", "Vector", "",0,0,0
P: "ScalingMax", "Vector3D", "Vector", "",1,1,1
P: "ScalingMinX", "bool", "", "",0
P: "ScalingMinY", "bool", "", "",0
P: "ScalingMinZ", "bool", "", "",0
P: "ScalingMaxX", "bool", "", "",0
P: "ScalingMaxY", "bool", "", "",0
P: "ScalingMaxZ", "bool", "", "",0
P: "GeometricTranslation", "Vector3D", "Vector", "",0,0,0
P: "GeometricRotation", "Vector3D", "Vector", "",0,0,0
P: "GeometricScaling", "Vector3D", "Vector", "",1,1,1
P: "MinDampRangeX", "double", "Number", "",0
P: "MinDampRangeY", "double", "Number", "",0
P: "MinDampRangeZ", "double", "Number", "",0
P: "MaxDampRangeX", "double", "Number", "",0
P: "MaxDampRangeY", "double", "Number", "",0
P: "MaxDampRangeZ", "double", "Number", "",0
P: "MinDampStrengthX", "double", "Number", "",0
P: "MinDampStrengthY", "double", "Number", "",0
P: "MinDampStrengthZ", "double", "Number", "",0
P: "MaxDampStrengthX", "double", "Number", "",0
P: "MaxDampStrengthY", "double", "Number", "",0
P: "MaxDampStrengthZ", "double", "Number", "",0
P: "PreferedAngleX", "double", "Number", "",0
P: "PreferedAngleY", "double", "Number", "",0
P: "PreferedAngleZ", "double", "Number", "",0
P: "LookAtProperty", "object", "", ""
P: "UpVectorProperty", "object", "", ""
P: "Show", "bool", "", "",1
P: "NegativePercentShapeSupport", "bool", "", "",1
P: "DefaultAttributeIndex", "int", "Integer", "",-1
P: "Freeze", "bool", "", "",0
P: "LODBox", "bool", "", "",0
P: "Lcl Translation", "Lcl Translation", "", "A",0,0,0
P: "Lcl Rotation", "Lcl Rotation", "", "A",0,0,0
P: "Lcl Scaling", "Lcl Scaling", "", "A",1,1,1
P: "Visibility", "Visibility", "", "A",1
P: "Visibility Inheritance", "Visibility Inheritance", "", "",1
}
}
}
}
; Object properties
;------------------------------------------------------------------
Objects: {
Geometry: 2459249891472, "Geometry::", "Mesh" {
Vertices: *24 {
a: -0.5,-0.5,0.5,0.5,-0.5,0.5,-0.5,0.5,0.5,0.5,0.5,0.5,-0.5,0.5,-0.5,0.5,0.5,-0.5,-0.5,-0.5,-0.5,0.5,-0.5,-0.5
}
PolygonVertexIndex: *24 {
a: 0,1,3,-3,2,3,5,-5,4,5,7,-7,6,7,1,-1,1,7,5,-4,6,0,2,-5
}
Edges: *12 {
a: 0,2,6,10,3,1,7,5,11,9,15,13
}
GeometryVersion: 124
LayerElementNormal: 0 {
Version: 102
Name: ""
MappingInformationType: "ByPolygonVertex"
ReferenceInformationType: "Direct"
Normals: *72 {
a: 0,0,1,0,0,1,0,0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,1,0,0,0,-1,0,0,-1,0,0,-1,0,0,-1,0,-1,0,0,-1,0,0,-1,0,0,-1,0,1,0,0,1,0,0,1,0,0,1,0,0,-1,0,0,-1,0,0,-1,0,0,-1,0,0
}
NormalsW: *24 {
a: 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1
}
}
LayerElementBinormal: 0 {
Version: 102
Name: "map1"
MappingInformationType: "ByPolygonVertex"
ReferenceInformationType: "Direct"
Binormals: *72 {
a: 0,1,-0,0,1,-0,0,1,-0,0,1,-0,0,0,-1,0,0,-1,0,0,-1,0,0,-1,0,-1,0,0,-1,0,0,-1,0,0,-1,0,0,0,1,0,0,1,0,0,1,0,0,1,-0,1,0,-0,1,0,0,1,-0,-0,1,0,0,1,0,0,1,0,0,1,0,0,1,0
}
BinormalsW: *24 {
a: 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1
}
}
LayerElementTangent: 0 {
Version: 102
Name: "map1"
MappingInformationType: "ByPolygonVertex"
ReferenceInformationType: "Direct"
Tangents: *72 {
a: 1,-0,-0,1,-0,0,1,-0,0,1,-0,0,1,-0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,-0,1,0,-0,1,0,-0,1,0,-0,0,0,-1,0,0,-1,0,-0,-1,0,0,-1,0,-0,1,0,-0,1,0,-0,1,0,-0,1
}
TangentsW: *24 {
a: 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1
}
}
LayerElementUV: 0 {
Version: 101
Name: "map1"
MappingInformationType: "ByPolygonVertex"
ReferenceInformationType: "IndexToDirect"
UV: *28 {
a: 0.375,0,0.625,0,0.375,0.25,0.625,0.25,0.375,0.5,0.625,0.5,0.375,0.75,0.625,0.75,0.375,1,0.625,1,0.875,0,0.875,0.25,0.125,0,0.125,0.25
}
UVIndex: *24 {
a: 0,1,3,2,2,3,5,4,4,5,7,6,6,7,9,8,1,10,11,3,12,0,2,13
}
}
LayerElementSmoothing: 0 {
Version: 102
Name: ""
MappingInformationType: "ByEdge"
ReferenceInformationType: "Direct"
Smoothing: *12 {
a: 0,0,0,0,0,0,0,0,0,0,0,0
}
}
LayerElementMaterial: 0 {
Version: 101
Name: ""
MappingInformationType: "AllSame"
ReferenceInformationType: "IndexToDirect"
Materials: *1 {
a: 0
}
}
Layer: 0 {
Version: 100
LayerElement: {
Type: "LayerElementNormal"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementBinormal"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementTangent"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementMaterial"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementSmoothing"
TypedIndex: 0
}
LayerElement: {
Type: "LayerElementUV"
TypedIndex: 0
}
}
}
Model: 2461087530672, "Model::pCube1", "Mesh" {
Version: 232
Properties70: {
P: "RotationPivot", "Vector3D", "Vector", "",1,2,3
P: "ScalingPivot", "Vector3D", "Vector", "",4,5,6
P: "RotationActive", "bool", "", "",1
P: "InheritType", "enum", "", "",1
P: "ScalingMax", "Vector3D", "Vector", "",0,0,0
P: "DefaultAttributeIndex", "int", "Integer", "",0
P: "Lcl Translation", "Lcl Translation", "", "A",-1,-2,-3
P: "Lcl Rotation", "Lcl Rotation", "", "A",20,30,40
P: "Lcl Scaling", "Lcl Scaling", "", "A",0.3,0.4,0.5
P: "currentUVSet", "KString", "", "U", "map1"
}
Shading: T
Culling: "CullingOff"
}
Material: 2460372910976, "Material::lambert1", "" {
Version: 102
ShadingModel: "lambert"
MultiLayer: 0
Properties70: {
P: "AmbientColor", "Color", "", "A",0,0,0
P: "DiffuseColor", "Color", "", "A",0.5,0.5,0.5
P: "DiffuseFactor", "Number", "", "A",0.800000011920929
P: "TransparencyFactor", "Number", "", "A",1
P: "Emissive", "Vector3D", "Vector", "",0,0,0
P: "Ambient", "Vector3D", "Vector", "",0,0,0
P: "Diffuse", "Vector3D", "Vector", "",0.400000005960464,0.400000005960464,0.400000005960464
P: "Opacity", "double", "Number", "",1
}
}
AnimationStack: 2461089285728, "AnimStack::Take 001", "" {
Properties70: {
P: "LocalStart", "KTime", "Time", "",1924423250
P: "LocalStop", "KTime", "Time", "",230930790000
P: "ReferenceStart", "KTime", "Time", "",1924423250
P: "ReferenceStop", "KTime", "Time", "",230930790000
}
}
AnimationLayer: 2460354443744, "AnimLayer::BaseLayer", "" {
}
}
; Object connections
;------------------------------------------------------------------
Connections: {
;Model::pCube1, Model::RootNode
C: "OO",2461087530672,0
;AnimLayer::BaseLayer, AnimStack::Take 001
C: "OO",2460354443744,2461089285728
;Geometry::, Model::pCube1
C: "OO",2459249891472,2461087530672
;Material::lambert1, Model::pCube1
C: "OO",2460372910976,2461087530672
}
;Takes section
;----------------------------------------------------
Takes: {
Current: "Take 001"
Take: "Take 001" {
FileName: "Take_001.tak"
LocalTime: 1924423250,230930790000
ReferenceTime: 1924423250,230930790000
}
}

View File

@@ -0,0 +1,118 @@
<?xml version="1.0" encoding="utf-8"?>
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
<Type Name="ufbxi_node">
<DisplayString>{{ name={name,s} }}</DisplayString>
<Expand>
<Item Name="name">name,s</Item>
<Synthetic Name="values" Condition="value_type_mask!=3">
<DisplayString Condition="(value_type_mask&amp;0xffff) == 0">{{ num_values = 0 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xfffc) == 0">{{ num_values = 1 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xfff0) == 0">{{ num_values = 2 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xffc0) == 0">{{ num_values = 3 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xff00) == 0">{{ num_values = 4 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xfc00) == 0">{{ num_values = 5 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xf000) == 0">{{ num_values = 6 }}</DisplayString>
<DisplayString Condition="(value_type_mask&amp;0xc000) == 0">{{ num_values = 7 }}</DisplayString>
<Expand>
<CustomListItems MaxItemsPerView="7">
<Variable Name="mask" InitialValue="value_type_mask"/>
<Variable Name="ix" InitialValue="0"/>
<Loop>
<Break Condition="mask == 0" />
<Item Condition="(mask &amp; 0x3) == 1 &amp;&amp; (double)(int64_t)vals[ix].f == vals[ix].f">(int64_t)vals[ix].f</Item>
<Item Condition="(mask &amp; 0x3) == 1 &amp;&amp; (double)(int64_t)vals[ix].f != vals[ix].f">vals[ix].f</Item>
<Item Condition="(mask &amp; 0x3) == 2">vals[ix].s</Item>
<Exec>mask = mask >> 2</Exec>
<Exec>ix = ix + 1</Exec>
</Loop>
</CustomListItems>
</Expand>
</Synthetic>
<Item Name="array" Condition="value_type_mask==3">array</Item>
<Synthetic Name="children">
<DisplayString>{{ num_children={num_children} }}</DisplayString>
<Expand>
<ArrayItems>
<Size>num_children</Size>
<ValuePointer>children</ValuePointer>
</ArrayItems>
</Expand>
</Synthetic>
</Expand>
</Type>
<Type Name="ufbxi_value_array">
<DisplayString>{{ size={size} }}</DisplayString>
<Expand>
<ArrayItems Condition="type=='b'">
<Size>size</Size>
<ValuePointer>(bool*)data</ValuePointer>
</ArrayItems>
<ArrayItems Condition="type=='i'">
<Size>size</Size>
<ValuePointer>(int32_t*)data</ValuePointer>
</ArrayItems>
<ArrayItems Condition="type=='l'">
<Size>size</Size>
<ValuePointer>(int64_t*)data</ValuePointer>
</ArrayItems>
<ArrayItems Condition="type=='f'">
<Size>size</Size>
<ValuePointer>(float*)data</ValuePointer>
</ArrayItems>
<ArrayItems Condition="type=='d'">
<Size>size</Size>
<ValuePointer>(double*)data</ValuePointer>
</ArrayItems>
</Expand>
</Type>
<Type Name="ufbxi_xml_tag">
<DisplayString Condition="name.length > 0">&lt;{name.data,sb}&gt;</DisplayString>
<DisplayString Condition="text.length > 0">{text.data,s}</DisplayString>
<DisplayString Condition="name.length == 0 &amp;&amp; text.length == 0">(XML root)</DisplayString>
<Expand>
<Item Name="name" Condition="name.length > 0">name,s</Item>
<Item Name="text" Condition="text.length > 0">text,s</Item>
<Synthetic Name="attribs" Condition="num_attribs">
<DisplayString>{{ count={num_attribs} }}</DisplayString>
<Expand>
<ArrayItems>
<Size>num_attribs</Size>
<ValuePointer>attribs</ValuePointer>
</ArrayItems>
</Expand>
</Synthetic>
<Synthetic Name="children" Condition="num_children">
<DisplayString>{{ count={num_children} }}</DisplayString>
<Expand>
<ArrayItems>
<Size>num_children</Size>
<ValuePointer>children</ValuePointer>
</ArrayItems>
</Expand>
</Synthetic>
</Expand>
</Type>
</AutoVisualizer>

View File

@@ -0,0 +1,169 @@
import math
import random
import argparse
import transmute_fbx as tfbx
Node = tfbx.Node
Value = tfbx.Value
def max_codepoint(width):
if width == 0:
return -1
elif width == 1:
return 0x7f
elif width == 2:
return 0x7ff
elif width == 3:
return 0xffff
elif width == 4:
return 0x10_ffff
else:
raise ValueError(f"Unsupported width: {width}")
def codepoint_to_utf8(codepoint, width, *, allow_overflow=False):
"""Unrestricted codepoint to UTF-8"""
if not allow_overflow:
assert codepoint <= max_codepoint(width)
c = codepoint
if width == 1:
return bytes([c])
elif width == 2:
return bytes([
0b1100_0000 | ((c >> 6) & 0b0001_1111),
0b1000_0000 | ((c >> 0) & 0b0011_1111),
])
elif width == 3:
return bytes([
0b1110_0000 | ((c >> 12) & 0b0000_1111),
0b1000_0000 | ((c >> 6) & 0b0011_1111),
0b1000_0000 | ((c >> 0) & 0b0011_1111),
])
elif width == 4:
return bytes([
0b1111_0000 | ((c >> 18) & 0b0000_0111),
0b1000_0000 | ((c >> 12) & 0b0011_1111),
0b1000_0000 | ((c >> 6) & 0b0011_1111),
0b1000_0000 | ((c >> 0) & 0b0011_1111),
])
else:
raise ValueError(f"Unsupported width: {width}")
def int_to_bytes(value):
num_bytes = int(math.ceil(math.log2(value + 1) / 8))
return value.to_bytes(num_bytes, "big", signed=False)
def valid_utf8(utf8):
try:
utf8.decode("utf-8")
return True
except UnicodeDecodeError:
return False
fuzz_encodings = {
b"",
b"\x00",
b"\xff",
b"\xff\xff",
b"\xff\xff\xff",
b"\xff\xff\xff\xff",
b"Hello world",
b"Hello\xffworld",
}
for width in range(1, 4+1):
for codepoint in range(max_codepoint(width) - 1):
prev = codepoint_to_utf8(codepoint, width)
next = codepoint_to_utf8(codepoint + 1, width)
if valid_utf8(prev) != valid_utf8(next):
fuzz_encodings.add(prev)
fuzz_encodings.add(next)
for width in range(1, 4+1):
fuzz_encodings.add(codepoint_to_utf8(max_codepoint(width - 1) + 1, width))
fuzz_encodings.add(codepoint_to_utf8(max_codepoint(width), width))
for width in range(1, 4+1):
for n in range(0x10ffff):
codepoint = (n*n)//7 + n
if codepoint > max_codepoint(width):
break
fuzz_encodings.add(codepoint_to_utf8(codepoint, width))
for n in range(0x400):
fuzz_encodings.add(int_to_bytes(n))
for n in range(0, 0x1_00_00, 64):
fuzz_encodings.add(int_to_bytes(n))
fuzz_encodings.add(codepoint_to_utf8(max_codepoint(4) + 1, 4, allow_overflow=True))
for n in range(32):
codepoint = 0x10FFFF + n**4
assert codepoint <= 0x1FFFFF
fuzz_encodings.add(codepoint_to_utf8(codepoint, 4, allow_overflow=True))
fuzz_encodings.add(codepoint_to_utf8(0x1FFFFF, 4, allow_overflow=True))
random.seed(1)
for n in range(200):
for k in range(1, 4+1):
fuzz_encodings.add(bytes(random.choices(range(256), k=k)))
good = []
bad = []
for enc in sorted(fuzz_encodings, key=lambda e: (len(e), e)):
if valid_utf8(enc):
good.append(enc)
else:
bad.append(enc)
def fmt_fbx_props(encodings, ascii):
for enc in encodings:
hex = b"".join(f"{x:02x}".encode("ascii") for x in enc)
if ascii:
string = enc.replace(b"\"", b"&quot;")
else:
string = enc
yield Node(b"P", [Value(b"S", hex), Value(b"S", b""), Value(b"S", b""), Value(b"S", b""), Value(b"S", string)], [])
def fmt_fbx_model_name(name, ascii):
if ascii:
return Value(b"S", f"Model::{name}".encode("utf-8"))
else:
return Value(b"S", f"{name}\x00\x01Model".encode("utf-8"))
def fmt_fbx_root(ascii):
fbx_root = Node(b"", [], [])
fbx_objects = Node(b"Objects", [], [])
fbx_root.children.append(fbx_objects)
fbx_good = Node(b"Model", [Value(b"L", 1), fmt_fbx_model_name("Good", ascii), Value(b"S", b"Mesh")], [])
fbx_objects.children.append(fbx_good)
fbx_good_props = Node(b"Properties70", [], list(fmt_fbx_props(good, ascii)))
fbx_good.children.append(fbx_good_props)
fbx_bad = Node(b"Model", [Value(b"L", 2), fmt_fbx_model_name("Bad", ascii), Value(b"S", b"Mesh")], [])
fbx_objects.children.append(fbx_bad)
fbx_bad_props = Node(b"Properties70", [], list(fmt_fbx_props(bad, ascii)))
fbx_bad.children.append(fbx_bad_props)
ok = [b"\xff" + enc for enc in good]
fbx_ok = Node(b"Model", [Value(b"L", 3), fmt_fbx_model_name("Ok", ascii), Value(b"S", b"Mesh")], [])
fbx_objects.children.append(fbx_ok)
fbx_ok_props = Node(b"Properties70", [], list(fmt_fbx_props(ok, ascii)))
fbx_ok.children.append(fbx_ok_props)
return fbx_root
parser = argparse.ArgumentParser("unicode_test_gen.py")
parser.add_argument("outfile", help="Output filename")
argv = parser.parse_args()
root = fmt_fbx_root(ascii=False)
with open(argv.outfile, "wb") as f:
tfbx.binary_dump_root(f, root, tfbx.BinaryFormat(7500, False), b"")

View File

@@ -0,0 +1,553 @@
from collections import namedtuple, defaultdict
import itertools
class Options:
def __init__(self, **kwargs):
self.override_litlen_counts = kwargs.get("override_litlen_counts", { })
self.override_dist_counts = kwargs.get("override_dist_counts", { })
self.max_uncompressed_length = kwargs.get("max_uncompressed_length", 0xffff)
self.prune_interval = kwargs.get("prune_interval", 65536)
self.max_match_distance = kwargs.get("max_match_distance", 32768)
self.search_budget = kwargs.get("search_budget", 4096)
self.force_block_types = kwargs.get("force_block_types", [])
self.block_size = kwargs.get("block_size", 32768)
self.invalid_sym = kwargs.get("invalid_sym", None)
self.no_decode = kwargs.get("no_decode", False)
Code = namedtuple("Code", "code bits")
IntCoding = namedtuple("IntCoding", "symbol base bits")
BinDesc = namedtuple("BinDesc", "offset value bits desc")
SymExtra = namedtuple("Code", "symbol extra bits")
null_code = Code(0,0)
def make_int_coding(first_symbol, first_value, bit_sizes):
symbol = first_symbol
value = first_value
codings = []
for bits in bit_sizes:
codings.append(IntCoding(symbol, value, bits))
value += 1 << bits
symbol += 1
return codings
length_coding = make_int_coding(257, 3, [
0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,
])
distance_coding = make_int_coding(0, 1, [
0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,
])
def find_int_coding(codes, value):
for coding in codes:
if value < coding.base + (1 << coding.bits):
return coding
class BitBuf:
def __init__(self):
self.pos = 0
self.data = 0
self.desc = []
def push(self, val, bits, desc=""):
if bits == 0: return
assert val < 1 << bits
val = int(val)
self.desc.append(BinDesc(self.pos, val, bits, desc))
self.data |= val << self.pos
self.pos += bits
def push_rev(self, val, bits, desc=""):
if bits == 0: return
assert val < 1 << bits
rev = 0
for n in range(bits):
rev |= ((val >> n) & 1) << bits-n-1
self.push(rev, bits, desc)
def push_code(self, code, desc=""):
self.push(code.code, code.bits, desc)
def push_rev_code(self, code, desc=""):
if code is None:
raise RuntimeError("Empty code")
self.push_rev(code.code, code.bits, desc)
def append(self, buf):
for desc in buf.desc:
self.desc.append(desc._replace(offset = desc.offset + self.pos))
self.data |= buf.data << self.pos
self.pos += buf.pos
def patch(self, offset, value, bits, desc=""):
self.data = self.data & ~(((1 << bits) - 1) << offset) | (value << offset)
def to_bytes(self):
return bytes((self.data>>p&0xff) for p in range(0, self.pos, 8))
class Literal:
def __init__(self, data):
self.data = data
self.length = len(data)
def count_codes(self, litlen_count, dist_count):
for c in self.data:
litlen_count[c] += 1
def encode(self, buf, litlen_syms, dist_syms, opts):
for c in self.data:
sym = litlen_syms.get(c, opts.invalid_sym)
if c >= 32 and c <= 128:
buf.push_rev_code(sym, "Literal '{}' (0x{:02x})".format(chr(c), c))
else:
buf.push_rev_code(sym, "Literal {:3d} (0x{:02x})".format(c, c))
def decode(self, result):
result += self.data
def split(self, pos):
assert pos >= 0
return Literal(self.data[:pos]), Literal(self.data[pos:])
def __repr__(self):
return "Literal({!r})".format(self.data)
class Match:
def __init__(self, length, distance):
self.length = length
self.distance = distance
if length < 258:
self.lcode = find_int_coding(length_coding, length)
else:
assert length == 258
self.lcode = IntCoding(285, 0, 0)
self.dcode = find_int_coding(distance_coding, distance)
def count_codes(self, litlen_count, dist_count):
litlen_count[self.lcode.symbol] += 1
dist_count[self.dcode.symbol] += 1
def encode(self, buf, litlen_syms, dist_syms, opts):
lsym = litlen_syms.get(self.lcode.symbol, opts.invalid_sym)
dsym = dist_syms.get(self.dcode.symbol, opts.invalid_sym)
buf.push_rev_code(lsym, "Length: {}".format(self.length))
if self.lcode.bits > 0:
buf.push(self.length - self.lcode.base, self.lcode.bits, "Length extra")
buf.push_rev_code(dsym, "Distance: {}".format(self.distance))
if self.dcode.bits > 0:
buf.push(self.distance - self.dcode.base, self.dcode.bits, "Distance extra")
def decode(self, result):
begin = len(result) - self.distance
assert begin >= 0
for n in range(begin, begin + self.length):
result.append(result[n])
def split(self, pos):
return self, Literal(b"")
def __repr__(self):
return "Match({}, {})".format(self.length, self.distance)
def make_huffman_bits(syms, max_code_length):
if len(syms) == 0:
return { }
if len(syms) == 1:
return { next(iter(syms)): 1 }
sym_groups = ((prob, (sym,)) for sym,prob in syms.items())
initial_groups = list(sorted(sym_groups))
groups = initial_groups
for n in range(max_code_length-1):
packaged = [(a[0]+b[0], a[1]+b[1]) for a,b in zip(groups[0::2], groups[1::2])]
groups = list(sorted(packaged + initial_groups))
sym_bits = { }
for g in groups[:(len(syms) - 1) * 2]:
for sym in g[1]:
sym_bits[sym] = sym_bits.get(sym, 0) + 1
return sym_bits
def make_huffman_codes(sym_bits, max_code_length):
if len(sym_bits) == 0:
return { }
bl_count = [0] * (max_code_length + 1)
next_code = [0] * (max_code_length + 1)
for bits in sym_bits.values():
bl_count[bits] += 1
code = 0
for n in range(1, max_code_length + 1):
code = (code + bl_count[n - 1]) << 1
next_code[n] = code
codes = { }
for sym,bits in sorted(sym_bits.items()):
codes[sym] = Code(next_code[bits], bits)
next_code[bits] += 1
return codes
def make_huffman(syms, max_code_length):
sym_bits = make_huffman_bits(syms, max_code_length)
return make_huffman_codes(sym_bits, max_code_length)
def decode(message):
result = []
for m in message:
m.decode(result)
return bytes(result)
def encode_huff_bits(bits):
encoded = []
for value,copies in itertools.groupby(bits):
num = len(list(copies))
assert value < 16
if value == 0:
while num >= 11:
amount = min(num, 138)
encoded.append(SymExtra(18, amount-11, 7))
num -= amount
while num >= 3:
amount = min(num, 10)
encoded.append(SymExtra(17, amount-3, 3))
num -= amount
while num >= 1:
encoded.append(SymExtra(0, 0, 0))
num -= 1
else:
encoded.append(SymExtra(value, 0, 0))
num -= 1
while num >= 3:
amount = min(num, 6)
encoded.append(SymExtra(16, amount-3, 2))
num -= amount
while num >= 1:
encoded.append(SymExtra(value, 0, 0))
num -= 1
return encoded
def write_encoded_huff_bits(buf, codes, syms, desc):
value = 0
prev = 0
for code in codes:
sym = code.symbol
num = 1
if sym <= 15:
buf.push_rev_code(syms[sym], "{} {} bits: {}".format(desc, value, sym))
prev = sym
elif sym == 16:
num = code.extra + 3
buf.push_rev_code(syms[sym], "{} {}-{} bits: {}".format(desc, value, value+num-1, prev))
elif sym == 17:
num = code.extra + 3
buf.push_rev_code(syms[sym], "{} {}-{} bits: {}".format(desc, value, value+num-1, 0))
elif sym == 18:
num = code.extra + 11
buf.push_rev_code(syms[sym], "{} {}-{} bits: {}".format(desc, value, value+num-1, 0))
value += num
if code.bits > 0:
buf.push(code.extra, code.bits, "{} N={}".format(desc, num))
def prune_matches(matches, offset, opts):
new_matches = defaultdict(list)
begin = offset - opts.max_match_distance
for trigraph,chain in matches.items():
new_chain = [o for o in chain if o >= begin]
if new_chain:
new_matches[trigraph] = new_chain
return new_matches
def match_block(data, opts=Options()):
message = []
matches = defaultdict(list)
literal = []
offset = 0
size = len(data)
prune_interval = 0
while offset + 3 <= size:
trigraph = data[offset:offset+3]
advance = 1
match_begin, match_length = 0, 0
search_steps = 0
for m in reversed(matches[trigraph]):
length = 3
while offset + length < size and length < 258:
if data[offset + length] != data[m + length]: break
length += 1
search_steps += 1
if length > match_length and m - offset <= 32768:
match_begin, match_length = m, length
if search_steps >= opts.search_budget:
break
if match_length > 0:
if literal:
message.append(Literal(bytes(literal)))
literal.clear()
message.append(Match(match_length, offset - match_begin))
advance = match_length
else:
literal.append(data[offset])
for n in range(advance):
if offset >= 3:
trigraph = data[offset - 3:offset]
matches[trigraph].append(offset - 3)
offset += 1
prune_interval += advance
if prune_interval >= opts.prune_interval:
matches = prune_matches(matches, offset, opts)
prune_interval = 0
while offset < size:
literal.append(data[offset])
offset += 1
if literal:
message.append(Literal(bytes(literal)))
return message
def compress_block_uncompressed(buf, data, align, final, opts):
size = len(data)
begin = 0
while begin < size:
amount = min(size - begin, opts.max_uncompressed_length)
end = begin + amount
real_final = final and end == size
buf.push(real_final, 1, "BFINAL Final chunk: {}".format(real_final))
buf.push(0b00, 2, "BTYPE Chunk type: Uncompressed")
buf.push(0, -(buf.pos + align) & 7, "Pad to byte")
buf.push(amount, 16, "LEN: {}".format(amount))
buf.push(~amount&0xffff, 16, "NLEN: ~{}".format(amount))
for byte in data[begin:end]:
buf.push(byte, 8, "Byte '{}' ({:02x})".format(chr(byte), byte))
begin = end
def compress_block_static(buf, message, final, opts):
litlen_bits = [8]*(144-0) + [9]*(256-144) + [7]*(280-256) + [8]*(288-280)
distance_bits = [5] * 32
litlen_syms = make_huffman_codes(dict(enumerate(litlen_bits)), 16)
distance_syms = make_huffman_codes(dict(enumerate(distance_bits)), 16)
buf.push(final, 1, "BFINAL Final chunk: {}".format(final))
buf.push(0b01, 2, "BTYPE Chunk type: Static Huffman")
for m in message:
m.encode(buf, litlen_syms, distance_syms, opts)
# End-of-block
buf.push_rev_code(litlen_syms.get(256, opts.invalid_sym), "End-of-block")
def compress_block_dynamic(buf, message, final, opts):
litlen_count = [0] * 286
distance_count = [0] * 30
# There's always one end-of-block
litlen_count[256] = 1
for m in message:
m.count_codes(litlen_count, distance_count)
for sym,count in opts.override_litlen_counts.items():
litlen_count[sym] = count
for sym,count in opts.override_dist_counts.items():
distance_count[sym] = count
litlen_map = { sym: count for sym,count in enumerate(litlen_count) if count > 0 }
distance_map = { sym: count for sym,count in enumerate(distance_count) if count > 0 }
litlen_syms = make_huffman(litlen_map, 15)
distance_syms = make_huffman(distance_map, 15)
num_litlens = max(itertools.chain((k for k in litlen_map.keys()), (256,))) + 1
num_distances = max(itertools.chain((k for k in distance_map.keys()), (0,))) + 1
litlen_bits = [litlen_syms.get(s, null_code).bits for s in range(num_litlens)]
distance_bits = [distance_syms.get(s, null_code).bits for s in range(num_distances)]
litlen_bit_codes = encode_huff_bits(litlen_bits)
distance_bit_codes = encode_huff_bits(distance_bits)
codelen_count = [0] * 20
for code in itertools.chain(litlen_bit_codes, distance_bit_codes):
codelen_count[code.symbol] += 1
codelen_map = { sym: count for sym,count in enumerate(codelen_count) if count > 0 }
codelen_syms = make_huffman(codelen_map, 8)
codelen_permutation = [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]
num_codelens = 0
for i, p in enumerate(codelen_permutation):
if codelen_count[p] > 0:
num_codelens = i + 1
num_codelens = max(num_codelens, 4)
buf.push(final, 1, "BFINAL Final chunk: {}".format(final))
buf.push(0b10, 2, "BTYPE Chunk type: Dynamic Huffman")
buf.push(num_litlens - 257, 5, "HLIT Number of Litlen codes: {} (257 + {})".format(num_litlens, num_litlens - 257))
buf.push(num_distances - 1, 5, "HDIST Number of Distance codes: {} (1 + {})".format(num_distances, num_distances - 1))
buf.push(num_codelens - 4, 4, "HCLEN Number of Codelen codes: {} (4 + {})".format(num_codelens, num_codelens - 4))
for p in codelen_permutation[:num_codelens]:
bits = 0
if p in codelen_syms:
bits = codelen_syms[p].bits
buf.push(bits, 3, "Codelen {} bits: {}".format(p, bits))
write_encoded_huff_bits(buf, litlen_bit_codes, codelen_syms, "Litlen")
write_encoded_huff_bits(buf, distance_bit_codes, codelen_syms, "Distance")
for m in message:
m.encode(buf, litlen_syms, distance_syms, opts)
# End-of-block
buf.push_rev_code(litlen_syms.get(256, opts.invalid_sym), "End-of-block")
def adler32(data):
a, b = 1, 0
for d in data:
a = (a + d) % 65521
b = (b + a) % 65521
return b << 16 | a
def compress_message(message, opts=Options(), *args):
buf = BitBuf()
# ZLIB CFM byte
buf.push(8, 4, "CM=8 Compression method: DEFLATE")
buf.push(7, 4, "CINFO=7 Compression info: 32kB window size")
# ZLIB FLG byte
buf.push(28, 5, "FCHECK (CMF*256+FLG) % 31 == 0")
buf.push(0, 1, "FDICT=0 Preset dictionary: No")
buf.push(2, 2, "FLEVEL=2 Compression level: Default")
multi_part = False
multi_messages = []
multi_opts = []
if args:
multi_part = True
multi_messages = [message]
multi_opts = [opts]
args_it = iter(args)
message = message[:]
for msg, opt in zip(args_it, args_it):
message += msg
multi_messages.append(msg)
multi_opts.append(opt)
byte_offset = 0
part_pos = 0
num_parts = len(message)
overflow_part = Literal(b"")
block_message = []
block_opts = opts
message_bytes = b"" if opts.no_decode else decode(message)
last_part = False
multi_index = 0
while not last_part:
if multi_part:
block_message = multi_messages[multi_index]
block_opts = multi_opts[multi_index]
size = sum(m.length for m in block_message)
block_index = 0
multi_index += 1
last_part = multi_index == len(multi_messages)
else:
block_message.clear()
part, overflow_part = overflow_part.split(opts.block_size)
if part.length > 0:
block_message.append(part)
size = part.length
# Append parts until desired block size is reached
if size < opts.block_size:
while part_pos < num_parts:
part = message[part_pos]
part_pos += 1
if size + part.length >= opts.block_size:
last_part, overflow_part = part.split(opts.block_size - size)
if last_part.length > 0:
block_message.append(last_part)
size += last_part.length
break
else:
block_message.append(part)
size += part.length
last_part = part_pos >= num_parts and overflow_part.length == 0
# Compress the block
best_buf = None
block_index = 0
for block_type in range(3):
if block_index < len(block_opts.force_block_types):
if block_type != block_opts.force_block_types[block_index]:
continue
block_buf = BitBuf()
if block_type == 0:
compress_block_uncompressed(block_buf, message_bytes[byte_offset:byte_offset + size], buf.pos, last_part, block_opts)
elif block_type == 1:
compress_block_static(block_buf, block_message, last_part, block_opts)
elif block_type == 2:
compress_block_dynamic(block_buf, block_message, last_part, block_opts)
if not best_buf or block_buf.pos < best_buf.pos:
best_buf = block_buf
buf.append(best_buf)
byte_offset += size
block_index += 1
buf.push(0, -buf.pos & 7, "Pad to byte")
adler_hash = adler32(message_bytes)
buf.push((adler_hash >> 24) & 0xff, 8, "Adler[24:32]")
buf.push((adler_hash >> 16) & 0xff, 8, "Adler[16:24]")
buf.push((adler_hash >> 8) & 0xff, 8, "Adler[8:16]")
buf.push((adler_hash >> 0) & 0xff, 8, "Adler[0:8]")
return buf
def deflate(data, opts=Options()):
message = match_block(data, opts)
encoded = compress_message(message, opts)
return encoded
def print_huffman(tree):
width = max(len(str(s)) for s in tree.keys())
for sym,code in tree.items():
print("".format(sym, width, code.code, code.bits))
def print_buf(buf):
for d in buf.desc:
val = " {0:0{1}b}".format(d.value, d.bits)
if len(val) > 10:
val = "0x{0:x}".format(d.value)
desc = d.desc
patched_value = (buf.data >> d.offset) & ((1 << d.bits) - 1)
spacer = "|"
if patched_value != d.value:
desc += " >>> Patched to: {0:0{1}b} ({0})".format(patched_value, d.bits)
spacer = ">"
print("{0:>4} {0:>4x} {5}{1:>2} {5} {2:>10} {5} {3:>4} {5} {4}".format(d.offset, d.bits, val, d.value, desc, spacer))
def print_bytes(data):
print(''.join('\\x%02x' % b for b in data))