Some checks failed
Zig Project Action / Lint, Spell-check and test zig project (push) Failing after 55s
It still has a minor memory leak and has at least two hacks implemented that I would like to improve on.
84 lines
2.8 KiB
Zig
84 lines
2.8 KiB
Zig
//! Model of the `zterm` application.
|
|
|
|
// TODO planned features:
|
|
// - create change sets (i.e. parse diff headers and provide contents) is the multiarray that what I need?
|
|
// - create `elements.Change` structs which each point to every change associated to a given file (for each file)
|
|
|
|
// FIX known issues:
|
|
|
|
content: []const u8,
|
|
changes: std.AutoArrayHashMapUnmanaged(Index, std.ArrayList(Index)) = .empty,
|
|
render_mode: enum {
|
|
side_by_side,
|
|
stacked,
|
|
} = .stacked,
|
|
|
|
// TODO alloc-free parsing? (similar to how I've implemented the parser for `smd`?)
|
|
// - parsing would tokenize and create a structure for the following patterns
|
|
// ```
|
|
// .content -> // initial content is ignored!
|
|
// (.file -> (.header -> .content)*)* // these are relevant
|
|
// -> .eof
|
|
// ```
|
|
// For each .file collect each pair of following .header and .content to a single hunk, if a new file follows then there there are no more changes for the previous file and the new .file changes will be collected till an .eof is found
|
|
pub fn init(gpa: Allocator, content: [:0]const u8) !Model {
|
|
if (content.len == 0) return error.EmptyDiff;
|
|
|
|
var this: Model = .{ .content = content };
|
|
var tokenizer = lexer.Tokenizer.init(content);
|
|
var token = tokenizer.next();
|
|
var file: Index = undefined;
|
|
var diff: Index = undefined;
|
|
var last_tag: lexer.Token.Tag = .invalid;
|
|
while (token.tag != .eof) : (token = tokenizer.next()) {
|
|
defer last_tag = token.tag;
|
|
switch (token.tag) {
|
|
.file => {
|
|
// create file
|
|
file = .{
|
|
.idx = token.loc.idx,
|
|
.len = token.loc.len,
|
|
};
|
|
continue;
|
|
},
|
|
.header => if (last_tag == .file or last_tag == .content) {
|
|
diff = .{
|
|
.idx = token.loc.idx,
|
|
.len = token.loc.len,
|
|
};
|
|
},
|
|
.content => if (last_tag == .header) {
|
|
diff.len += token.loc.len;
|
|
const entry = try this.changes.getOrPut(gpa, file);
|
|
if (!entry.found_existing) entry.value_ptr.* = .empty;
|
|
try entry.value_ptr.append(gpa, diff);
|
|
},
|
|
else => unreachable,
|
|
}
|
|
}
|
|
if (this.changes.entries.len == 0) return error.UnexpectedFormat;
|
|
return this;
|
|
}
|
|
|
|
pub fn deinit(this: *Model, gpa: Allocator) void {
|
|
for (this.changes.values()) |*diff| diff.deinit(gpa);
|
|
this.changes.deinit(gpa);
|
|
}
|
|
|
|
pub const Index = struct {
|
|
idx: usize,
|
|
len: usize,
|
|
};
|
|
|
|
const Model = @This();
|
|
|
|
const std = @import("std");
|
|
const assert = std.debug.assert;
|
|
const Allocator = std.mem.Allocator;
|
|
const lexer = @import("lexer.zig");
|
|
|
|
test {
|
|
std.testing.refAllDeclsRecursive(@This());
|
|
_ = @import("lexer.zig");
|
|
}
|