Skip to content

Commit

Permalink
Merge pull request #1032 from ziglang/pointer-reform
Browse files Browse the repository at this point in the history
 use * for pointer type instead of &
  • Loading branch information
andrewrk committed Jun 1, 2018
2 parents 717ac85 + e29d12d commit 3918e76
Show file tree
Hide file tree
Showing 150 changed files with 2,438 additions and 2,350 deletions.
14 changes: 7 additions & 7 deletions build.zig
Expand Up @@ -10,7 +10,7 @@ const ArrayList = std.ArrayList;
const Buffer = std.Buffer;
const io = std.io;

pub fn build(b: &Builder) !void {
pub fn build(b: *Builder) !void {
const mode = b.standardReleaseOptions();

var docgen_exe = b.addExecutable("docgen", "doc/docgen.zig");
Expand Down Expand Up @@ -132,7 +132,7 @@ pub fn build(b: &Builder) !void {
test_step.dependOn(tests.addGenHTests(b, test_filter));
}

fn dependOnLib(lib_exe_obj: &std.build.LibExeObjStep, dep: &const LibraryDep) void {
fn dependOnLib(lib_exe_obj: *std.build.LibExeObjStep, dep: *const LibraryDep) void {
for (dep.libdirs.toSliceConst()) |lib_dir| {
lib_exe_obj.addLibPath(lib_dir);
}
Expand All @@ -147,7 +147,7 @@ fn dependOnLib(lib_exe_obj: &std.build.LibExeObjStep, dep: &const LibraryDep) vo
}
}

fn addCppLib(b: &Builder, lib_exe_obj: &std.build.LibExeObjStep, cmake_binary_dir: []const u8, lib_name: []const u8) void {
fn addCppLib(b: *Builder, lib_exe_obj: *std.build.LibExeObjStep, cmake_binary_dir: []const u8, lib_name: []const u8) void {
const lib_prefix = if (lib_exe_obj.target.isWindows()) "" else "lib";
lib_exe_obj.addObjectFile(os.path.join(b.allocator, cmake_binary_dir, "zig_cpp", b.fmt("{}{}{}", lib_prefix, lib_name, lib_exe_obj.target.libFileExt())) catch unreachable);
}
Expand All @@ -159,7 +159,7 @@ const LibraryDep = struct {
includes: ArrayList([]const u8),
};

fn findLLVM(b: &Builder, llvm_config_exe: []const u8) !LibraryDep {
fn findLLVM(b: *Builder, llvm_config_exe: []const u8) !LibraryDep {
const libs_output = try b.exec([][]const u8{
llvm_config_exe,
"--libs",
Expand Down Expand Up @@ -217,7 +217,7 @@ fn findLLVM(b: &Builder, llvm_config_exe: []const u8) !LibraryDep {
return result;
}

pub fn installStdLib(b: &Builder, stdlib_files: []const u8) void {
pub fn installStdLib(b: *Builder, stdlib_files: []const u8) void {
var it = mem.split(stdlib_files, ";");
while (it.next()) |stdlib_file| {
const src_path = os.path.join(b.allocator, "std", stdlib_file) catch unreachable;
Expand All @@ -226,7 +226,7 @@ pub fn installStdLib(b: &Builder, stdlib_files: []const u8) void {
}
}

pub fn installCHeaders(b: &Builder, c_header_files: []const u8) void {
pub fn installCHeaders(b: *Builder, c_header_files: []const u8) void {
var it = mem.split(c_header_files, ";");
while (it.next()) |c_header_file| {
const src_path = os.path.join(b.allocator, "c_headers", c_header_file) catch unreachable;
Expand All @@ -235,7 +235,7 @@ pub fn installCHeaders(b: &Builder, c_header_files: []const u8) void {
}
}

fn nextValue(index: &usize, build_info: []const u8) []const u8 {
fn nextValue(index: *usize, build_info: []const u8) []const u8 {
const start = index.*;
while (true) : (index.* += 1) {
switch (build_info[index.*]) {
Expand Down
22 changes: 11 additions & 11 deletions doc/docgen.zig
Expand Up @@ -104,7 +104,7 @@ const Tokenizer = struct {
};
}

fn next(self: &Tokenizer) Token {
fn next(self: *Tokenizer) Token {
var result = Token{
.id = Token.Id.Eof,
.start = self.index,
Expand Down Expand Up @@ -196,7 +196,7 @@ const Tokenizer = struct {
line_end: usize,
};

fn getTokenLocation(self: &Tokenizer, token: &const Token) Location {
fn getTokenLocation(self: *Tokenizer, token: *const Token) Location {
var loc = Location{
.line = 0,
.column = 0,
Expand All @@ -221,7 +221,7 @@ const Tokenizer = struct {
}
};

fn parseError(tokenizer: &Tokenizer, token: &const Token, comptime fmt: []const u8, args: ...) error {
fn parseError(tokenizer: *Tokenizer, token: *const Token, comptime fmt: []const u8, args: ...) error {
const loc = tokenizer.getTokenLocation(token);
warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args);
if (loc.line_start <= loc.line_end) {
Expand All @@ -244,13 +244,13 @@ fn parseError(tokenizer: &Tokenizer, token: &const Token, comptime fmt: []const
return error.ParseError;
}

fn assertToken(tokenizer: &Tokenizer, token: &const Token, id: Token.Id) !void {
fn assertToken(tokenizer: *Tokenizer, token: *const Token, id: Token.Id) !void {
if (token.id != id) {
return parseError(tokenizer, token, "expected {}, found {}", @tagName(id), @tagName(token.id));
}
}

fn eatToken(tokenizer: &Tokenizer, id: Token.Id) !Token {
fn eatToken(tokenizer: *Tokenizer, id: Token.Id) !Token {
const token = tokenizer.next();
try assertToken(tokenizer, token, id);
return token;
Expand Down Expand Up @@ -317,7 +317,7 @@ const Action = enum {
Close,
};

fn genToc(allocator: &mem.Allocator, tokenizer: &Tokenizer) !Toc {
fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
var urls = std.HashMap([]const u8, Token, mem.hash_slice_u8, mem.eql_slice_u8).init(allocator);
errdefer urls.deinit();

Expand Down Expand Up @@ -546,7 +546,7 @@ fn genToc(allocator: &mem.Allocator, tokenizer: &Tokenizer) !Toc {
};
}

fn urlize(allocator: &mem.Allocator, input: []const u8) ![]u8 {
fn urlize(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();

Expand All @@ -566,7 +566,7 @@ fn urlize(allocator: &mem.Allocator, input: []const u8) ![]u8 {
return buf.toOwnedSlice();
}

fn escapeHtml(allocator: &mem.Allocator, input: []const u8) ![]u8 {
fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();

Expand Down Expand Up @@ -608,7 +608,7 @@ test "term color" {
assert(mem.eql(u8, result, "A<span class=\"t32\">green</span>B"));
}

fn termColor(allocator: &mem.Allocator, input: []const u8) ![]u8 {
fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();

Expand Down Expand Up @@ -688,7 +688,7 @@ fn termColor(allocator: &mem.Allocator, input: []const u8) ![]u8 {
return buf.toOwnedSlice();
}

fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: var, zig_exe: []const u8) !void {
fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var, zig_exe: []const u8) !void {
var code_progress_index: usize = 0;
for (toc.nodes) |node| {
switch (node) {
Expand Down Expand Up @@ -1036,7 +1036,7 @@ fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: var
}
}

fn exec(allocator: &mem.Allocator, args: []const []const u8) !os.ChildProcess.ExecResult {
fn exec(allocator: *mem.Allocator, args: []const []const u8) !os.ChildProcess.ExecResult {
const result = try os.ChildProcess.exec(allocator, args, null, null, max_doc_file_size);
switch (result.term) {
os.ChildProcess.Term.Exited => |exit_code| {
Expand Down

0 comments on commit 3918e76

Please sign in to comment.