[FILE FORMAT CHANGED] Reverse dimensions in ggml file (makes it more similar to llama.cpp format)

This commit is contained in:
saharNooby 2023-04-01 14:41:30 +04:00
parent ac03019fcf
commit 7130a89d1f
2 changed files with 7 additions and 9 deletions

View File

@ -192,7 +192,6 @@ struct rwkv_context * rwkv_init_from_file(const char * file_path, int n_threads)
int32_t x = -1; int32_t x = -1;
int32_t y = -1; int32_t y = -1;
int32_t z = -1;
int32_t element_count; int32_t element_count;
if (dim_count == 1) { if (dim_count == 1) {
@ -203,11 +202,7 @@ struct rwkv_context * rwkv_init_from_file(const char * file_path, int n_threads)
read_int32(file, &x); read_int32(file, &x);
read_int32(file, &y); read_int32(file, &y);
element_count = x * y; element_count = x * y;
// Dimension order is reversed here: tensor = ggml_new_tensor_2d(ctx, ggml_data_type, x, y);
// * PyTorch shape is (x rows, y columns)
// * ggml shape is (y elements in a row, x elements in a column)
// Both shapes represent the same tensor.
tensor = ggml_new_tensor_2d(ctx, ggml_data_type, y, x);
} else { } else {
abort(); abort();
} }

View File

@ -103,8 +103,11 @@ def write_state_dict(state_dict: Dict[str, torch.Tensor], dest_path: str, data_t
1 if tensor.dtype == torch.float16 else 0 1 if tensor.dtype == torch.float16 else 0
)) ))
# Note that shape is not reversed here like in llama.cpp! # Dimension order is reversed here:
for dim in tensor.shape: # * PyTorch shape is (x rows, y columns)
# * ggml shape is (y elements in a row, x elements in a column)
# Both shapes represent the same tensor.
for dim in reversed(tensor.shape):
out_file.write(struct.pack('=i', dim)) out_file.write(struct.pack('=i', dim))
out_file.write(k_encoded) out_file.write(k_encoded)
@ -150,7 +153,7 @@ def test() -> None:
2, 2,
10, 10,
0, 0,
3, 2, 2, 3,
'emb.weight'.encode('utf-8'), 'emb.weight'.encode('utf-8'),
1.0, 2.0, 3.0, 1.0, 2.0, 3.0,
4.0, 5.0, 6.0, 4.0, 5.0, 6.0,