mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-10-30 08:42:00 +00:00 
			
		
		
		
	convert : fix no-lazy dtypes from direct safetensors
This commit is contained in:
		| @@ -209,7 +209,7 @@ class ModelBase: | |||||||
|                             data_gen = lambda data=data: LazyTorchTensor.from_local_tensor(data)  # noqa: E731 |                             data_gen = lambda data=data: LazyTorchTensor.from_local_tensor(data)  # noqa: E731 | ||||||
|                         else: |                         else: | ||||||
|                             dtype = LazyTorchTensor._dtype_str_map[data.dtype] |                             dtype = LazyTorchTensor._dtype_str_map[data.dtype] | ||||||
|                             data_gen = lambda data=data: torch.from_numpy(data.mmap_bytes()).view(dtype).reshape(data.shape)  # noqa: E731 |                             data_gen = lambda data=data, dtype=dtype: torch.from_numpy(data.mmap_bytes()).view(dtype).reshape(data.shape)  # noqa: E731 | ||||||
|                     else: |                     else: | ||||||
|                         data_torch: Tensor = model_part[name] |                         data_torch: Tensor = model_part[name] | ||||||
|                         if self.lazy: |                         if self.lazy: | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 Francis Couture-Harpin
					Francis Couture-Harpin