Skip to content

Commit

Permalink
refactor blender addon
Browse files Browse the repository at this point in the history
  • Loading branch information
PavelBlend committed Feb 9, 2023
1 parent 8f24f40 commit 2451c98
Show file tree
Hide file tree
Showing 8 changed files with 120 additions and 200 deletions.
24 changes: 6 additions & 18 deletions blender/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
# necessary for Blender to detect this addon
bl_info = {
'name': 'Taichi Elements',
'description': 'High-Performance Multi-Material Continuum Physics Engine',
'author': 'Taichi Elements Developers',
'version': (0, 0, 0),
'blender': (2, 82, 0),
'blender': (3, 4, 1),
'location': 'Taichi Elements Window',
'warning': 'Work in progress',
'support': 'COMMUNITY',
Expand All @@ -13,23 +12,12 @@
'category': 'Physics'
}

use_blender = False

try:
# If inside blender, act as an addon
import bpy

use_blender = True
except:
pass

if use_blender:
def register():
from . import addon
addon.register()

def register():
addon.register()

def unregister():
addon.unregister()

# Otherwise act as a PyPI package
def unregister():
from . import addon
addon.unregister()
1 change: 0 additions & 1 deletion blender/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@ def create_pobj(name):
return par_obj


# get outputs nodes
def get_output_nodes():
mesh_nodes = []

Expand Down
1 change: 1 addition & 0 deletions blender/nodes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import bpy

from .base import *

# categories
from .inputs import *
from .output import *
Expand Down
2 changes: 1 addition & 1 deletion blender/nodes/component/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def get_cache(socket):
path = bpy.path.abspath(os.path.join(folder, name))

if os.path.exists(path):
particles_io.read_pars(path, caches, folder, socket.name)
particles_io.read_pars(path, caches, folder)

else:
scn.elements_sockets[pos_key] = ()
Expand Down
2 changes: 1 addition & 1 deletion blender/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def save_particles(self, frame, np_x, np_v, np_color, np_material, np_emitters):
particles_io.MAT: np_material,
particles_io.EMT: np_emitters,
}
data = particles_io.write_pars_v1(par_data, pars_fpath, fname)
data = particles_io.write_pars(par_data, pars_fpath, fname)

with open(pars_fpath + '.bin', 'wb') as file:
file.write(data)
Expand Down
133 changes: 33 additions & 100 deletions blender/particles_io.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,21 @@
import struct, os, time
import struct
import os
import time

import numpy

import bpy


# particles format version 0
PARS_FMT_VER_0 = 0
PARS_FMT_VER_1 = 1
# particles format support versions
PARS_FMT_SUPP = (PARS_FMT_VER_0, PARS_FMT_VER_1)
# particles format version
PARS_FMT_VER = 1

# particle attributes

# position
POS = 0
# velocity
VEL = 1
# color
COL = 2
# material id
MAT = 3
# emitter id
EMT = 4
POS = 0 # position
VEL = 1 # velocity
COL = 2 # color
MAT = 3 # material id
EMT = 4 # emitter id

# numpy attributes type
attr_types = {
Expand All @@ -44,33 +37,15 @@
attr_count = len(attr_names)


def write_pars_v0(par_data):
def write_pars(par_data, fpath, fname):
data = bytearray()
# particles format version
data.extend(struct.pack('I', PARS_FMT_VER_0))
# particles count
pars_cnt = len(par_data[POS])
data.extend(struct.pack('I', pars_cnt))
# print('Particles count:', pars_cnt)

# par_i - particles index
for par_i in range(pars_cnt):
data.extend(struct.pack('3f', *par_data[POS][par_i]))
data.extend(struct.pack('3f', *par_data[VEL][par_i]))
data.extend(struct.pack('I', par_data[COL][par_i]))
data.extend(struct.pack('I', par_data[MAT][par_i]))

return data


def write_pars_v1(par_data, fpath, fname):
data = bytearray()
# particles format version
data.extend(struct.pack('I', PARS_FMT_VER_1))
data.extend(struct.pack('I', PARS_FMT_VER))

# particles count
pars_cnt = par_data[POS].shape[0]
data.extend(struct.pack('I', pars_cnt))
# print('Particles count:', pars_cnt)

for attr_id in range(attr_count):
fname_str = '{}_{}.bin'.format(fname, attr_names[attr_id])
Expand All @@ -86,82 +61,40 @@ def write_pars_v1(par_data, fpath, fname):
return data


def read_pars_v0(data, caches, offs, folder):
# particles positions
pos = []
# particles velocities
vel = []
# particles colors
col = []
# particles materials
mat = []
# particles count
count = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4

for index in range(count):
# particle position
p_pos = struct.unpack('3f', data[offs : offs + 12])
offs += 12
pos.append(p_pos)

# particle velocity
p_vel = struct.unpack('3f', data[offs : offs + 12])
offs += 12
vel.append(p_vel)

# particle color
p_col = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4
col.append(p_col)
def read_pars(file_path, caches, folder):
with open(file_path, 'rb') as file:
data = file.read()

# particle material
p_mat = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4
mat.append(p_mat)
# read offset in file
offs = 0

caches[folder] = {POS: pos, VEL: vel, COL: col, MAT: mat}
# particles format version
ver = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4

if ver != PARS_FMT_VER:
msg = 'Unsupported particles format version: {0}'.format(ver)
raise BaseException(msg)

def read_pars_v1(data, caches, offs, folder):
# particles count
count = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4

caches[folder] = {}

for attr_id in range(attr_count):
file_name_len = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4

file_name_bytes = struct.unpack('{}s'.format(file_name_len), data[offs : offs + file_name_len])[0]
file_name_bytes = struct.unpack(
'{}s'.format(file_name_len),
data[offs : offs + file_name_len]
)[0]
file_name = str(file_name_bytes, 'utf-8')
offs += file_name_len

file_path = bpy.path.abspath(os.path.join(folder, file_name))
caches[folder][attr_id] = numpy.fromfile(file_path, dtype=attr_types[attr_id])


# read particles
def read_pars(file_path, caches, folder, attr_name):
start_time = time.time()

with open(file_path, 'rb') as file:
data = file.read()
# read offset in file
offs = 0
# particles format version
ver = struct.unpack('I', data[offs : offs + 4])[0]
offs += 4

if not ver in PARS_FMT_SUPP:
msg = 'Unsupported particles format version: {0}'.format(ver)
raise BaseException(msg)

if ver == PARS_FMT_VER_0:
read_pars_v0(data, caches, offs, folder)
elif ver == PARS_FMT_VER_1:
read_pars_v1(data, caches, offs, folder)

end_time = time.time()
total_time = end_time - start_time
# print('read particles {}: {:.4f} seconds'.format(attr_name.lower(), total_time))
caches[folder][attr_id] = numpy.fromfile(
file_path,
dtype=attr_types[attr_id]
)
Loading

0 comments on commit 2451c98

Please sign in to comment.