Python import grinds to a halt

So my ultimate goal is to animate a full 3D Voronoi tessellation. I have a system of 8000 particles and their positions across 81 frames. I’ve used Voro++ to generate Voronoi cells about each particle. Voro++ gives me a file containing “Particle/Cell ID, # of Vertices, # of Faces, List of global vertex positions, List of sets of vertices that define each face”. I import these polyhedron to Blender and use their geometry to define shape keys for each cell mesh.

That works fine when I test it on a few particles. The problem I encounter after ~50 cells is that the script slows down immensely. To a rate that it’d take several days to import to entire simulation. Attempts to isolate the issue show that I have the same problem when I comment out the script such that the only thing it does is import polyhedron data for one cell and then delete it (and loops over each cell).

Any ideas or suggestions for an alternative way to do this? The trick to this one was to find a way to animate polyhedron that change the # of vertices. Something I understand Blender cannot do easily. Thanks for any help.


import math
import bpy
import mathutils

#Animation information
FR=10
bpy.context.scene.frame_start=1
bpy.context.scene.frame_end=FR*80

#Original sphere from which all subsequent spheres are copied.
sphr=bpy.ops.mesh.primitive_uv_sphere_add(location=(0,0,0),size=10,segments=16,ring_count=16)
bpy.data.objects["Sphere"].name="Sphere_0"
bpy.data.objects["Sphere_0"].select=False

file = open("...", 'r')
 
#Loop for polyhedra data
for n,line in enumerate(file):
    
    Current=line.split(" ")
    N=int(Current[0])       #Particle/cell number
    V=Current[1]            #List of vertices
    F=Current[2]            #List of groups of vertices that define the faces


    Vertices = []
    for i in range(0,int(V)):
        tmp=Current[i+3].strip("()").split(",")
        Points=[float(point) for point in tmp]
        Vertices.append(mathutils.Vector((Points[0],Points[1],Points[2])))
    
    Faces = []
    for i in range(0,int(F)):
        tmp=Current[i+3+int(V)].strip("
").strip("()").split(",")
        Set=[int(point) for point in tmp]
        Faces.append(Set)
        
    
       
    #Create the polyhedra from vertex and face data. Set its origin.
    NewMesh = bpy.data.meshes.new("Polygon_Mesh_"+str(n%81)+"_"+str(N)) 
    NewMesh.from_pydata(Vertices,[],Faces)
    NewObj = bpy.data.objects.new("Polygon_"+str(n%81)+"_"+str(N), NewMesh)
    bpy.context.scene.objects.link(NewObj)
    bpy.data.objects["Polygon_"+str(n%81)+"_"+str(N)].select=True
    bpy.ops.object.origin_set(type="ORIGIN_GEOMETRY")
    bpy.data.objects["Polygon_"+str(n%81)+"_"+str(N)].select=False
    del Vertices, Faces


    #Once we have all the polyhedra for all 81 frames, we begin setting shape keys using them.
    if(n==81*N-1):
        #Create the sphere that we'll use to shrinkwrap around the polyhedra
        copy = bpy.data.objects["Sphere_0"].copy()  
        copy.data = copy.data.copy()
        bpy.data.objects["Sphere_0.001"].name = "Sphere_"+str(N)
        bpy.context.scene.objects.link(bpy.data.objects["Sphere_"+str(N)])
        
        #Loop over polyhedra
        for i in range(0,81):
            #Set cell location and insert keyframes
            bpy.data.objects["Sphere_"+str(N)].location=bpy.data.objects["Polygon_"+str(i)+"_"+str(N)].location
            if(i==0):
                bpy.data.objects["Sphere_"+str(N)].keyframe_insert(data_path='location', frame=1)
            elif(i>0 and i<81):
                bpy.data.objects["Sphere_"+str(N)].keyframe_insert(data_path='location', frame=i*FR)
        
            #Add and apply shrinkwrap modifier to sphere/cell.
            bpy.context.scene.objects.active = bpy.data.objects["Sphere_"+str(N)]
            bpy.ops.object.modifier_add(type="SHRINKWRAP")
            bpy.context.scene.objects.active.modifiers[0].target=bpy.data.objects["Polygon_"+str(i)+"_"+str(N)]
            bpy.context.scene.objects.active.modifiers[0].offset=0.0001            
            bpy.ops.object.modifier_apply(apply_as='SHAPE',modifier="Shrinkwrap")
            
            #Delete polyhedra that are no longer of use.
            bpy.data.objects["Polygon_"+str(i)+"_"+str(N)].select=True
            bpy.ops.object.delete()
            bpy.data.meshes.remove(bpy.data.meshes["Polygon_Mesh_"+str(i)+"_"+str(N)])
            
        #Insert keyframes for shape keys
        for i in range(1,82):
            if(i==1):
                bpy.data.shape_keys[N-1].key_blocks[1].value=1.0
                bpy.data.shape_keys[N-1].key_blocks[2].value=0.0
                bpy.data.shape_keys[N-1].key_blocks[1].keyframe_insert("value",frame=1)
                bpy.data.shape_keys[N-1].key_blocks[2].keyframe_insert("value",frame=1)

            if(i>1 and i<81):
                bpy.data.shape_keys[N-1].key_blocks[i-1].value=0.0
                bpy.data.shape_keys[N-1].key_blocks[i].value=1.0
                bpy.data.shape_keys[N-1].key_blocks[i+1].value=0.0
            
                bpy.data.shape_keys[N-1].key_blocks[i-1].keyframe_insert("value",frame=FR*(i-1))
                bpy.data.shape_keys[N-1].key_blocks[i].keyframe_insert("value",frame=FR*(i-1))
                bpy.data.shape_keys[N-1].key_blocks[i+1].keyframe_insert("value",frame=FR*(i-1))
                    
            if(i==81):
                bpy.data.shape_keys[N-1].key_blocks[80].value=0.0
                bpy.data.shape_keys[N-1].key_blocks[81].value=1.0
                bpy.data.shape_keys[N-1].key_blocks[80].keyframe_insert("value",frame=FR*80)
                bpy.data.shape_keys[N-1].key_blocks[81].keyframe_insert("value",frame=FR*80)


        print(N)

file.close()

Have a look here:

You should avoid any operator calls in loops. It might also be advisable to toggle operators off during import, and only enable then as last step.

Thanks for the reply. I did come across that posting and believe I incorporated some of your code from the references there. The low-level approach is certainly more appealing and if it’s faster then all the better. I did swap out all of the bpy.ops calls and see an improvement up until ~100 cells. I’m not entirely convinced that implicit updates are causing the problem (at least updates that I’m aware of). I encounter similar slow-down when running the following code:

import math
import bpy
import mathutils

#Animation information
FR=10
bpy.context.scene.frame_start=1
bpy.context.scene.frame_end=FR*80

#Original sphere from which all subsequent spheres are copied.
sphr=bpy.ops.mesh.primitive_uv_sphere_add(location=(0,0,0),size=10,segments=16,ring_count=16)
bpy.data.objects["Sphere"].name="Sphere_0"
bpy.data.objects["Sphere_0"].select=False

file = open("...", 'r')
 
#Loop for polyhedra data
for n,line in enumerate(file):
    
    Current=line.split(" ")
    N=int(Current[0])       #Particle/cell number
    V=Current[1]            #List of vertices
    F=Current[2]            #List of groups of vertices that define the faces


    Vertices = []
    for i in range(0,int(V)):
        tmp=Current[i+3].strip("()").split(",")
        Points=[float(point) for point in tmp]
        Vertices.append(mathutils.Vector((Points[0],Points[1],Points[2])))
    
    origin = mathutils.Vector([0.0,0.0,0.0])
    for Verts in Vertices:
        origin += Verts
    origin = origin/len(Vertices)
        
    
    Faces = []
    for i in range(0,int(F)):
        tmp=Current[i+3+int(V)].strip("
").strip("()").split(",")
        Set=[int(point) for point in tmp]
        Faces.append(Set)
        
    #Create the polyhedra from vertex and face data. Set its origin.
    NewMesh = bpy.data.meshes.new("Polygon_Mesh_"+str(n%81)+"_"+str(N)) 
    NewMesh.from_pydata(Vertices,[],Faces)
    NewObj = bpy.data.objects.new("Polygon_"+str(n%81)+"_"+str(N), NewMesh)
    bpy.data.objects["Polygon_"+str(n%81)+"_"+str(N)].location = origin
    del Vertices, Faces


    #Once we have all the polyhedra for all 81 frames, we begin setting shape keys using them.
    if(n==81*N-1):      
        #Loop over polyhedra
        for i in range(0,81):       
            #Delete polyhedra that are no longer of use.
            bpy.data.objects.remove(bpy.data.objects["Polygon_"+str(i)+"_"+str(N)])
            bpy.data.meshes.remove(bpy.data.meshes["Polygon_Mesh_"+str(i)+"_"+str(N)])
            print(N)

file.close()

Which isn’t doing much more than reading in every set of 81 polyhedrons and then deleting them. As far as I’ve been able to read, there shouldn’t be any of these implicit updates.