Skip to content

Commit

Permalink
add planning
Browse files Browse the repository at this point in the history
  • Loading branch information
YoruCathy committed Jul 8, 2023
1 parent 873a1d3 commit 5b62a77
Show file tree
Hide file tree
Showing 8 changed files with 444 additions and 0 deletions.
33 changes: 33 additions & 0 deletions planning/URDF/python_script/combine2fbx.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import bpy
import sys
import os

def combine2fbx(from_path, to_path, hier_file):
# load all the ply object
# and combine to a fbx
# hier file: this object -> parent

for item in os.listdir(from_path):
bpy.ops.import_scene.obj(filepath=from_path+"/"+item)

objs = bpy.data.objects
objs.remove(objs["Cube"], do_unlink=True)
objs.remove(objs["Camera"], do_unlink=True)
objs.remove(objs["Light"], do_unlink=True)

with open(hier_file) as f:
for line in f:
this_object, parent, _ = line.strip().split(" ")
if parent != "None":
try:
objs[this_object].parent = objs[parent]
objs[this_object].matrix_parent_inverse = objs[parent].matrix_world.inverted()
except KeyError:
print("Warning: Virtual link exists!")

bpy.ops.export_scene.fbx(filepath=to_path)

argv = sys.argv
argv = argv[argv.index("--") + 1:]
print(argv)
combine2fbx(argv[0], argv[1], argv[2])
103 changes: 103 additions & 0 deletions planning/URDF/python_script/extract_parts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import numpy as np
import argparse
import os

def parse_obj_with_group(obj_name):
vlines = []
vnlines = []
glines = {}
with open(obj_name) as f:
for line in f:
if line.startswith("v "): # it is vertex line
vlines.append(line)
if line.startswith("vn "): # it is normal line
vnlines.append(line)
if line.startswith("g"):
g_name = line.strip().split(" ")[-1]
try:
print(g_name, len(glines[g_name]))
except KeyError:
glines[g_name] = []
g_label = g_name
if line.startswith("f"):
glines[g_name].append(line)
return vlines, vnlines, glines


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("object_name", type=str)
parser.add_argument("-o", "--output_dir", type=str, default="smpl_parts")
parser.add_argument("--hier", type=str)
args = parser.parse_args()
#obj_name = "SMPL_f_unityDoubleBlends_lbs_10_scale5_207_v1.0.0.obj"
#obj_name = "SMPL_f_unityDoubleBlends_lbs_10_scale5_207_v1.0.0_groups.obj"
obj_name = args.object_name

output_dir = args.output_dir

if not os.path.exists(output_dir):
os.makedirs(output_dir)

hier_path = args.hier
hier_dict = {}
co_vertex_dict = {}
with open(hier_path) as f:
for line in f:
child, parent, _ = line.strip().split()
if parent != "None" and parent != "f_avg_root":
hier_dict[child] = parent

vlines, vnlines, glines = parse_obj_with_group(obj_name)
all_counter = 0
face_id_group_mapping = {}
part_v_seq_new_index_mapping = {}
for part_name in glines.keys(): # for each part
v_seq = []
vn_seq = []
for line in glines[part_name]:
head, face1, face2, face3 = line.strip().split()
v1i, vt1i, vn1i = face1.split("/")
v2i, vt2i, vn2i = face2.split("/")
v3i, vt3i, vn3i = face3.split("/")
v_seq.append([int(v1i), int(v2i), int(v3i)])
vn_seq.append([int(vn1i), int(vn2i), int(vn3i)])

v_seq_unique = np.unique(v_seq)
vn_seq_unique = np.unique(vn_seq)

face_id_group_mapping[part_name] = set(v_seq_unique)

v_seq_new_index_mapping = {}
vn_seq_new_index_mapping = {}
for i in range(len(v_seq_unique)):
v_seq_new_index_mapping[v_seq_unique[i]] = i+1
for i in range(len(vn_seq_unique)):
vn_seq_new_index_mapping[vn_seq_unique[i]] = i + 1
part_v_seq_new_index_mapping[part_name] = v_seq_new_index_mapping

with open(output_dir+"/"+part_name+".obj", "w") as fout:
for vi in v_seq_unique:
fout.write(vlines[vi-1])
for vni in vn_seq_unique:
fout.write(vnlines[vni-1])
for line in glines[part_name]:
head, face1, face2, face3 = line.strip().split()
v1i, vt1i, vn1i = face1.split("/")
v2i, vt2i, vn2i = face2.split("/")
v3i, vt3i, vn3i = face3.split("/")
new_v1i = v_seq_new_index_mapping[int(v1i)]
new_v2i = v_seq_new_index_mapping[int(v2i)]
new_v3i = v_seq_new_index_mapping[int(v3i)]
new_vn1i = vn_seq_new_index_mapping[int(vn1i)]
new_vn2i = vn_seq_new_index_mapping[int(vn2i)]
new_vn3i = vn_seq_new_index_mapping[int(vn3i)]
fout.write(head+" "+str(new_v1i)+"//"+str(new_vn1i)+" "+str(new_v2i)+"//"+str(new_vn2i)+" "+str(new_v3i)+"//"+str(new_vn3i)+"\n")

with open(hier_path.replace("hier", "covertex"), "w") as f:
for key in hier_dict.keys():
intersect_ids = face_id_group_mapping[key].intersection(face_id_group_mapping[hier_dict[key]])
selected_id = int(list(intersect_ids)[0])
current_child = int(part_v_seq_new_index_mapping[key][selected_id])
current_parent = int(part_v_seq_new_index_mapping[hier_dict[key]][selected_id])
f.write(key+" "+hier_dict[key]+" "+str(selected_id)+" "+str(current_child)+" "+str(current_parent)+"\n")
47 changes: 47 additions & 0 deletions planning/URDF/python_script/extract_vertex_group.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import bpy
import sys
import os

def find_max_group(weights):
max_weight = 0
max_index = 0
for i in range(len(weights)):
item = weights[i]
if item > max_weight:
max_weight = item
max_index = i
return max_index

def extractVertexGroup(from_path, to_path, hier_path):
to_dir = "/".join(to_path.split("/")[:-1])
res_dir = "/".join(hier_path.split("/")[:-1])
if not os.path.exists(to_dir):
os.makedirs(to_dir)
if not os.path.exists(res_dir):
os.makedirs(res_dir)

objs = bpy.data.objects
objs.remove(objs["Cube"], do_unlink=True)
objs.remove(objs["Camera"], do_unlink=True)
objs.remove(objs["Light"], do_unlink=True)

bpy.ops.import_scene.fbx(filepath=from_path)
print(bpy.data.armatures.keys())
real_armature = bpy.data.armatures['SMPLX-female']
print(real_armature.bones.keys())

with open(hier_path, "w") as f:
for key in real_armature.bones.keys():
if real_armature.bones[key].parent is None:
f.write(key+" None 0\n")
elif real_armature.bones[key].parent.name.find("root")!=-1:
f.write(key+" "+real_armature.bones[key].parent.name+" 0\n")
else:
f.write(key+" "+real_armature.bones[key].parent.name+" 1\n")

bpy.ops.export_scene.obj(filepath=to_path, keep_vertex_order=True,use_vertex_groups=True)

argv = sys.argv
argv = argv[argv.index("--") + 1:]

extractVertexGroup(argv[0], argv[1], argv[2])
119 changes: 119 additions & 0 deletions planning/URDF/python_script/generate_rough_urdf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
import argparse
import numpy as np

def process_a_base(f, link_name, xyz, rpy, inertial=None):
f.write(' <link name="{}">\n'.format(link_name))
f.write(' <inertial>\n')
f.write(' <origin xyz="{} {} {}" rpy="{} {} {}" />\n'.format(xyz[0], xyz[1], xyz[2], rpy[0], rpy[1], rpy[2]))
f.write(' <mass value="0" />\n')
if inertial is None:
f.write(' <inertia ixx="0" ixy="0" ixz="0" iyy="0" iyz="0" izz="0" />\n')
else:
f.write(' <inertia ixx="{}" ixy="{}" ixz="{}" iyy="{}" iyz="{}" izz="{}" />\n'.format(inertial[0],inertial[1],inertial[2], inertial[3], inertial[4],inertial[5]))
f.write(' </inertial></link>\n')

def process_a_link(f, link_name, link_path, xyz, rpy, inertial=None, color=None):
f.write(' <link name="{}">\n'.format(link_name))
f.write(' <inertial>\n')
f.write(' <origin xyz="{} {} {}" rpy="{} {} {}" />\n'.format(xyz[0], xyz[1], xyz[2], rpy[0], rpy[1], rpy[2]))
f.write(' <mass value="1" />\n')
if inertial is None:
f.write(' <inertia ixx="0" ixy="0" ixz="0" iyy="0" iyz="0" izz="0" />\n')
else:
f.write(' <inertia ixx="{}" ixy="{}" ixz="{}" iyy="{}" iyz="{}" izz="{}" />\n'.format(inertial[0],inertial[1],inertial[2], inertial[3], inertial[4],inertial[5]))
f.write(' </inertial>\n')
f.write(' <visual>\n')
f.write(' <origin xyz="{} {} {}" rpy="{} {} {}" />\n'.format(xyz[0], xyz[1], xyz[2], rpy[0], rpy[1], rpy[2]))
f.write(' <geometry><mesh filename="{}" /></geometry>\n'.format(link_path))
if color is None:
f.write(' <material name=""><color rgba="0.827450980392157 0.83921568627451 0.827450980392157 1" /></material>\n')
else:
f.write(' <material name=""><color rgba="{} {} {} 1" /></material>\n'.format(color[0], color[1], color[2]))
f.write(' </visual>\n')
f.write(' <collision>\n')
f.write(' <origin xyz="{} {} {}" rpy="{} {} {}" />\n'.format(xyz[0], xyz[1], xyz[2], rpy[0], rpy[1], rpy[2]))
f.write(' <geometry><mesh filename="{}" /></geometry>\n'.format(link_path))
f.write(' </collision></link>\n')

def process_a_1d_joint(f, joint_name, parent, child, xyz, rpy, joint_type="revolute", limits=[0,0,0,0], axis=[0,1,0]):
f.write(' <joint name="{}" type="{}">\n'.format(joint_name, joint_type))
f.write(' <origin xyz="{} {} {}" rpy="{} {} {}" />\n'.format(xyz[0], xyz[1], xyz[2], rpy[0], rpy[1], rpy[2]))
f.write(' <parent link="{}" />\n'.format(parent))
f.write(' <child link="{}" />\n'.format(child))
f.write(' <axis xyz="{} {} {}" />\n'.format(axis[0],axis[1], axis[2]))
f.write(' <limit lower="{}" upper="{}" effort="{}" velocity="{}" />\n'.format(limits[0], limits[1], limits[2], limits[3]))
f.write(' </joint>\n')

def process_a_3d_joint(f, parent, child, xyz, rpy, limits=None):
fake_link_name_x = parent+"_"+child+"_rot_link_x"
fake_link_name_y = parent+"_"+child+"_rot_link_y"
f.write(' <link name={}></link>\n'.format(fake_link_name_x))
f.write(' <link name={}></link>\n'.format(fake_link_name_y))
process_a_1d_joint(f, parent+"_"+child+"_joint_x", parent, fake_link_name_x, [0,0,0], rpy, limits=limits, axis=[1,0,0])
process_a_1d_joint(f, parent+"_"+child+"_joint_y", fake_link_name_x, fake_link_name_y, [0,0,0], rpy, limits=limits, axis=[0, 1,0])
process_a_1d_joint(f, parent+"_"+child+"_joint_z",fake_link_name_y, child, xyz, rpy, limits=limits, axis=[0,0,1])

if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--part_dir", type=str, default="smpl_stl_normal")
parser.add_argument("-o", "--urdf_name", type=str, default="test.urdf")
parser.add_argument("-n", "--package_name", type=str, default="smpl_description")
parser.add_argument("--offset_path", type=str)
parser.add_argument("--hier_path", type=str)
args = parser.parse_args()


urdf_name = args.urdf_name
part_dir_path = args.part_dir

part_pos_dict = {}
with open(args.offset_path) as f:
for line in f:
name, x, y, z = line.strip().split(" ")
part_pos_dict[name.split(".")[0]] = np.array([float(x), float(y), float(z)])

# for unity
unity_urdf_name = urdf_name.replace(".urdf", "_for_unity.urdf")
with open(unity_urdf_name, "w") as f:
f.write('<robot name="{}">\n'.format(unity_urdf_name.split(".")[0].split("/")[-1]))

with open(args.hier_path) as f1:
for line in f1:
child, parent, _ = line.strip().split(" ")
if parent == "None":
process_a_base(f, child, xyz=[0,0,0], rpy=[0,0,0])
else:
link_path = "package://"+part_dir_path+"/"+child+".stl"
process_a_link(f, child, link_path, xyz=[0,0,0], rpy=[0,0,0])
if parent.find("root")!=-1:
process_a_1d_joint(f, child+"_joint", parent, child, xyz=[0,0,0], rpy=[0,0,0], joint_type="fixed")
else:
xyz = part_pos_dict[child] - part_pos_dict[parent]
process_a_1d_joint(f, child+"_joint", parent, child, xyz=xyz, rpy=[0,0,0], limits=[0,np.pi,0,0])

f.write("</robot>\n")

# for moveit
moveit_urdf_name = urdf_name.replace(".urdf", "_for_moveit.urdf")
with open(moveit_urdf_name, "w") as f:
f.write('<robot name="{}">\n'.format(moveit_urdf_name.split(".")[0].split("/")[-1]))

with open(args.hier_path) as f1:
for line in f1:
child, parent, level = line.strip().split(" ")
if parent == "None":
process_a_base(f, child, xyz=[0,0,0], rpy=[0,0,0])
else:
link_path = "package://"+args.package_name+"/"+part_dir_path+"/"+child+".stl"
process_a_link(f, child, link_path, xyz=[0,0,0], rpy=[0,0,0])
if level == "0":
if parent.find("root")!=-1:
process_a_1d_joint(f, child+"_joint", parent, child, xyz=[0,0,0], rpy=[0,0,0], joint_type="fixed")
else:
xyz = part_pos_dict[child] - part_pos_dict[parent]
process_a_1d_joint(f, child+"_joint", parent, child, xyz=xyz, rpy=[0,0,0], limits=[0,np.pi,0,0])
else:
xyz = part_pos_dict[child] - part_pos_dict[parent]
process_a_3d_joint(f, parent, child, xyz=xyz, rpy=[0,0,0], limits=[0, np.pi, 0, 0])

f.write("</robot>\n")
48 changes: 48 additions & 0 deletions planning/URDF/python_script/normalize_part.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import os
import argparse
import numpy as np

def parse_obj(obj_name):
vertex_data = []
flines = []
with open(obj_name) as f:
for line in f:
if line.startswith("v "): # it is vertex line
head, x, y, z = line.strip().split(" ")
vertex_data.append([float(x), float(y), float(z)])
if line.startswith("f"):
flines.append(line)
return np.array(vertex_data), flines

def get_center(vertex):
return np.mean(vertex, axis=0)

def write_obj(path, vertex, faces):
with open(path, "w") as f:
for item in vertex:
f.write("v "+str(item[0])+" "+str(item[1])+" "+str(item[2])+"\n")
for line in faces:
f.write(line)


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input_dir", type=str, default="smpl_stl")
parser.add_argument("-o", "--output_dir", type=str, default="smpl_parts")
parser.add_argument("--offset_path", type=str)
args = parser.parse_args()

input_dir = args.input_dir
part_list = os.listdir(input_dir)
write_dir = args.output_dir

if not os.path.exists(write_dir):
os.makedirs(write_dir)

with open(args.offset_path,"w") as f:
for part in part_list:
vertex, faces = parse_obj(input_dir+"/"+part)
center = get_center(vertex)
f.write(part+" "+str(center[0])+" "+str(center[1])+" "+str(center[2])+"\n")
vertex -= center
write_obj(write_dir+"/"+part, vertex, faces)
21 changes: 21 additions & 0 deletions planning/URDF/python_script/obj2stl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import pymeshlab
import os
import argparse

if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input_dir", type=str, default="smpl_stl")
parser.add_argument("-o", "--output_dir", type=str, default="smpl_parts")
args = parser.parse_args()
input_dir = args.input_dir
output_dir = args.output_dir

if not os.path.exists(output_dir):
os.makedirs(output_dir)

for item in os.listdir(input_dir):
if item.split(".")[-1] != "obj":
continue
ms = pymeshlab.MeshSet()
ms.load_new_mesh(input_dir+"/"+item)
ms.save_current_mesh(output_dir+"/"+item.replace(".obj", ".stl"))
Loading

0 comments on commit 5b62a77

Please sign in to comment.