Forum Moderators: Staff
Poser Python Scripting F.A.Q (Last Updated: 2024 Sep 18 2:50 am)
from __future__ import print_function
import numpy as NP
import os
import sys
import time
try:
import poser
except ImportError:
raise RuntimeError("Script must run in Poser.")
SCENE = poser.Scene()
SCALEFACTOR = 100 # Change this to the needs for your modeller.
USE_GROUPS = False # export groupnames (False for full-body-morphs)
EXPORT_UV = True # not needed for morphs, but nicer to model with textures ;)
USE_MATERIAL = True # export materialnames (should be True if UV is exported)
# Directory to store OBJ files.
# Default: Directory where this script lives + "MORPHS".
_scriptdir = os.path.dirname(sys.argv[0])
_dirname = "MORPHS"
BASE_DIR = os.path.abspath(os.path.join(_scriptdir, _dirname))
if not os.path.isdir(BASE_DIR):
try:
os.makedirs(BASE_DIR)
except IOError:
raise RuntimeError("Cant't create directory '{}'.n"
"Please change variable BASE_DIR in line 25.".format(BASE_DIR))
# Figure to work with.
FIGURE = SCENE.CurrentFigure()
# Default filename is "Fullbody-.obj" (Poser output)
# and "Fullbody-_mod.obj" (Poser input).
# //Fullbody-LaFemme_1R1.obj
# //Fullbody-LaFemme_1R1_mod.obj
OUT_FILE = os.path.join(BASE_DIR, "Fullbody-{}.obj".format(FIGURE.Name()))
IN_FILE = os.path.join(BASE_DIR, "Fullbody-{}_mod.obj".format(FIGURE.Name()))
# Forced floatingpoint precision. Mainly to help avoiding floatingpoint
# errors while reading files from external modelers.
PRECISION = 8
NP_PRECISION = NP.float32
# ---------------- end of editable constants
PATH = os.path.dirname(__file__) # default path for save (path from this scripts)
# helper to get one numpy vertex from Poser
np_vertex = lambda v: NP.array((v.X(), v.Y(), v.Z()), NP_PRECISION)
np_tvertex = lambda v: NP.array((v.U(), v.V()), NP_PRECISION)
def collect_geometry(figure=FIGURE, scalefactor=SCALEFACTOR):
geom, actors, actor_indices = figure.UnimeshInfo()
verts = NP.zeros((geom.NumVertices(), 3), NP_PRECISION)
tverts = NP.array([np_tvertex(v) for v in geom.TexVertices()], NP_PRECISION)
sets = NP.array(geom.Sets(), NP.int32)
tsets = NP.array(geom.TexSets(), NP.int32)
for actor_idx, actor in enumerate(actors):
world_verts = actor.Geometry().WorldVertices()
for i, vertex_idx in enumerate(actor_indices[actor_idx]):
verts[vertex_idx] = np_vertex(world_verts[i])
return dict(vertices=verts * scalefactor,
sets=sets,
polygons=geom.Polygons,
tex_vertices=tverts,
tsets=tsets,
tex_polygons=geom.TexPolygons,
actorlist=actors,
actor_indices=actor_indices,
materials=geom.Materials
)
def read_verts_from_file(filename):
"""
Read Wavefront obj-file saved to file. Typically a figure exported
from Poser and modified with an external modeller (Blender etc).
"""
try:
open(filename, "r")
except IOError:
raise RuntimeError("File '{}' does not exist or is not accessible.".
format(filename))
vertices = list()
idx = 0
with open(filename, "r") as fh:
for line in fh:
if not line:
break
c, _, v = line.strip().partition(" ")
if c == "v":
vert = map(float, v.split())
if len(vert) == 3:
vertices.append(vert)
else:
raise RuntimeError("Problem at vertex # {}: {}".
format(idx, line))
idx += 1
# Remove following line if vertices are not in one block,
# so the whole file is processed.
elif c in ("vt", "vn", "f"):
break
return NP.array(vertices, NP_PRECISION)
def write_matfile(filename, materials):
"""
Write out a simple material-file.
"""
try:
open(filename, "w")
except IOError:
raise RuntimeError("Can't create or write to file '{}'.n"
"Make sure directory '{}' exist and is writable.".
format(filename, os.path.dirname(filename)))
with open(filename, "w") as mfh:
for mat in materials:
print("newmtl", mat.Name(), file=mfh)
print("Ns", mat.Ns(), file=mfh)
print("Ka", "0 0 0", file=mfh)
print("Kd", " ".join(map(str, mat.DiffuseColor())), file=mfh)
print("Ks", "0 0 0", file=mfh)
if mat.TextureMapFileName():
print("map_Kd", mat.TextureMapFileName(), file=mfh)
if mat.BumpMapFileName():
print("map_Bump", mat.BumpMapFileName(), file=mfh)
def export(filename,
vertices=None, sets=None, polygons=None,
tex_vertices=None, tsets=None, tex_polygons=None,
materials=None, **kwargs):
"""
Export Wavefront obj-file to file.
"""
try:
open(filename, "w")
except IOError:
raise RuntimeError("Can't create or write to file '{}'.n"
"Make sure directory '{}' exist and is writable.".
format(filename, os.path.dirname(filename)))
with open(filename, "w") as fh:
print("### Date : %s" % time.asctime(), file=fh)
print("### Figure : %s" % FIGURE.Name(), file=fh)
print("### Vertices: %s" % len(vertices), file=fh)
if USE_MATERIAL and materials:
matfile = filename.rsplit(".", 1)[0] + ".mtl"
write_matfile(matfile, materials())
print("mtllib ./" + os.path.basename(matfile), file=fh)
for vertex in vertices:
print("v {} {} {}".format(*vertex), file=fh)
if EXPORT_UV or USE_MATERIAL:
for uv in tex_vertices:
print("vt {} {}".format(*uv), file=fh)
current_groups = list()
current_mat = list()
if not USE_GROUPS:
print("g", FIGURE.Name(), file=fh)
polys = polygons()
tpolys = tex_polygons()
for index, poly in enumerate(polys):
if USE_GROUPS:
if poly.Groups() != current_groups:
current_groups = poly.Groups()
print("g", ", ".join(current_groups), file=fh)
if USE_MATERIAL:
if poly.MaterialName() != current_mat:
current_mat = poly.MaterialName()
print("usemtl", current_mat, file=fh)
line = [str(sets[idx + poly.Start()] + 1) for idx in range(poly.NumVertices())]
if EXPORT_UV:
tpoly = tpolys[index]
for tidx, v in enumerate((tsets[idx + tpoly.Start()] + 1) for idx in range(tpoly.NumTexVertices())):
line[tidx] += "/%d" % v
print("f", " ".join(map(str, line)), file=fh)
del polys
del tpolys
return True
def fullbodymorph_import(figure, morphname, old_filename, new_filename,
actorlist, actor_indices,
**kwargs):
verts_new = read_verts_from_file(new_filename)
verts_old = read_verts_from_file(old_filename)
if len(verts_old) != len(verts_new):
raise RuntimeError("!!!Failed!!!n"
"Old number of vertices: {}n"
"New number of vertices: {}".
format(len(verts_old), len(verts_new)))
vertices = (verts_new - verts_old) / SCALEFACTOR
del verts_new
del verts_old
body = figure.ParentActor()
masterdial = body.Parameter(morphname)
if masterdial is None:
body.CreateValueParameter(morphname)
masterdial = body.Parameter(morphname)
if masterdial is None:
raise RuntimeError("Can't find or create morph in body actor.")
for actor_idx, actor in enumerate(actorlist):
morph = list()
for i, v_idx in enumerate(actor_indices[actor_idx]):
x, y, z = map(lambda a: round(a, PRECISION), vertices[v_idx])
if x != 0 or y != 0 or z != 0:
morph.append((i, x, y, z))
if len(morph) == 0:
continue
morphparm = actor.Parameter(morphname)
if morphparm is None:
actor.SpawnTarget(morphname)
morphparm = actor.Parameter(morphname)
assert morphparm is not None, "Could not create Morphtarget"
assert morphparm.IsMorphTarget(), "Parametername ('%s') " +
"already exist but is not a morph"
% morphname
for i, x, y, z in morph:
morphparm.SetMorphTargetDelta(i, x, y, z)
while morphparm.NumValueOperations():
morphparm.DeleteValueOperation(0)
morphparm.AddValueOperation(poser.kValueOpTypeCodeKEY, masterdial)
vop = morphparm.ValueOperations()[0]
vop.InsertKey(0, 0)
vop.InsertKey(1, 1)
masterdial.SetMinValue(-.5)
masterdial.SetMaxValue(1.0)
File "Fullbody_Export.py"
from __future__ import print_function
import time
from Handle_Unimesh_Figure import
FIGURE,
OUT_FILE,
collect_geometry,
export
t = time.time()
# Get dictionary with Poser data.
# Yes, there is actually more data then needed.
geom = collect_geometry(FIGURE)
print("Time to collect data from Poser:", round(time.time() - t, 2), "Secs")
tt = time.time()
export(filename=OUT_FILE, **geom)
print("Time to export:", round(time.time() - tt, 2), "Secs")
print("Exported to '{}'".format(OUT_FILE))
print("Done in", round(time.time() - t, 2), "Secs")
# Free all used variables
del geom
File "Fullbody_Import.py"
from __future__ import print_function
import time
from Handle_Unimesh_Figure import
FIGURE,
OUT_FILE,
IN_FILE,
collect_geometry,
fullbodymorph_import
t = time.time()
# Get dictionary with Poser data.
# Yes, there is actually more data then needed.
geom = collect_geometry(FIGURE)
print("Time to collect data from Poser:", round(time.time() - t, 2), "Secs")
tt = time.time()
fullbodymorph_import(FIGURE, "MORPH", OUT_FILE, IN_FILE, **geom)
print("Time to read file and create morph(s):", round(time.time() - tt, 2), "Secs")
print("Imported from '{}".format(IN_FILE))
print("Done in ", round(time.time() - t, 2), "Secs")
# Free all used variables
del geom
How to use:
Pose a figure. Select the figure.
Run script "Fullbody_Export.py"
Open you modeler. Import OBJ file created from script (/PATH/Fullbody-figurename.obj) ("figurename" is the name of your figure; "LaFemme" for example). Your model should look exactly as it looks in Poser (pose and currently attached morphs).
If the model appears to small or to big, change SCALEFACTOR in script "Handle_Unimesh_Figure.py" (line 16).
Modify model. Save model to the same path with filename: /PATH/Fullbody-figurename_mod.obj (append "_mod", watch the underscore).
Run script "Fullbody_Import.py" Script creates a morph named "MORPH" in each actor your model was modified. And a "Masterdial" in the bodyactor.
After a few experiments internally and externally I could find out that almost all important modellers are able to read and write out the data generated by the script correctly. You just have to adjust them properly.
I am now working on optimizing the script and building a user interface with wxPython in the next days. Christmas is overrated anyway.
In the end, something like the Poser-ZBrush Bridge for all modellers should be released, but as a light-light version. Because for most morph stuff ZBrush is pure overkill that costs a lot of money.
If someone feels inspired to help out: go for it!
Translated with www.DeepL.com/Translator (free version)
parkdalegardener posted at 1:06PM Mon, 23 December 2019 - #4374163
Thanks for this. I'll check it out after the holiday when I have more time.
Thanks for feedback!
adp, I'm still having some issues with exporting and importing some figures but I think I've found out why. some of the figures I've tried to use it with have UNWELDED groups, and at some point in the export, modeler, import process it appears that the groups have been welded. At least the difference in numbers of verts match up with the number that welding the verts in UVMapper pro. It may be the modelers I have doing this since my options are limited ( and this included loading the obj into Blender and re exporting it to load into Poser using your script. I haven't learned enough about Blender to prevent this YET. But I just wanted to let you know about this incase it happens to someone else.
BTW on some figure with welded obj this seems to work very good. At least when exporting and importing the same OBJ, which is as far as I've had time to try so far
I described the process several time now.When Poser welds the geometry to a Unimesh-Figure, some thousend vertices will become superfluous. Thous vertices can't be simply deleted, because their "slots" are needed to not destroy "vertex count" (index pointing to certain places in the mesh). If this order is destroyed, existing meshes wont work anymore. So, Poser sets this "orphaned" vertices simply to 0.
Some modelers don't make a fuz about it and export enything as is if vertex-count preservation is ticked (Blender), others need to know what they should do with orphanes when exporting (C4D for example).
And, mutch important: Their is no uncomplicated way to circumvence these orphaned vertices. A database with each possible figure would be needed. What makes the process more error-prone.
One has to find the right setup for the modeler used, because each of the modelers available seems to handle the situation differently.
Under no circumstances one should delete the orphaned vertices, because their position (index) in the row of vertices is important.
So the solution would be to keep a record of original vertex numbers when cleaning up the Unimesh and translate the vertices back to the number where Poser expects them before loading the morph. This last step needs not change the facets or the uv coordinates so it can be pretty quick.
Yes one would need to keep a record of the translation tables. Poser standard would be to use a sub folder of the preferences folder to store them.
The number of vertices could be used as a built-in identifier for the dataset. This is not flawless but in combination with the number of redundant vertices it may form a combination unique enough for small scale use. So say if the translation dataset has 4501 vertices, that number can be used as a first identifier of the dataset. Then if the dataset contains info of the length of the vertex list it is supposed to translate, the loader process, knowing the vertex count of the dataset to be filled, can use that as a key to find the correct dataset if multiple combinations exist. If all else fails there is trial and error, or better just load them all as a morph and let the user delete the wrong ones.
AFAIK Poser is happy to load just a bunch of vertices for a morph. If not maybe the trick I use for MD may work: I add a line: 'f 1 2 3 4' to keep it happy.
FVerbaas posted at 2:53PM Sat, 18 January 2020 - #4377023
So the solution would be to keep a record of original vertex numbers when cleaning up the Unimesh and translate the vertices back to the number where Poser expects them before loading the morph. This last step needs not change the facets or the uv coordinates so it can be pretty quick.
If you remove an entry from the vertex-list, you have to recompute all polygons using a vertex after the removed indexnumber โ because they are now one position deeper.
Solution could be:
Create a polygonlist not refering to vertex-indexnumbers but real vertex data. Create new vertexlist based on polygonlist. Create a cross-reference-list to original vertex-list. Save obj-file. Save cross-reference-list with the same name.
On load: Load obj-file. Load cross-reference-list. Use cross-reference-list to translate loaded index-positions to real vertex positions.
As I said: error-prone, and, allmost all modern modelers are able to handle the situation if properly configured.
FVerbaas posted at 5:19PM Sat, 18 January 2020 - #4377035
The xyz data mapping could be skipped. If vertex ID appears in the set array, it is used and must stay, if not it is redundant. One problem less.
Then you have to export those null vertices (the ones poser set to zero while welding). And exactly those orphanded vertices are the problem. If you don't export them, the vertex index doesn't match with what the polys needs and your modeler will show mikado :)
Or do I missunderstand you?
Not bad - I got the output.
Maybe you want to try this:
Replace function do_export() with this:
def do_export(figure, onFinish=None):
"""
Export figure to Wavefront obj file.
"""
assert isinstance(figure, poser.FigureType)
figurename = figure.Name()
parms = collect_geometry(figure)
vertices = parms["vertices"]
geom = parms["geom"]
use_material = CONFIG.get("CTRL_ExportTexture", True)
use_groups = CONFIG.get("CTRL_ExportGroups", False)
morphname = CONFIG["CTRL_MorphName"].strip()
vertices *= int(CONFIG.get("Scale", 100))
if CONFIG.get("CTRL_SingleSelectFile", True):
with wx.FileDialog(None, "Export Wavefront file",
style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT | wx.FD_CHANGE_DIR,
defaultDir=CONFIG.get("ExportPath", BASEPATH),
defaultFile="{}-{}.obj".format(figurename, morphname)
) as dlg:
if dlg.ShowModal() == wx.ID_CANCEL:
return
CONFIG["ExportPath"] = os.path.dirname(dlg.GetPath())
filename = dlg.GetFilename()
else:
filename = os.path.join(CONFIG["ExportPath"], "{}-{}.obj".format(figurename, morphname))
try:
tmp = open(filename, "w")
tmp.close()
except IOError:
ErrDialog("Can't create or write to file '{}'.",
"Maybe you have to select another directory first.")
return
with open(filename, "w") as fh:
print("### Date : %s" % time.asctime(), file=fh)
print("### Figure : %s" % figurename, file=fh)
print("### Vertices: %s" % len(vertices), file=fh)
if use_material and geom.Materials():
matfile = filename.rsplit(".", 1)[0] + ".mtl"
write_matfile(matfile, geom.Materials())
print("mtllib ./" + os.path.basename(matfile), file=fh)
vertex_crosslist = NP.zeros(len(vertices), NP.int32)
not_empty_idx = 0
for idx, vertex in enumerate(vertices):
if vertex[0] != 0.0 and vertex[1] != 0.0 and vertex[2] != 0.0:
print("v {} {} {}".format(*vertex), file=fh)
vertex_crosslist[idx] = not_empty_idx
not_empty_idx += 1
if use_material:
tex_crosslist = NP.zeros(geom.NumTexVertices(), NP.int32)
not_empty_idx = 0
for idx, tvert in enumerate(geom.TexVertices()):
if tvert.U() != 0.0 and tvert.V() != 0:
print("vt {} {}".format(tvert.U(), tvert.V()), file=fh)
tex_crosslist[idx] = not_empty_idx
not_empty_idx += 1
current_groups = list()
current_mat = list()
if not use_groups:
print("g", figurename, file=fh)
# experimental --- define smoothgroup
print("s", figurename, file=fh)
polys = geom.Polygons()
tpolys = geom.TexPolygons()
sets = geom.Sets()
for idx in range(len(sets)):
sets[idx] = vertex_crosslist[sets[idx]]
tsets = geom.TexSets()
for idx in range(len(tsets)):
tsets[idx] = tex_crosslist[tsets[idx]]
for index, poly in enumerate(polys):
if use_groups:
if poly.Groups() != current_groups:
current_groups = poly.Groups()
print("g", ", ".join(current_groups), file=fh)
# experimental --- define smoothgroup
print("s", ", ".join(current_groups), file=fh)
if use_material:
if poly.MaterialName() != current_mat:
current_mat = poly.MaterialName()
print("usemtl", current_mat, file=fh)
line = [str(sets[idx + poly.Start()] + 1) for idx in range(poly.NumVertices())]
if use_material:
tpoly = tpolys[index]
for tidx, v in enumerate((tsets[idx + tpoly.Start()] + 1) for idx in range(tpoly.NumTexVertices())):
line[tidx] += "/%d" % v
print("f", " ".join(map(str, line)), file=fh)
CONFIG["LastExported"] = filename
if onFinish is not None:
onFinish()
return filename
BTW: Same issue further down, at the end of do_export(), where the pickling is done.
La Femme gives a weird error, but that seems a single vertex.Se green prop in image below. It is gone (yellow prop) if I change the selection criterion to look in the sets list. That does slow down the process though.
Searching in an unsorted array is not fast indeed. I tried with good success a faster route: An array, named used, with boolean Falses, one for each vertex, Then a loop over sets and vertex indeces found are raised to True.
Vertices are written when
I assume the array is deleted after the method ends and re-created when the method is called again.
FVerbaas posted at 1:58PM Tue, 21 January 2020 - #4377086
La Femme gives a weird error, but that seems a single vertex.Se green prop in image below.
So La Femme isn't only badly constructed, but has also errors in the mesh? I'm happe to have Bella. A real good mesh and a pretty nice figure. I'm on the move replacing my "standard figure" Roxy with Bella now.
LOL! In fact Bella had problems much much worse, with many of the belly vertices thrown into the genital area. The platform we use being Rendo and the example image a flagrant trespassing of their TOS I did choose LF for the example image.
The no unused vertices version works fine with Marvelous Designer. It allows me to export, drape, simulate, and re- import as a morph into the mesh object in Poser.
FVerbaas posted at 11:27AM Wed, 22 January 2020 - #4377335
The no unused vertices version works fine with Marvelous Designer. It allows me to export, drape, simulate, and re- import as a morph into the mesh object in Poser.
The advantage of open software is that anybody can use and/or modifiy it the way it works for him :)
FVerbaas posted at 4:58PM Fri, 24 January 2020 - #4377568
Works nicely with the reduced vertices and the reload of the FBM.
To get full abilities of the FBM, like create joint dependency on the morph, the figure must be saved and reloaded. .
Why not publishing the changes you made to work with your software?
I noticed the problem that Poser not always sees that changes are made. For me it was the case as I tried to modify a freshly loaded morph.
Maybe I'm going to advance the script so that morphs are directly written into Posers PMD-file. But at the moment I wont have the time for this.
adp001 posted at 5:31PM Fri, 24 January 2020 - #4377626
Why not publishing the changes you made to work with your software?
That is the plan. I just did not have the time yet. The main changes are shown in my post of the 19th. The rest is still your code. One thin I want to do is make the cross lists (clean vertex list) an option like the export of textures is so versions need not deviate.
I want to maybe take out the texture vertex change table. It is currently not read or used so effort in making it and writing it is lost. I first want to see however how this algorithm works when you delete polygons in the group editor. I have a feeling the thing may work still because Poser most likely will not reorder the vertices when you delete polygons. This would make it possible to export parts of the geometry only, do whatever mean thing to the vertex positions other than changing topology of that part, and re-import that as a morph into the original mesh, or something like that. If tha works, it makes sense to keep the texture cross list.
My last published version has no crosslist for tex vertices anymore. Even if some groups are not exported, texturemap should still work (modeller will ignore it, and the script don't need polys or texture information on import).
This morning I thought: What about not exporting hidden actors? :)
And just because I'm working on some dynamic cloth (no groups at all, but also no figure) I'll output props too. Just to have it all in one script.
We were thinking along the same lines then, and did so at about the same moment. Must have been sent from that UFO passing over. ;-)
One other thing: I see the present accuracy for xyz is 8 digits in the Poser units system, so 0.000026 mm. I doubt if a morph delta of less than 0.0001 Poser unit (0.26 mm) is worth registrating for Poser garment figures. Higher accuracy may lead to loads of 'noise' deltas just clogging memory.
I took the liberty to change:
Been thinking of other applications also to select facets and define groups.
As you said: We where thinking along the same line. I had a similar line you added in a test, but did not notice a difference.
There must be a more elegant way to avoid those orphaned vertices. But I have no time to wrap my brain around this at the moment (some family problems). Kind of a hole in my head.
FVerbaas posted at 3:40AM Sat, 25 January 2020 - #4377688
The amount of verrex noise the object file comes back with will depend on what it goes through in the cruel world outside Poser.
The "noise" - low floating point precision at Posers end - is (should be) removed by reducing numpy precision to 32 bit numbers and rounding incoming values to 8 decimal places. I moved an object several 10 times between Blender and Poser, manipulated here and there, before I published the code. No noise errors so far (like common ones with ZBrush).
Yes probably reducing the 8 digit accuracy to 4 digits would give similar effect for 90% of the cases. Did you change the scale on import-export?
I referred to 'noise' from various sources like it comes from say making the morph via simulation. If I make say a belly morph in a sweater that way I will find there are insignificant but non-zero deltas in the sleeves.
I am considering to add a sort of filter function where morph deltas of vertices less than distance x from the area of interest are written with weight factor 1.0, and outside that area with a factor decreasing gradually with distance, to be filtered by the lower limit where appropriate. Something like the falloff zones of joints. Default zones would need to be defined only once for a body morph normally.
From there, route would be open to very interesting tools for content development, but that is outside the scope of this thread.
This site uses cookies to deliver the best experience. Our own cookies make user accounts and other features possible. Third-party cookies are used to display relevant ads and to analyze how Renderosity is used. By using our site, you acknowledge that you have read and understood our Terms of Service, including our Cookie Policy and our Privacy Policy.
Goal: Export a posed figure to Wavefront OBJ file. In one piece, not broken by actors. Manipulate the figure (morphing). From the modeler: export the morphed figure back to another file. Import the morph into Poser as one full body morph.
Workflow Export: Get vertices, polygons and actorlist from unimesh-info. Use the actorlist to get worldvertices (actual position for each actor vertex). Replace unimesh-vertices with world vertices. Output to file.
Workflow Import: Read vertices from previously written file (the one loaded into the modeller). Read vertices from new created file (modified geometry). subtract one from another to get the difference, the morphdata. Extract morphdata for each actor.
This works so far. I get the morphs I want. Tested with LaFemme and Roxy.
One problem so far: Unimesh-vertices will keep the obsolete vertices (actor-boundries are glued together). These vertices are all set to coordinate 0, 0, 0. These references to 0, 0, 0 are kept in their original vertex-index position with no reference from any polygon. Example: After exporting Roxy and importing the file into my modeler, I have 2217 single and not referrenced vertices at coordinate 0, 0, 0. Under normal circumstances, these vertices are ignored from modelers. And are also not exported. What leads to wrong vertex-count while importing the model.
Luckily my modeler (C4D) can export "duplicate vertices". I looked into Blender, but I didn't see such an option. So my test with Blender failed.
Nevertheless I will publish the script. Maybe someone has an idea how to solve the problem elegantly.
First script has all functions to collect data from Poser, export and import. The other 2 are helpers for Export/Import.
Filenames are fixed for the moment (see script comment). It's "Fullbody-.obj". Make sure the exported file is in the form "Fullbody-_mod.obj".