diff --git a/.gitignore b/.gitignore
index 73d42f0..7bbc71c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -99,21 +99,3 @@ ENV/
# mypy
.mypy_cache/
-
-# blender backup files
-*.blend1
-install_plugin_cad.sh
-.vscode
-.DS_Store
-
-# emacs backup files
-~*
-*~
-*#
-.#*
-\#*\#
-out/
-
-#freecad_workbench
-freecad_workbench/freecad/update_workbench.sh
-*.FCBak
\ No newline at end of file
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 39904c4..0000000
--- a/.gitmodules
+++ /dev/null
@@ -1,9 +0,0 @@
-[submodule "rcg_pipeline"]
- path = rcg_pipeline
- url = https://gitlab.com/robossembler/rcg-pipeline.git
-[submodule "freecad_workbench"]
- path = freecad_workbench
- url = https://gitlab.com/robossembler/robossembler-freecad-workbench.git
-[submodule "simulation/insertion_vector_predicate/assembly"]
- path = simulation/insertion_vector_predicate/assembly
- url = https://github.com/yunshengtian/Assemble-Them-All
diff --git a/ARFrames.py b/ARFrames.py
new file mode 100644
index 0000000..d4faea0
--- /dev/null
+++ b/ARFrames.py
@@ -0,0 +1,752 @@
+import FreeCAD
+import ARTools
+if FreeCAD.GuiUp:
+ import FreeCADGui
+ from pivy import coin
+ from PySide import QtCore, QtGui, QtSvg
+ import Part
+ import os
+
+__title__ = "ARFrames"
+__author__ = "Mathias Hauan Arbo"
+__workbenchname__ = "ARBench"
+__version__ = "0.1"
+__url__ = "https://github.com/mahaarbo/ARBench"
+__doc__ = """"""
+
+
+############################################################
+# Frame Objects
+############################################################
+class Frame(object):
+ """Basic freestanding frame"""
+ def __init__(self, obj):
+ obj.addProperty("App::PropertyPlacement",
+ "Placement", "Base",
+ "Placement of the frame")
+ obj.Placement = FreeCAD.Placement()
+ obj.Proxy = self
+ self.obj = obj
+ self.additional_data = {}
+
+ def onChanged(self, fp, prop):
+ pass
+
+ def execute(self, obj):
+ pass
+
+ def __getstate__(self):
+ return None
+
+ def __setstate__(self, state):
+ return None
+
+ def getDict(self):
+ d = {}
+ d["label"] = str(self.obj.Label)
+ d["placement"] = ARTools.placement2axisvec(self.obj.Placement)
+ d.update(self.additional_data)
+ return d
+
+
+class PartFrame(Frame):
+ """Frame rigidly attached to a part frame.
+ Inherits the base placement from the part's frame, and placement is
+ relative to the part frame.
+ """
+ def __init__(self, obj, partobj):
+ Frame.__init__(self, obj)
+ obj.addProperty("App::PropertyPlacementLink",
+ "Part", "Parent",
+ "The part to attach to.")
+ obj.Part = partobj
+ obj.setEditorMode("Part", 1)
+
+ def execute(self, obj):
+ if FreeCAD.GuiUp:
+ obj.ViewObject.Proxy.updateData(obj, "Placement")
+
+ def getDict(self):
+ d = Frame.getDict(self)
+ d["part"] = str(self.obj.Part.Label)
+ return d
+
+
+class FeatureFrame(PartFrame):
+ """Frame rigidly attached to a feature.
+ The feature frame is attached to a feature on a part. It gives both the
+ placement of the feature w.r.t. the part, and the placement from the
+ feature."""
+ def __init__(self, obj, partobj, featurePlacement):
+ PartFrame.__init__(self, obj, partobj)
+ obj.addProperty("App::PropertyPlacement",
+ "FeaturePlacement", "Feature",
+ "The frame attached to the feature.")
+ obj.addProperty("App::PropertyString",
+ "PrimitiveType", "Feature",
+ "The primitive type of the feature.")
+ obj.addProperty("App::PropertyString",
+ "ShapeType", "Feature",
+ "The shape type of the feature.")
+ obj.addProperty("App::PropertyString",
+ "Positioning", "Feature",
+ "The type of positioning used during creation.")
+ obj.FeaturePlacement = featurePlacement
+
+ def getDict(self):
+ d = PartFrame.getDict(self)
+ d["featureplacement"] = ARTools.placement2axisvec(self.obj.FeaturePlacement)
+ d["shapetype"] = str(self.obj.ShapeType)
+ d["positioning"] = str(self.obj.Positioning)
+ return d
+
+
+############################################################
+# ViewProvider to the frames
+############################################################
+class ViewProviderFrame(object):
+ """ViewProvider for the basic frame.
+ Uses the SOAxiscrosskit to create axises with constant length regardless
+ of zoom. Updates position when placement is changed.
+ """
+ def __init__(self, vobj):
+ vobj.addProperty("App::PropertyFloat", "Scale")
+ vobj.Scale = 0.12
+ vobj.addProperty("App::PropertyFloat", "HeadSize")
+ vobj.HeadSize = 3.0
+ vobj.addProperty("App::PropertyFloat", "LineWidth")
+ vobj.LineWidth = 2.0
+ vobj.Proxy = self
+
+ def attach(self, vobj):
+ # We only have a shaded visual group
+ self.shaded = coin.SoGroup()
+
+ # Takes heavily from SoAxisCrosskit.h,
+ # and Toggle_DH_Frames by galou_breizh on the forums
+ self.vframe = coin.SoType.fromName("SoShapeScale").createInstance()
+ self.vframe.setPart("shape", coin.SoType.fromName("SoAxisCrossKit").createInstance())
+ self.vframe.scaleFactor.setValue(0.1)
+ ax = self.vframe.getPart("shape", 0)
+ cone = ax.getPart("xHead.shape", 0)
+ cone.bottomRadius.setValue(vobj.HeadSize)
+ cone = ax.getPart("yHead.shape", 0)
+ cone.bottomRadius.setValue(vobj.HeadSize)
+ cone = ax.getPart("zHead.shape", 0)
+ cone.bottomRadius.setValue(vobj.HeadSize)
+ lwstring = "lineWidth {0}".format(vobj.LineWidth)
+ ax.set("xAxis.appearance.drawStyle", lwstring)
+ ax.set("yAxis.appearance.drawStyle", lwstring)
+ ax.set("zAxis.appearance.drawStyle", lwstring)
+ ax.set("xAxis.pickStyle", "style SHAPE")
+ ax.set("yAxis.pickStyle", "style SHAPE")
+ ax.set("zAxis.pickStyle", "style SHAPE")
+
+ # Then remember to make it selectable in the viewer
+ selectionNode = coin.SoType.fromName("SoFCSelection").createInstance()
+ selectionNode.documentName.setValue(FreeCAD.ActiveDocument.Name)
+ selectionNode.objectName.setValue(vobj.Object.Name)
+ selectionNode.subElementName.setValue("Frame")
+ selectionNode.addChild(self.vframe)
+
+ # We would like to place it where we want
+ self.transform = coin.SoTransform()
+ self.shaded.addChild(self.transform)
+ self.shaded.addChild(self.vframe)
+ self.shaded.addChild(selectionNode)
+ vobj.addDisplayMode(self.shaded, "Shaded")
+
+ def updateData(self, fp, prop):
+ if prop == "Placement":
+ pl = fp.getPropertyByName("Placement")
+ self.transform.translation = (pl.Base.x,
+ pl.Base.y,
+ pl.Base.z)
+ self.transform.rotation = pl.Rotation.Q
+
+ def getDisplayModes(self, vobj):
+ modes = ["Shaded"]
+ return modes
+
+ def getDefaultDisplayMode(self):
+ return "Shaded"
+
+ def getIcon(self):
+ icondir = os.path.join(FreeCAD.getUserAppDataDir(), "Mod", __workbenchname__, "UI", "icons")
+ return str(os.path.join(icondir, "frame.svg"))
+
+ def onChanged(self, vp, prop):
+ if prop == "Scale":
+ s = vp.getPropertyByName("Scale")
+ self.vframe.scaleFactor.setValue(float(s))
+ elif prop == "HeadSize":
+ hs = vp.getPropertyByName("HeadSize")
+ xcone = self.vframe.getPart("shape", 0).getPart("xHead.shape", 0)
+ xcone.bottomRadius.setValue(float(hs))
+ ycone = self.vframe.getPart("shape", 0).getPart("yHead.shape", 0)
+ ycone.bottomRadius.setValue(float(hs))
+ zcone = self.vframe.getPart("shape", 0).getPart("zHead.shape", 0)
+ zcone.bottomRadius.setValue(float(hs))
+ elif prop == "LineWidth":
+ lw = vp.getPropertyByName("LineWidth")
+ lwstring = "lineWidth {0}".format(lw)
+ ax = self.vframe.getPart("shape", 0)
+ ax.set("xAxis.appearance.drawStyle", lwstring)
+ ax.set("yAxis.appearance.drawStyle", lwstring)
+ ax.set("zAxis.appearance.drawStyle", lwstring)
+
+ def __getstate__(self):
+ return None
+
+ def __setstate__(self, state):
+ pass
+
+
+class ViewProviderPartFrame(ViewProviderFrame):
+ """View provider to the part frame."""
+ def updateData(self, fp, prop):
+ if prop == "Placement":
+ parentpl = fp.getPropertyByName("Part").Placement
+ localpl = fp.Placement
+ pl = parentpl.multiply(localpl)
+ self.transform.translation = (pl.Base.x,
+ pl.Base.y,
+ pl.Base.z)
+ self.transform.rotation = pl.Rotation.Q
+
+
+class ViewProviderFeatureFrame(ViewProviderFrame):
+ """View provider to the feature frames."""
+ def updateData(self, fp, prop):
+ if prop == "Placement":
+ parentpl = fp.getPropertyByName("Part").Placement
+ featurepl = fp.getPropertyByName("FeaturePlacement")
+ localpl = fp.Placement
+ pl = parentpl.multiply(featurepl.multiply(localpl))
+ self.transform.translation = (pl.Base.x,
+ pl.Base.y,
+ pl.Base.z)
+ self.transform.rotation = pl.Rotation.Q
+
+
+###################################################################
+# Base functions
+###################################################################
+
+def makeFrame(placement=FreeCAD.Placement()):
+ obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "Frame")
+ Frame(obj)
+ if FreeCAD.GuiUp:
+ ViewProviderFrame(obj.ViewObject)
+ return obj
+
+
+def makePartFrame(part):
+ obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "PartFrame")
+ PartFrame(obj, part)
+ if int(FreeCAD.Version()[1]) > 16:
+ geo_feature_group = part.getParentGeoFeatureGroup()
+ geo_feature_group.addObject(obj)
+ if FreeCAD.GuiUp:
+ ViewProviderPartFrame(obj.ViewObject)
+ return obj
+
+
+def makeFeatureFrame(part, featurepl):
+ obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython",
+ "FeatureFrame")
+ FeatureFrame(obj, part, featurepl)
+ # If we're >0.16, add the feature frame to the assembly
+ if int(FreeCAD.Version()[1]) > 16:
+ geo_feature_group = part.getParentGeoFeatureGroup()
+ geo_feature_group.addObject(obj)
+ if FreeCAD.GuiUp:
+ ViewProviderFeatureFrame(obj.ViewObject)
+ return obj
+
+
+def makeAllPartFrames():
+ dc = FreeCAD.activeDocument()
+ for part in dc.Objects:
+ if isinstance(part, Part.Feature):
+ pf = makePartFrame(part)
+ pf.Label = "Frame"+str(part.Label)
+
+
+def spawnFeatureFrameCreator():
+ ffpanel = FeatureFramePanel()
+ FreeCADGui.Control.showDialog(ffpanel)
+
+
+###################################################################
+# GUI Related
+###################################################################
+uidir = os.path.join(FreeCAD.getUserAppDataDir(),
+ "Mod", __workbenchname__, "UI")
+icondir = os.path.join(uidir, "icons")
+
+ARTools.spawnClassCommand("FrameCommand",
+ makeFrame,
+ {"Pixmap": str(os.path.join(icondir, "frame.svg")),
+ "MenuText": "Make a free frame",
+ "ToolTip": "Make a freestanding reference frame."})
+
+ARTools.spawnClassCommand("AllPartFramesCommand",
+ makeAllPartFrames,
+ {"Pixmap": str(os.path.join(icondir, "allpartframes.svg")),
+ "MenuText": "All part frames",
+ "ToolTip": "Make all part frames."})
+ARTools.spawnClassCommand("FeatureFrameCommand",
+ spawnFeatureFrameCreator,
+ {"Pixmap": str(os.path.join(icondir, "featureframecreator.svg")),
+ "MenuText": "Feature frame creator",
+ "ToolTip": "Create a feature frame on selected primitive."})
+
+
+###################################################################
+# GUI buttons
+###################################################################
+class FeatureFramePanel:
+ """Spawn panel choices for a feature."""
+ def __init__(self):
+ selected = FreeCADGui.Selection.getSelectionEx()
+ # Check selection
+ if len(selected) == 1:
+ selected = selected[0]
+ self.selected = selected
+ else:
+ FreeCAD.Console.PrintError("Multipart selection not available.")
+ self.reject()
+
+ if not selected.HasSubObjects:
+ FreeCAD.Console.PrintError("Part selected not feature.")
+ self.reject()
+ elif not len(selected.SubObjects) == 1:
+ FreeCAD.Console.PrintError("Multifeature selection not available")
+ self.reject()
+
+ # Choices related to selection
+ so_desc = ARTools.describeSubObject(selected.SubObjects[0])
+ self.so_desc = so_desc
+ shape_choices = {
+ "Vertex": [],
+ "Edge": ["PointOnEdge"],
+ "Face": ["PointOnSurface"]
+ }
+ prim_choices = {
+ "ArcOfCircle": ["Center"],
+ "ArcOfEllipse": ["Center"],
+ "ArcOfHyperBola": ["Center"],
+ "ArcOfParabola": ["Center"],
+ "BSplineCurve": ["Center"],
+ "BezierCurve": ["Center"],
+ "Circle": ["Center"],
+ "Ellipse": ["Center"],
+ "Hyperbola": ["Center"],
+ "Parabola": ["Center"],
+ "Line": [],
+ "BSplineSurface": ["Center"],
+ "BezierSurface": ["Center"],
+ "Cylinder": ["PointOnCenterline"],
+ "Plane": ["Center"],
+ "Sphere": ["Center"],
+ "Toroid": ["Center"],
+ "Cone": ["PointOnCenterline"]
+ }
+ self.choices = ["PickedPoint"]
+ self.choices = self.choices + shape_choices[so_desc[1]]
+ self.choices = self.choices + prim_choices[so_desc[0]]
+ # Setting up QT form
+ uiform_path = os.path.join(uidir, "FeatureFrameCreator.ui")
+ self.form = FreeCADGui.PySideUic.loadUi(uiform_path)
+ self.form.ChoicesBox.addItems(self.choices)
+ self.form.PickedTypeLabel.setText(so_desc[0])
+ QtCore.QObject.connect(self.form.ChoicesBox,
+ QtCore.SIGNAL("currentIndexChanged(QString)"),
+ self.choiceChanged)
+ self.scenes = {}
+ for choice in self.choices:
+ sc = QtGui.QGraphicsScene()
+ icon = str(os.path.join(icondir, choice+".svg"))
+ sc.addItem(QtSvg.QGraphicsSvgItem(icon))
+ self.scenes[choice] = sc
+ self.choiceChanged(self.form.ChoicesBox.currentText())
+
+ def choiceChanged(self, choice):
+ if choice in self.scenes.keys():
+ self.form.Preview.setScene(self.scenes[choice])
+
+ def accept(self):
+ sel_choice = self.form.ChoicesBox.currentText()
+ paneldict = {"PickedPoint": PickedPointPanel,
+ "PointOnEdge": PointOnEdgePanel,
+ "PointOnSurface": PointOnSurfacePanel,
+ "Center": CenterPanel,
+ "PointOnCenterline": PointOnCenterlinePanel}
+ pan = paneldict[sel_choice](self.selected, self.so_desc)
+ FreeCADGui.Control.closeDialog()
+ # The dialog is actually closed after the accept function has
+ # completed. So we need to use a delayed task to open the new dialog:
+ QtCore.QTimer.singleShot(0,
+ lambda: FreeCADGui.Control.showDialog(pan))
+
+ def reject(self):
+ FreeCADGui.Control.closeDialog()
+
+
+class BaseFeaturePanel(object):
+ """Base feature panel to be inherited from."""
+ def __init__(self, selected, so_desc):
+ # Handle selected and FF placement
+ self.selected = selected
+ self.so_desc = so_desc
+ # Connect offset to spinboxes
+ QtCore.QObject.connect(self.form.XBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.offsetChanged)
+ QtCore.QObject.connect(self.form.YBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.offsetChanged)
+ QtCore.QObject.connect(self.form.ZBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.offsetChanged)
+ QtCore.QObject.connect(self.form.RollBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.offsetChanged)
+ QtCore.QObject.connect(self.form.PitchBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.offsetChanged)
+ QtCore.QObject.connect(self.form.YawBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.offsetChanged)
+ QtCore.QObject.connect(self.form.ScaleBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.scaleChanged)
+
+ def createFrame(self):
+ self.fframe = makeFeatureFrame(self.selected.Object, self.local_ffpl)
+ self.fframe.PrimitiveType = self.so_desc[0]
+ self.fframe.ShapeType = self.so_desc[1]
+ ad = ARTools.getPrimitiveInfo(self.so_desc[0],
+ self.selected.SubObjects[0])
+ self.fframe.Proxy.additional_data.update(ad)
+
+ def scaleChanged(self):
+ scale = self.form.ScaleBox.value()
+ self.fframe.ViewObject.Scale = scale
+
+ def offsetChanged(self):
+ disp = FreeCAD.Vector(self.form.XBox.value(),
+ self.form.YBox.value(),
+ self.form.ZBox.value())
+ rot = FreeCAD.Rotation(self.form.YawBox.value(),
+ self.form.PitchBox.value(),
+ self.form.RollBox.value())
+ offset = FreeCAD.Placement(disp, rot)
+ self.fframe.Placement = offset
+
+ def accept(self):
+ framelabel = self.form.FrameLabelField.toPlainText()
+ if not len(framelabel) == 0:
+ self.fframe.Label = framelabel
+ FreeCADGui.Control.closeDialog()
+
+ def reject(self):
+ FreeCAD.activeDocument().removeObject(self.fframe.Name)
+ FreeCADGui.Control.closeDialog()
+
+
+class PickedPointPanel(BaseFeaturePanel):
+ """Create a feature frame at the picked point."""
+ # Not very clever. It just places the frame with default rotation.
+ def __init__(self, selected, so_desc):
+ uiform_path = os.path.join(uidir, "FramePlacer.ui")
+ self.form = FreeCADGui.PySideUic.loadUi(uiform_path)
+ BaseFeaturePanel.__init__(self, selected, so_desc)
+ parent_pl = selected.Object.Placement
+ abs_pl = FreeCAD.Placement(selected.PickedPoints[0],
+ FreeCAD.Rotation())
+ self.local_ffpl = parent_pl.inverse().multiply(abs_pl)
+ self.createFrame()
+ self.fframe.Positioning = "PickedPoint"
+
+
+class PointOnEdgePanel(BaseFeaturePanel):
+ """Create a feature frame on an edge."""
+ def __init__(self, selected, so_desc):
+ uiform_path = os.path.join(uidir, "FramePlacer.ui")
+ self.form = FreeCADGui.PySideUic.loadUi(uiform_path)
+ # Enable the first parameter
+ self.form.VLabel.setEnabled(True)
+ self.form.VLabel.setVisible(True)
+ self.form.VLabel.setText("u")
+ self.form.VBox.setEnabled(True)
+ self.form.VBox.setVisible(True)
+ QtCore.QObject.connect(self.form.VBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.parameterChanged)
+
+ # Enable percentage or param selection
+ self.form.OptionsLabel.setEnabled(True)
+ self.form.OptionsLabel.setVisible(True)
+ self.form.OptionsLabel.setText("Arc param.")
+ self.form.OptionsBox.setEnabled(True)
+ self.form.OptionsBox.setVisible(True)
+ self.form.OptionsBox.addItems(["mm", "%"])
+ QtCore.QObject.connect(self.form.OptionsBox,
+ QtCore.SIGNAL("currentIndexChanged(QString)"),
+ self.choiceChanged)
+ BaseFeaturePanel.__init__(self, selected, so_desc)
+
+ # Place the frame wherever the values are atm
+ self.local_ffpl = FreeCAD.Placement()
+ self.createFrame()
+ self.fframe.Positioning = "PointOnEdge"
+ self.choiceChanged(self.form.OptionsBox.currentText())
+ self.parameterChanged()
+
+ def parameterChanged(self):
+ value = self.form.VBox.value()
+ if self.form.OptionsBox.currentText() == "%":
+ value = self.p2mm(value)
+ edge = self.selected.SubObjects[0]
+ point = edge.valueAt(value)
+ tangentdir = edge.tangentAt(value)
+ rot = FreeCAD.Rotation(FreeCAD.Vector(1, 0, 0),
+ tangentdir)
+ abs_ffpl = FreeCAD.Placement(point, rot)
+ parent_pl = self.selected.Object.Placement
+ self.local_ffpl = parent_pl.inverse().multiply(abs_ffpl)
+ self.fframe.FeaturePlacement = self.local_ffpl
+ # force recompute of placement?
+ self.fframe.Placement = self.fframe.Placement
+
+ def choiceChanged(self, choice):
+ value = self.form.VBox.value()
+ if choice == "mm":
+ value = self.p2mm(value)
+ self.form.VBox.setSuffix("mm")
+ parameter_range = self.selected.SubObjects[0].ParameterRange
+ self.form.VBox.setRange(*parameter_range)
+ self.form.VBox.setSingleStep(0.1)
+ elif choice == "%":
+ value = self.mm2p(value)
+ self.form.VBox.setSuffix("%")
+ self.form.VBox.setRange(0, 100.0)
+ self.form.VBox.setSingleStep(1.0)
+ self.form.VBox.setValue(value)
+
+ def p2mm(self, value):
+ parameter_range = self.selected.SubObjects[0].ParameterRange
+ delta = parameter_range[1] - parameter_range[0]
+ return 0.01*value*delta + parameter_range[0]
+
+ def mm2p(self, value):
+ parameter_range = self.selected.SubObjects[0].ParameterRange
+ delta = parameter_range[1] - parameter_range[0]
+ return 100.0*(value - parameter_range[0])/delta
+
+
+class PointOnSurfacePanel(BaseFeaturePanel):
+ """Create a feature on a surface."""
+ def __init__(self, selected, so_desc):
+ uiform_path = os.path.join(uidir, "FramePlacer.ui")
+ self.form = FreeCADGui.PySideUic.loadUi(uiform_path)
+ # Enable both parameters
+ self.form.ULabel.setVisible(True)
+ self.form.VLabel.setVisible(True)
+ self.form.UBox.setVisible(True)
+ self.form.VBox.setVisible(True)
+ QtCore.QObject.connect(self.form.VBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.parameterChanged)
+ QtCore.QObject.connect(self.form.UBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.parameterChanged)
+ # Enable percentage or param selection
+ self.form.OptionsLabel.setEnabled(True)
+ self.form.OptionsLabel.setVisible(True)
+ self.form.OptionsLabel.setText("Surf. param.")
+ self.form.OptionsBox.setEnabled(True)
+ self.form.OptionsBox.setVisible(True)
+ self.form.OptionsBox.addItems(["mm", "%"])
+ QtCore.QObject.connect(self.form.OptionsBox,
+ QtCore.SIGNAL("currentIndexChanged(QString)"),
+ self.choiceChanged)
+ BaseFeaturePanel.__init__(self, selected, so_desc)
+
+ # Place the frame wherever the values are atm
+ self.local_ffpl = FreeCAD.Placement()
+ self.createFrame()
+ self.fframe.Positioning = "PointOnSurface"
+ self.choiceChanged(self.form.OptionsBox.currentText())
+ self.parameterChanged()
+
+ def parameterChanged(self):
+ value = (self.form.UBox.value(), self.form.VBox.value())
+ if self.form.OptionsBox.currentText() == "%":
+ value = self.p2mm(value)
+ face = self.selected.SubObjects[0]
+ point = face.valueAt(*value)
+ normaldir = face.normalAt(*value)
+ rotation = FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1),
+ normaldir)
+ abs_ffpl = FreeCAD.Placement(point, rotation)
+ parent_pl = self.selected.Object.Placement
+ self.local_ffpl = parent_pl.inverse().multiply(abs_ffpl)
+ self.fframe.FeaturePlacement = self.local_ffpl
+ # Force recompute of placement
+ self.fframe.Placement = self.fframe.Placement
+
+ def choiceChanged(self, choice):
+ value = (self.form.UBox.value(), self.form.VBox.value())
+ if choice == "mm":
+ value = self.p2mm(value)
+ parameter_range = self.selected.SubObjects[0].ParameterRange
+ self.form.UBox.setRange(parameter_range[0], parameter_range[1])
+ self.form.UBox.setSuffix("mm")
+ self.form.UBox.setSingleStep(0.1)
+ self.form.VBox.setRange(parameter_range[2], parameter_range[3])
+ self.form.VBox.setSuffix("mm")
+ self.form.VBox.setSingleStep(0.1)
+ elif choice == "%":
+ value = self.mm2p(value)
+ self.form.UBox.setRange(0.0, 100.0)
+ self.form.UBox.setSuffix("%")
+ self.form.VBox.setRange(0.0, 100.0)
+ self.form.UBox.setSingleStep(1.0)
+ self.form.VBox.setSuffix("%")
+ self.form.VBox.setSingleStep(1.0)
+ self.form.UBox.setValue(value[0])
+ self.form.VBox.setValue(value[1])
+
+
+ def p2mm(self, value):
+ parameter_range = self.selected.SubObjects[0].ParameterRange
+ delta = [parameter_range[1] - parameter_range[0],
+ parameter_range[3] - parameter_range[2]]
+ u = 0.01*value[0]*delta[0] + parameter_range[0]
+ v = 0.01*value[1]*delta[1] + parameter_range[2]
+ return (u, v)
+
+ def mm2p(self, value):
+ parameter_range = self.selected.SubObjects[0].ParameterRange
+ delta = [parameter_range[1] - parameter_range[0],
+ parameter_range[3] - parameter_range[2]]
+ u = 100.0*(value[0] - parameter_range[0])/delta[0]
+ v = 100.0*(value[1] - parameter_range[2])/delta[1]
+ return (u, v)
+
+
+class CenterPanel(BaseFeaturePanel):
+ """Create a feature frame on center."""
+ def __init__(self, selected, so_desc):
+ uiform_path = os.path.join(uidir, "FramePlacer.ui")
+ self.form = FreeCADGui.PySideUic.loadUi(uiform_path)
+ BaseFeaturePanel.__init__(self, selected, so_desc)
+ edge_curve_list = ["ArcOfCircle",
+ "ArcOfEllipse",
+ "ArcOfHyperbola",
+ "ArcOfParabola",
+ "Circle",
+ "Ellipse",
+ "Hyperbola",
+ "Parabola"]
+ face_surf_list = ["Sphere",
+ "Toroid"]
+ if so_desc[0] in edge_curve_list:
+ edge = selected.SubObjects[0]
+ axis = edge.Curve.Axis
+ rotation = FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1),
+ axis)
+ center_point = edge.Curve.Center
+ elif so_desc[0] in face_surf_list:
+ face = selected.SubObjects[0]
+ axis = face.Surface.Axis
+ rotation = FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1),
+ axis)
+ center_point = face.Surface.Center
+ else:
+ rotation = FreeCAD.Rotation()
+ center_point = selected.SubObjects[0].CenterOfMass
+ parent_pl = selected.Object.Placement
+ abs_pl = FreeCAD.Placement(center_point,
+ rotation)
+ self.local_ffpl = parent_pl.inverse().multiply(abs_pl)
+ self.createFrame()
+ self.fframe.Positioning = "Center"
+
+
+class PointOnCenterlinePanel(BaseFeaturePanel):
+ """Create a point on centerline of primitive."""
+ def __init__(self, selected, so_desc):
+ uiform_path = os.path.join(uidir, "FramePlacer.ui")
+ self.form = FreeCADGui.PySideUic.loadUi(uiform_path)
+ BaseFeaturePanel.__init__(self, selected, so_desc)
+ # Enable the along line parameter
+ self.form.VLabel.setVisible(True)
+ self.form.VLabel.setText("u")
+ self.form.VBox.setVisible(True)
+ QtCore.QObject.connect(self.form.VBox,
+ QtCore.SIGNAL("valueChanged(double)"),
+ self.parameterChanged)
+ # Enable percentage of param selection
+ self.form.OptionsLabel.setVisible(True)
+ self.form.OptionsLabel.setText("Line param.")
+ self.form.OptionsBox.setVisible(True)
+ self.form.OptionsBox.addItems(["mm", "%"])
+ QtCore.QObject.connect(self.form.OptionsBox,
+ QtCore.SIGNAL("currentIndexChanged(QString)"),
+ self.choiceChanged)
+ # Place the frame wherever the values are atm
+ self.local_ffpl = FreeCAD.Placement()
+ self.createFrame()
+ self.fframe.Positioning = "PointOnCenterline"
+ self.parameterChanged()
+
+ def parameterChanged(self):
+ value = self.form.VBox.value()
+ if self.form.OptionsBox.currentText() == "%":
+ value = self.p2mm(value)
+ displacement_pl = FreeCAD.Placement(FreeCAD.Vector(0, 0, value),
+ FreeCAD.Rotation())
+ # Find the center
+ axis = self.selected.SubObjects[0].Surface.Axis
+ rotation = FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1),
+ axis)
+ center_point = self.selected.SubObjects[0].Surface.Center
+ center_pl = FreeCAD.Placement(center_point, rotation)
+ abs_ffpl = center_pl.multiply(displacement_pl)
+ parent_pl = self.selected.Object.Placement
+ self.local_ffpl = parent_pl.inverse().multiply(abs_ffpl)
+ self.fframe.FeaturePlacement = self.local_ffpl
+ # force recompute of placement
+ self.fframe.Placement = self.fframe.Placement
+
+ def choiceChanged(self, choice):
+ FreeCAD.Console.PrintMessage("choiceChanged\n")
+ value = self.form.VBox.value()
+ FreeCAD.Console.PrintMessage("preval:"+str(value)+"\n")
+ if choice == "mm":
+ value = self.p2mm(value)
+ self.form.VBox.setSuffix("mm")
+ parameter_range = self.selected.SubObjects[0].ParameterRange[2:]
+ self.form.VBox.setRange(*parameter_range)
+ self.form.VBox.setSingleStep(0.1)
+ elif choice == "%":
+ value = self.mm2p(value)
+ self.form.VBox.setSuffix("%")
+ self.form.VBox.setRange(0, 100)
+ self.form.VBox.setSingleStep(1.0)
+ self.form.VBox.setValue(value)
+ FreeCAD.Console.PrintMessage("postval:"+str(value)+"\n")
+
+ def p2mm(self, value):
+ parameter_range = self.selected.SubObjects[0].ParameterRange[2:]
+ delta = parameter_range[1] - parameter_range[0]
+ return 0.01*value*delta + parameter_range[0]
+
+ def mm2p(self, value):
+ parameter_range = self.selected.SubObjects[0].ParameterRange[2:]
+ delta = parameter_range[1] - parameter_range[0]
+ return 100.0*(value - parameter_range[0])/delta
diff --git a/ARTasks.py b/ARTasks.py
new file mode 100644
index 0000000..4b572f7
--- /dev/null
+++ b/ARTasks.py
@@ -0,0 +1,501 @@
+import FreeCAD
+import ARTools
+import ARFrames
+if FreeCAD.GuiUp:
+ import os
+ import FreeCADGui
+ from PySide import QtCore, QtGui, QtSvg
+__title__ = "ARTasks"
+__author__ = "Mathias Hauan Arbo"
+__workbenchname__ = "ARBench"
+__version__ = "0.1"
+__url__ = "https://github.com/mahaarbo/ARBench"
+__doc__ = """"""
+
+# Ideally tasks inherit from base task, and are constructed based on what is
+# available in the expert system
+
+
+###################################################################
+## Module objects
+###################################################################
+class BaseTask(object):
+ """
+ Base task object that new tasks inherit from.
+ """
+ def __init__(self, obj):
+ obj.Proxy = self
+
+ def onChanged(self, fp, prop):
+ pass
+
+ def execute(self, obj):
+ pass
+
+ def __getstate__(self):
+ return None
+
+ def __setstate__(self):
+ return None
+
+
+class InsertTask(BaseTask):
+ def __init__(self, obj, holeface, pegface):
+ BaseTask.__init__(self, obj)
+
+ # Hole info
+ obj.addProperty("App::PropertyLinkSub", "HoleFace",
+ "Feature", "The inner face of the hole.").HoleFace = holeface
+ obj.addProperty("App::PropertyString", "HolePart",
+ "Feature", "The part that the holeface is on.").HolePart = holeface[0].Label
+ obj.setEditorMode("HolePart", 1)
+ obj.addProperty("App::PropertyString", "HoleFaceID",
+ "Feature", "The FaceID associated with the hole.").HoleFaceID = holeface[1]
+ obj.setEditorMode("HoleFaceID", 1)
+ obj.setEditorMode("HoleFaceID", 1)
+ holeface_subelem = holeface[0].Shape.getElement(holeface[1])
+ holefeature, temp = ARTools.describeSubObject(holeface_subelem)
+ obj.addProperty("App::PropertyString", "HoleFeature",
+ "Feature", "The type of feature of the hole.").HoleFeature = holefeature
+ obj.setEditorMode("HoleFeature", 1)
+
+ # Peg info
+ obj.addProperty("App::PropertyLinkSub", "PegFace",
+ "Feature", "The outer face of the peg.").PegFace = pegface
+ obj.addProperty("App::PropertyString", "PegPart",
+ "Feature", "The part that the pegface is on.").PegPart = pegface[0].Label
+ obj.setEditorMode("PegPart", 1)
+ pegface_subelem = pegface[0].Shape.getElement(pegface[1])
+ pegfeature, temp = ARTools.describeObject(pegface_subelem)
+ obj.addProperty("App::PropertyString", "PegPart",
+ "Feature", "The type of feature of the peg.").PegFeature = pegfeature
+ obj.setEditorMode("PegFeature", 1)
+
+ # Create Task Frame
+ # For now it is set to be pointing down the hole axis.
+ if holeface_subelem.Surface.Axis.dot(pegface_subelem.Surface.Axis) < 0:
+ p_dist = pegface_subelem.ParameterRange[3]
+ v = FreeCAD.Vector(0, 0, 1)
+ else:
+ p_dist = pegface_subelem.ParameterRange[2]
+ v = FreeCAD.Vector(0, 0, -1)
+ axis_rot = FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), pegface_subelem.Surface.Axis)
+ task_frame_rot = FreeCAD.Rotation(v, pegface_subelem.Surface.Axis)
+
+ # pegface can be anywhere on the object, so define it wrt origo from the local
+ center_point = pegface_subelem.Surface.Center
+ center_placement = FreeCAD.Placement(center_point, axis_rot)
+ abs_taskframe_point = center_placement.multVec(FreeCAD.Vector(0, 0, p_dist))
+ abs_taskframe_placement = FreeCAD.Placement(abs_taskframe_point, task_frame_rot)
+ inv_pegobject_placement = pegface[0].Placement.inverse()
+
+ peglocal_taskframe_placement = inv_pegobject_placement.multiply(abs_taskframe_placement)
+ taskframe = ARFrames.makeFeatureFrame(pegface[0], peglocal_taskframe_placement)
+ taskframe.Label = obj.Label+"_task_frame"
+ obj.addProperty("App::PropertyLink", "TaskFrame", "Process", "The task frame for the insertion task.").TaskFrame = taskframe
+
+
+class ScrewTask(InsertTask):
+ def __init__(self, obj, holeface, screwface):
+ BaseTask.__init__(self, obj)
+ # Hole info
+ obj.addProperty("App::PropertyLinkSub", "HoleFace", "Feature", "The inner face of the screw.").HoleFace = holeface
+ obj.addProperty("App::PropertyString", "HolePart", "Feature", "The part that the holeface is on.").HolePart = holeface[0].Label
+ obj.setEditorMode("HolePart", 1)
+ obj.addProperty("App::PropertyString", "HoleFaceID", "Feature", "The FaceID associated with the hole.").HoleFaceID = holeface[1]
+ obj.setEditorMode("HoleFaceID", 1)
+ holeface_subelem = holeface[0].Shape.getElement(holeface[1])
+ holefeature, temp = ARTools.describeSubObject(holeface_subelem)
+ obj.addProperty("App::PropertyString", "HoleFeature", "Feature", "The type of feature of the hole.").HoleFeature = holefeature
+ obj.setEditorMode("HoleFaceID", 1)
+
+ # Screw info
+ obj.addProperty("App::PropertyLinkSub", "ScrewFace", "Feature", "The outer face of the screw.").ScrewFace = screwface
+ obj.addProperty("App::PropertyString", "ScrewPart", "Feature", "The part that the screwface is on").ScrewPart = screwface[0].Label
+ obj.setEditorMode("ScrewPart", 1)
+ obj.addProperty("App::PropertyString", "ScrewFaceID", "Feature", "The FaceID associated with the screw.").ScrewFaceID = screwface[1]
+ obj.setEditorMode("ScrewFaceID", 1)
+ screwface_subelem = screwface[0].Shape.getElement(screwface[1])
+ screwfeature, temp = ARTools.describeSubObject(screwface_subelem)
+
+ # Create Task Frame
+ # For now we set the task frame to be pointing down the hole axis.
+ if holeface_subelem.Surface.Axis.dot(screwface_subelem.Surface.Axis) < 0:
+ p_dist = screwface_subelem.ParameterRange[3]
+ v = FreeCAD.Vector(0, 0, 1)
+ else:
+ p_dist = screwface_subelem.ParameterRange[2]
+ v = FreeCAD.Vector(0, 0, -1)
+ axis_rot = FreeCAD.Rotation(FreeCAD.Vector(0, 0, 1), screwface_subelem.Surface.Axis)
+ task_frame_rot = FreeCAD.Rotation(v, screwface_subelem.Surface.Axis)
+
+ # Screface can be anywhere on screw object so define it wrt origo then find local
+ center_point = screwface_subelem.Surface.Center
+ center_placement = FreeCAD.Placement(center_point, axis_rot)
+ abs_taskframe_point = center_placement.multVec(FreeCAD.Vector(0, 0, p_dist))
+ abs_taskframe_placement = FreeCAD.Placement(abs_taskframe_point, task_frame_rot)
+ inv_screwobject_placement = screwface[0].Placement.inverse()
+
+ screwlocal_taskframe_placement = inv_screwobject_placement.multiply(abs_taskframe_placement)
+ taskframe = ARFrames.makeFeatureFrame(screwface[0], screwlocal_taskframe_placement)
+ taskframe.Label = obj.Label+"_task_frame"
+ obj.addProperty("App::PropertyLink", "TaskFrame", "Process", "The task frame for the insertion task.").TaskFrame = taskframe
+
+
+class PlaceTask(BaseTask):
+ def __init__(self, obj, partA, partB):
+ BaseTask.__init__(self, obj)
+ obj.addProperty("App::PropertyLink", "PartA",
+ "Feature", "The first face.").PartA = partA
+ obj.addProperty("App::PropertyLink", "PartB",
+ "Feature", "The second face.").PartB = partB
+ obj.setEditorMode("PartA", 1)
+ obj.setEditorMode("PartB", 1)
+ obj.addProperty("App::PropertyVector", "AssemblyAxis",
+ "Process", "The linear axis to move A onto B.").AssemblyAxis = FreeCAD.Vector()
+
+
+class CustomTask(BaseTask):
+ def __init__(self, obj, partA, partB):
+ BaseTask.__init__(self, obj)
+ obj.addProperty("App::PropertyLink", "PartA",
+ "Parts", "The first part.").PartA = partA
+ obj.addProperty("App::PropertyLink", "PartB",
+ "Parts", "The second part.").PartB = partB
+ obj.addProperty("App::PropertyString", "TaskDescription",
+ "Process", "The description of the custom task.").TaskDescription = ""
+
+
+###################################################################
+## ViewProviders to the objects
+###################################################################
+class ViewProviderBaseTask(object):
+ """
+ View provider for the base task.
+ """
+ def __init__(self, vobj):
+ vobj.Proxy = self
+ col = (0.64, 0., 0., 0.)
+ vobj.addProperty("App::PropertyColor", "Color").Color = col
+
+ def attach(self, vobj):
+ pass
+
+ def updateData(self, fp, prop):
+ pass
+
+ def getDisplayModes(self, vobj):
+ modes = []
+ return []
+
+ def getDefaultDisplayMode(self):
+ return None
+
+ def setDisplayMode(self, mode):
+ return mode
+
+ def onChanged(self, vp, prop):
+ pass
+
+ def getIcon(self):
+ return """"""
+
+ def __getstate__(self):
+ return None
+
+ def __setstate__(self):
+ return None
+
+
+###################################################################
+## Module functions
+###################################################################
+def createInsertTask(holeface, pegface):
+ a = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "Task")
+ InsertTask(a, holeface, pegface)
+ # if FreeCAD.GuiUp:
+ # ViewProviderBaseTask(a.ViewObject)
+ return a
+
+
+def createScrewTask(holeface, screwface):
+ a = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "Task")
+ ScrewTask(a, holeface, screwface)
+ # if FreeCAD.GuiUp:
+ # ViewProviderBaseTask(a.ViewObject)
+ return a
+
+
+def createCustomTask(partA, partB):
+ a = FreeCAD.ActiveDocument.addObject("App::FeaturePython", "Task")
+ CustomTask(a, partA, partB)
+ if FreeCAD.GuiUp:
+ ViewProviderBaseTask(a.ViewObject)
+ return a
+
+
+def spawnTaskCreator():
+ taskpanel = TaskSelectionPanel()
+ FreeCADGui.Control.showDialog(taskpanel)
+
+
+###################################################################
+## GUI Commands
+###################################################################
+if FreeCAD.GuiUp:
+ icondir = os.path.join(FreeCAD.getUserAppDataDir(),
+ "Mod", __workbenchname__, "UI", "icons")
+
+ def spawnClassCommand(command_classname,
+ command_function,
+ command_resources):
+ """
+ Commands, or buttons, are tedious to write. So this function spawns one if the command_function to be executed takes no arguments.
+ Example:
+ spawnClassCommand("testCommand",testfunc, {"Pixmap":"","MenuText":"test","ToolTip":"Test tooltip"
+ then add "testcommand" to commandlist in InitGui.py
+ """
+ def Activated(s):
+ command_function()
+ def GetResources(s):
+ return command_resources
+ commandClass = type("command_classname", (object,), {"Activated":Activated,"GetResources":GetResources})
+ FreeCADGui.addCommand(command_classname,commandClass())
+
+ spawnClassCommand("spawnTaskCreatorCommand",spawnTaskCreator, {"Pixmap":str(os.path.join(icondir, "taskcreator.svg")),
+ "MenuText": "Spawn task creator",
+ "ToolTip": "Spawn task creator."})
+
+###################################################################
+## GUI widgets
+###################################################################
+if FreeCAD.GuiUp:
+ uidir = os.path.join(FreeCAD.getUserAppDataDir(),"Mod",__workbenchname__, "UI")
+ icondir = os.path.join(uidir,"icons")
+
+ class TaskSelectionPanel:
+ """Choose appropriate skill"""
+ def __init__(self):
+ self.form = FreeCADGui.PySideUic.loadUi(os.path.join(uidir,"FeatureFrameCreator.ui"))
+ self.choices = ("Insert","Place","Screw")
+ self.picked_type = "Insert"
+ self.form.ChoicesBox.addItems(self.choices)
+ QtCore.QObject.connect(self.form.ChoicesBox,
+ QtCore.SIGNAL("currentIndexChanged(QString)"),
+ self.ChoiceChanged)
+ self.form.PickedTypeLabel.setText(self.picked_type)
+ self.scenes = {}
+ iscene = QtGui.QGraphicsScene()
+ iscene.addItem(QtSvg.QGraphicsSvgItem(str(os.path.join(icondir, "inserttask.svg"))))
+ self.scenes["Insert"] = iscene
+ pscene = QtGui.QGraphicsScene()
+ pscene.addItem(QtSvg.QGraphicsSvgItem(str(os.path.join(icondir, "placetask.svg"))))
+ self.scenes["Place"] = pscene
+ sscene = QtGui.QGraphicsScene()
+ sscene.addItem(QtSvg.QGraphicsSvgItem(str(os.path.join(icondir, "screwtask.svg"))))
+ self.scenes["Screw"] = sscene
+ self.ChoiceChanged(self.picked_type)
+
+ def ChoiceChanged(self, choice):
+ if choice in self.scenes.keys():
+ self.picked_type = choice
+ self.form.Preview.setScene(self.scenes[choice])
+ self.form.PickedTypeLabel.setText(self.picked_type)
+ def accept(self):
+ paneldict = {"Insert":InsertPanel,
+ "Place":PlacePanel,
+ "Screw":ScrewPanel}
+ picked_type = self.form.ChoicesBox.currentText()
+ new_panel = paneldict[picked_type]()
+ FreeCADGui.Control.closeDialog()
+ QtCore.QTimer.singleShot(0,
+ lambda: FreeCADGui.Control.showDialog(new_panel))
+
+ def reject(self):
+ FreeCADGui.Control.closeDialog()
+
+
+ class InsertPanel(object):
+ """Selection panel for the insertion task."""
+ def __init__(self):
+ self.form = FreeCADGui.PySideUic.loadUi(os.path.join(uidir, "InsertTaskCreator.ui"))
+ self.pegsel = None
+ self.holesel = None
+ QtCore.QObject.connect(self.form.SelectButton,
+ QtCore.SIGNAL("released()"),
+ self.FromSelection)
+ QtCore.QObject.connect(self.form.AnimateButton,
+ QtCore.SIGNAL("released()"),
+ self.Animate)
+
+ def FromSelection(self):
+ s = FreeCADGui.Selection.getSelectionEx()
+ #Nothing selected, do nothing
+ if len(s) == 0:
+ FreeCAD.Console.PrintWarning("No part selected.\n")
+ return
+ #One selected
+ elif len(s) == 1:
+ if not s[0].HasSubObjects:
+ FreeCAD.Console.PrintWarning("Selection has no subobject.\n")
+ if s[0].SubObjects[0].Orientation == "Forward":
+ FreeCAD.Console.PrintMessage("Pegface selected.\n")
+ self.SetPeg(s[0])
+ elif s[0].SubObjects[0].Orientation == "Reversed":
+ FreeCAD.Console.PrintMessage("Holeface selected.\n")
+ self.SetHole(s[0])
+ else:
+ FreeCAD.Console.PrintWarning("Only handles Forward and Reversed oriented faces.\n")
+ else:
+ FreeCAD.Console.PrintError("Too many parts involved, undefined behavior.\n")
+ return
+
+ def ClearPeg(self):
+ self.pegsel = None
+ self.form.PegPartField.setText("PartLabel")
+ self.form.PegFaceIDField.setText("FaceID")
+ self.form.PegFeatureSummary.setText("Feature Description")
+ return
+
+ def ClearHole():
+ self.holesel = None
+ self.form.HolePartField.setText("PartLabel")
+ self.form.HoleFaceIDField.setText("FaceID")
+ self.form.HoleFeatureSummary.setText("Feature Description")
+ return
+
+ def SetPeg(self,sel):
+ self.pegsel = sel
+ self.form.PegPartField.setText(sel.Object.Label)
+ self.form.PegFaceIDField.setText(sel.SubElementNames[0])
+ class_name, prim_class_name = ARTools.describeSubObject(sel.SubObjects[0])
+ self.form.PegFeatureSummary.setText(class_name)
+ return
+
+ def SetHole(self,sel):
+ self.holesel = sel
+ self.form.HolePartField.setText(sel.Object.Label)
+ self.form.HoleFaceIDField.setText(sel.SubElementNames[0])
+ class_name, prim_class_name = ARTools.describeSubObject(sel.SubObjects[0])
+ self.form.HoleFeatureSummary.setText(class_name)
+ return
+
+ def Animate(self):
+ FreeCAD.Console.PrintWarning("Not available yet.\n")
+ pass
+
+ def accept(self):
+ if self.pegsel is None:
+ FreeCAD.Console.PrintError("No pegface selected.\n")
+ return
+ if self.holesel is None:
+ FreeCAD.Console.PrintError("No holeface selected.\n")
+ return
+ holeface = self.holesel.Object, self.holesel.SubElementNames[0]
+ pegface = self.pegsel.Object, self.pegsel.SubElementNames[0]
+ ins_task = createInsertTask(holeface, pegface)
+ txtlabel = self.form.TaskLabelField.text()
+ if txtlabel == "":
+ txtlabel = "Task"+self.holesel.Object.Label+self.pegsel.Object.Label
+ ins_task.Label = txtlabel
+ ins_task.TaskFrame.Label = txtlabel+"_task_frame"
+ FreeCADGui.Control.closeDialog()
+
+ def reject(self):
+ FreeCADGui.Control.closeDialog()
+
+ class PlacePanel(object):
+ def __init__(self):
+ pass
+ def SetFaceA(self,sel):
+ pass
+ def SetFaceB(self,sel):
+ pass
+
+ def accept(self):
+ pass
+ def reject(self):
+ FreeCADGui.Control.closeDialog()
+
+ class ScrewPanel(object):
+ def __init__(self):
+ self.form = FreeCADGui.PySideUic.loadUi(os.path.join(uidir, "ScrewTaskCreator.ui"))
+ self.screwsel = None
+ self.holesel = None
+ QtCore.QObject.connect(self.form.SelectButton,
+ QtCore.SIGNAL("released()"),
+ self.FromSelection)
+ QtCore.QObject.connect(self.form.AnimateButton,
+ QtCore.SIGNAL("released()"),
+ self.Animate)
+ def FromSelection(self):
+ s = FreeCADGui.Selection.getSelectionEx()
+ #Nothing selected, do nothing
+ if len(s) == 0:
+ FreeCAD.Console.PrintWarning("No face selected.\n")
+ #One selected
+ elif len(s) == 1:
+ if not s[0].HasSubObjects:
+ FreeCAD.Console.PrintError("Selection has no subobject.\n")
+ elif s[0].SubObjects[0].Orientation == "Forward":
+ FreeCAD.Console.PrintMessage("Screwface selected.\n")
+ self.SetScrew(s[0])
+ elif s[0].SubObjects[0].Orientation == "Reversed":
+ FreeCAD.Console.PrintMessage("Holeface selected.\n")
+ self.SetHole(s[0])
+ else:
+ FreeCAD.Console.PrintError("Only handles Forward and Reversed oriented faces.\n")
+ else:
+ FreeCAD.Console.PrintError("Too many parts involved, undefined behavior.\n")
+ return
+
+ def ClearScrew(self):
+ self.screwsel = None
+ self.form.PegPartField.setText("PartLabel")
+ self.form.PegFaceIDField.setText("FaceID")
+ self.form.ScrewTypeField.setText("ScrewType")
+
+ def ClearHole(self):
+ self.holesel = None
+ self.form.HolePartField.setText("PartLabel")
+ self.form.HoleFaceIDField.setText("FaceID")
+ return
+
+ def SetScrew(self,sel):
+ self.screwsel = sel
+ self.form.PegPartField.setText(sel.Object.Label)
+ self.form.PegFaceIDField.setText(sel.SubElementNames[0])
+ class_name, prim_class_name = ARTools.describeSubObject(sel.SubObjects[0])
+ self.form.ScrewTypeField.setText("M"+str(int(2*sel.SubObjects[0].Surface.Radius)))
+
+ return
+
+ def SetHole(self,sel):
+ self.holesel = sel
+ self.form.HolePartField.setText(sel.Object.Label)
+ self.form.HoleFaceIDField.setText(sel.SubElementNames[0])
+ return
+
+ def Animate(self):
+ FreeCAD.Console.PrintWarning("Not available yet.\n")
+ pass
+
+ def accept(self):
+ if self.screwsel is None:
+ FreeCAD.Console.PrintError("No screwface selected.\n")
+ return
+ if self.holesel is None:
+ FreeCAD.Console.PrintError("No holeface selected.\n")
+ return
+ holeface = self.holesel.Object, self.holesel.SubElementNames[0]
+ screwface = self.screwsel.Object, self.screwsel.SubElementNames[0]
+ ins_task = createScrewTask(holeface, screwface)
+ txtlabel = self.form.TaskLabelField.text()
+ if txtlabel == "":
+ txtlabel = "task_"+self.holesel.Object.Label+self.screwsel.Object.Label
+ ins_task.Label = txtlabel
+ ins_task.TaskFrame.Label = txtlabel+"_task_frame"
+ FreeCADGui.Control.closeDialog()
+
+ def reject(self):
+ FreeCADGui.Control.closeDialog()
diff --git a/ARTools.py b/ARTools.py
new file mode 100644
index 0000000..8d9de03
--- /dev/null
+++ b/ARTools.py
@@ -0,0 +1,520 @@
+import FreeCAD
+import Part
+import json # For exporting part infos
+import os # for safer path handling
+if FreeCAD.GuiUp:
+ import FreeCADGui
+ from PySide import QtGui
+
+__title__ = "ARTools"
+__author__ = "Mathias Hauan Arbo"
+__workbenchname__ = "ARBench"
+__version__ = "0.1"
+__url__ = "https://github.com/mahaarbo/ARBench"
+__doc__ = """
+Useful tools for the Annotations for Robotics workbench."""
+
+
+###################################################################
+# Module functions
+###################################################################
+def vector2list(vec, scale=1e-3):
+ """Gives the vector as a list, set scale for scaling factor.
+ default scale = 1e-3 for units in m."""
+ return [vec.x*scale, vec.y*scale, vec.z*scale]
+
+
+def matrix2list(mat, scale=1e-3):
+ """Gives the transformation matrix as a list, set scale 1 to get in mm."""
+ return [[mat.A11, mat.A12, mat.A13, mat.A14*scale],
+ [mat.A21, mat.A22, mat.A23, mat.A24*scale],
+ [mat.A31, mat.A32, mat.A33, mat.A34*scale],
+ [mat.A41, mat.A42, mat.A43, mat.A44]]
+
+
+def placement2axisvec(pl, scale=1e-3):
+ """Gives the placement as an dictionary of origin and rotation.
+ origin: [x,y,z], rotation:{axis:[ax,ay,az], angle:ang}"""
+ return {"origin": vector2list(pl.Base, scale),
+ "rotation": {"axis": vector2list(pl.Rotation.Axis, scale=1),
+ "angle": pl.Rotation.Angle}}
+
+
+def boundingBox2list(bb, scale=1e-3):
+ """Gives the bounding box as a list in m instead of mm"""
+ return [bb.XMin*scale, bb.XMax*scale,
+ bb.YMin*scale, bb.YMax*scale,
+ bb.ZMin*scale, bb.ZMax*scale]
+
+
+def principalProperties2dict(pp, scale=1e-3):
+ npp = {}
+ for key, value in pp.iteritems():
+ if type(value) is FreeCAD.Vector:
+ npp[key.lower()] = vector2list(value, scale=1e-3)
+ else:
+ npp[key.lower()] = value
+ return npp
+
+
+def describeSubObject(subobj):
+ """Returns PrimitiveType, ShapeType."""
+ if isinstance(subobj, Part.Vertex):
+ return "Vertex", "Vertex"
+ elif isinstance(subobj, Part.Edge):
+ if isinstance(subobj.Curve, Part.Arc):
+ return "Arc", "Edge"
+ elif isinstance(subobj.Curve, Part.ArcOfCircle):
+ return "ArcOfCircle", "Edge"
+ elif isinstance(subobj.Curve, Part.ArcOfEllipse):
+ return "ArcOfEllipse", "Edge"
+ elif isinstance(subobj.Curve, Part.ArcOfHyperbola):
+ return "ArcOfHyperbola", "Edge"
+ elif isinstance(subobj.Curve, Part.ArcOfParabola):
+ return "ArcOfParabola", "Edge"
+ elif isinstance(subobj.Curve, Part.BSplineCurve):
+ return "BSplineCurve", "Edge"
+ elif isinstance(subobj.Curve, Part.BezierCurve):
+ return "BezierCurve", "Edge"
+ elif isinstance(subobj.Curve, Part.Circle):
+ return "Circle", "Edge"
+ elif isinstance(subobj.Curve, Part.Ellipse):
+ return "Ellipse", "Edge"
+ elif isinstance(subobj.Curve, Part.Hyperbola):
+ return "Hyperbola", "Edge"
+ elif isinstance(subobj.Curve, Part.Line):
+ return "Line", "Edge"
+ elif isinstance(subobj.Curve, Part.Parabola):
+ return "Parabola", "Edge"
+ else:
+ FreeCAD.Console.PrintError("Unknown edge type")
+ elif isinstance(subobj, Part.Face):
+ if isinstance(subobj.Surface, Part.BSplineSurface):
+ return "BSplineSurface", "Face"
+ elif isinstance(subobj.Surface, Part.BezierSurface):
+ return "BezierSurface", "Face"
+ elif isinstance(subobj.Surface, Part.Cylinder):
+ return "Cylinder", "Face"
+ elif isinstance(subobj.Surface, Part.Plane):
+ return "Plane", "Face"
+ elif isinstance(subobj.Surface, Part.Sphere):
+ return "Sphere", "Face"
+ elif isinstance(subobj.Surface, Part.Toroid):
+ return "Toroid", "Face"
+ elif isinstance(subobj.Surface, Part.Cone):
+ return "Cone", "Face"
+ else:
+ FreeCAD.Console.PrintError("Unknown surface type")
+ # Better strategy desirable for the following:
+ elif isinstance(subobj, Part.Wire):
+ return "Wire", "Wire"
+ elif isinstance(subobj, Part.Shell):
+ return "Shell", "Shell"
+ elif isinstance(subobj, Part.Solid):
+ return "Solid", "Solid"
+ elif isinstance(subobj, Part.Compsolid):
+ return "Compsolid", "Compsolid"
+ elif isinstance(subobj, Part.Compound):
+ return "Compound", "Compound"
+ else:
+ FreeCAD.Console.PrintError("Unable to identify subobject.")
+
+
+def closeToZero(a, tol=1e-10):
+ return abs(a) < tol
+
+
+def spawnClassCommand(classname, function, resources):
+ """
+ Commands, or buttons, are tedious to write. So this function spawns
+ one if the function to be executed takes no arguments.
+ Example usage:
+ spawnClassCommand("testcommand", testfunc,
+ {"Pixmap":"", "MenuText":"menutext","ToolTip":"tooltiptext"})
+ then add "testcommand" to commandlist in InitGui.py
+ """
+ def Activated(s):
+ function()
+
+ def GetResources(s):
+ return resources
+ CommandClass = type("classname", (object,), {"Activated": Activated,
+ "GetResources": GetResources})
+ FreeCADGui.addCommand(classname, CommandClass())
+
+
+def getLocalPartProps(obj):
+ old_placement = obj.Placement
+ obj.Placement = FreeCAD.Placement()
+ # Part properties
+ partprops = {
+ "label": obj.Label,
+ "placement": placement2axisvec(old_placement),
+ "boundingbox": boundingBox2list(obj.Shape.BoundBox),
+ "volume": obj.Shape.Volume*1e-9,
+ "centerofmass": vector2list(obj.Shape.CenterOfMass),
+ "principalproperties": principalProperties2dict(obj.Shape.PrincipalProperties)
+ }
+ obj.Placement = old_placement
+ return partprops
+
+
+###################################################################
+# Export functions
+###################################################################
+def exportPartInfo(obj, ofile):
+ """
+ Exports part info to a new json file.
+ The part info includes:
+ Placement relative to world frame, bounding box, volume, center of mass,
+ principal properties.
+ For more information on principal properties, see TopoShape in OCCT
+ documentation.
+ """
+ # File path stuff
+ odir, of = os.path.split(ofile)
+ if not os.path.exists(odir):
+ os.makedirs(odir)
+ if not of.lower().endswith(".json"):
+ ofile = ofile + ".json"
+
+ partprops = getLocalPartProps(obj)
+ with open(ofile, "wb") as propfile:
+ json.dump(partprops, propfile, indent=1, separators=(',', ': '))
+ return True
+
+
+def appendPartInfo(obj, ofile):
+ """Rewrites/appends part info to an existing json file.
+ The part info includes:
+ Placement relative to world frame, bounding box, volume, center of mass,
+ principal properties.
+ For more information on principal properties, see TopoShape in OCCT
+ documentation.
+ """
+ with open(ofile, "rb") as propfile:
+ partprops = json.load(propfile)
+ new_props = getLocalPartProps(obj)
+ partprops.update(new_props)
+ with open(ofile, "wb") as propfile:
+ json.dump(partprops, propfile, indent=1, separators=(',', ': '))
+ return True
+
+
+def exportFeatureFrames(obj, ofile):
+ """Exports feature frames attached to a part."""
+ # Get the feature frames
+ import ARFrames
+ ff_check = lambda x: isinstance(x.Proxy, ARFrames.FeatureFrame)
+ ff_list = filter(ff_check, obj.InList)
+ ff_named = {ff.Label: ff.Proxy.getDict() for ff in ff_list}
+ feature_dict = {"features": ff_named}
+
+ # File stuff
+ odir, of = os.path.split(ofile)
+ if not os.path.exists(odir):
+ os.makedirs(odir)
+ if not of.lower().endswith(".json"):
+ ofile = ofile + ".json"
+ with open(ofile, "wb") as propfile:
+ json.dump(feature_dict, propfile, indent=1, separators=(',', ': '))
+ return True
+
+
+def appendFeatureFrames(obj, ofile):
+ """Rewrites/appends featureframes attached to a part to an existing json
+ file."""
+ # Get the feature frames
+ import ARFrames
+ with open(ofile, "rb") as propfile:
+ partprops = json.load(propfile)
+ ff_check = lambda x: isinstance(x.Proxy, ARFrames.FeatureFrame)
+ ff_list = filter(ff_check, obj.InList)
+ ff_named = {ff.Label: ff.Proxy.getDict() for ff in ff_list}
+ feature_dict = {"features": ff_named}
+ if "features" not in partprops.keys():
+ partprops.update(feature_dict)
+ else:
+ partprops["features"].update(feature_dict["features"])
+ with open(ofile, "wb") as propfile:
+ json.dump(partprops, propfile, indent=1, separators=(',', ': '))
+ return True
+
+
+def exportPartInfoDialogue():
+ """Spawns a dialogue window for part info exporting"""
+ # Select only true parts
+ s = FreeCADGui.Selection.getSelection()
+ FreeCADGui.Selection.clearSelection()
+ if len(s) == 0:
+ FreeCAD.Console.PrintError("No part selected.")
+ return False
+ unique_selected = []
+ for item in s:
+ if item not in unique_selected and isinstance(item, Part.Feature):
+ # Ensuring that we are parts
+ unique_selected.append(item)
+ FreeCADGui.Selection.addSelection(item)
+ # Fix wording
+ textprompt = "Save the properties of the part"
+ if len(unique_selected) > 1:
+ textprompt = textprompt + "s"
+ opts = QtGui.QFileDialog.DontConfirmOverwrite
+ # Create file dialog
+ ofile, filt = QtGui.QFileDialog.getSaveFileName(None, textprompt,
+ os.getenv("HOME"),
+ "*.json", options=opts)
+ if ofile == "":
+ # User cancelled
+ return False
+ if os.path.exists(ofile):
+ msgbox = QtGui.QMessageBox()
+ msgbox.setText("File already exists. We can overwrite the file, or add the information/rewrite only relevant sections.")
+ append_button = msgbox.addButton(unicode("Append"),
+ QtGui.QMessageBox.YesRole)
+ overwrite_button = msgbox.addButton(unicode("Overwrite"),
+ QtGui.QMessageBox.NoRole)
+ msgbox.exec_()
+ if msgbox.clickedButton() == append_button:
+ NEWFILE = False
+ elif msgbox.clickedButton() == overwrite_button:
+ NEWFILE = True
+ else:
+ return False
+ else:
+ NEWFILE = True
+ if NEWFILE:
+ exportPartInfo(unique_selected[0], ofile)
+ else:
+ appendPartInfo(unique_selected[0], ofile)
+
+ if len(unique_selected) > 1:
+ FreeCAD.Console.PrintWarning("Multi-part export not yet supported\n")
+ FreeCAD.Console.PrintMessage("Properties exported to "+str(ofile)+"\n")
+
+
+def exportFeatureFramesDialogue():
+ """Spawns a dialogue window for a part's feature frames to be exported."""
+ # Select only true parts
+ s = FreeCADGui.Selection.getSelection()
+ FreeCADGui.Selection.clearSelection()
+ if len(s) == 0:
+ FreeCAD.Console.PrintError("No part selected.")
+ return False
+ unique_selected = []
+ for item in s:
+ if item not in unique_selected and isinstance(item, Part.Feature):
+ # Ensuring that we are parts
+ unique_selected.append(item)
+ FreeCADGui.Selection.addSelection(item)
+ # Fix wording
+ textprompt = "Save the feature frames attached to the part"
+ if len(unique_selected) > 1:
+ textprompt = textprompt + "s"
+ opts = QtGui.QFileDialog.DontConfirmOverwrite
+ # Create file dialog
+ ofile, filt = QtGui.QFileDialog.getSaveFileName(None, textprompt,
+ os.getenv("HOME"),
+ "*.json", options=opts)
+ if ofile == "":
+ # User cancelled
+ return False
+ if os.path.exists(ofile):
+ msgbox = QtGui.QMessageBox()
+ msgbox.setText("File already exists. We can overwrite the file, or add the information/rewrite only relevant sections.")
+ append_button = msgbox.addButton(unicode("Append"),
+ QtGui.QMessageBox.YesRole)
+ overwrite_button = msgbox.addButton(unicode("Overwrite"),
+ QtGui.QMessageBox.NoRole)
+ msgbox.exec_()
+ if msgbox.clickedButton() == append_button:
+ NEWFILE = False
+ elif msgbox.clickedButton() == overwrite_button:
+ NEWFILE = True
+ else:
+ return False
+ else:
+ NEWFILE = True
+ if NEWFILE:
+ exportFeatureFrames(unique_selected[0], ofile)
+ else:
+ appendFeatureFrames(unique_selected[0], ofile)
+ if len(unique_selected) > 1:
+ FreeCAD.Console.PrintWarning("Multi-part export not yet supported\n")
+ FreeCAD.Console.PrintMessage("Feature frames of " + str(unique_selected[0].Label) + " exported to " + str(ofile) + "\n")
+
+
+def exportPartInfoAndFeaturesDialogue():
+ """Spawns a dialogue window for exporting both."""
+ s = FreeCADGui.Selection.getSelection()
+ FreeCADGui.Selection.clearSelection()
+ if len(s) == 0:
+ FreeCAD.Console.PrintError("No part selected.")
+ return False
+ unique_selected = []
+ for item in s:
+ if item not in unique_selected and isinstance(item, Part.Feature):
+ # Ensuring that we are parts
+ unique_selected.append(item)
+ FreeCADGui.Selection.addSelection(item)
+ # Fix wording
+ textprompt = "Save the part info and feature frames attached to the part"
+ if len(unique_selected) > 1:
+ textprompt = textprompt + "s"
+ opts = QtGui.QFileDialog.DontConfirmOverwrite
+ # Create file dialog
+ ofile, filt = QtGui.QFileDialog.getSaveFileName(None, textprompt,
+ os.getenv("HOME"),
+ "*.json", options=opts)
+ if ofile == "":
+ # User cancelled
+ return False
+ if os.path.exists(ofile):
+ msgbox = QtGui.QMessageBox()
+ msgbox.setText("File already exists. We can overwrite the file, or add the information/rewrite only relevant sections.")
+ append_button = msgbox.addButton(unicode("Append"),
+ QtGui.QMessageBox.YesRole)
+ overwrite_button = msgbox.addButton(unicode("Overwrite"),
+ QtGui.QMessageBox.NoRole)
+ msgbox.exec_()
+ if msgbox.clickedButton() == append_button:
+ NEWFILE = False
+ elif msgbox.clickedButton() == overwrite_button:
+ NEWFILE = True
+ else:
+ return False
+ else:
+ NEWFILE = True
+ if NEWFILE:
+ exportPartInfo(unique_selected[0], ofile)
+ appendFeatureFrames(unique_selected[0], ofile)
+ else:
+ appendPartInfo(unique_selected[0], ofile)
+ appendFeatureFrames(unique_selected[0], ofile)
+ if len(unique_selected) > 1:
+ FreeCAD.Console.PrintWarning("Multi-part export not yet supported.\n")
+ FreeCAD.Console.PrintMessage("Feature frames of "
+ + str(unique_selected[0].Label)
+ + " exported to " + str(ofile) + "\n")
+
+
+###################################################################
+# GUI Commands
+###################################################################
+uidir = os.path.join(FreeCAD.getUserAppDataDir(),
+ "Mod", __workbenchname__, "UI")
+icondir = os.path.join(uidir, "icons")
+spawnClassCommand("ExportPartInfoAndFeaturesDialogueCommand",
+ exportPartInfoAndFeaturesDialogue,
+ {"Pixmap": str(os.path.join(icondir, "parttojson.svg")),
+ "MenuText": "Export info and featureframes",
+ "ToolTip": "Export part properties (placement, C.O.M) and feature frames"})
+
+
+###################################################################
+# Information from primitive type
+###################################################################
+def getPrimitiveInfo(prim_type, subobj, scale=1e-3):
+ """returns a dictionary of the primitive's specific information."""
+ d = {}
+ if prim_type == "ArcOfCircle":
+ d["radius"] = scale*subobj.Curve.Radius
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "ArcOfEllipse":
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["majorradius"] = scale*subobj.Curve.MajorRadius
+ d["minorradius"] = scale*subobj.Curve.MinorRadius
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "ArcOfHyperBola":
+ d["anglexu"] = subobj.Curve.AngleXU
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["majorradius"] = scale*subobj.Curve.MajorRadius
+ d["minorradius"] = scale*subobj.Curve.MinorRadius
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "ArcOfParabola":
+ d["anglexu"] = subobj.Curve.AngleXU
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["focal"] = scale*subobj.Curve.Focal
+ elif prim_type == "BSplineCurve":
+ FreeCAD.Console.PrintWarning("getPrimitiveInfo of BSpline incomplete.")
+ elif prim_type == "BezierCurve":
+ FreeCAD.Console.PrintWarning("getPrimitiveInfo of Bezier incomplete.")
+ elif prim_type == "Circle":
+ d["radius"] = scale*subobj.Curve.Radius
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "Ellipse":
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["majorradius"] = scale*subobj.Curve.MajorRadius
+ d["minorradius"] = scale*subobj.Curve.MinorRadius
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "Hyperbola":
+ d["anglexu"] = subobj.Curve.AngleXU
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["majorradius"] = scale*subobj.Curve.MajorRadius
+ d["minorradius"] = scale*subobj.Curve.MinorRadius
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "Parabola":
+ d["anglexu"] = subobj.Curve.AngleXU
+ d["axis"] = vector2list(subobj.Curve.Axis, scale=1)
+ d["center"] = vector2list(subobj.Curve.Center, scale)
+ d["focal"] = scale*subobj.Curve.Focal
+ elif prim_type == "Line":
+ if int(FreeCAD.Version()[1]) > 16:
+ sp = subobj.valueAt(subobj.FirstParameter)
+ ep = subobj.valueAt(subobj.LastParameter)
+ d["startpoint"] = vector2list(sp)
+ d["endpoint"] = vector2list
+ else:
+ if not hasattr(subobj.Curve, "Infinite"):
+ d["startpoint"] = vector2list(subobj.Curve.StartPoint)
+ d["endpoint"] = vector2list(subobj.Curve.EndPoint)
+ if hasattr(subobj.Curve, "Infinite"):
+ if subobj.Curve.Infinite:
+ d["infinite"] = subobj.Curve.Infinite
+ else:
+ d["startpoint"] = vector2list(subobj.Curve.StartPoint)
+ d["endpoint"] = vector2list(subobj.Curve.EndPoint)
+ elif prim_type == "BSplineSurface":
+ FreeCAD.Console.PrintWarning("getPrimitiveInfo of BSpline incomplete.")
+ elif prim_type == "BezierSurface":
+ FreeCAD.Console.PrintWarning("getPrimitiveInfo of Bezier incomplete.")
+ elif prim_type == "Cylinder":
+ d["axis"] = vector2list(subobj.Surface.Axis, scale=1)
+ d["radius"] = scale*subobj.Surface.Radius
+ d["center"] = vector2list(subobj.Surface.Center)
+ PR = list(subobj.ParameterRange)
+ PR[2] = PR[2]*scale
+ PR[3] = PR[3]*scale
+ d["parameterrange"] = PR
+ elif prim_type == "Plane":
+ d["axis"] = vector2list(subobj.Surface.Axis, scale=1)
+ d["position"] = vector2list(subobj.Surface.Position, scale)
+ d["parameterrange"] = [scale*i for i in subobj.ParameterRange]
+ elif prim_type == "Sphere":
+ d["axis"] = vector2list(subobj.Surface.Axis, scale=1)
+ d["center"] = vector2list(subobj.Surface.Center, scale)
+ d["radius"] = scale*subobj.Surface.Radius
+ d["parameterrange"] = subobj.ParameterRange
+ elif prim_type == "Toroid":
+ d["axis"] = vector2list(subobj.Surface.Axis, scale=1)
+ d["center"] = vector2list(subobj.Surface.Center, scale)
+ d["majorradius"] = scale*subobj.Surface.MajorRadius
+ d["minorradius"] = scale*subobj.Surface.MinorRadius
+ d["parameterrange"] = subobj.Surface.ParameterRange
+ elif prim_type == "Cone":
+ d["axis"] = vector2list(subobj.Surface.Axis, scale=1)
+ d["center"] = vector2list(subobj.Surface.Center, scale)
+ d["radius"] = scale*subobj.Surface.Radius
+ d["semiangle"] = subobj.Surface.SemiAngle
+ d["parameterrange"] = subobj.ParameterRange
+ FreeCAD.Console.PrintWarning("getPrimitiveInfo of Cone may have wrong ParameterRange.")
+ return d
diff --git a/Init.py b/Init.py
new file mode 100644
index 0000000..2b677df
--- /dev/null
+++ b/Init.py
@@ -0,0 +1 @@
+#I made this?
diff --git a/InitGui.py b/InitGui.py
new file mode 100644
index 0000000..a8cf245
--- /dev/null
+++ b/InitGui.py
@@ -0,0 +1,39 @@
+class ARBench(Workbench):
+ MenuText = "ARBench"
+ ToolTip = "Annotation for Robotics workbench"
+ Icon = """"""
+
+ def __init__(self):
+ import os
+ self.Icon = os.path.join(FreeCAD.getUserAppDataDir(), "Mod",
+ "ARBench", "UI", "icons", "frame.svg")
+
+ def Initialize(self):
+ """This function is executed when FreeCAD starts"""
+ import ARFrames
+ self.framecommands = ["FrameCommand",
+ "AllPartFramesCommand",
+ "FeatureFrameCommand"]
+ self.toolcommands = ["ExportPartInfoAndFeaturesDialogueCommand"]
+ self.appendToolbar("AR Frames", self.framecommands)
+ self.appendToolbar("AR Tools", self.toolcommands)
+
+ def Activated(self):
+ """This function is executed when the workbench is activated."""
+ #
+ return
+
+ def Deactivated(self):
+ """This function is executed when the workbench is deactivated."""
+ #
+ return
+
+ def ContextMenu(self, recipient):
+ """This is execcuted whenever the user right-clicks on screen."""
+ pass
+
+ def GetClassName(self):
+ # This function is mandatory if this is a full python workbench
+ return "Gui::PythonWorkbench"
+
+Gui.addWorkbench(ARBench())
diff --git a/LICENSE b/LICENSE
index 324170b..8000a6f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,674 +1,504 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
- Copyright (C) 2007 Free Software Foundation, Inc.
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
Preamble
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard. To achieve this, non-free programs must be
+allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
this License.
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
- 13. Use with the GNU Affero General Public License.
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
- 14. Revised Versions of this License.
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
- 15. Disclaimer of Warranty.
+ NO WARRANTY
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
END OF TERMS AND CONDITIONS
- How to Apply These Terms to Your New Programs
+ How to Apply These Terms to Your New Libraries
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
-
- Copyright (C) 2024 Igor Brylev
+
+ Copyright (C)
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
- This program is distributed in the hope that it will be useful,
+ This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
+ USA
Also add information on how to contact you by electronic and paper mail.
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
- Copyright (C) 2024 Igor Brylev
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random
+ Hacker.
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
+ , 1 April 1990
+ Ty Coon, President of Vice
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
+That's all there is to it!
diff --git a/README.md b/README.md
index f8a0bdc..c2a8c49 100644
--- a/README.md
+++ b/README.md
@@ -1,31 +1,32 @@
-# Фреймворк Робосборщик
+
+# Arbench
+Annotation for robotics bench. A FreeCAD workbench for annotating frames of interest, exporting these w.r.t. the part frame, and exporting part information.
+# Installation instructions
+This workbench supports versions of FreeCAD>0.16.
+1. [Install FreeCAD](https://www.freecadweb.org/wiki/Installing)
+2. If you're not on Ubuntu follow the [workbench installation instructions](https://www.freecadweb.org/wiki/How_to_install_additional_workbenches) or you can do the following on Ubuntu.
+3. Custom workbenches are located in `.FreeCAD/Mod/` under your home directory
+`cd ~/.FreeCAD/Mod/`
+3. Either
+ - Clone the repository there
+ - symlink the cloned repo in there (`ln -s ./ARBench ~/.FreeCAD/ARBench`)
+4. Start the workbench by
+ 1. Running FreeCAD
+ 2. Open a STEP file
+ 3. Open the `ARBench` workbench
-Фреймворк Робосборщик (Robossembler Framework) предназначен для автоматизации разработки управляющих программ для роботов-манипуляторов, их отладки в виртуальных средах и оценки производительности.
+# Usage
-Фреймворк состоит из следующих функциональных модулей
-1. __Модуль автоматической генерации последовательности сборки__ (`as_generator`) генерирует граф сборки из статической модели изделия с учётом дополнительных опций и ограничений.
-2. __Модуль построения технологических карт__ (`pddl_configurator`) использует модели производственного оборудования (робот-манипулятор,3D-принтер) для формирования спецификаций задач в PDDL-совместимом формате для передачи в систему планирования совместно с последовательностью сборки.
-3. __Модуль экспорта моделей в виртуальные среды__ (`env_exporter`) (игровые движки, движки физики, системы рендеринга, симуляторы) формирует т.н. ассеты, то есть модели, адаптированные для использования в виртуальных средах (тесселированные или подверженные ретопологии).
-4. __Модуль генерации наборов данных__ (`dataset_generator`) аггрегирует данные симуляции для их последующей обработки и машинного обучения с целью развития навыков робота.
-5. __Модуль исполнения планов__ управляет движениями и задачами робота в режиме реального времени. Совместимый со стандартом ROS2. Исходный код см. в репозитории [robossembler-ros2](https://gitlab.com/robossembler/robossembler-ros2)
-6. __Модуль оценки производительности__ (`benchmark`) агрегирует информацию об эффективности методов получения навыков (вычислительная ресурсоёмкость, размер занимаемого дискового пространства) и эффективности самих навыков (скорость, точность).
-7. __Модуль управления виртуальными средами__ управляет запуском подходящих отработки конкретных навыков виртуальных сред. Исходный код см. в репозитории [robossembler-ros2](https://gitlab.com/robossembler/robossembler-ros2)
-
-# Генератор последовательности сборки (ASP)
-
-Данный программный модуль(cad_parts_adjacency_matrix.py) предназначен для решения задачи планирования сборки. Он формирует граф сборки состоящий из И/ИЛИ и оценивает полученные там решения по набору из 4-ёх формальных правил для проверки сгенерированного графа, давая обратную связь для инженеров-конструкторов.
-
-Формальный правила называются предикатами. Модуль ASP предусматривает следующие разновидности предикатов:
-- __Предикат геометрической осуществимости__. Верен для тех последовательностей сборки, которые удовлетворяют критерию геометрической осуществимости - когда все части графа сборки могут соприкосаться в определенной последовательности без каких либо столконовений.
-- __Предикат механической осуществимость__. Верна для последовательности сборки, когда инструменты сборки могут осуществлять указанную операцию без каких либо коллизий с изделием.
-- __Предикат стабильной осуществимости__. Верен для последовательности сборки, когда сборка на каждом из этапов приходит к стабильному состоянию.
-- __Предикат степеней свободы__. Формируется на основе уже сгенерированных графов/графа сборки. В каких степенях свободы возможно перемещать деталь.
-
-# Генерация сцен
-
-TODO: составить описание
-
-[пример файла описания сцены](docs/scene_generator)
+1. Click a small feature e.g. a circle
+2. Press the feature frame creator (cone with a magnifying glass on it icon)
+3. Chose type of feature to create
+4. Chose feature parameters if relevant and the offset of the frame from the feature.
+5. Repeat 4 for each feature you want on each part
+6. Click a part and press the export to json button (block->textfile icon)
+7. Save json
+8. Use the json with whatever you want. E.g. [`arbench_part_publisher`](https://github.com/mahaarbo/arbench_part_publisher)
+# Todo
+ -[] Add export all parts to meshes
diff --git a/UI/FeatureFrameCreator.ui b/UI/FeatureFrameCreator.ui
new file mode 100644
index 0000000..1eb41b6
--- /dev/null
+++ b/UI/FeatureFrameCreator.ui
@@ -0,0 +1,73 @@
+
+
+ FeatureFrameCreator
+
+
+
+ 0
+ 0
+ 235
+ 238
+
+
+
+
+ 0
+ 0
+
+
+
+
+ 235
+ 238
+
+
+
+
+ 235
+ 238
+
+
+
+ Feature Frame
+
+
+
+
+ 30
+ 29
+ 170
+ 151
+
+
+
+
+
+
+ 30
+ 190
+ 171
+ 27
+
+
+
+
+
+
+ 35
+ 10
+ 161
+ 20
+
+
+
+ TextLabel
+
+
+ Qt::AlignCenter
+
+
+
+
+
+
diff --git a/UI/FramePlacer.ui b/UI/FramePlacer.ui
new file mode 100644
index 0000000..335f3a7
--- /dev/null
+++ b/UI/FramePlacer.ui
@@ -0,0 +1,520 @@
+
+
+ dialog
+
+
+
+ 0
+ 0
+ 299
+ 292
+
+
+
+
+ 0
+ 0
+
+
+
+
+ 299
+ 292
+
+
+
+ Part Frame Creator
+
+
+
+
+ 52
+ 85
+ 103
+ 27
+
+
+
+ Offset in part frame x direction
+
+
+ mm
+
+
+ -1000.000000000000000
+
+
+ 1000.000000000000000
+
+
+
+
+
+ 52
+ 118
+ 103
+ 27
+
+
+
+ offset in part frame z direction
+
+
+ mm
+
+
+ -1000.000000000000000
+
+
+ 1000.000000000000000
+
+
+
+
+
+ 52
+ 151
+ 103
+ 27
+
+
+
+ offset in part frame z direction
+
+
+ mm
+
+
+ 2
+
+
+ -1000.000000000000000
+
+
+ 1000.000000000000000
+
+
+
+
+
+ 9
+ 85
+ 16
+ 17
+
+
+
+ x
+
+
+ XBox
+
+
+
+
+
+ 9
+ 118
+ 16
+ 17
+
+
+
+ y
+
+
+ YBox
+
+
+
+
+
+ 9
+ 151
+ 16
+ 17
+
+
+
+ z
+
+
+ ZBox
+
+
+
+
+
+ 161
+ 85
+ 24
+ 17
+
+
+
+ roll offset from part frame
+
+
+ roll
+
+
+ RollBox
+
+
+
+
+
+ 161
+ 118
+ 35
+ 17
+
+
+
+ pitch offset from part frame
+
+
+ pitch
+
+
+ PitchBox
+
+
+
+
+
+ 161
+ 151
+ 29
+ 17
+
+
+
+ yaw offset from part frame
+
+
+ yaw
+
+
+ YawBox
+
+
+
+
+
+ 228
+ 85
+ 61
+ 27
+
+
+
+ roll offset from part frame
+
+
+ °
+
+
+ 1
+
+
+ 360.000000000000000
+
+
+
+
+
+ 228
+ 118
+ 61
+ 27
+
+
+
+ pitch offset from part frame
+
+
+ °
+
+
+ 1
+
+
+ 360.000000000000000
+
+
+
+
+
+ 228
+ 151
+ 61
+ 27
+
+
+
+ yaw offset from part frame
+
+
+ °
+
+
+ 1
+
+
+ 360.000000000000000
+
+
+
+
+
+ 94
+ 9
+ 195
+ 31
+
+
+
+
+ 0
+ 0
+
+
+
+ Label of the frame
+
+
+ Label of the new frame
+
+
+ Label of the new frame, must be unique
+
+
+ Qt::ScrollBarAlwaysOff
+
+
+
+
+
+ 9
+ 9
+ 79
+ 31
+
+
+
+ Frame label
+
+
+
+
+
+ 9
+ 48
+ 281
+ 31
+
+
+
+ Define an offset from the part frame if needed
+
+
+ Offset from part frame
+
+
+ Qt::AlignCenter
+
+
+
+
+
+ 160
+ 180
+ 71
+ 21
+
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ Axis scale
+
+
+ Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
+
+
+
+
+
+ 230
+ 180
+ 60
+ 27
+
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ 0.100000000000000
+
+
+ 2.000000000000000
+
+
+ 0.010000000000000
+
+
+ 0.110000000000000
+
+
+
+
+ true
+
+
+
+ 20
+ 220
+ 66
+ 31
+
+
+
+ false
+
+
+ Options
+
+
+ OptionsBox
+
+
+
+
+ true
+
+
+
+ 210
+ 220
+ 78
+ 27
+
+
+
+ false
+
+
+
+
+ true
+
+
+
+ 100
+ 250
+ 62
+ 27
+
+
+
+ false
+
+
+
+
+ true
+
+
+
+ 220
+ 250
+ 62
+ 27
+
+
+
+ false
+
+
+
+
+ true
+
+
+
+ 80
+ 250
+ 16
+ 31
+
+
+
+ false
+
+
+ u
+
+
+ UBox
+
+
+
+
+ true
+
+
+
+ 200
+ 250
+ 16
+ 31
+
+
+
+ false
+
+
+ v
+
+
+ VBox
+
+
+
+
+ true
+
+
+
+ 20
+ 256
+ 51
+ 21
+
+
+
+ false
+
+
+ Coords:
+
+
+
+
+
+
diff --git a/UI/InsertTaskCreator.ui b/UI/InsertTaskCreator.ui
new file mode 100644
index 0000000..f2830c3
--- /dev/null
+++ b/UI/InsertTaskCreator.ui
@@ -0,0 +1,262 @@
+
+
+ Dialog
+
+
+
+ 0
+ 0
+ 323
+ 325
+
+
+
+
+ 316
+ 325
+
+
+
+ Dialog
+
+
+
+
+ 10
+ 20
+ 71
+ 21
+
+
+
+ Task Label
+
+
+
+
+
+ 150
+ 290
+ 161
+ 27
+
+
+
+ Get from selection
+
+
+
+
+
+ 100
+ 20
+ 211
+ 27
+
+
+
+
+ 0
+ 0
+
+
+
+
+
+
+ 10
+ 50
+ 31
+ 21
+
+
+
+ Hole
+
+
+
+
+
+ 10
+ 130
+ 31
+ 21
+
+
+
+ Peg
+
+
+
+
+
+ 50
+ 70
+ 41
+ 21
+
+
+
+ Part:
+
+
+
+
+
+ 50
+ 90
+ 41
+ 21
+
+
+
+ Face:
+
+
+
+
+
+ 50
+ 150
+ 41
+ 21
+
+
+
+ Part:
+
+
+
+
+
+ 50
+ 170
+ 41
+ 21
+
+
+
+ Face:
+
+
+
+
+
+ 100
+ 70
+ 231
+ 21
+
+
+
+ PartLabel
+
+
+
+
+
+ 100
+ 90
+ 231
+ 21
+
+
+
+ FaceID
+
+
+
+
+
+ 100
+ 170
+ 231
+ 21
+
+
+
+ FaceID
+
+
+
+
+
+ 100
+ 150
+ 231
+ 21
+
+
+
+ PartLabel
+
+
+
+
+
+ 100
+ 110
+ 231
+ 21
+
+
+
+ Feature Description
+
+
+
+
+
+ 30
+ 110
+ 61
+ 21
+
+
+
+ Feature:
+
+
+
+
+
+ 100
+ 190
+ 231
+ 21
+
+
+
+ Feature Description
+
+
+
+
+
+ 30
+ 190
+ 61
+ 21
+
+
+
+ Feature:
+
+
+
+
+
+ 10
+ 290
+ 131
+ 27
+
+
+
+ Animate
+
+
+
+
+
+
diff --git a/UI/InsertTaskCreator.ui~ b/UI/InsertTaskCreator.ui~
new file mode 100644
index 0000000..3338ac2
--- /dev/null
+++ b/UI/InsertTaskCreator.ui~
@@ -0,0 +1,401 @@
+
+
+ Dialog
+
+
+
+ 0
+ 0
+ 323
+ 325
+
+
+
+
+ 316
+ 325
+
+
+
+ Dialog
+
+
+
+
+ 10
+ 20
+ 71
+ 21
+
+
+
+ Task Label
+
+
+
+
+
+ 150
+ 290
+ 161
+ 27
+
+
+
+ Get from selection
+
+
+
+
+
+ 100
+ 20
+ 211
+ 27
+
+
+
+
+ 0
+ 0
+
+
+
+
+
+
+ 10
+ 50
+ 31
+ 21
+
+
+
+ Hole
+
+
+
+
+
+ 10
+ 130
+ 31
+ 21
+
+
+
+ Peg
+
+
+
+
+
+ 50
+ 70
+ 41
+ 21
+
+
+
+ Part:
+
+
+
+
+
+ 50
+ 90
+ 41
+ 21
+
+
+
+ Face:
+
+
+
+
+
+ 50
+ 150
+ 41
+ 21
+
+
+
+ Part:
+
+
+
+
+
+ 50
+ 170
+ 41
+ 21
+
+
+
+ Face:
+
+
+
+
+
+ 100
+ 70
+ 231
+ 21
+
+
+
+ PartLabel
+
+
+
+
+
+ 100
+ 90
+ 231
+ 21
+
+
+
+ FaceID
+
+
+
+
+
+ 100
+ 170
+ 231
+ 21
+
+
+
+ FaceID
+
+
+
+
+
+ 100
+ 150
+ 231
+ 21
+
+
+
+ PartLabel
+
+
+
+
+
+ 100
+ 110
+ 231
+ 21
+
+
+
+ Feature Description
+
+
+
+
+
+ 30
+ 110
+ 61
+ 21
+
+
+
+ Feature:
+
+
+
+
+
+ 100
+ 190
+ 231
+ 21
+
+
+
+ Feature Description
+
+
+
+
+
+ 30
+ 190
+ 61
+ 21
+
+
+
+ Feature:
+
+
+
+
+
+ 80
+ 250
+ 16
+ 17
+
+
+
+ y
+
+
+ YBox
+
+
+
+
+
+ 80
+ 230
+ 16
+ 17
+
+
+
+ x
+
+
+ XBox
+
+
+
+
+
+ 80
+ 270
+ 16
+ 17
+
+
+
+ z
+
+
+ ZBox
+
+
+
+
+
+ 100
+ 250
+ 61
+ 21
+
+
+
+ offset in part frame z direction
+
+
+
+
+
+ -1.000000000000000
+
+
+ 1.000000000000000
+
+
+ 0.100000000000000
+
+
+
+
+
+ 100
+ 230
+ 61
+ 20
+
+
+
+ Offset in part frame x direction
+
+
+
+
+
+ -1.000000000000000
+
+
+ 1.000000000000000
+
+
+ 0.100000000000000
+
+
+
+
+
+ 100
+ 270
+ 61
+ 20
+
+
+
+ offset in part frame z direction
+
+
+
+
+
+ 2
+
+
+ -1.000000000000000
+
+
+ 1.000000000000000
+
+
+ 0.100000000000000
+
+
+
+
+
+ 10
+ 210
+ 121
+ 21
+
+
+
+ Assembly Axis
+
+
+
+
+
+ 10
+ 290
+ 131
+ 27
+
+
+
+ Animate
+
+
+
+
+
+
diff --git a/UI/PartFrameCreator.ui b/UI/PartFrameCreator.ui
new file mode 100644
index 0000000..335f3a7
--- /dev/null
+++ b/UI/PartFrameCreator.ui
@@ -0,0 +1,520 @@
+
+
+ dialog
+
+
+
+ 0
+ 0
+ 299
+ 292
+
+
+
+
+ 0
+ 0
+
+
+
+
+ 299
+ 292
+
+
+
+ Part Frame Creator
+
+
+
+
+ 52
+ 85
+ 103
+ 27
+
+
+
+ Offset in part frame x direction
+
+
+ mm
+
+
+ -1000.000000000000000
+
+
+ 1000.000000000000000
+
+
+
+
+
+ 52
+ 118
+ 103
+ 27
+
+
+
+ offset in part frame z direction
+
+
+ mm
+
+
+ -1000.000000000000000
+
+
+ 1000.000000000000000
+
+
+
+
+
+ 52
+ 151
+ 103
+ 27
+
+
+
+ offset in part frame z direction
+
+
+ mm
+
+
+ 2
+
+
+ -1000.000000000000000
+
+
+ 1000.000000000000000
+
+
+
+
+
+ 9
+ 85
+ 16
+ 17
+
+
+
+ x
+
+
+ XBox
+
+
+
+
+
+ 9
+ 118
+ 16
+ 17
+
+
+
+ y
+
+
+ YBox
+
+
+
+
+
+ 9
+ 151
+ 16
+ 17
+
+
+
+ z
+
+
+ ZBox
+
+
+
+
+
+ 161
+ 85
+ 24
+ 17
+
+
+
+ roll offset from part frame
+
+
+ roll
+
+
+ RollBox
+
+
+
+
+
+ 161
+ 118
+ 35
+ 17
+
+
+
+ pitch offset from part frame
+
+
+ pitch
+
+
+ PitchBox
+
+
+
+
+
+ 161
+ 151
+ 29
+ 17
+
+
+
+ yaw offset from part frame
+
+
+ yaw
+
+
+ YawBox
+
+
+
+
+
+ 228
+ 85
+ 61
+ 27
+
+
+
+ roll offset from part frame
+
+
+ °
+
+
+ 1
+
+
+ 360.000000000000000
+
+
+
+
+
+ 228
+ 118
+ 61
+ 27
+
+
+
+ pitch offset from part frame
+
+
+ °
+
+
+ 1
+
+
+ 360.000000000000000
+
+
+
+
+
+ 228
+ 151
+ 61
+ 27
+
+
+
+ yaw offset from part frame
+
+
+ °
+
+
+ 1
+
+
+ 360.000000000000000
+
+
+
+
+
+ 94
+ 9
+ 195
+ 31
+
+
+
+
+ 0
+ 0
+
+
+
+ Label of the frame
+
+
+ Label of the new frame
+
+
+ Label of the new frame, must be unique
+
+
+ Qt::ScrollBarAlwaysOff
+
+
+
+
+
+ 9
+ 9
+ 79
+ 31
+
+
+
+ Frame label
+
+
+
+
+
+ 9
+ 48
+ 281
+ 31
+
+
+
+ Define an offset from the part frame if needed
+
+
+ Offset from part frame
+
+
+ Qt::AlignCenter
+
+
+
+
+
+ 160
+ 180
+ 71
+ 21
+
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ Axis scale
+
+
+ Qt::AlignLeading|Qt::AlignLeft|Qt::AlignVCenter
+
+
+
+
+
+ 230
+ 180
+ 60
+ 27
+
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ Scale of the axis arrows
+
+
+ 0.100000000000000
+
+
+ 2.000000000000000
+
+
+ 0.010000000000000
+
+
+ 0.110000000000000
+
+
+
+
+ true
+
+
+
+ 20
+ 220
+ 66
+ 31
+
+
+
+ false
+
+
+ Options
+
+
+ OptionsBox
+
+
+
+
+ true
+
+
+
+ 210
+ 220
+ 78
+ 27
+
+
+
+ false
+
+
+
+
+ true
+
+
+
+ 100
+ 250
+ 62
+ 27
+
+
+
+ false
+
+
+
+
+ true
+
+
+
+ 220
+ 250
+ 62
+ 27
+
+
+
+ false
+
+
+
+
+ true
+
+
+
+ 80
+ 250
+ 16
+ 31
+
+
+
+ false
+
+
+ u
+
+
+ UBox
+
+
+
+
+ true
+
+
+
+ 200
+ 250
+ 16
+ 31
+
+
+
+ false
+
+
+ v
+
+
+ VBox
+
+
+
+
+ true
+
+
+
+ 20
+ 256
+ 51
+ 21
+
+
+
+ false
+
+
+ Coords:
+
+
+
+
+
+
diff --git a/UI/ScrewTaskCreator.ui b/UI/ScrewTaskCreator.ui
new file mode 100644
index 0000000..fc20be0
--- /dev/null
+++ b/UI/ScrewTaskCreator.ui
@@ -0,0 +1,239 @@
+
+
+ Dialog
+
+
+
+ 0
+ 0
+ 337
+ 322
+
+
+
+
+ 337
+ 322
+
+
+
+ Dialog
+
+
+
+
+ 20
+ 50
+ 31
+ 21
+
+
+
+ Hole
+
+
+
+
+
+ 60
+ 150
+ 41
+ 21
+
+
+
+ Part:
+
+
+
+
+
+ 60
+ 170
+ 41
+ 21
+
+
+
+ Face:
+
+
+
+
+
+ 110
+ 170
+ 231
+ 21
+
+
+
+ FaceID
+
+
+
+
+
+ 20
+ 20
+ 71
+ 21
+
+
+
+ Task Label
+
+
+
+
+
+ 110
+ 20
+ 211
+ 27
+
+
+
+
+ 0
+ 0
+
+
+
+
+
+
+ 20
+ 290
+ 131
+ 27
+
+
+
+ Animate
+
+
+
+
+
+ 160
+ 290
+ 161
+ 27
+
+
+
+ Get from selection
+
+
+
+
+
+ 110
+ 90
+ 231
+ 21
+
+
+
+ FaceID
+
+
+
+
+
+ 60
+ 70
+ 41
+ 21
+
+
+
+ Part:
+
+
+
+
+
+ 60
+ 90
+ 41
+ 21
+
+
+
+ Face:
+
+
+
+
+
+ 20
+ 130
+ 41
+ 21
+
+
+
+ Screw
+
+
+
+
+
+ 110
+ 70
+ 231
+ 21
+
+
+
+ PartLabel
+
+
+
+
+
+ 110
+ 150
+ 231
+ 21
+
+
+
+ PartLabel
+
+
+
+
+
+ 30
+ 190
+ 66
+ 17
+
+
+
+ Type:
+
+
+ Qt::AlignRight|Qt::AlignTrailing|Qt::AlignVCenter
+
+
+
+
+
+ 110
+ 190
+ 161
+ 17
+
+
+
+ ThreadType
+
+
+
+
+
+
diff --git a/UI/icons/Center.svg b/UI/icons/Center.svg
new file mode 100644
index 0000000..aeddb82
--- /dev/null
+++ b/UI/icons/Center.svg
@@ -0,0 +1,237 @@
+
+
+
+
diff --git a/UI/icons/PickedPoint.svg b/UI/icons/PickedPoint.svg
new file mode 100644
index 0000000..6e0c4b3
--- /dev/null
+++ b/UI/icons/PickedPoint.svg
@@ -0,0 +1,152 @@
+
+
+
+
diff --git a/UI/icons/PointOnCenterline.svg b/UI/icons/PointOnCenterline.svg
new file mode 100644
index 0000000..501db60
--- /dev/null
+++ b/UI/icons/PointOnCenterline.svg
@@ -0,0 +1,275 @@
+
+
+
+
diff --git a/UI/icons/PointOnEdge.svg b/UI/icons/PointOnEdge.svg
new file mode 100644
index 0000000..0edfb5e
--- /dev/null
+++ b/UI/icons/PointOnEdge.svg
@@ -0,0 +1,181 @@
+
+
+
+
diff --git a/UI/icons/PointOnSurface.svg b/UI/icons/PointOnSurface.svg
new file mode 100644
index 0000000..62c8edc
--- /dev/null
+++ b/UI/icons/PointOnSurface.svg
@@ -0,0 +1,44 @@
+
+
+
+
diff --git a/UI/icons/allpartframes.svg b/UI/icons/allpartframes.svg
new file mode 100644
index 0000000..11d63fa
--- /dev/null
+++ b/UI/icons/allpartframes.svg
@@ -0,0 +1,60 @@
+
+
+
+
diff --git a/UI/icons/allpartgroups.svg b/UI/icons/allpartgroups.svg
new file mode 100644
index 0000000..c12ed92
--- /dev/null
+++ b/UI/icons/allpartgroups.svg
@@ -0,0 +1,141 @@
+
+
+
+
diff --git a/UI/icons/featureframecreator.svg b/UI/icons/featureframecreator.svg
new file mode 100644
index 0000000..a5d9ebe
--- /dev/null
+++ b/UI/icons/featureframecreator.svg
@@ -0,0 +1,254 @@
+
+
+
+
diff --git a/UI/icons/frame.svg b/UI/icons/frame.svg
new file mode 100644
index 0000000..6e0c4b3
--- /dev/null
+++ b/UI/icons/frame.svg
@@ -0,0 +1,152 @@
+
+
+
+
diff --git a/UI/icons/github_preview.png b/UI/icons/github_preview.png
new file mode 100644
index 0000000..3da4e14
Binary files /dev/null and b/UI/icons/github_preview.png differ
diff --git a/UI/icons/inserttask.svg b/UI/icons/inserttask.svg
new file mode 100644
index 0000000..bc562cf
--- /dev/null
+++ b/UI/icons/inserttask.svg
@@ -0,0 +1,49 @@
+
+
+
+
diff --git a/UI/icons/partframe.svg b/UI/icons/partframe.svg
new file mode 100644
index 0000000..4a7acc5
--- /dev/null
+++ b/UI/icons/partframe.svg
@@ -0,0 +1,384 @@
+
+
+
+
diff --git a/UI/icons/parttojson.svg b/UI/icons/parttojson.svg
new file mode 100644
index 0000000..2531c40
--- /dev/null
+++ b/UI/icons/parttojson.svg
@@ -0,0 +1,262 @@
+
+
+
+
diff --git a/UI/icons/placetask.svg b/UI/icons/placetask.svg
new file mode 100644
index 0000000..b99e61b
--- /dev/null
+++ b/UI/icons/placetask.svg
@@ -0,0 +1,66 @@
+
+
diff --git a/UI/icons/plasetask.svg b/UI/icons/plasetask.svg
new file mode 100644
index 0000000..c0ac862
--- /dev/null
+++ b/UI/icons/plasetask.svg
@@ -0,0 +1,66 @@
+
+
diff --git a/UI/icons/screwtask.svg b/UI/icons/screwtask.svg
new file mode 100644
index 0000000..f307cc8
--- /dev/null
+++ b/UI/icons/screwtask.svg
@@ -0,0 +1,48 @@
+
+
diff --git a/UI/icons/taskcreator.svg b/UI/icons/taskcreator.svg
new file mode 100644
index 0000000..49c923c
--- /dev/null
+++ b/UI/icons/taskcreator.svg
@@ -0,0 +1,49 @@
+
+
diff --git a/dataset_generation/cli/README.md b/dataset_generation/cli/README.md
deleted file mode 100644
index 1d00a6f..0000000
--- a/dataset_generation/cli/README.md
+++ /dev/null
@@ -1,15 +0,0 @@
-## Скрипт генерации датасета
-
-Скрипт используется в составе web-сервиса для генерации датасетов с использованием заданной пользователем конфигурации.
-Должен быть установлен пакет [BlenderProc](https://github.com/DLR-RM/BlenderProc).
-
-Команда для вызова:
-```bash
-blenderproc run renderBOPdataset.py --cfg CFG
-
-options:
- --cfg CFG строка json с параметрами конфигурации датасета / путь к json-файлу с конфигурацией
-```
-
-[Пример файла конфигурации датасета.](dataset_cfg.json)
-
diff --git a/dataset_generation/cli/dataset_cfg.json b/dataset_generation/cli/dataset_cfg.json
deleted file mode 100644
index 8456392..0000000
--- a/dataset_generation/cli/dataset_cfg.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
- "dataSetObjects": ["fork"],
- "datasetType": "Object Detection - YOLOv8",
- "name": "123123e",
- "formBuilder": {
- "output": {
- "typedataset": "ObjectDetection",
- "dataset_path": "eqwfeadszxz",
- "models": [{"id": 1, "name": "fork"}],
- "models_randomization": { "loc_range_low": [-1, -1, 0.0], "loc_range_high": [1, 1, 2] },
- "scene": {
- "objects": [
- {"name": "floor", "collision_shape": "BOX", "loc_xyz":[0,0,0], "rot_euler":[0, 0, 0],
- "material_randomization": {"specular":[0,1], "roughness":[0,1], "metallic":[0,1], "base_color":[[0,0,0,1],[1,1,1,1]]}
- }
- ],
- "lights": [
- {"id": 1, "type": "POINT", "loc_xyz":[5,5,5], "rot_euler":[-0.06, 0.61, -0.19],
- "color_range_low":[0.5, 0.5, 0.5], "color_range_high":[1, 1, 1],
- "energy_range":[400,900]
- },
- {"id": 2, "type": "SUN", "loc_xyz":[0,0,0], "rot_euler":[-0.01, 0.01, -0.01],
- "color_range_low":[1, 1, 1], "color_range_high":[1, 1, 1],
- "energy_range":[2,9]
- }
- ]
- },
- "camera_position": { "center_shell": [0, 0, 0], "radius_range": [0.4, 1.4], "elevation_range": [10, 90] },
- "generation": {
- "n_cam_pose": 3,
- "n_sample_on_pose": 1,
- "n_series": 3,
- "image_format": "JPEG",
- "image_size_wh": [640, 480]
- }
- }
- },
- "processStatus": "exec",
- "local_path": "/home/user/5f4e161b-82d1-41fa-a11c-15d485b01600",
- "projectId": "660aaddbf98957a186f9c546"
-}
diff --git a/dataset_generation/cli/renderBOPdataset.py b/dataset_generation/cli/renderBOPdataset.py
deleted file mode 100755
index db616fa..0000000
--- a/dataset_generation/cli/renderBOPdataset.py
+++ /dev/null
@@ -1,361 +0,0 @@
-import blenderproc as bproc
-"""
- renderBOPdataset
- Общая задача: common pipeline
- Реализуемая функция: создание датасета в формате BOP с заданными параметрами рандомизации
- Используется модуль blenderproc
-
- 26.04.2024 @shalenikol release 0.1
-"""
-import numpy as np
-import argparse
-import random
-import os
-import shutil
-import json
-
-VHACD_PATH = "blenderproc_resources/vhacd"
-DIR_MODELS = "models"
-FILE_LOG_SCENE = "res.txt"
-FILE_RBS_INFO = "rbs_info.json"
-FILE_GT_COCO = "scene_gt_coco.json"
-
-Not_Categories_Name = True # наименование категории в COCO-аннотации отсутствует
-
-def _get_path_model(name_model: str) -> str:
- # TODO on name_model find path for mesh (model.fbx)
- # local_path/assets/mesh/
- return os.path.join(rnd_par.output_dir, "assets/mesh/"+name_model+".fbx")
-
-def _get_path_object(name_obj: str) -> str:
- # TODO on name_obj find path for scene object (object.fbx)
- return os.path.join(rnd_par.output_dir, "assets/mesh/"+name_obj+".fbx")
-
-def convert2relative(height, width, bbox):
- """
- YOLO format use relative coordinates for annotation
- """
- x, y, w, h = bbox
- x += w/2
- y += h/2
- return x/width, y/height, w/width, h/height
-
-def render() -> int:
- for obj in all_meshs:
- # Make the object actively participate in the physics simulation
- obj.enable_rigidbody(active=True, collision_shape="COMPOUND")
- # Also use convex decomposition as collision shapes
- obj.build_convex_decomposition_collision_shape(VHACD_PATH)
-
- objs = all_meshs + rnd_par.scene.objs
-
- log_txt = os.path.join(rnd_par.output_dir, FILE_LOG_SCENE)
- with open(log_txt, "w") as fh:
- for i,o in enumerate(objs):
- loc = o.get_location()
- euler = o.get_rotation_euler()
- fh.write(f"{i} : {o.get_name()} {loc} {euler} category_id = {o.get_cp('category_id')}\n")
-
- # define a light and set its location and energy level
- ls = []
- for l in rnd_par.scene.light_data:
- light = bproc.types.Light(name=f"l{l['id']}")
- light.set_type(l["type"])
- light.set_location(l["loc_xyz"]) #[5, -5, 5])
- light.set_rotation_euler(l["rot_euler"]) #[-0.063, 0.6177, -0.1985])
- ls += [light]
-
- # define the camera intrinsics
- bproc.camera.set_intrinsics_from_blender_params(1,
- rnd_par.image_size_wh[0],
- rnd_par.image_size_wh[1],
- lens_unit="FOV")
-
- # add segmentation masks (per class and per instance)
- bproc.renderer.enable_segmentation_output(map_by=["category_id", "instance", "name"])
-
- # activate depth rendering
- bproc.renderer.enable_depth_output(activate_antialiasing=False)
-
- res_dir = os.path.join(rnd_par.output_dir, rnd_par.ds_name)
- if os.path.isdir(res_dir):
- shutil.rmtree(res_dir)
- # Цикл рендеринга
- # Do multiple times: Position the shapenet objects using the physics simulator and render X images with random camera poses
- for r in range(rnd_par.n_series):
- # один случайный объект в кадре / все заданные объекты
- random_obj = random.choice(range(rnd_par.scene.n_obj))
- meshs = []
- for i,o in enumerate(all_meshs): #objs
- if rnd_par.single_object and i != random_obj:
- continue
- meshs += [o]
- rnd_mat = rnd_par.scene.obj_data[i]["material_randomization"]
- mats = o.get_materials() #[0]
- for mat in mats:
- val = rnd_mat["specular"]
- mat.set_principled_shader_value("Specular", random.uniform(val[0], val[1]))
- val = rnd_mat["roughness"]
- mat.set_principled_shader_value("Roughness", random.uniform(val[0], val[1]))
- val = rnd_mat["base_color"]
- mat.set_principled_shader_value("Base Color", np.random.uniform(val[0], val[1]))
- val = rnd_mat["metallic"]
- mat.set_principled_shader_value("Metallic", random.uniform(val[0], val[1]))
-
- # Randomly set the color and energy
- for i,l in enumerate(ls):
- current = rnd_par.scene.light_data[i]
- l.set_color(np.random.uniform(current["color_range_low"], current["color_range_high"]))
- energy = current["energy_range"]
- l.set_energy(random.uniform(energy[0], energy[1]))
-
- # Clear all key frames from the previous run
- bproc.utility.reset_keyframes()
-
- # Define a function that samples 6-DoF poses
- def sample_pose(obj: bproc.types.MeshObject):
- obj.set_location(np.random.uniform(rnd_par.loc_range_low, rnd_par.loc_range_high)) #[-1, -1, 0], [1, 1, 2]))
- obj.set_rotation_euler(bproc.sampler.uniformSO3())
-
- # Sample the poses of all shapenet objects above the ground without any collisions in-between
- bproc.object.sample_poses(meshs,
- objects_to_check_collisions = meshs + rnd_par.scene.collision_objects,
- sample_pose_func = sample_pose)
-
- # Run the simulation and fix the poses of the shapenet objects at the end
- bproc.object.simulate_physics_and_fix_final_poses(min_simulation_time=4, max_simulation_time=20, check_object_interval=1)
-
- # Find point of interest, all cam poses should look towards it
- poi = bproc.object.compute_poi(meshs)
-
- coord_max = [0.1, 0.1, 0.1]
- coord_min = [0., 0., 0.]
-
- with open(log_txt, "a") as fh:
- fh.write("*****************\n")
- fh.write(f"{r}) poi = {poi}\n")
- i = 0
- for o in meshs:
- i += 1
- loc = o.get_location()
- euler = o.get_rotation_euler()
- fh.write(f" {i} : {o.get_name()} {loc} {euler}\n")
- for j in range(3):
- if loc[j] < coord_min[j]:
- coord_min[j] = loc[j]
- if loc[j] > coord_max[j]:
- coord_max[j] = loc[j]
-
- # Sample up to X camera poses
- #an = np.random.uniform(0.78, 1.2) #1. #0.35
- for i in range(rnd_par.n_cam_pose):
- # Sample location
- location = bproc.sampler.shell(center=rnd_par.center_shell,
- radius_min=rnd_par.radius_range[0],
- radius_max=rnd_par.radius_range[1],
- elevation_min=rnd_par.elevation_range[0],
- elevation_max=rnd_par.elevation_range[1])
- # координата, по которой будем сэмплировать положение камеры
- j = random.randint(0, 2)
- # разовый сдвиг по случайной координате
- d = (coord_max[j] - coord_min[j]) / rnd_par.n_sample_on_pose
- if location[j] < 0:
- d = -d
- for _ in range(rnd_par.n_sample_on_pose):
- # Compute rotation based on vector going from location towards poi
- rotation_matrix = bproc.camera.rotation_from_forward_vec(poi - location, inplane_rot=np.random.uniform(-0.7854, 0.7854))
- # Add homog cam pose based on location an rotation
- cam2world_matrix = bproc.math.build_transformation_mat(location, rotation_matrix)
- bproc.camera.add_camera_pose(cam2world_matrix)
- location[j] -= d
- # render the whole pipeline
- data = bproc.renderer.render()
- # Write data to bop format
- bproc.writer.write_bop(res_dir,
- target_objects = all_meshs, # Optional[List[MeshObject]] = None
- depths = data["depth"],
- depth_scale = 1.0,
- colors = data["colors"],
- color_file_format=rnd_par.image_format,
- append_to_existing_output = (r>0),
- save_world2cam = False) # world coords are arbitrary in most real BOP datasets
- # dataset="robo_ds",
-
- models_dir = os.path.join(res_dir, DIR_MODELS)
- os.mkdir(models_dir)
-
- data = []
- for i,objn in enumerate(rnd_par.models.names):
- rec = {}
- rec["id"] = i+1
- rec["name"] = objn
- rec["model"] = os.path.join(DIR_MODELS, os.path.split(rnd_par.models.filenames[i])[1]) # путь относительный
- t = [obj.get_bound_box(local_coords=True).tolist() for obj in all_meshs if obj.get_name() == objn]
- rec["cuboid"] = t[0]
- data.append(rec)
- shutil.copy2(rnd_par.models.filenames[i], models_dir)
- f = (os.path.splitext(rnd_par.models.filenames[i]))[0] + ".mtl" # файл материала
- if os.path.isfile(f):
- shutil.copy2(f, models_dir)
-
- with open(os.path.join(res_dir, FILE_RBS_INFO), "w") as fh:
- json.dump(data, fh, indent=2)
-
- """
- !!! categories -> name берётся из category_id !!!
- см.ниже
- blenderproc.python.writer : BopWriterUtility.py
- class _BopWriterUtility
- def calc_gt_coco
- ...
- CATEGORIES = [{'id': obj.get_cp('category_id'), 'name': str(obj.get_cp('category_id')), 'supercategory':
- dataset_name} for obj in dataset_objects]
-
- поэтому заменим наименование категории в аннотации
- """
- def change_categories_name(dir: str):
- coco_file = os.path.join(dir,FILE_GT_COCO)
- with open(coco_file, "r") as fh:
- data = json.load(fh)
- cats = data["categories"]
-
- for i,cat in enumerate(cats):
- cat["name"] = rnd_par.models.names[i] #obj_names[i]
-
- with open(coco_file, "w") as fh:
- json.dump(data, fh, indent=0)
-
- def explore(path: str):
- if not os.path.isdir(path):
- return
- folders = [
- os.path.join(path, o)
- for o in os.listdir(path)
- if os.path.isdir(os.path.join(path, o))
- ]
- for path_entry in folders:
- print(path_entry)
- if os.path.isfile(os.path.join(path_entry,FILE_GT_COCO)):
- change_categories_name(path_entry)
- else:
- explore(path_entry)
-
- if Not_Categories_Name:
- explore(res_dir)
- return 0 # success
-
-def _get_models(par, data) -> int:
- global all_meshs
-
- par.models = lambda: None
- par.models.n_item = len(data)
- if par.models.n_item == 0:
- return 0 # no models
-
- # загрузим объекты
- par.models.names = [] # obj_names
- par.models.filenames = [] # obj_filenames
- i = 1
- for f in data:
- nam = f
- par.models.names.append(nam)
- ff = _get_path_model(nam)
- par.models.filenames.append(ff)
- if not os.path.isfile(ff):
- print(f"Error: no such file '{ff}'")
- return -1
- obj = bproc.loader.load_obj(ff)
- all_meshs += obj
- obj[0].set_cp("category_id", i) # начиная с 1
- i += 1
- return par.models.n_item
-
-def _get_scene(par, data) -> int:
- # load scene
- par.scene = lambda: None
- objs = data["objects"]
- par.scene.n_obj = len(objs)
- if par.scene.n_obj == 0:
- return 0 # empty scene
- lights = data["lights"]
- par.scene.n_light = len(lights)
- if par.scene.n_light == 0:
- return 0 # no lighting
-
- par.scene.objs = []
- par.scene.collision_objects = []
- for f in objs:
- ff = _get_path_object(f["name"])
- if not os.path.isfile(ff):
- print(f"Error: no such file '{ff}'")
- return -1
- obj = bproc.loader.load_obj(ff)
- obj[0].set_cp("category_id", 999)
- coll = f["collision_shape"]
- if len(coll) > 0:
- obj[0].enable_rigidbody(False, collision_shape=coll)
- par.scene.collision_objects += obj
- par.scene.objs += obj
-
- if not par.scene.collision_objects:
- print("Collision objects not found in the scene")
- return 0
- par.scene.obj_data = objs
- par.scene.light_data = lights
- return par.scene.n_obj
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--cfg", required=True, help="Json-string with dataset parameters")
- args = parser.parse_args()
-
- if args.cfg[-5:] == ".json":
- if not os.path.isfile(args.cfg):
- print(f"Error: no such file '{args.cfg}'")
- exit(-1)
- with open(args.cfg, "r") as f:
- j_data = f.read()
- else:
- j_data = args.cfg
- try:
- cfg = json.loads(j_data)
- except json.JSONDecodeError as e:
- print(f"JSon error: {e}")
- exit(-2)
-
- ds_cfg = cfg["formBuilder"]["output"] # dataset config
- generation = ds_cfg["generation"]
- cam_pos = ds_cfg["camera_position"]
- models_randomization = ds_cfg["models_randomization"]
-
- rnd_par = lambda: None
- rnd_par.single_object = True
- rnd_par.ds_name = cfg["name"]
- rnd_par.output_dir = cfg["local_path"]
- rnd_par.dataset_objs = cfg["dataSetObjects"]
- rnd_par.n_cam_pose = generation["n_cam_pose"]
- rnd_par.n_sample_on_pose = generation["n_sample_on_pose"]
- rnd_par.n_series = generation["n_series"]
- rnd_par.image_format = generation["image_format"]
- rnd_par.image_size_wh = generation["image_size_wh"]
- rnd_par.center_shell = cam_pos["center_shell"]
- rnd_par.radius_range = cam_pos["radius_range"]
- rnd_par.elevation_range = cam_pos["elevation_range"]
- rnd_par.loc_range_low = models_randomization["loc_range_low"]
- rnd_par.loc_range_high = models_randomization["loc_range_high"]
-
- if not os.path.isdir(rnd_par.output_dir):
- print(f"Error: invalid path '{rnd_par.output_dir}'")
- exit(-3)
-
- bproc.init()
-
- all_meshs = []
- ret = _get_models(rnd_par, rnd_par.dataset_objs)
- if ret <= 0:
- print("Error: no models in config")
- exit(-4)
- if _get_scene(rnd_par, ds_cfg["scene"]) == 0:
- print("Error: empty scene in config")
- exit(-5)
- exit(render())
\ No newline at end of file
diff --git a/dataset_generation/renderBOPdataset2.py b/dataset_generation/renderBOPdataset2.py
deleted file mode 100755
index 61cdf90..0000000
--- a/dataset_generation/renderBOPdataset2.py
+++ /dev/null
@@ -1,412 +0,0 @@
-import blenderproc as bproc
-"""
- renderBOPdataset2
- Общая задача: common pipeline
- Реализуемая функция: создание датасета в формате BOP с заданными параметрами рандомизации
- Используется модуль blenderproc
-
- 02.05.2024 @shalenikol release 0.1
- 02.07.2024 @shalenikol release 0.2
- 28.10.2024 @shalenikol release 0.3
-"""
-import numpy as np
-import argparse
-import random
-import os
-import shutil
-import json
-from pathlib import Path
-
-import bpy
-
-VHACD_PATH = "blenderproc_resources/vhacd"
-DIR_MODELS = "models"
-DIR_MESH = "assets/libs/objects/"
-FILE_LOG_SCENE = "res.txt"
-FILE_RBS_INFO = "rbs_info.json"
-FILE_GT_COCO = "scene_gt_coco.json"
-EXT_MODELS = ".fbx"
-TEXTURE_TMPL = "*.jpg"
-
-Not_Categories_Name = True # наименование категории в COCO-аннотации отсутствует
-
-def _get_list_texture(rel_path: str) -> list:
- # local_path/texture/
- loc = os.path.dirname(os.path.dirname(rnd_par.output_dir))
- path = os.path.join(loc, rel_path)
- return list(Path(path).absolute().rglob(TEXTURE_TMPL))
-
-# def _get_path_model(name_model: str) -> str:
-# # TODO on name_model find path for mesh (model.fbx)
-# # local_path/assets/libs/objects # assets/mesh/
-# loc = os.path.dirname(os.path.dirname(rnd_par.output_dir))
-# return os.path.join(loc, DIR_MESH + name_model + EXT_MODELS)
-
-def _get_path_object(name_obj: str) -> str:
- # TODO on name_obj find path for scene object (object.fbx)
- # loc = os.path.dirname(os.path.dirname(rnd_par.output_dir))
- # return os.path.join(loc, DIR_MESH + name_obj + EXT_MODELS)
- return os.path.join(rnd_par.details_dir, name_obj + EXT_MODELS)
-
-def convert2relative(height, width, bbox):
- """
- YOLO format use relative coordinates for annotation
- """
- x, y, w, h = bbox
- x += w/2
- y += h/2
- return x/width, y/height, w/width, h/height
-
-def render() -> int:
- i = 0
- for obj in all_meshs:
- # Make the object actively participate in the physics simulation
- obj.enable_rigidbody(active=True, collision_shape="COMPOUND")
- # Also use convex decomposition as collision shapes
- obj.build_convex_decomposition_collision_shape(VHACD_PATH)
- i += 1
- # print(f"{i} : {obj.get_name()}")
-
- objs = all_meshs + rnd_par.scene.objs
-
- log_txt = os.path.join(os.path.dirname(rnd_par.output_dir), FILE_LOG_SCENE)
- with open(log_txt, "w") as fh:
- for i,o in enumerate(objs):
- loc = o.get_location()
- euler = o.get_rotation_euler()
- fh.write(f"{i} : {o.get_name()} {loc} {euler} category_id = {o.get_cp('category_id')}\n")
-
- # define a light and set its location and energy level
- ls = []
- for l in rnd_par.scene.light_data:
- light = bproc.types.Light(name=f"l{l['id']}")
- light.set_type(l["type"])
- light.set_location(l["loc_xyz"]) #[5, -5, 5])
- light.set_rotation_euler(l["rot_euler"]) #[-0.063, 0.6177, -0.1985])
- ls += [light]
-
- # define the camera intrinsics
- bproc.camera.set_intrinsics_from_blender_params(1,
- rnd_par.image_size_wh[0],
- rnd_par.image_size_wh[1],
- lens_unit="FOV")
-
- # add segmentation masks (per class and per instance)
- bproc.renderer.enable_segmentation_output(map_by=["category_id", "instance", "name"])
-
- # activate depth rendering
- bproc.renderer.enable_depth_output(activate_antialiasing=False)
-
- # res_dir = os.path.join(rnd_par.output_dir, rnd_par.ds_name)
- res_dir = rnd_par.output_dir
- if os.path.isdir(res_dir):
- shutil.rmtree(res_dir)
- # Цикл рендеринга
- # Do multiple times: Position the shapenet objects using the physics simulator and render X images with random camera poses
- for r in range(rnd_par.n_series):
- print(f"********** Series : {r+1}")
- is_texture = True if "texture_path" in rnd_par.models_randomization else False
- if is_texture:
- val = rnd_par.models_randomization["texture_path"]
- l_texture = _get_list_texture(val)
- image = bpy.data.images.load(filepath=str(l_texture[r % len(l_texture)]))
- # один случайный объект в кадре / все заданные объекты
- random_obj = random.choice(range(rnd_par.models.n_item))
- meshs = []
- for i,o in enumerate(all_meshs): # активные модели
- if rnd_par.single_object and i != random_obj:
- continue
- meshs += [o]
- if is_texture:
- mats = o.get_materials()
- for mat in mats:
- # image = bpy.data.images.load(filepath=str(random.choice(l_texture)))
- mat.set_principled_shader_value("Base Color", image)
-
- for i,o in enumerate(rnd_par.scene.objs): # объекты сцены
- rnd_mat = rnd_par.scene.obj_data[i]["material_randomization"]
- mats = o.get_materials() #[0]
- for mat in mats:
- val = rnd_mat["specular"]
- mat.set_principled_shader_value("Specular", random.uniform(val[0], val[1]))
- val = rnd_mat["roughness"]
- mat.set_principled_shader_value("Roughness", random.uniform(val[0], val[1]))
- val = rnd_mat["metallic"]
- mat.set_principled_shader_value("Metallic", random.uniform(val[0], val[1]))
- if "texture_path" in rnd_mat: # путь к текстурам (*.jpg)
- val = rnd_mat["texture_path"]
- val = _get_list_texture(val)
- image = bpy.data.images.load(filepath=str(random.choice(val)))
- mat.set_principled_shader_value("Base Color", image)
- else:
- val = rnd_mat["base_color"]
- mat.set_principled_shader_value("Base Color", np.random.uniform(val[0], val[1]))
- # mat.set_principled_shader_value("Base Color", image)
-
- # Randomly set the color and energy
- for i,l in enumerate(ls):
- current = rnd_par.scene.light_data[i]
- l.set_color(np.random.uniform(current["color_range_low"], current["color_range_high"]))
- energy = current["energy_range"]
- l.set_energy(random.uniform(energy[0], energy[1]))
-
- # Clear all key frames from the previous run
- bproc.utility.reset_keyframes()
-
- # Define a function that samples 6-DoF poses
- def sample_pose(obj: bproc.types.MeshObject):
- obj.set_location(np.random.uniform(rnd_par.loc_range_low, rnd_par.loc_range_high)) #[-1, -1, 0], [1, 1, 2]))
- obj.set_rotation_euler(bproc.sampler.uniformSO3())
-
- # Sample the poses of all shapenet objects above the ground without any collisions in-between
- bproc.object.sample_poses(meshs,
- objects_to_check_collisions = meshs + rnd_par.scene.collision_objects,
- sample_pose_func = sample_pose)
-
- # Run the simulation and fix the poses of the shapenet objects at the end
- bproc.object.simulate_physics_and_fix_final_poses(min_simulation_time=4, max_simulation_time=20, check_object_interval=1)
-
- # Find point of interest, all cam poses should look towards it
- poi = bproc.object.compute_poi(meshs)
-
- coord_max = [0.1, 0.1, 0.1]
- coord_min = [0., 0., 0.]
-
- with open(log_txt, "a") as fh:
- fh.write("*****************\n")
- fh.write(f"{r}) poi = {poi}\n")
- i = 0
- for o in meshs:
- i += 1
- loc = o.get_location()
- euler = o.get_rotation_euler()
- fh.write(f" {i} : {o.get_name()} {loc} {euler}\n")
- for j in range(3):
- if loc[j] < coord_min[j]:
- coord_min[j] = loc[j]
- if loc[j] > coord_max[j]:
- coord_max[j] = loc[j]
-
- # Sample up to X camera poses
- #an = np.random.uniform(0.78, 1.2) #1. #0.35
- for i in range(rnd_par.n_cam_pose):
- # Sample location
- location = bproc.sampler.shell(center=rnd_par.center_shell,
- radius_min=rnd_par.radius_range[0],
- radius_max=rnd_par.radius_range[1],
- elevation_min=rnd_par.elevation_range[0],
- elevation_max=rnd_par.elevation_range[1])
- # координата, по которой будем сэмплировать положение камеры
- j = random.randint(0, 2)
- # разовый сдвиг по случайной координате
- d = (coord_max[j] - coord_min[j]) / rnd_par.n_sample_on_pose
- if location[j] < 0:
- d = -d
- for _ in range(rnd_par.n_sample_on_pose):
- # Compute rotation based on vector going from location towards poi
- rotation_matrix = bproc.camera.rotation_from_forward_vec(poi - location, inplane_rot=np.random.uniform(-0.7854, 0.7854))
- # Add homog cam pose based on location an rotation
- cam2world_matrix = bproc.math.build_transformation_mat(location, rotation_matrix)
- bproc.camera.add_camera_pose(cam2world_matrix)
- location[j] -= d
- # render the whole pipeline
- data = bproc.renderer.render()
- # Write data to bop format
- bproc.writer.write_bop(res_dir,
- target_objects = all_meshs, # Optional[List[MeshObject]] = None
- depths = data["depth"],
- depth_scale = 1.0,
- colors = data["colors"],
- color_file_format=rnd_par.image_format,
- append_to_existing_output = (r>0),
- save_world2cam = False) # world coords are arbitrary in most real BOP datasets
- # dataset="robo_ds",
-
- models_dir = os.path.join(res_dir, DIR_MODELS)
- os.mkdir(models_dir)
-
- data = []
- for i,objn in enumerate(rnd_par.models.names):
- rec = {}
- rec["id"] = i+1
- rec["name"] = objn
- rec["model"] = os.path.join(DIR_MODELS, os.path.split(rnd_par.models.filenames[i])[1]) # путь относительный
- t = [obj.get_bound_box(local_coords=True).tolist() for obj in all_meshs if obj.get_name() == objn]
- rec["cuboid"] = t[0]
- data.append(rec)
- shutil.copy2(rnd_par.models.filenames[i], models_dir)
- f = (os.path.splitext(rnd_par.models.filenames[i]))[0] + ".mtl" # файл материала
- if os.path.isfile(f):
- shutil.copy2(f, models_dir)
-
- with open(os.path.join(res_dir, FILE_RBS_INFO), "w") as fh:
- json.dump(data, fh, indent=2)
-
- """
- !!! categories -> name берётся из category_id !!!
- см.ниже
- blenderproc.python.writer : BopWriterUtility.py
- class _BopWriterUtility
- def calc_gt_coco
- ...
- CATEGORIES = [{'id': obj.get_cp('category_id'), 'name': str(obj.get_cp('category_id')), 'supercategory':
- dataset_name} for obj in dataset_objects]
-
- поэтому заменим наименование категории в аннотации
- """
- def change_categories_name(dir: str):
- coco_file = os.path.join(dir,FILE_GT_COCO)
- with open(coco_file, "r") as fh:
- data = json.load(fh)
- cats = data["categories"]
-
- for i,cat in enumerate(cats):
- cat["name"] = rnd_par.models.names[i] #obj_names[i]
-
- with open(coco_file, "w") as fh:
- json.dump(data, fh, indent=1)
-
- def explore(path: str):
- if not os.path.isdir(path):
- return
- folders = [
- os.path.join(path, o)
- for o in os.listdir(path)
- if os.path.isdir(os.path.join(path, o))
- ]
- for path_entry in folders:
- print(path_entry)
- if os.path.isfile(os.path.join(path_entry,FILE_GT_COCO)):
- change_categories_name(path_entry)
- else:
- explore(path_entry)
-
- if Not_Categories_Name:
- explore(res_dir)
- return 0 # success
-
-def _get_models(par, data) -> int:
- global all_meshs
-
- par.models = lambda: None
- par.models.n_item = len(data)
- if par.models.n_item == 0:
- return 0 # no models
-
- # загрузим объекты
- par.models.names = [] # obj_names
- par.models.filenames = [] # obj_filenames
- i = 1
- for f in data:
- nam = f["name"]
- par.models.names.append(nam)
- ff = f["fbx"] # _get_path_model(nam)
- par.models.filenames.append(ff)
- if not os.path.isfile(ff):
- print(f"Error: no such file '{ff}'")
- return -1
- # !!! dir with meshs
- par.details_dir = os.path.split(ff)[0]
-
- obj = bproc.loader.load_obj(ff)
- all_meshs += obj
- obj[0].set_cp("category_id", i) # начиная с 1
- i += 1
- return par.models.n_item
-
-def _get_scene(par, data) -> int:
- # load scene
- par.scene = lambda: None
- objs = data["objects"]
- par.scene.n_obj = len(objs)
- if par.scene.n_obj == 0:
- return 0 # empty scene
- lights = data["lights"]
- par.scene.n_light = len(lights)
- if par.scene.n_light == 0:
- return 0 # no lighting
- if len(rnd_par.details_dir) == 0:
- return 0 # no path to details
-
- par.scene.objs = []
- par.scene.collision_objects = []
- for f in objs:
- ff = _get_path_object(f["name"])
- if not os.path.isfile(ff):
- print(f"Error: no such file '{ff}'")
- return -1
- obj = bproc.loader.load_obj(ff)
- obj[0].set_cp("category_id", 999)
- coll = f["collision_shape"]
- if len(coll) > 0:
- obj[0].enable_rigidbody(False, collision_shape=coll)
- par.scene.collision_objects += obj
- par.scene.objs += obj
-
- if not par.scene.collision_objects:
- print("Collision objects not found in the scene")
- return 0
- par.scene.obj_data = objs
- par.scene.light_data = lights
- return par.scene.n_obj
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--form", required=True, help="Json-string with dataset parameters")
- parser.add_argument("--path", required=True, help="Output path")
- args = parser.parse_args()
-
- if args.form[-5:] == ".json":
- if not os.path.isfile(args.form):
- print(f"Error: no such file '{args.form}'")
- exit(-1)
- with open(args.form, "r") as f:
- j_data = f.read()
- else:
- j_data = args.form
- try:
- cfg = json.loads(j_data)
- except json.JSONDecodeError as e:
- print(f"JSon error: {e}")
- exit(-2)
-
- # output_dir = args.path
-
- ds_cfg = cfg["output"] # dataset config
- generation = ds_cfg["generation"]
- cam_pos = ds_cfg["camera_position"]
- models_randomization = ds_cfg["models_randomization"]
-
- rnd_par = lambda: None
- rnd_par.output_dir = args.path # cfg["local_path"]
-
- if not os.path.isdir(rnd_par.output_dir):
- print(f"Error: invalid path '{rnd_par.output_dir}'")
- exit(-3)
-
- rnd_par.single_object = False # True
- rnd_par.details_dir = ""
- # rnd_par.ds_name = os.path.split(rnd_par.output_dir)[1] # cfg["name"]
- rnd_par.dataset_objs = ds_cfg["datasetObjects"]["details"] # ["knight"]
- rnd_par.n_cam_pose = generation["n_cam_pose"]
- rnd_par.n_sample_on_pose = generation["n_sample_on_pose"]
- rnd_par.n_series = generation["n_series"]
- rnd_par.image_format = generation["image_format"]
- rnd_par.image_size_wh = generation["image_size_wh"]
- rnd_par.center_shell = cam_pos["center_shell"]
- rnd_par.radius_range = cam_pos["radius_range"]
- rnd_par.elevation_range = cam_pos["elevation_range"]
- rnd_par.models_randomization = models_randomization
- rnd_par.loc_range_low = models_randomization["loc_range_low"]
- rnd_par.loc_range_high = models_randomization["loc_range_high"]
-
- bproc.init()
-
- all_meshs = []
- if _get_models(rnd_par, rnd_par.dataset_objs) <= 0:
- print("Error: no models in config")
- exit(-4)
- if _get_scene(rnd_par, ds_cfg["scene"]) <= 0:
- print("Error: empty scene in config")
- exit(-5)
- exit(render())
\ No newline at end of file
diff --git a/docs/framework_debug_3part.md b/docs/framework_debug_3part.md
deleted file mode 100644
index 5bdba4b..0000000
--- a/docs/framework_debug_3part.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# Фреймворк: отладка.
-
-После того как был отлажен механизм обнаружения объектов на основе метода YoloV4 и в результате его испытаний, нами было принято решение имплементировать более совершенный и современный метод Yolo 8-ой версии ([YoloV8](https://github.com/ultralytics/ultralytics)).
-На основе предыдущего опыта был взят за основу lifecycle-узел ROS2, который управлялся бы исполнением действий деревьев поведения (Behavior Tree). В ходе работы над этим модулем и дальнейшей отладки выявились преимущества метода YoloV8 в сравнении с YoloV4: файл весов модели 8-й версии по размеру составил около 6 МБ, в 4-й - около 244 МБ. Также на реальных изображениях распечатанных нами моделей улучшилась их точность распознавания (рис.1). В качестве моделей мы использовали набор шахматных фигур, распечатанных на 3D-принтере.
-
-
-Рис.1
-
-Когда отлаживался модуль распознавания объектов (Object Detection), выявилась сложность проектирования и отладки этого навыка. Необходимо было кроме основной логики ROS-узла создавать и отлаживать передачу параметров в дереве поведения. И была предложена схема обобщения интерфейса для любых навыков.
-
-
-
-В ней предположено использовать отдельный интерфейсный узел (Interface Node), который будет реализовывать взаимодействие системы исполнения дерева поведения (BT Engine Node) c библиотекой навыков. А сами навыки предложено упаковывать в отдельные ROS-пакеты с json-файлом описания, который позволит декларативно описывать элементы интерфейса, а также схему запуска навыков.
-
-Такой интерфейсный узел был реализован и позволил упростить как составление и выполнение дерева поведения, так и облегчить создание самой библиотеки навыков.
-
-Первым навыком, который использовал интерфейсный узел, стала имплементация метода оценки 6D-позы объекта [DOPE](https://github.com/NVlabs/Deep_Object_Pose).
diff --git a/docs/img/P_curve.png b/docs/img/P_curve.png
deleted file mode 100644
index f2a2125..0000000
Binary files a/docs/img/P_curve.png and /dev/null differ
diff --git a/docs/img/qXX7sBMbsvA.jpg b/docs/img/qXX7sBMbsvA.jpg
deleted file mode 100644
index 17a6b46..0000000
Binary files a/docs/img/qXX7sBMbsvA.jpg and /dev/null differ
diff --git a/docs/img/scheme1.jpg b/docs/img/scheme1.jpg
deleted file mode 100644
index 63c3988..0000000
Binary files a/docs/img/scheme1.jpg and /dev/null differ
diff --git a/docs/obj_detection_use_case.drawio b/docs/obj_detection_use_case.drawio
deleted file mode 100644
index d0a3322..0000000
--- a/docs/obj_detection_use_case.drawio
+++ /dev/null
@@ -1,1262 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/docs/randomization.md b/docs/randomization.md
deleted file mode 100644
index e065e77..0000000
--- a/docs/randomization.md
+++ /dev/null
@@ -1,123 +0,0 @@
-# Рандомизация
-
-### При обучении робота (Илья Ураев)
-
-- Гравитация
-- Положение камеры
-- Конфигурацию робота (положение джоинтов так называется)
-- Положение объекта или точки куда надо дотянутся
-- Текстуру поверхности размещения
-- Ну и я думаю чтобы с robot_builder смотрелось, то можно рандомизировать число степеней свободы робота.
-- Можно рандомизировать позиции спавна робота
-
-### При создании датасета (Александр Шушпанов)
-
-- Зона локации (спавна) активных объектов
-- Позиция камеры: радиус сферы размещения и наклон, центр этой сферы
-- Источники света: количество, тип, локализация в пространстве, цветность, сила света
-- Свойства материала по отражению света: зеркальность, шероховатость, металлизация, цвет
-
- Гиперпараметры.
-- количество серий (спавна активных объектов)
-- количество позиций камеры на одну серию
-- количество сдвигов камеры на 1 позу
-
-### Общий список параметров рандомизации
-
-- Положение искомого(активного) объекта(-ов) в рабочей зоне
-- Позиция камеры: радиус сферы размещения и наклон, центр этой сферы
-- Текстуры повехностей объектов и/или свойства материала по отражению света: зеркальность, шероховатость, металлизация, цвет
-- Источники света: количество, тип, локализация в пространстве, цветность, сила света
-- Конфигурация робота (положение джоинтов), число степеней свободы и его начальное расположение
-- Гравитация
-
-## Web-сервис для генерации датасетов
-
-Для реализации пользовательского интерфейса web-сервиса нами была разработана схема описания параметров рандомизации. Её использование позволяет изменять конфигурацию параметров в зависимости от задачи. Предполагается в дальнейшем использовать модуль ввода параметров в том числе и в задачах обучения с подкреплением.
-
-Пример такой схемы:
-```
-ENUM T = "ObjectDetection","PoseEstimation"
-ENUM C = "","BOX","SPHERE","CAPSULE","CYLINDER","CONE","CONVEX_HULL","MESH","COMPOUND"
-ENUM L = "POINT","SUN"
-ENUM F = "JPEG","PNG"
-
-MODELS = {
- "id": ${ID:number:1},
- "name": ${NAME:string:""},
- "model": ${MODEL:string:"models/1.fbx"}
-}
-OBJECTS_SCENE = {
- "name": ${NAME:string:""},
- "collision_shape": ${enum:C:"BOX"},
- "loc_xyz": [${LOC_XYZ_1:number:0}, ${LOC_XYZ_2:number:0}, ${LOC_XYZ_3:number:0}],
- "rot_euler": [${ROT_EULER_1:number:0}, ${ROT_EULER_2:number:0}, ${ROT_EULER_3:number:0}],
- "material_randomization": {
- "specular": [${SPECULAR_1:number:0}, ${SPECULAR_2:number:1}],
- "roughness": [${ROUGHNESS_1:number:0}, ${ROUGHNESS_2:number:1}],
- "metallic": [${METALLIC_1:number:0}, ${METALLIC_2:number:1}],
- "base_color": [
- [
- ${BASE_COLOR_1:number:0},
- ${BASE_COLOR_2:number:0},
- ${BASE_COLOR_3:number:0},
- ${BASE_COLOR_4:number:1}
- ],
- [
- ${BASE_COLOR_5:number:1},
- ${BASE_COLOR_6:number:1},
- ${BASE_COLOR_7:number:1},
- ${BASE_COLOR_8:number:1}
- ]
- ]
- }
-}
-LIGHTS = {
- "id": ${ID:number:1},
- "type": ${enum:L:"POINT"},
- "loc_xyz": [${LOC_XYZ_1:number:5}, ${LOC_XYZ_2:number:5}, ${LOC_XYZ_3:number:5}],
- "rot_euler": [${ROT_EULER_1:number:-0.06}, ${ROT_EULER_2:number:0.61}, ${ROT_EULER_3:number:-0.19}],
- "color_range_low": [${COLOR_RANGE_LOW_1:number:0.5}, ${COLOR_RANGE_LOW_2:number:0.5}, ${COLOR_RANGE_LOW_3:number:0.5}],
- "color_range_high":[${COLOR_RANGE_HIGH_1:number:1}, ${COLOR_RANGE_HIGH_2:number:1}, ${COLOR_RANGE_HIGH_3:number:1}],
- "energy_range":[${ENERGY_RANGE_1:number:400},${ENERGY_RANGE_2:number:900}]
-}
-
-{
- "typedataset": ${enum:T:"ObjectDetection"},
- "dataset_path": ${DATASET_PATH:string},
- "models":${ARRAY:MODELS:[]},
- "models_randomization":{
- "loc_range_low": [${LOC_RANGE_LOW_1:number:-1}, ${LOC_RANGE_LOW_2:number:-1}, ${LOC_RANGE_LOW_3:number:0}],
- "loc_range_high": [${LOC_RANGE_HIGH_1:number:1}, ${LOC_RANGE_HIGH_2:number:1}, ${LOC_RANGE_HIGH_3:number:2}]
- },
- "scene":{
- "objects": ${ARRAY:OBJECTS_SCENE:[]},
- "lights": ${ARRAY:LIGHTS:[]},
- },
- "camera_position":{
- "center_shell": [${CENTER_SHELL_1:number:0}, ${CENTER_SHELL_2:number:0}, ${CENTER_SHELL_3:number:0}],
- "radius_range": [${RADIUS_RANGE_1:number:0.4}, ${RADIUS_RANGE_2:number:1.4}],
- "elevation_range": [${ELEVATION_RANGE_1:number:10}, ${ELEVATION_RANGE_2:number:90}]
- },
- "generation":{
- "n_cam_pose": ${N_CAM_POSE:number:5},
- "n_sample_on_pose": ${N_SAMPLE_ON_POSE:number:3},
- "n_series": ${N_SERIES:number:100},
- "image_format": ${enum:F:"jpg"},
- "image_size_wh": [${IMAGE_SIZE_WH_1:number:640}, ${IMAGE_SIZE_WH_2:number:480}]
- }
-}
-```
-
-Вначале описываются перечисления - ENUM, которым присваиваются имена и список возможных значений. Затем описываются составные именованные объекты, а затем основной блок описания параметров. Этот блок представляет из себя JSON-словарь параметров, который будет подготовлен на выходе модуля ввода параметров. Каждый ключ этого словаря дополняется мини-схемой описания вводимого значения.
-Формат:
-```
-${<имя_переменной>:<тип>:<значение_по_умолчанию>}
-```
-либо массив объектов
-```
-${ARRAY:<имя_объекта>:[]}
-```
-
-В нашем алгоритме формирования датасета для задач компьютерного зрения (ObjectDetection, PoseEstimation) выбран формат [BOP: Benchmark for 6D Object Pose Estimation](https://bop.felk.cvut.cz/home/), в его [BOP-Classic](https://bop.felk.cvut.cz/datasets/) версии.
-Он содержит в своей аннотации все необходимые данные (ground truth) для обучения нейросетевых моделей поиску объектов на изображении, а также распознавания поз искомых объектов.
\ No newline at end of file
diff --git a/docs/scene_generator.md b/docs/scene_generator.md
deleted file mode 100644
index 85df89c..0000000
--- a/docs/scene_generator.md
+++ /dev/null
@@ -1,435 +0,0 @@
-## Мета-модель фреймворка
-
-## Соответствие Digital Twin Definition Language и мета-модели Robossembler
-
-Слои абстракции
-1. DTDL - BASETYPES, COMPLEXTYPES (Array, Map, ...), DT_TYPES
-2. ROBOSSEMBLER_CONTEXT (Represents)
-3. USE_CASE_CONTEXT (Entities)
-4. USE_CASE_INSTANCE (Product)
-
-## Сравнение мета-моделей Digital Twin Definition Language и ROS 2
-
-| Сущность | DTDL | ROS | Robonomics |
-| - | - | - | - |
-| Описывает всё содержимое двойника, включая ниже приведённые сущности | **Interface** | **Interface Spec** | - |
-| Передаваемые данные и их тип | **Telemetry** | **Topic** | **Datalog Hash** |
-| Свойство, описывающее какое-то состояние двойника, может быть read-only или read/write; также описывает синхронизацию состояния между компонентами (например, показание датчика записано в облако) | **Property** | **Parameters** | **Launch Param** |
-| Функция или операция, которая может быть осуществлена над двойником (например, `reboot`) | **Command** | **Service** / **Action** | **Launch** |
-| Структура из примитивных типов (Array, Enum, Map, Object) данных для сериализации (в JSON, Avro, Protobuf) | **Schema** | **IDL** | - |
-| Часть интерфейса (отношение part-of с каким-то другим двойником) | **Component** | - | - |
-| Связь с другим цифровым двойником. Связи могут представлять различные семантические значения. Например, `contains` ("этаж содержит комнату"), `cools` ("hvac cools room"), `isBilledTo` ("счёт выставляется пользователю") | **Relationship** | - | - |
-
-
-```json
-# базовые типы значений (из JSON Schema)
-"string","number", "boolean", "array", "object";
-
-{
- "type": "object",
- "properties": {
- "REPRESENT": {
- "type": {
- "enum": [ "OBJECT_LINK", "KEY", "FILEPATH", "VALUE", "TREE", "ARRAY", "SEQUENCE" ]
- }
- }
- }
-}
-
-# представления
-ENUM REPRESENT = "OBJECT_LINK", # строка-ссылка на объект (ENTITY)
- "KEY", # уникальный ключ в виде строки (используется как идентификатор записи)
- "FILEPATH", # строка - путь к файлу
- "VALUE", # непосредственное значение
- "TREE", # представление в виде дерева
- "ARRAY", # массив значений
- "SEQUENCE"; # массив ссылок на объект определённого типа
-
-# сущности
-
-ENUM ENTITY = "MESH", "PART", "ASSET", "BTREE", "BTACTION", "SKILL", "DATASET", "INTERFACE", "WEIGHTS", "DEVICE";
-
-ENUM DEVICE = "ROBOT", "SENSOR";
-
-type SCENE = {
- "objects": [ { ref: "1", type: "PART" }; { ref: "2", type: "PART" }; ]
-};
-
-type PARAM = {
- "sid": \${KEY:string:""},
- "name": \${NAME:string:""},
- "represent": \${REPRESENT:Enum:"VALUE"},
- "link": \${LINK:Enum:"topic"}
-};
-
-### тип поверхность детали
-
-type MESH/SURFACE = {
- "sid": \${KEY:string:""};
- "path": { "stl": "PATH/*.stl", "brep": "PATH/*.brep", "fbx": "PATH/*.fbx", }
-};
-
-type PART = {
- "sid": \${NAME:string:""},
- "name": \${NAME:string:""},
- "pose6d": { "loc_xyz": \${XYZ:Array3:[0.0,0.0,0.0] }, "rot_xyzw": \${XYZW:Array4:[0.0,0.0,0.0,1.0]} },
- "attributes": [
- "Robossembler_NonSolid": True
- ],
- "surface": { "stl": "PATH/*.stl", "brep": "PATH/*.brep", },
- "material": "/path/to/robossembler/materials/mmm.FCMat",
- "unit_scale": \${UNIT_SCALE:number:1.0},
- "dimensions": \${Array3:[0.0,0.0,0.0]},
- "assets": { "fbx": "PATH/*.fbx", "blend": "PATH/*.blend", }
-};
-
-type DATASET = {
- "sid": \${NAME:string:""},
- "name": \${NAME:string:""},
- "objects": \${SEQUENCE:PART},
- "environment": \${SEQUENCE:PART},
- "randomisation": \${FILE}
-};
-
-type WEIGHTS = {
- "sid": \${NAME:string:""},
- "name": \${NAME:string:""},
- "file": \${FILE:string:"*.pth"},
- "epoch": \${EPOCH:number:""},
- "dataset": \${OBJECT_LINK:DATASET}
-};
-
-type TOPIC = {
- "sid": ...,
- "name": "topic_name",
- "msg": "sensor_msgs/Image",
-};
-
-DEVICE = {
- "sid": 1235,
- "name": "dev",
- "topics": \${SEQUENCE:TOPIC},
-}
-
-// DEVICE -> TOPIC LIST -> {DEVICE: {TOPIC LIST}}
-
-type POSE_ESTIMATION = {
- "object_name": \${OBJECT_LINK:PART},
- "weights": \${OBJECT_LINK:WEIGHTS},
- "topic_name": \${OBJECT_LINK:TOPIC}
-};
-
-type SKILL = {
- "sid": \${NAME:string:""},
- "name": \${NAME:string:""},
- "interface": \${INTERFACE}
-};
-
-command_LifeCycle = "run", "stop"
-
-type ASSEMBLY_SEQUENCE = \{SEQUENCE:TASK};
-
-# task1 = { source_state = []; target_state = [ p1 ] }
-# task2 = { source_state = [ p1 ]; target_state = [ p1 p2 ] }
-# task3 = { source_state = [ p1 p2 ]; target_state = [ p1 p2 p3 ] }
-
-task = { source_state = \${TREE:PART}; target_state = \${TREE:PART} }
-
-type TASK = {
- "sid": ...
- "action": \${BT_TREE}
- "source_state": \${TREE:PART} // PART
- "target_state": \${TREE:PART} // PRODUCT
-};
-
-type DOMAIN = {
- "objects": \{SEQUENCE:PART}
- "predicates": \{OBJECT_LINK:CONDITION}
- "actions": \${OBJECT_LINK:BT_ACTION}
-};
-
-type BTREE = {
- "sid": \${NAME:string:""},
- "name": \${NAME:string:""},
-};
-
-```
-
-## Device Package
-
-### Camera
-
-```json
-{
- "DevicePackage": { "name": "Robossembler", "version": "1.0", "format": "1" },
- "Module": { "name": "RealSense Dxx", "description": "ROS Wrapper for Intel(R) RealSense(TM) Cameras" },
- "Launch": { "package": "realsense2_camera", "executable": "rs_launch.py" },
- "DTwin": [
- { "interface": {
- "input": [
- { "name": "camera_namespace", "type": "STRING" }, // -- /robot_drawer/455_1/camera_info
- { "name": "camera_name", "type": "STRING" },
- { "name": "serial_port", "type": "STRING" },
- ],
- "output": [
- { "name": "camera_info", "type": "TOPIC", "topic_name": "/${camera_namespace}/${camera_name}/camera_info" },
- { "name": "pose", "type": "TOPIC", "msg": "Pose" }
- ]
- },
- }
- ],
- "Settings": [
- { "name": "camera_config", "description": "Camera Config", "type":"file", "defaultValue": "{ rgb_camera.profile: 1280x720x15 }" }
- ]
-}
-```
-
-### Robot RBS
-
-
-```json
-{
- "DevicePackage": { "name": "Robossembler", "version": "1.0", "format": "1" },
- "Module": { "name": "RBS", "description": "Main Robot" },
- "Launch": { "package": "rbs_bringup", "executable": "single_robot.launch.py" },
- "DTwin": [
- { "interface": {
- "input": [
- { "name": "robot_namespace", "type": "STRING", "defaultValue": "rbs_arm" },
- { "name": "dof", "type": "INT", "defaultValue": 6 }
- ]
- }
- }
- ],
- "Settings": [
- { "name": "robot_type", "description": "Type of robot by name", "defaultValue": "rbs_arm" }
- ]
-}
-```
-
-### Robot UR5
-
-
-```json
-{
- "DevicePackage": { "name": "Robossembler", "version": "1.0", "format": "1" },
- "Module": { "name": "UR", "description": "..." },
- "Launch": { "package": "ur_package", "executable": "ur5_single_arm.py" },
- "DTwin": [
- { "interface": {
- "input": [
- { "robot_namespace": "robot1" },
- ],
- },
- }
- ],
- "Settings": [
- { "name": "", "description": "Config", "type":"file", "defaultValue": "{}" }
- ]
-}
-```
-
-```json
-{
- "SkillPackage": { "name": "Robossembler", "version": "1.0", "format": "1" },
- "Module": { "name": "PoseEstimation", "description": "Pose Estimation skill with DOPE" },
- "Launch": { "package": "rbs_perception", "executable": "pe_dope_lc.py", "name": "lc_dope" },
- "BTAction": [
- { "name": "peConfigure",
- "type": "run",
- "interface": {
- "input": [
- { "name": "image_raw", "type": "TOPIC", "msg": "Image" },
- { "name": "camera_info", "type": "TOPIC", "msg": "CameraInfo" },
- { "name": "object_name", "type": "PART", "msgs": "Part" }, // string
- { "name": "weights", "type": "WEIGHTS", "msgs": "" },
- ],
- "output": [
- { "name": "pose_estimation", "type": "TOPIC" },
- ]
- },
- },
- { "name": "peStop", "type": "stop", "interface": { "input": [], "output": [] } }
- ],
- "Settings": [
- { "name": "publishDelay", "description": "Publish Delay", "type":"float", "defaultValue": 0.5 },
- { "name": "tf2_send_pose", "description": "Transform Pose", "type":"int", "defaultValue": 1 },
- { "name": "mesh_scale", "description": "Part Mesh Scale", "type":"float", "defaultValue": 0.001 }
- ]
-}
-```
-
-## Атомарные навыки
-
-```xml
-
-
-
-
-
-
-
- Open or close
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-```
-
-## Пример интерфейса в DTDL
-
-```json
-{
- "@context": "...;3",
- "@id": "robossember_assembly;1",
- "@type": "Interface",
- "displayName": "Bolt",
- "contents": [
- {
- "@type": "Property",
- "name": "Label",
- "schema": "string"
- },
- {
- "@type": "Property",
- "name": "mesh",
- "writable": true,
- "schema": "string"
- },
- {
- "@type": "Telemetry",
- "name": "Pose6D",
- "writable": true,
- "schema": {
- "@type": "Object",
- "fields": [
- {
- "name": "x",
- "schema": "double"
- },
- {
- "name": "y",
- "schema": "double"
- },
- {
- "name": "z",
- "schema": "double"
- }
- ]
- }
- },
- {
- "@type": "Relationship",
- "name": "contains",
- "target": "dtmi:com:robossember_assembly:Bolt;1"
- },
- {
- "@type": "Component",
- "name": "frontCamera",
- "schema": "dtmi:com:example:Camera;3"
- },
- ]
-}
-```
-
-## Пример файла описания сцены из Blender
-
-TODO: рассписать потребность в основных элементах, что и для чего
-
-```json
-{
- "assets": [
- {
- "name": "robo-arm",
- "id": "629b29d7-fe15-428b-9014-6c3dde045af8",
- "model_path": "../model.urdf"
- }
- ],
- "instances": [
- // измненная URDF модель
- {
- "id": "0e29084f-1190-45d0-bd59-8f4ce2591gb1",
- "name": "robo-arm-1",
- //assetId указывает на родительский ассет
- "assetId": "629b29d7-fe15-428b-9014-6c3dde045af8",
- "pose": {
- "x": 12.0,
- "y": 1.0,
- "z": 4.0,
- "roll": 1.0,
- "pitch": 4.0,
- "yaw": 5.0
- },
- //если изменено внутренее состояние URDF модели
- "tags": [
- ""
- ]
- },
- {
- "id": "0e27984f-8890-4d90-bd59-8f4ce29920f9",
- "name": "robo-arm-2",
- //assetId указывает на родительский ассет
- "assetId": "629b29d7-fe15-428b-9014-6c3dde045af8",
- //если дефолтные позиции модели
- "pose": null,
- //если не изменено внутренее состояние URDF модели
- "tags": null
- },
- {
- "type": "LIGHT",
- "light_type": "SPOT",
- "power": 10.0,
- "spot_angle": 45.0,
- "name": null,
- "pose": {
- "x": 0.0,
- "y": 0.0,
- "z": 0.0,
- "roll": 0.0,
- "pitch": 0.0,
- "yaw": 0.0
- }
- }
- ]
-}
-```
diff --git a/docs/Модуль технологической подготовки.md b/docs/Модуль технологической подготовки.md
deleted file mode 100644
index 191556c..0000000
--- a/docs/Модуль технологической подготовки.md
+++ /dev/null
@@ -1,60 +0,0 @@
-
-
-# Модуль технологической подготовки
-
-На вход: - step-модель описываемого оборудования.
-
-1. Описание имеющихся сущностей:
- - Мы можем создать описание действий со станком и его состояниями для связи с окружающим миром нужен некий обобщенный объект
- его свойством будет возможность воздействовать на состояния, но сам он будет любым. Думаю, нужно описать сами рычаги, а не того, кто будет их нажимать.
-
- - Описать объекты можно созданием зон вокруг них и/или созданием призрака геометрии оборудования (может быть сложно и избыточно).
- - Для этого возможно создать параллелепипед/цилиндр, сопоставимый по габаритам с рассматриваемой зоной. С помощью инструментов верстака Part создается объект, имеющий желаемое название (Станок, рабочая зона, etc). Эта зона с помощью отношений parent привязана к геометрии станка.
-
-2. Описание действий
- - Создается папка actions, в которую сохраняются создаваемые действия
- - Интерфейс создания действий аналогичен интерфейсу задания других сущностей. Через него мы задаем ссылки на существующие действия и элементы и указываем их тип: триггеры, рабочие органы, конечные состояния, начальные состояния. Указываем их статус (выбор "да/нет")
- - Указываем ссылки на привязываемую к этим действиям геометрию. (катушки, статоры, расходники и тд) Для этого их геометрия должна быть импортирована в модель. Ссылка будет указывать на конкретный импортированный файл. Если существует идентификатор детали, можно ссылаться на него.
-
-3. Задание состояний и переменных
- - Переменные задаются средствами параметризации FreeCAD, инструменты для этого можно взять из ASM4, используя функционал переменных (Variables) и панели конфигураций.
- - Для состояний переменных аналогично создается (в ASM4 уже имеется при создании модели) отдельная директория.
-
-4. Результатом описания будет модель, имеющая дерево объектов, в свойствах которых мы имеем всю необходимую информацию. Геометрические характеристики мы сохраняем как json и отправляем в среды, работающие с геометрией и физикой. Действия и геометрия подставляются в шаблон pddl в соответствующие абзацы.
-
-## Пример описания объекта
-
-Action - "Заправка 3д-принтера пластиком"
-
- - |- Объекты:
- - - 3d-принтер [printer_id] /прямоугольная зона по габаритам принтера. Зона привязана к геометрии оборудования
- - Workzone [printer_id] / прямоугольная зона. Указание на объект workzone, который содержит в себе габариты и позиционирование рабочей зоны относительно 3d-принтера.
- - Wirenest [printer_id] /цилиндрическая зона. Указание на объект wirenest (цилиндр), хранящий информацию об ориентации и положении гнезда для катушки с пластиком
- - Filament [filament_id] /катушка с пластиком требуемой модели, формы и габаритов.
- - Observer [observer_id] / некая сущность(манипулятор, человек, камера), к которой обращается станок, чтобы с ним провели внешние манипуляции
- - |- Длительность действия, с
-
-
- - |- Стартовые состояния:
- - Пластика достаточно (нет)
- - Наблюдатель свободен (да)
- - |- Во время действия:
- - Наблюдатель[observer_id] свободен (нет)
- - Катушка пластика установлена (нет)
- - |- После окончания:
- - Катушка пластика установлена (да)
- - Наблюдатель [observer_id] свободен (да)
- - Пластика достаточно (да)
-
-
---В раздел Variables мы можем (должны ли?) полуавтоматически/автоматически указать подобные состояния, привязанные к значениям да/нет.-- (Указывать стартовые значения по умолчанию?)
-
-
-Указанные отдельно состояния пригодились бы, чтобы ссылаться на них при задавании действий, поскольку действия сообщаются между собой не напрямую, а через выполнение определенного набора состояний.
-
-
-
-
-Пример размеченной модели:
-
-
\ No newline at end of file
diff --git a/freecad_workbench b/freecad_workbench
deleted file mode 160000
index 08db658..0000000
--- a/freecad_workbench
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 08db6583ce94895103c94b1b70b4846ef581e624
diff --git a/rcg_pipeline b/rcg_pipeline
deleted file mode 160000
index 605d45e..0000000
--- a/rcg_pipeline
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 605d45e85ae0ca1076553f0e496442b0f358700c
diff --git a/simulation/asp/.gitignore b/simulation/asp/.gitignore
deleted file mode 100644
index c585e19..0000000
--- a/simulation/asp/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-out
\ No newline at end of file
diff --git a/simulation/asp/helper/fs.py b/simulation/asp/helper/fs.py
deleted file mode 100644
index 4031795..0000000
--- a/simulation/asp/helper/fs.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import os
-import json
-import typing
-
-
-class FS:
- def readJSON(path: str):
- return json.loads((open(path)).read())
-
- def writeFile(data, filePath, fileName):
- file_to_open = filePath + fileName
-
- f = open(file_to_open, "w", encoding="utf-8", errors="ignore")
- f.write(data)
- f.close()
-
- def readFile(path: str):
- return open(path).read()
-
- def readFilesTypeFolder(pathFolder: str, fileType=".json"):
- filesJson = list(
- filter(
- lambda x: x[-fileType.__len__() :] == fileType, os.listdir(pathFolder)
- )
- )
- return filesJson
-
-
-def listGetFirstValue(iterable, default=False, pred=None):
- return next(filter(pred, iterable), default)
-
-
-def filterModels(filterModels, filterModelsDescription: list[str]):
- models = []
- for el in filterModelsDescription:
- models.append(listGetFirstValue(filterModels, None, lambda x: x.name == el))
- return models
diff --git a/simulation/asp/helper/xmlformatter.py b/simulation/asp/helper/xmlformatter.py
deleted file mode 100644
index cf41b29..0000000
--- a/simulation/asp/helper/xmlformatter.py
+++ /dev/null
@@ -1,877 +0,0 @@
-"""
-Format and compress XML documents
-"""
-import getopt
-import re
-import sys
-import xml.parsers.expat
-
-__version__ = "0.2.4"
-
-DEFAULT_BLANKS = False
-DEFAULT_COMPRESS = False
-DEFAULT_SELFCLOSE = False
-DEFAULT_CORRECT = True
-DEFAULT_INDENT = 2
-DEFAULT_INDENT_CHAR = " "
-DEFAULT_INLINE = True
-DEFAULT_ENCODING_INPUT = None
-DEFAULT_ENCODING_OUTPUT = None
-DEFAULT_EOF_NEWLINE = False
-
-
-class Formatter:
- # Use internal encoding:
- encoding_internal = None
-
- def __init__(
- self,
- indent=DEFAULT_INDENT,
- preserve=[],
- blanks=DEFAULT_BLANKS,
- compress=DEFAULT_COMPRESS,
- selfclose=DEFAULT_SELFCLOSE,
- indent_char=DEFAULT_INDENT_CHAR,
- encoding_input=DEFAULT_ENCODING_INPUT,
- encoding_output=DEFAULT_ENCODING_OUTPUT,
- inline=DEFAULT_INLINE,
- correct=DEFAULT_CORRECT,
- eof_newline=DEFAULT_EOF_NEWLINE,
- ):
- # Minify the XML document:
- self.compress = compress
- # Use self-closing tags
- self.selfclose = selfclose
- # Correct text nodes
- self.correct = correct
- # Decode the XML document:
- self.encoding_input = self.enc_normalize(encoding_input)
- # Encode ouput by:
- self.encoding_output = self.enc_normalize(encoding_output)
- # Insert indent = indent*level*indent_char:
- self.indent = int(indent)
- # Indent by char:
- self.indent_char = indent_char
- # Format inline objects:
- self.inline = inline
- # Don't compress this elements and their descendants:
- self.preserve = preserve
- # Preserve blanks lines (collapse multiple into one)
- self.blanks = blanks
- # Always add a newline character at EOF
- self.eof_newline = eof_newline
-
- @property
- def encoding_effective(self, enc=None):
- if self.encoding_output:
- return self.encoding_output
- elif self.encoding_internal:
- return self.encoding_internal
- elif self.encoding_input:
- return self.encoding_input
- else:
- return "UTF-8"
-
- def enc_normalize(self, string):
- """ Format an Encoding identifier to upper case. """
- if isinstance(string, str):
- return string.upper()
- return None
-
- def enc_encode(self, strg):
- """ Encode a formatted XML document in target"""
- if sys.version_info > (3, 0):
- return strg.encode(self.encoding_effective) # v3
- return strg.decode("utf-8").encode(self.encoding_effective) # v2
-
- def enc_output(self, path, strg):
- """ Output according to encoding """
- fh = sys.stdout
- if strg is not None:
- if path is not None:
- open(path, "w+b").write(strg)
- elif sys.version_info > (3, 0):
- fh.buffer.write(strg)
- else:
- fh.write(strg)
-
- def format_string(self, xmldoc=""):
- """ Format a XML document given by xmldoc """
- token_list = Formatter.TokenList(self)
- token_list.parser.Parse(xmldoc)
- return self.enc_encode(str(token_list))
-
- def format_file(self, file):
- """ Format a XML document given by path name """
- fh = open(file, "rb")
- token_list = Formatter.TokenList(self)
- token_list.parser.ParseFile(fh)
- fh.close()
- return self.enc_encode(str(token_list))
-
- class TokenList:
- # Being in a cdata section:
- cdata_section = False
- # Lock deletion of leading whitespace:
- desc_mixed_level = None
- # Lock indenting:
- indent_level = None
- # Reference the Formatter:
- formatter = None
- # Count levels:
- level_counter = 0
- # Lock deletion of whitespaces:
- preserve_level = None
-
- def __init__(self, formatter):
- # Keep tokens in a list:
- self._list = []
- self.formatter = formatter
- self.parser = xml.parsers.expat.ParserCreate(
- encoding=self.formatter.encoding_input
- )
- self.parser.specified_attributes = 1
- self.parser.buffer_text = True
- # Push tokens to buffer:
- for pattern in [
- "XmlDecl%s",
- "ElementDecl%s",
- "AttlistDecl%s",
- "EntityDecl%s",
- "StartElement%s",
- "EndElement%s",
- "ProcessingInstruction%s",
- "CharacterData%s",
- "Comment%s",
- "Default%s",
- "StartDoctypeDecl%s",
- "EndDoctypeDecl%s",
- "StartCdataSection%s",
- "EndCdataSection%s",
- "NotationDecl%s",
- ]:
- setattr(
- self.parser, pattern % "Handler", self.xml_handler(pattern % "")
- )
-
- def __iter__(self):
- return iter(self._list)
-
- def __len__(self):
- return len(self._list)
-
- def __getitem__(self, pos):
- if 0 <= pos < len(self._list):
- return self._list[pos]
- else:
- raise IndexError
-
- def __setitem__(self, pos, value):
- if 0 <= pos < len(self._list):
- self._list[pos] = value
- else:
- raise IndexError
-
- def __str__(self):
- """ Returns the formatted XML document in UTF-8. """
- for step in ["configure", "pre_operate", "post_operate"]:
- for tk in iter(self):
- getattr(tk, step)()
- result = ""
- for tk in iter(self):
- result += str(tk)
- if self.formatter.eof_newline and not result.endswith("\n"):
- result += "\n"
- return result
-
- def append(self, tk):
- """ Add token to tokenlist. """
- tk.pos = len(self._list)
- self._list.append(tk)
-
- def level_increment(self):
- """ Increment level counter. """
- self.level_counter += 1
-
- def level_decrement(self):
- """ Decrement level counter. """
- self.level_counter -= 1
-
- def token_descendant_mixed(self, tk):
- """ Mark descendants of mixed content. """
- if tk.name == "StartElement":
- # Mark every descendant:
- if tk.content_model in [2, 3] and self.desc_mixed_level is None:
- self.desc_mixed_level = tk.level
- return False
- return self.desc_mixed_level is not None
- elif tk.name == "EndElement":
- # Stop marking every descendant:
- if tk.level is self.desc_mixed_level:
- self.desc_mixed_level = None
- elif self.desc_mixed_level is not None:
- return True
- return False
- elif self.desc_mixed_level is None:
- return False
- return self.desc_mixed_level >= tk.level - 1
-
- def sequence(self, tk, scheme=None):
- """Returns sublist of token list.
- None: next to last
- EndElement: first to previous"""
- if scheme == "EndElement" or (scheme is None and tk.end):
- return reversed(self._list[: tk.pos])
- return self._list[(tk.pos + 1) :]
-
- def token_indent(self, tk):
- if self.formatter.inline:
- return self.token_indent_inline(tk)
- """ Indent outside of text of mixed content. """
- if tk.name == "StartElement":
- # Block indenting for descendants of text and mixed content:
- if tk.content_model in [2, 3] and self.indent_level is None:
- self.indent_level = tk.level
- elif self.indent_level is not None:
- return False
- return True
- elif tk.name == "EndElement":
- # Unblock indenting for descendants of text and mixed content:
- if tk.level == self.indent_level:
- self.indent_level = None
- elif self.indent_level is None:
- return True
- return False
- return self.indent_level is None
-
- def token_indent_inline(self, tk):
- """ Indent every element content - no matter enclosed by text or mixed content. """
- for itk in iter(self.sequence(tk, "EndElement")):
- if itk.level < tk.level and itk.name == "StartElement":
- if itk.content_model == 1:
- return True
- return False
- if (
- itk.level == tk.level
- and tk.name == "EndElement"
- and itk.name == "StartElement"
- ):
- if itk.content_model == 1:
- return True
- return False
- return True
-
- def token_model(self, tk):
- """Returns code for content model.
- 0: empty
- 1: element
- 2: text
- 3: mixed"""
- eflag = tflag = 0
- for itk in iter(self.sequence(tk)):
- # Element boundary found:
- if itk.level <= tk.level:
- break
- # Direct child found:
- elif (itk.level - 1) == tk.level:
- if itk.start:
- eflag = 1
- elif itk.not_empty:
- tflag = 2
- return eflag + tflag
-
- def token_preserve(self, tk):
- """Preseve eyery descendant of an preserved element.
- 0: not locked
- 1: just (un)locked
- 2: locked"""
- # Lock perserving for StartElements:
- if tk.name == "StartElement":
- if self.preserve_level is not None:
- return 2
- if tk.arg[0] in self.formatter.preserve:
- self.preserve_level = tk.level
- return 1
- return 0
- # Unlock preserving for EndElements:
- elif tk.name == "EndElement":
- if (
- tk.arg[0] in self.formatter.preserve
- and tk.level == self.preserve_level
- ):
- self.preserve_level = None
- return 1
- elif self.preserve_level is None:
- return 0
- return 2
- return self.preserve_level is not None
-
- def whitespace_append_trailing(self, tk):
- """ Add a trailing whitespace to previous character data. """
- if self.formatter.correct and tk.leading and tk.not_empty:
- self.whitespace_append(tk, "EndElement", "StartElement", True)
-
- def whitespace_append_leading(self, tk):
- """ Add a leading whitespace to previous character data. """
- if self.formatter.correct and tk.trailing and tk.not_empty:
- self.whitespace_append(tk)
-
- def whitespace_append(
- self, tk, start="StartElement", stop="EndElement", direct=False
- ):
- """ Add a whitspace to token list. """
- for itk in self.sequence(tk, start):
- if (
- itk.empty
- or (itk.name == stop and itk.descendant_mixed is False)
- or (itk.name == start and abs(tk - itk) == 1)
- ):
- break
- elif itk.not_empty or (itk.name == start and itk.descendant_mixed):
- self.insert_empty(itk, direct)
- break
-
- def whitespace_delete_leading(self, tk):
- """ Returns True, if no next token or all empty (up to next end element)"""
- if (
- self.formatter.correct
- and tk.leading
- and not tk.preserve
- and not tk.cdata_section
- ):
- for itk in self.sequence(tk, "EndElement"):
- if itk.trailing:
- return True
- elif itk.name in ["EndElement", "CharacterData", "EndCdataSection"]:
- return False
- return True
- return False
-
- def whitespace_delete_trailing(self, tk):
- """Returns True, if no next token or all empty (up to next end element)"""
- if (
- self.formatter.correct
- and tk.trailing
- and not tk.preserve
- and not tk.cdata_section
- ):
- for itk in self.sequence(tk, "StartElement"):
- if itk.end:
- return True
- elif (
- itk.name in ["StartElement", "StartCdataSection"]
- or itk.not_empty
- ):
- return False
- return True
- return False
-
- def insert_empty(self, tk, before=True):
- """ Insert an Empty Token into token list - before or after tk. """
- if not (0 < tk.pos < (len(self) - 1)):
- return False
- ptk = self[tk.pos - 1]
- ntk = self.formatter.CharacterData(self, [" "])
- ntk.level = max(ptk.level, tk.level)
- ntk.descendant_mixed = tk.descendant_mixed
- ntk.preserve = ptk.preserve * tk.preserve
- ntk.cdata_section = ptk.cdata_section or tk.cdata_section
- if before:
- self._list.insert(tk.pos + 1, ntk)
- else:
- self._list.insert(tk.pos, ntk)
- for i in range((tk.pos - 1), len(self._list)):
- self._list[i].pos = i
-
- def xml_handler(self, key):
- """ Returns lambda function which adds token to token list"""
- return lambda *arg: self.append(getattr(self.formatter, key)(self, arg))
-
- class Token(object):
- def __init__(self, tklist, arg):
- # Reference Token List:
- self.list = tklist
- # Token datas:
- self.arg = list(arg)
- # Token is placed in an CDATA section:
- self.cdata_section = False
- # Token has content model:
- self.content_model = None
- # Remove trailing wihtespaces:
- self.delete_trailing = False
- # Remove leading whitespaces:
- self.delete_leading = False
- # Token is descendant of text or mixed content element:
- self.descendant_mixed = False
- # Reference to formatter:
- self.formatter = tklist.formatter
- # Insert indenting white spaces:
- self.indent = False
- # N-th generation of roots descendants:
- self.level = self.list.level_counter
- # Token class:
- self.name = self.__class__.__name__
- # Preserve white spaces within enclosed tokens:
- self.preserve = False
- # Position in token list:
- self.pos = None
-
- def __sub__(self, other):
- return self.pos - other.pos
-
- def __unicode__(self):
- return ""
-
- # Workaround, see http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/:
- if sys.version_info > (3, 0):
- __str__ = lambda x: x.__unicode__()
- else:
- __str__ = lambda x: unicode(x).encode("utf-8")
-
- @property
- def end(self):
- return self.name == "EndElement"
-
- @property
- def empty(self):
- return self.name == "CharacterData" and re.match(
- r"^[\t\s\n]*$", self.arg[0]
- )
-
- @property
- def leading(self):
- return self.name == "CharacterData" and re.search(
- r"^[\t\s\n]+", self.arg[0]
- )
-
- @property
- def not_empty(self):
- return (
- self.name == "CharacterData"
- and not self.cdata_section
- and not re.match(r"^[\t\s\n]+$", self.arg[0])
- )
-
- @property
- def trailing(self):
- return self.name == "CharacterData" and re.search(
- r"[\t\s\n]+$", self.arg[0]
- )
-
- @property
- def start(self):
- return self.name == "StartElement"
-
- @property
- def correct(self):
- return self.formatter.correct
-
- def attribute(self, key, value):
- if key and value:
- return ' %s="%s"' % (key, value)
- elif key:
- return ' %s=""' % (key)
- return ""
-
- def indent_insert(self):
- """ Indent token. """
- # Child of root and no empty node
- if (
- self.level > 0 and not (self.end and self.list[self.pos - 1].start)
- ) or ( # not empty node:
- self.end and not self.list[self.pos - 1].start
- ):
- return self.indent_create(self.level)
- return ""
-
- def indent_create(self, times=1):
- """ Returns indent string. """
- if not self.formatter.compress and self.formatter.indent:
- return "\n%s" % (
- (times * self.formatter.indent) * self.formatter.indent_char
- )
- return ""
-
- def identifier(self, systemid, publicid):
- # TODO add base parameter:
- if publicid and systemid:
- return ' PUBLIC "%s" "%s"' % (publicid, systemid)
- elif publicid:
- return ' PUBLIC "%s"' % publicid
- elif systemid:
- return ' SYSTEM "%s"' % systemid
- return ""
-
- def configure(self):
- """ Set token properties. """
- self.descendant_mixed = self.list.token_descendant_mixed(self)
- self.preserve = self.list.token_preserve(self)
- self.cdata_section = self.list.cdata_section
-
- def pre_operate(self):
- pass
-
- def post_operate(self):
- pass
-
- class AttlistDecl(Token):
- def __unicode__(self):
- str = self.indent_create()
- str += ""
- return str
-
- class CharacterData(Token):
- def __unicode__(self):
- str = self.arg[0]
- if not self.preserve and not self.cdata_section:
- # remove empty tokens always in element content!
- if self.empty and not self.descendant_mixed:
- if self.formatter.blanks and not self.formatter.compress and re.match(r"\s*\n\s*\n\s*", str):
- str = "\n"
- else:
- str = ""
- else:
- if self.correct:
- str = re.sub(r"\r\n", "\n", str)
- str = re.sub(r"\r|\n|\t", " ", str)
- str = re.sub(r"\s+", " ", str)
- if self.delete_leading:
- str = re.sub(r"^\s", "", str)
- if self.delete_trailing:
- str = re.sub(r"\s$", "", str)
- if not self.cdata_section:
- str = re.sub(r"&", "&", str)
- str = re.sub(r"<", "<", str)
- return str
-
- def pre_operate(self):
- self.list.whitespace_append_trailing(self)
- self.list.whitespace_append_leading(self)
-
- def post_operate(self):
- self.delete_leading = self.list.whitespace_delete_leading(self)
- self.delete_trailing = self.list.whitespace_delete_trailing(self)
-
- class Comment(Token):
- def __unicode__(self):
- str = ""
- if self.preserve in [0, 1] and self.indent:
- str += self.indent_insert()
- str += "" % re.sub(
- r"^[\r\n]+$", "\n", re.sub(r"^[\r\n]+", "\n", self.arg[0])
- )
- return str
-
- def configure(self):
- super(Formatter.Comment, self).configure()
- self.indent = self.list.token_indent(self)
-
- class Default(Token):
- pass
-
- class EndCdataSection(Token):
- def __unicode__(self):
- return "]]>"
-
- def configure(self):
- self.list.cdata_section = False
-
- class ElementDecl(Token):
- def __unicode__(self):
- str = self.indent_create()
- str += "" % (self.arg[0], self.evaluate_model(self.arg[1]))
- return str
-
- def evaluate_model(self, model, modelStr="", concatStr=""):
- childSeq = []
- mixed = model[0] == xml.parsers.expat.model.XML_CTYPE_MIXED
- hasChilds = len(model[3]) or mixed
- if model[0] == xml.parsers.expat.model.XML_CTYPE_EMPTY: # 1
- modelStr += " EMPTY"
- elif model[0] == xml.parsers.expat.model.XML_CTYPE_ANY: # 2
- modelStr += " ANY"
- elif model[0] == xml.parsers.expat.model.XML_CTYPE_NAME: # 4
- modelStr = "%s" % model[2] # new start
- elif model[0] in (
- xml.parsers.expat.model.XML_CTYPE_CHOICE,
- xml.parsers.expat.model.XML_CTYPE_MIXED,
- ): # 5
- concatStr = "|"
- elif model[0] == xml.parsers.expat.model.XML_CTYPE_SEQ: # 6
- concatStr = ","
- if hasChilds:
- modelStr += " ("
- if mixed:
- childSeq.append("#PCDATA")
- for child in model[3]:
- childSeq.append(self.evaluate_model(child))
- modelStr += concatStr.join(childSeq)
- if hasChilds:
- modelStr += ")"
- modelStr += {
- xml.parsers.expat.model.XML_CQUANT_NONE: "",
- xml.parsers.expat.model.XML_CQUANT_OPT: "?",
- xml.parsers.expat.model.XML_CQUANT_PLUS: "+",
- xml.parsers.expat.model.XML_CQUANT_REP: "*",
- }[model[1]]
- return modelStr
-
- class EndDoctypeDecl(Token):
- def __unicode__(self):
- str = ""
- if self.list[self.pos - 1].name != "StartDoctypeDecl":
- str += self.indent_create(0)
- str += "]"
- str += ">"
- str += self.indent_create(0)
- return str
-
- class EndElement(Token):
- def __init__(self, list, arg):
- list.level_decrement()
- super(Formatter.EndElement, self).__init__(list, arg)
-
- def __unicode__(self):
- str = ""
- # Don't close empty nodes on compression mode:
- if (
- not (self.formatter.compress or self.formatter.selfclose)
- or self.list[self.pos - 1].name != "StartElement"
- ):
- if self.preserve in [0] and self.indent:
- str += self.indent_insert()
- str += "%s>" % self.arg[0]
- return str
-
- def configure(self):
- self.descendant_mixed = self.list.token_descendant_mixed(self)
- self.preserve = self.list.token_preserve(self)
- self.indent = self.list.token_indent(self)
-
- class EntityDecl(Token):
- def __unicode__(self):
- str = self.indent_create()
- str += ""
- return str
-
- class NotationDecl(Token):
- def __unicode__(self):
- str = self.indent_create()
- str += "" % (
- self.arg[0],
- self.identifier(self.arg[2], self.arg[3]),
- )
- return str
-
- class ProcessingInstruction(Token):
- def __unicode__(self):
- str = ""
- if self.preserve in [0, 1] and self.indent:
- str += self.indent_insert()
- str += "%s %s?>" % (self.arg[0], self.arg[1])
- return str
-
- def configure(self):
- super(Formatter.ProcessingInstruction, self).configure()
- self.indent = self.list.token_indent(self)
-
- class StartCdataSection(Token):
- def __unicode__(self):
- return ""
- else:
- str += ">"
- return str
-
- def configure(self):
- self.content_model = self.list.token_model(self)
- self.descendant_mixed = self.list.token_descendant_mixed(self)
- self.preserve = self.list.token_preserve(self)
- self.indent = self.list.token_indent(self)
-
- class XmlDecl(Token):
- def __init__(self, list, arg):
- super(Formatter.XmlDecl, self).__init__(list, arg)
- if len(self.arg) > 1:
- self.formatter.encoding_internal = self.arg[1]
-
- def __unicode__(self):
- str = " -1:
- str += self.attribute("standalone", "yes")
- str += "?>\n"
- return str
-
-
-def cli_usage(msg=""):
- """ Output usage for command line tool. """
- sys.stderr.write(msg + "\n")
- sys.stderr.write(
- 'Usage: xmlformat [--preserve "pre,literal"] [--blanks]\
- [--compress] [--selfclose] [--indent num] [--indent-char char]\
- [--outfile file] [--encoding enc] [--outencoding enc]\
- [--disable-inlineformatting] [--overwrite] [--disable-correction]\
- [--eof-newline]\
- [--help] <--infile file | file | - >\n'
- )
- sys.exit(2)
-
-
-def cli():
- """ Launch xmlformatter from command line. """
- res = None
- indent = DEFAULT_INDENT
- indent_char = DEFAULT_INDENT_CHAR
- outfile = None
- overwrite = False
- preserve = []
- blanks = False
- compress = DEFAULT_COMPRESS
- selfclose = DEFAULT_SELFCLOSE
- infile = None
- encoding = DEFAULT_ENCODING_INPUT
- outencoding = DEFAULT_ENCODING_OUTPUT
- inline = DEFAULT_INLINE
- correct = DEFAULT_CORRECT
- eof_newline = DEFAULT_EOF_NEWLINE
- try:
- opts, args = getopt.getopt(
- sys.argv[1:],
- "",
- [
- "compress",
- "selfclose",
- "disable-correction",
- "disable-inlineformatting",
- "encoding=",
- "help",
- "infile=",
- "indent=",
- "indent-char=",
- "outfile=",
- "outencoding=",
- "overwrite",
- "preserve=",
- "blanks",
- "eof-newline"
- ],
- )
- except getopt.GetoptError as err:
- cli_usage(str(err))
- for key, value in opts:
- if key in ["--indent"]:
- indent = value
- elif key in ["--preserve"]:
- preserve = value.replace(",", " ").split()
- elif key in ["--blanks"]:
- blanks = True
- elif key in ["--help"]:
- cli_usage()
- elif key in ["--compress"]:
- compress = True
- elif key in ["--selfclose"]:
- selfclose = True
- elif key in ["--outfile"]:
- outfile = value
- elif key in ["--infile"]:
- infile = value
- elif key in ["--encoding"]:
- encoding = value
- elif key in ["--outencoding"]:
- outencoding = value
- elif key in ["--indent-char"]:
- indent_char = value
- elif key in ["--disable-inlineformatting"]:
- inline = False
- elif key in ["--disable-correction"]:
- correct = False
- elif key in ["--overwrite"]:
- overwrite = True
- elif key in ["--eof-newline"]:
- eof_newline = True
- try:
- formatter = Formatter(
- indent=indent,
- preserve=preserve,
- blanks=blanks,
- compress=compress,
- selfclose=selfclose,
- encoding_input=encoding,
- encoding_output=outencoding,
- indent_char=indent_char,
- inline=inline,
- correct=correct,
- eof_newline=eof_newline,
- )
- input_file = None
- if infile:
- input_file = infile
- res = formatter.format_file(input_file)
- elif len(args) > 0:
- if args[0] == "-":
- res = formatter.format_string("".join(sys.stdin.readlines()))
- else:
- input_file = args[0]
- res = formatter.format_file(input_file)
-
- except xml.parsers.expat.ExpatError as err:
- cli_usage("XML error: %s" % err)
- except IOError as err:
- cli_usage("IO error: %s" % err)
- except:
- cli_usage("Unkonwn error")
-
- if overwrite:
- formatter.enc_output(input_file, res)
- else:
- formatter.enc_output(outfile, res)
\ No newline at end of file
diff --git a/simulation/asp/main.py b/simulation/asp/main.py
deleted file mode 100644
index 7ed3455..0000000
--- a/simulation/asp/main.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import argparse
-import shutil
-from src.model.enum import Enum
-from helper.fs import FS
-from src.usecases.urdf_sub_assembly_usecase import UrdfSubAssemblyUseCase
-from src.model.sdf_geometry import GeometryModel
-from src.usecases.sdf_sub_assembly_usecase import SdfSubAssemblyUseCase
-
-import os
-from pathlib import Path
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--generationFolder", help="FreeCad generation folder")
- parser.add_argument("--outPath", help="save SDF path")
- parser.add_argument("--world", help="adding sdf world")
- parser.add_argument("--format", help="urdf,sdf,mujoco")
- args = parser.parse_args()
-
- if args.generationFolder == None or args.outPath == None:
- parser.print_help()
- outPath = args.outPath
- geometryFiles = FS.readFilesTypeFolder(args.generationFolder + "/assets/")
- assemblyStructure = FS.readJSON(args.generationFolder + "/step-structure.json")
-
- geometryModels: list[GeometryModel] = []
- for el in geometryFiles:
- geometryModels.append(
- GeometryModel.from_dict(
- FS.readJSON(args.generationFolder + "/assets/" + el)
- )
- )
- if os.path.exists(outPath + Enum.folderPath):
- shutil.rmtree(outPath + Enum.folderPath)
- Path(outPath + Enum.folderPath).mkdir(parents=True, exist_ok=True)
-
- if args.format == "sdf":
- SdfSubAssemblyUseCase().call(
- geometryModels=geometryModels,
- assembly=assemblyStructure,
- world=args.world,
- generationFolder=args.generationFolder,
- outPath=args.outPath,
- )
- if args.format == "urdf":
- UrdfSubAssemblyUseCase().call(
- geometryModels=geometryModels,
- assembly=assemblyStructure,
- world=args.world,
- generationFolder=args.generationFolder,
- outPath=args.outPath,
- )
diff --git a/simulation/asp/mocks/Cube1.json b/simulation/asp/mocks/Cube1.json
deleted file mode 100644
index 6952409..0000000
--- a/simulation/asp/mocks/Cube1.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "name": "Cube1",
- "ixx": "16.66666666666667",
- "ixy": "0.0",
- "ixz": "0.0",
- "iyy": "16.66666666666667",
- "izz": "16.66666666666667",
- "massSDF": "0.9999999999999998",
- "posX": "0.0",
- "posY": "-0.015",
- "posZ": "0.0",
- "eulerX": "0.0",
- "eulerY": "0.0",
- "eulerZ": "0.0",
- "iyz": "0.0",
- "stl": "/meshes/Cube1.stl",
- "link": "1554"
-}
\ No newline at end of file
diff --git a/simulation/asp/mocks/Cube2.json b/simulation/asp/mocks/Cube2.json
deleted file mode 100644
index 66d8705..0000000
--- a/simulation/asp/mocks/Cube2.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "name": "Cube2",
- "ixx": "16.66666666666667",
- "ixy": "0.0",
- "ixz": "-3.637978807091713e-15",
- "iyy": "16.66666666666667",
- "izz": "16.66666666666667",
- "massSDF": "0.9999999999999998",
- "posX": "0.0",
- "posY": "-0.009",
- "posZ": "0.01",
- "eulerX": "0.0",
- "eulerY": "0.0",
- "eulerZ": "0.0",
- "iyz": "-3.637978807091713e-15",
- "stl": "/meshes/Cube2.stl",
- "link": "8838"
-}
\ No newline at end of file
diff --git a/simulation/asp/mocks/sdf/include.sdf b/simulation/asp/mocks/sdf/include.sdf
deleted file mode 100644
index 089a39e..0000000
--- a/simulation/asp/mocks/sdf/include.sdf
+++ /dev/null
@@ -1,4 +0,0 @@
-
- {name}
- {uri}
-
\ No newline at end of file
diff --git a/simulation/asp/mocks/sdf/include_pose.sdf b/simulation/asp/mocks/sdf/include_pose.sdf
deleted file mode 100644
index ad43ee8..0000000
--- a/simulation/asp/mocks/sdf/include_pose.sdf
+++ /dev/null
@@ -1,5 +0,0 @@
-
- {name}
- {uri}
- {posX} {posY} {posZ} {eulerX} {eulerY} {eulerZ}
-
\ No newline at end of file
diff --git a/simulation/asp/mocks/sdf/joint_fixed.sdf b/simulation/asp/mocks/sdf/joint_fixed.sdf
deleted file mode 100644
index 8e8a45c..0000000
--- a/simulation/asp/mocks/sdf/joint_fixed.sdf
+++ /dev/null
@@ -1,7 +0,0 @@
-
- base_link
- {child}::{child}
- {posX} {posY} {posZ} {eulerX} {eulerY} {eulerZ}
-
-
-
\ No newline at end of file
diff --git a/simulation/asp/mocks/sdf/link.sdf b/simulation/asp/mocks/sdf/link.sdf
deleted file mode 100644
index 0b2f09d..0000000
--- a/simulation/asp/mocks/sdf/link.sdf
+++ /dev/null
@@ -1,36 +0,0 @@
-
- {posX} {posY} {posZ} {eulerX} {eulerY} {eulerZ}
-
- {posX} {posY} {posZ} {eulerX} {eulerY} {eulerZ}
-
- {ixx}
- {ixy}
- {ixz}
- {iyy}
- {iyz}
- {izz}
-
- {massSDF}
-
-
-
-
- model:/{stl}
-
-
-
-
-
-
- model:/{stl}
-
-
-
-
-
- {friction}
-
-
-
-
-
\ No newline at end of file
diff --git a/simulation/asp/mocks/sdf/model.config b/simulation/asp/mocks/sdf/model.config
deleted file mode 100644
index ca79d61..0000000
--- a/simulation/asp/mocks/sdf/model.config
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
- model.sdf
-
\ No newline at end of file
diff --git a/simulation/asp/mocks/sdf/model.sdf b/simulation/asp/mocks/sdf/model.sdf
deleted file mode 100644
index 2b19966..0000000
--- a/simulation/asp/mocks/sdf/model.sdf
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
- 0
-
-
- model:/{stl}
-
-
-
-
-
- model:/{stl}
-
-
-
-
-
- {friction}
-
-
-
-
-
-
-
diff --git a/simulation/asp/mocks/sdf/world.sdf b/simulation/asp/mocks/sdf/world.sdf
deleted file mode 100644
index bd583fa..0000000
--- a/simulation/asp/mocks/sdf/world.sdf
+++ /dev/null
@@ -1,64 +0,0 @@
-
-
-
-
- 0 0 -9.8
- 6e-06 2.3e-05 -4.2e-05
-
-
- 0.4 0.4 0.4 1
- 0.7 0.7 0.7 1
- true
-
-
- true
-
-
-
-
- 0 0 1
- 100 100
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0 0 1
- 100 100
-
-
-
- 0.8 0.8 0.8 1
- 0.8 0.8 0.8 1
- 0.8 0.8 0.8 1
-
-
- 0 0 0 0 -0 0
-
- 0 0 0 0 -0 0
- 1
-
- 1
- 0
- 0
- 1
- 0
- 1
-
-
- false
-
- 0 0 0 0 -0 0
- false
-
-
-
-
diff --git a/simulation/asp/mocks/urdf/asm.urdf b/simulation/asp/mocks/urdf/asm.urdf
deleted file mode 100644
index b8d901a..0000000
--- a/simulation/asp/mocks/urdf/asm.urdf
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
diff --git a/simulation/asp/mocks/urdf/joint.urdf b/simulation/asp/mocks/urdf/joint.urdf
deleted file mode 100644
index 611aa12..0000000
--- a/simulation/asp/mocks/urdf/joint.urdf
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-
-
-
-
diff --git a/simulation/asp/mocks/urdf/link.urdf b/simulation/asp/mocks/urdf/link.urdf
deleted file mode 100644
index b20131f..0000000
--- a/simulation/asp/mocks/urdf/link.urdf
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/simulation/asp/mocks/urdf/model.urdf b/simulation/asp/mocks/urdf/model.urdf
deleted file mode 100644
index 74aec10..0000000
--- a/simulation/asp/mocks/urdf/model.urdf
+++ /dev/null
@@ -1,37 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 0.2
- 0.1
- 1 0 0
-
-
-
-
\ No newline at end of file
diff --git a/simulation/asp/requirements.txt b/simulation/asp/requirements.txt
deleted file mode 100644
index 2a50401..0000000
--- a/simulation/asp/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-argparse
-matplotlib
-pybullet
-argparse
-xmlformatter
\ No newline at end of file
diff --git a/simulation/asp/src/model/asm.py b/simulation/asp/src/model/asm.py
deleted file mode 100644
index db07cb3..0000000
--- a/simulation/asp/src/model/asm.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from distutils.dir_util import copy_tree
-from src.model.enum import Enum
-
-class Assembly:
- def generateSubAssembly(self, assembly: list[str]):
- asm = {}
- inc = 0
- for el in assembly:
- asm[str("asm" + str(inc))] = {
- "part": el,
- "assembly": assembly[0:inc],
- }
- inc += 1
- return asm
- def copy(self,generationFolder,format,outPath ):
- copy_tree(generationFolder + format, outPath + Enum.folderPath)
\ No newline at end of file
diff --git a/simulation/asp/src/model/enum.py b/simulation/asp/src/model/enum.py
deleted file mode 100644
index 9e2c5ef..0000000
--- a/simulation/asp/src/model/enum.py
+++ /dev/null
@@ -1,2 +0,0 @@
-class Enum:
- folderPath = "generation/"
diff --git a/simulation/asp/src/model/sdf_geometry.py b/simulation/asp/src/model/sdf_geometry.py
deleted file mode 100644
index 5118152..0000000
--- a/simulation/asp/src/model/sdf_geometry.py
+++ /dev/null
@@ -1,327 +0,0 @@
-import os
-from helper.fs import FS
-
-from src.model.sdf_join import SdfJoin
-import typing
-import uuid
-
-
-def from_str(x):
- assert isinstance(x, str)
- return x
-
-
-def from_none(x):
- assert x is None
- return x
-
-
-def from_union(fs, x):
- for f in fs:
- try:
- return f(x)
- except:
- pass
- assert False
-
-
-def to_class(c, x):
- assert isinstance(x, c)
- return x.to_dict()
-
-
-DELIMITER_SCALE = 10000
-
-
-class GeometryModel:
- def __init__(
- self,
- name,
- ixx,
- ixy,
- ixz,
- iyy,
- izz,
- massSDF,
- posX,
- posY,
- posZ,
- eulerX,
- eulerY,
- eulerZ,
- iyz,
- stl,
- link,
- friction,
- centerMassX,
- centerMassY,
- centerMassZ,
- ):
- self.name = name
- self.ixx = ixx
- self.ixy = ixy
- self.ixz = ixz
- self.iyy = iyy
- self.izz = izz
- self.massSDF = massSDF
- self.posX = posX
- self.posY = posY
- self.posZ = posZ
- self.eulerX = eulerX
- self.eulerY = eulerY
- self.eulerZ = eulerZ
- self.iyz = iyz
- self.stl = stl
- self.link = link
- self.friction = friction
- self.centerMassX = centerMassX
- self.centerMassY = centerMassY
- self.centerMassZ = centerMassZ
-
- @staticmethod
- def from_dict(obj):
- assert isinstance(obj, dict)
- name = from_union([from_str, from_none], obj.get("name"))
- ixx = from_union([from_str, from_none], obj.get("ixx"))
- ixy = from_union([from_str, from_none], obj.get("ixy"))
- ixz = from_union([from_str, from_none], obj.get("ixz"))
- iyy = from_union([from_str, from_none], obj.get("iyy"))
- izz = from_union([from_str, from_none], obj.get("izz"))
- massSDF = from_union([from_str, from_none], obj.get("massSDF"))
- posX = from_union([from_str, from_none], obj.get("posX"))
- posY = from_union([from_str, from_none], obj.get("posY"))
- posZ = from_union([from_str, from_none], obj.get("posZ"))
- eulerX = from_union([from_str, from_none], obj.get("eulerX"))
- eulerY = from_union([from_str, from_none], obj.get("eulerY"))
- eulerZ = from_union([from_str, from_none], obj.get("eulerZ"))
- iyz = from_union([from_str, from_none], obj.get("iyz"))
- stl = from_union([from_str, from_none], obj.get("stl"))
- link = from_union([from_str, from_none], obj.get("link"))
- friction = from_union([from_str, from_none], obj.get("friction"))
- centerMassX = from_union([from_str, from_none], obj.get("centerMassX"))
- centerMassY = from_union([from_str, from_none], obj.get("centerMassY"))
- centerMassZ = from_union([from_str, from_none], obj.get("centerMassZ"))
- return GeometryModel(
- name,
- ixx,
- ixy,
- ixz,
- iyy,
- izz,
- massSDF,
- posX,
- posY,
- posZ,
- eulerX,
- eulerY,
- eulerZ,
- iyz,
- stl,
- link,
- friction,
- centerMassX,
- centerMassY,
- centerMassZ,
- )
-
- def to_dict(self):
- result = {}
- if self.name is not None:
- result["name"] = from_union([from_str, from_none], self.name)
- if self.ixx is not None:
- result["ixx"] = from_union([from_str, from_none], self.ixx)
- if self.ixy is not None:
- result["ixy"] = from_union([from_str, from_none], self.ixy)
- if self.ixz is not None:
- result["ixz"] = from_union([from_str, from_none], self.ixz)
- if self.iyy is not None:
- result["iyy"] = from_union([from_str, from_none], self.iyy)
- if self.izz is not None:
- result["izz"] = from_union([from_str, from_none], self.izz)
- if self.massSDF is not None:
- result["massSDF"] = from_union([from_str, from_none], self.massSDF)
- if self.posX is not None:
- result["posX"] = from_union([from_str, from_none], self.posX)
- if self.posY is not None:
- result["posY"] = from_union([from_str, from_none], self.posY)
- if self.posZ is not None:
- result["posZ"] = from_union([from_str, from_none], self.posZ)
- if self.eulerX is not None:
- result["eulerX"] = from_union([from_str, from_none], self.eulerX)
- if self.eulerY is not None:
- result["eulerY"] = from_union([from_str, from_none], self.eulerY)
- if self.eulerZ is not None:
- result["eulerZ"] = from_union([from_str, from_none], self.eulerZ)
- if self.iyz is not None:
- result["iyz"] = from_union([from_str, from_none], self.iyz)
- if self.stl is not None:
- result["stl"] = from_union([from_str, from_none], self.stl)
- if self.link is not None:
- result["link"] = from_union([from_str, from_none], self.link)
- if self.friction is not None:
- result["friction"] = from_union([from_str, from_none], self.eulerZ)
- if self.centerMassX is not None:
- result["centerMassX"] = from_union([from_str, from_none], self.centerMassX)
- if self.centerMassY is not None:
- result["centerMassY"] = from_union([from_str, from_none], self.centerMassY)
- if self.centerMassZ is not None:
- result["centerMassZ"] = from_union([from_str, from_none], self.centerMassZ)
- return result
-
- def toJSON(self) -> str:
- return str(self.to_dict()).replace("'", '"')
-
- def toSDF(self):
- return (
- FS.readFile(
- os.path.dirname(os.path.realpath(__file__))
- + "/../../mocks/sdf/model.sdf"
- )
- .replace(
- "{name}",
- self.name,
- )
- .replace("{posX}", self.posX)
- .replace("{posY}", self.posY)
- .replace("{posZ}", self.posZ)
- .replace("{eulerX}", self.eulerX)
- .replace("{eulerY}", self.eulerY)
- .replace("{eulerZ}", self.eulerZ)
- .replace("{ixx}", self.ixx)
- .replace("{ixy}", self.ixy)
- .replace("{ixz}", self.ixz)
- .replace("{iyy}", self.iyy)
- .replace("{iyz}", self.iyz)
- .replace("{izz}", self.izz)
- .replace(
- "{massSDF}",
- self.massSDF,
- )
- .replace("{stl}", self.stl)
- .replace("{friction}", self.friction)
- )
-
- def toSdfLink(self):
- return (
- FS.readFile(
- os.path.dirname(os.path.realpath(__file__))
- + "/../../mocks/sdf/link.sdf"
- )
- .replace(
- "{name}",
- self.name,
- )
- .replace("{posX}", self.posX)
- .replace("{posY}", self.posY)
- .replace("{posZ}", self.posZ)
- .replace("{eulerX}", self.eulerX)
- .replace("{eulerY}", self.eulerY)
- .replace("{eulerZ}", self.eulerZ)
- .replace("{ixx}", self.ixx)
- .replace("{ixy}", self.ixy)
- .replace("{ixz}", self.ixz)
- .replace("{iyy}", self.iyy)
- .replace("{iyz}", self.iyz)
- .replace("{izz}", self.izz)
- .replace(
- "{massSDF}",
- self.massSDF,
- )
- .replace("{stl}", self.stl)
- .replace("{friction}", self.friction)
- )
-
- def includeLink(self, pose=False):
- if pose == False:
- return (
- FS.readFile(
- os.path.dirname(os.path.realpath(__file__))
- + "/../../mocks/sdf/include.sdf"
- )
- .replace("{name}", self.name)
- .replace("{uri}", "/" + self.name)
- )
- return (
- FS.readFile(
- os.path.dirname(os.path.realpath(__file__))
- + "/../../mocks/sdf/include_pose.sdf"
- )
- .replace("{name}", self.name)
- .replace("{uri}", "/" + self.name)
- .replace("{posX}", self.posX)
- .replace("{posY}", self.posY)
- .replace("{posZ}", self.posZ)
- .replace("{eulerX}", self.eulerX)
- .replace("{eulerY}", self.eulerY)
- .replace("{eulerZ}", self.eulerZ)
- .replace("{ixx}", self.ixx)
- .replace("{ixy}", self.ixy)
- .replace("{ixz}", self.ixz)
- .replace("{iyy}", self.iyy)
- .replace("{iyz}", self.iyz)
- .replace("{izz}", self.izz)
- )
-
- def generateSDFatJoinFixed(self, sdfModels: list["GeometryModel"]):
- sdf = '\n\n'
- sdf += ' \n'
- sdf += " 0 0 0 0 0 0\n"
- sdf += " \n"
-
- link = sdf + self.includeLink(pose=True)
- if sdfModels.__len__() == 0:
- return link
- endTagLinkInc = link.__len__()
- beginSDF = link[0:endTagLinkInc]
-
- sdfJoin = beginSDF + "\n"
-
- for el in sdfModels:
- if el.name != self.name:
- sdfJoin += el.includeLink(pose=True) + "\n"
-
- endSDF = link[endTagLinkInc : link.__len__()]
-
- for el in sdfModels:
- if el.name != self.name:
- sdfJoin += (
- SdfJoin(
- name=str(uuid.uuid4()),
- parent=self.name,
- child=el.name,
- modelAt=el,
- ).toSDF()
- + "\n"
- )
-
- sdfJoin += endSDF
- sdfJoin += ""
- return sdfJoin
-
- def toUrdf(self):
- return (
- FS.readFile(
- os.path.dirname(os.path.realpath(__file__))
- + "/../../mocks/urdf/model.urdf"
- )
- .replace("{name}", self.name)
- .replace("{name}", self.name)
- .replace("{uri}", "/" + self.name)
- .replace("{posX}", self.posX)
- .replace("{posY}", self.posY)
- .replace("{posZ}", self.posZ)
- .replace("{eulerX}", self.eulerX)
- .replace("{eulerY}", self.eulerY)
- .replace("{eulerZ}", self.eulerZ)
- .replace("{ixx}", self.ixx)
- .replace("{ixy}", self.ixy)
- .replace("{ixz}", self.ixz)
- .replace("{iyy}", self.iyy)
- .replace("{iyz}", self.iyz)
- .replace("{izz}", self.izz)
- .replace("{stl}", self.stl)
- .replace("{massSDF}", self.massSDF)
- .replace("{centerMassX}", self.centerMassX)
- .replace("{centerMassY}", self.centerMassY)
- .replace("{centerMassZ}", self.centerMassZ)
- )
diff --git a/simulation/asp/src/model/sdf_join.py b/simulation/asp/src/model/sdf_join.py
deleted file mode 100644
index 2e38208..0000000
--- a/simulation/asp/src/model/sdf_join.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from helper.fs import FS
-import os
-
-
-
-class SdfJoin:
-
- def __init__(self, name, parent, modelAt, child) -> None:
- self.name = name
- self.parent = parent
- self.child = child
- self.modelAt = modelAt
- pass
-
- def toSDF(self):
- return (FS.readFile(os.path.dirname(os.path.realpath(__file__)) + '/../../mocks/sdf/joint_fixed.sdf')).replace('{name}', self.name,).replace('{parent}', self.parent).replace('{child}', self.child).replace('{posX}', self.modelAt.posX).replace('{posY}', self.modelAt.posY).replace('{posZ}', self.modelAt.posZ).replace('{eulerX}', self.modelAt.eulerX).replace('{eulerY}', self.modelAt.eulerY).replace('{eulerZ}', self.modelAt.eulerZ).replace('{ixx}', self.modelAt.ixx).replace('{ixy}', self.modelAt.ixy).replace('{ixz}', self.modelAt.ixz).replace('{iyy}', self.modelAt.iyy).replace('{iyz}', self.modelAt.iyz).replace('{izz}', self.modelAt.izz)
diff --git a/simulation/asp/src/usecases/formatter_usecase.py b/simulation/asp/src/usecases/formatter_usecase.py
deleted file mode 100644
index f1a9d97..0000000
--- a/simulation/asp/src/usecases/formatter_usecase.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from helper.xmlformatter import Formatter
-from src.model.enum import Enum
-from helper.fs import FS
-
-class FormatterUseCase:
- def call(outPath: str, format: str):
- formatter = Formatter(
- indent="1", indent_char="\t", encoding_output="ISO-8859-1", preserve=["literal"])
-
- files = FS.readFilesTypeFolder(
- outPath + Enum.folderPath, fileType=format)
- for el in files:
- FS.writeFile(data=str(formatter.format_file(outPath + Enum.folderPath + el),
- 'utf-8'), filePath=outPath + Enum.folderPath, fileName=el)
diff --git a/simulation/asp/src/usecases/generate_world.py b/simulation/asp/src/usecases/generate_world.py
deleted file mode 100644
index bfe9681..0000000
--- a/simulation/asp/src/usecases/generate_world.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import os
-from helper.fs import FS
-
-
-class SdfGenerateWorldUseCase:
- def call(assembly: str) -> str:
- world = FS.readFile(
- os.path.dirname(os.path.realpath(__file__)) + "/../../mocks/sdf/world.sdf"
- )
- beginWorld = world[0 : world.find(" str:
- return
diff --git a/simulation/asp/src/usecases/sdf_generate_world_usecase.py b/simulation/asp/src/usecases/sdf_generate_world_usecase.py
deleted file mode 100644
index 96b842e..0000000
--- a/simulation/asp/src/usecases/sdf_generate_world_usecase.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import os
-from helper.fs import FS
-
-class SdfGenerateWorldUseCase:
- def call(assembly:str) -> str:
- world = FS.readFile(os.path.dirname(os.path.realpath(__file__))
- + '/../../mocks/sdf/world.sdf')
- beginWorld = world[0:world.find(' None:
- try:
- from random import randrange
- self.id = str(randrange(1000000))
- childObj = part
- print(part)
- __shape = Part.getShape(
- childObj, '', needSubElement=False, refine=False)
- obj = App.ActiveDocument.addObject('Part::Feature', self.id)
- obj.Shape = __shape
- self.part = obj
- self.label = obj.Label
- App.ActiveDocument.recompute()
- except Exception as e:
- print(e)
-
- def remove(self):
- App.ActiveDocument.removeObject(self.label)
-
-class MeshPartModel:
- id = None
- mesh = None
-
- def __init__(self, part) -> None:
- try:
- from random import randrange
- self.id = 'mesh' + str(randrange(1000000))
- document = App.ActiveDocument
- mesh = document.addObject("Mesh::Feature", self.id)
- shape = Part.getShape(part, "")
- mesh.Mesh = MeshPart.meshFromShape(
- Shape=shape, LinearDeflection=20, AngularDeflection=0.1, Relative=False)
- mesh.Label = self.id
- self.mesh = mesh
- except Exception as e:
- print(e)
- pass
-
- def remove(self):
- try:
- App.ActiveDocument.removeObject(self.mesh.Label)
- except Exception as e:
- print(e)
-
-
-
-class JoinMeshModel:
- id = None
- mesh = None
-
- def __init__(self, meshesPartModels: list['MeshPartModel']) -> None:
- meshes = []
- from random import randrange
- for el in meshesPartModels:
- meshes.append(el.mesh.Mesh)
-
- self.id = 'MergedMesh' + str(randrange(1000000))
- doc = App.ActiveDocument
- merged_mesh = Mesh.Mesh()
- for el in meshes:
- merged_mesh.addMesh(el)
-
- new_obj = doc.addObject("Mesh::Feature", self.id)
- new_obj.Mesh = merged_mesh
- new_obj.ViewObject.DisplayMode = "Flat Lines"
- self.mesh = new_obj
-
- def remove(self):
- try:
- App.ActiveDocument.removeObject(self.id)
- except Exception as e:
- print(e)
-
-
-class ExportAssemblyThemAllUseCase:
-
- def call(self, path:str, assemblys:list[str]):
- assembly = assemblys
- asmStructure = {}
- inc = 0
- for el in assembly:
- if (inc != 0):
- asmStructure[inc] = {
- "child": el,
- "parents": assembly[0:inc]
- }
- inc += 1
- objectsFreeCad = App.ActiveDocument.Objects
- asmSolids = {}
- for k, v in asmStructure.items():
- assemblyParentList = v['parents']
- assemblyChild = v['child']
- for el in assemblyParentList:
- for solid in objectsFreeCad:
- if (el == solid.Label):
- if (asmSolids.get(k) is None):
-
- asmSolids[k] = {'parents': [], 'child': list(
- filter(lambda x: x.Label == assemblyChild, objectsFreeCad))[0]}
-
- asmSolids[k]['parents'].append(solid)
-
- inc = 0
- for k, v in asmSolids.items():
- geometry = {"0": [], "1": []}
- if (k != 0):
- App.activeDocument().addObject("Part::Compound", "Compound")
-
- copyLinks = list(
- map(lambda el: SimpleCopyPartModel(el), v['parents']))
-
- if copyLinks != None:
- App.activeDocument().Compound.Links = list(
- map(lambda el: el.getPart(), copyLinks))
-
- object = App.activeDocument().getObject('Compound')
- boundBox = object.Shape.BoundBox
- geometry['0'].append(boundBox.XMax)
- geometry['0'].append(boundBox.YMax)
- geometry['0'].append(boundBox.ZMax)
-
- boundBoxChild = v['child'].Shape.BoundBox
- geometry['1'].append(boundBoxChild.XMax)
- geometry['1'].append(boundBoxChild.YMax)
- geometry['1'].append(boundBoxChild.ZMax)
- meshParents = []
-
- for el in v['parents']:
- meshParents.append(MeshPartModel(el))
- joinMesh = JoinMeshModel(meshParents)
- for el in meshParents:
- el.remove()
- import importOBJ
- importOBJ.export(joinMesh.mesh, path + str(1) + '.obj')
- joinMesh.remove()
- importOBJ.export(v['child'], path + str(0) + '.obj')
- FS.writeFile(json.dumps(geometry), path, 'translation.json')
-
- App.ActiveDocument.removeObject("Compound")
- for el in copyLinks:
- el.remove()
- App.activeDocument().recompute()
- inc += 1
-
-def main():
-
- env = FS.readJSON('./env.json')
- env.get('cadDoc')
- aspDir = env.get('aspDir')
- sequences = FS.readJSON(env.get('sequences')).get('sequences')
- App.openDocument(env.get('cadDoc'))
- for sequencyNumber in range(len(sequences)):
- FS.createFolder(aspDir + 'assemblys/')
- mainFolder = aspDir + 'assemblys/' + str(sequencyNumber) + '/'
- FS.createFolder(mainFolder)
- for subSequenceNumber in range(len(sequences[sequencyNumber])):
- if(subSequenceNumber != 0):
- subFolder = aspDir + 'assemblys/' + \
- str(sequencyNumber) + '/' + str(subSequenceNumber) + '/'
-
- FS.createFolder(subFolder)
- ExportAssemblyThemAllUseCase().call(path=subFolder,assemblys=sequences[sequencyNumber][0:subSequenceNumber+1])
-
- App.closeDocument(App.ActiveDocument.Name)
- freecadQTWindow = Gui.getMainWindow()
- freecadQTWindow.close()
-main()
\ No newline at end of file
diff --git a/simulation/insertion_vector_predicate/main.py b/simulation/insertion_vector_predicate/main.py
deleted file mode 100644
index f41ca5b..0000000
--- a/simulation/insertion_vector_predicate/main.py
+++ /dev/null
@@ -1,174 +0,0 @@
-import os
-import sys
-
-project_base_dir = os.path.abspath(os.path.join(
- os.path.dirname(os.path.abspath(__file__)), './')) + '/assembly/'
-
-
-sys.path.append(project_base_dir)
-sys.path.append(project_base_dir + '/baselines/')
-sys.path.append(project_base_dir + '/assets/')
-
-from scipy.spatial.transform import Rotation
-import shutil
-from spatialmath import *
-from spatialmath.base import *
-from assembly.assets.process_mesh import process_mesh
-from assembly.examples.run_joint_plan import get_planner
-from assembly.baselines.run_joint_plan import PyPlanner
-from assembly.assets.subdivide import subdivide_to_size
-import numpy as np
-import json
-import trimesh
-
-import re
-def merge_meshes(meshes):
- # Создание пустого меша
- merged_mesh = trimesh.Trimesh()
-
- # Объединение каждого меша в один
- for mesh in meshes:
- merged_mesh = trimesh.util.concatenate(
- [merged_mesh, trimesh.load(mesh)])
- i = True
- while i:
- if merged_mesh.fill_holes():
- i = False
-
-
-
-
-
- return merged_mesh
-
-
-os.environ['OMP_NUM_THREADS'] = '1'
-
-
-
-class FS:
- def readJSON(path: str):
- return json.loads((open(path)).read())
-
- def writeFile(data, filePath, fileName):
-
- file_to_open = filePath + fileName
-
- f = open(file_to_open, 'w', )
-
- f.write(data)
-
- def readFile(path: str):
- return open(path).read()
-
- def readFilesTypeFolder(pathFolder: str, fileType='.json'):
- return os.listdir(pathFolder)
-
- def readFolder(pathFolder: str):
- return list(map(lambda el: pathFolder + '/' + el, os.listdir(pathFolder)))
-
- def createFolder(path: str):
- if (not os.path.exists(path)):
- return os.mkdir(path)
-
-
-def listGetFirstValue(iterable, default=False, pred=None):
- return next(filter(pred, iterable), default)
-
-
-def filterModels(filterModels, filterModelsDescription):
- models = []
- for el in filterModelsDescription:
- models.append(listGetFirstValue(
- filterModels, None, lambda x: x.name == el))
- return models
-
-
-# mesh1 = trimesh.load('/Users/idontsudo/framework/asp/out/sdf-generation/meshes/Cube.obj')
-# mesh2 = trimesh.load('/Users/idontsudo/framework/asp/out/sdf-generation/meshes/Cube001.obj')
-
-
-# # Объединение мешей
-# merged_mesh = merge_meshes([mesh1, mesh2])
-
-# # Сохранение объединенного меша в файл
-# merged_mesh.export('merged.obj')
-def main():
- # from argparse import ArgumentParser
- # parser = ArgumentParser()
- # parser.add_argument('--asp-path', type=str, required=True)
- # args = parser.parse_args()
- # aspDir = args.asp_dir
-
- # # Коректировка пути до папки с генерацией ASP
- # if (aspDir == None):
- # args.print_helper()
- # if (aspDir[aspDir.__len__() - 1] != '/'):
- # aspDir += '/'
- aspDir = '/home/idontsudo/framework/asp/out/'
- sequences = FS.readJSON(aspDir + 'sequences.json').get('sequences')
-
- assemblyDirNormalize = []
- for el in FS.readFolder(aspDir + 'assemblys'):
- for e in FS.readFolder(el):
- try:
- # Пост обработка .obj обьектов
- process_mesh(source_dir=e, target_dir=e +
- '/process/', subdivide=e, verbose=True)
- assemblyDirNormalize.append(e + '/process/')
- except Exception as e:
- print('ERRROR:')
- print(e)
-
-
- print(assemblyDirNormalize)
- for el in assemblyDirNormalize:
- asset_folder = os.path.join(project_base_dir, aspDir)
- assembly_dir = os.path.join(asset_folder, el)
- planner = get_planner('bfs')(assembly_dir, assembly_dir, 0, [
- 1], False, 'sdf', 0.05, 0.01, 100, 100, True)
-
- # Планирование пути
- status, t_plan, path = planner.plan(
- 120, seed=1, return_path=True, render=False, record_path=None
- )
- coords = []
-
- for k in path:
- seMatrix = SE3(k)
- euler = seMatrix.eul()
- coord = seMatrix.A[0:3, 3]
- rot = Rotation.from_euler('xyz', euler, degrees=True).as_quat()
- coords.append({'quadrelion': [rot[0], rot[1], rot[2], rot[3]], 'xyz': [
- coord[0], coord[1], coord[2]], 'euler': [euler[0], euler[1], euler[2]]})
- # Запись пути в кортеж
- planingObject = {
- "time": t_plan,
- "insertion_path": coords,
- "status": status,
- }
- # Запись результата планирования
- FS.writeFile(json.dumps(planingObject),
- el[0:el.__len__() - 8], 'insertion_path.json')
-
- try:
- planner = PyPlanner(assembly_dir, 'process', still_ids=[1],)
- status, t_plan, path = planner.plan(
- planner_name='rrt',
- step_size=None,
- max_time=None,
- seed=1,
- return_path=True,
- simplify=False,
- render=False
- )
-
- print(f'Status: {status}, planning time: {t_plan}')
-
- if args.save_dir is not None:
- planner.save_path(path, args.save_dir, args.n_save_state)
- except Exception as e:
- print(e)
-
-
-main()
diff --git a/simulation/insertion_vector_predicate/requirements.txt b/simulation/insertion_vector_predicate/requirements.txt
deleted file mode 100644
index f52e972..0000000
--- a/simulation/insertion_vector_predicate/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-spatialmath
-scipy
-uuid
\ No newline at end of file
diff --git a/simulation/intersection_geometry_predicate/README.md b/simulation/intersection_geometry_predicate/README.md
deleted file mode 100644
index bceb3dd..0000000
--- a/simulation/intersection_geometry_predicate/README.md
+++ /dev/null
@@ -1,37 +0,0 @@
-# Intersection Geometry Predicate
-
-Осуществляется проверка геометрических вершин пересечения файлов .obj на соответствие допустимой погрешности глубины.
-
-
-### CLI аргументы:
---aspPath путь до папки с асетами сборки
-
-### вывод
-на выходе делает файл intersection_geometry.json
-в котором записан результат работы предиката в виде ключа status и результат в виде ключа recalculations.
-В ключе recalculations, записан объект в который записываются результаты расчета пересечения.
-Они состоят из объекта.
-- names имена пересекающеюся деталей
-- depth глубина пересечения
-- point геометрические вершины
-
-
-```JSON
-{
- "status": false,
- "recalculations": {
- "disk_bottom bolt ": [
- {
- "names": "disk_bottom bolt ",
- "depth": 0.5127948565443177,
- "point": [
- -1.972554,
- 16.442781,
- -9.208569
- ]
- }
- ]
- }
-}
-
-```
\ No newline at end of file
diff --git a/simulation/intersection_geometry_predicate/main.py b/simulation/intersection_geometry_predicate/main.py
deleted file mode 100644
index 810ad98..0000000
--- a/simulation/intersection_geometry_predicate/main.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import trimesh
-import os
-import json
-import argparse
-
-class FS:
- def readJSON(path: str):
- return json.loads((open(path)).read())
-
- def writeFile(data, filePath, fileName):
-
- file_to_open = filePath + fileName
-
- f = open(file_to_open, 'w')
- f.write(data)
- f.close()
-
- def readFile(path: str):
- return open(path).read()
-
- def readFilesTypeFolder(pathFolder: str, fileType='.json'):
- filesJson = list(
- filter(lambda x: x[-fileType.__len__():] == fileType, os.listdir(pathFolder)))
- return list(map(lambda x: pathFolder + x, filesJson))
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--aspPath', help='asp generation folder')
- args = parser.parse_args()
-
- if args.aspPath == None:
- parser.print_help()
- aspPath = args.aspPath
- pathMeshes = 'sdf/meshes/'
- permissibleDepth = 0.5
-
- trimeshObjects = []
- meshes = FS.readFilesTypeFolder(aspPath + pathMeshes, '.obj')
- for el in meshes:
- trimeshObjects.append(trimesh.load(el))
-
- manager = trimesh.collision.CollisionManager()
-
- for el in range(len(trimeshObjects)):
- manager.add_object(str(meshes[el]), trimeshObjects[el])
-
- def set_to_dict(s):
- keys = list(s)
- values = [None] * len(s)
- return {k: v for k, v in zip(keys, values)}
- collisions = manager.in_collision_internal(True, True)
-
- recalculations = {}
- for el in collisions[collisions.__len__() - 1]:
- if (el.depth > permissibleDepth):
- labels = ''
- for key in set_to_dict(el.names).keys():
- label = key[key.rfind('/') + 1:key.__len__() - 4]
- labels+=label + " "
- message = {
- 'names': labels,
- 'depth': el.depth,
- 'point': el.point.tolist()
- }
- if(recalculations.get(labels) != None):
- recalculations[labels].append(message)
- else:
- recalculations[labels] = [message]
-
- if(len(list(recalculations.keys())) >= 1):
- messageError = {
- 'status':False,
- 'recalculations':recalculations
- }
- FS.writeFile(json.dumps(messageError, ensure_ascii=False, indent=4), aspPath,'intersection_geometry.json')
- else:
- message = {
- 'status':True,
- 'recalculations': None
- }
- FS.writeFile(json.dumps(messageError, ensure_ascii=False, indent=4), aspPath,'intersection_geometry.json')
-
-
-
-main()
-
\ No newline at end of file
diff --git a/simulation/intersection_geometry_predicate/requirements.txt b/simulation/intersection_geometry_predicate/requirements.txt
deleted file mode 100644
index fdda232..0000000
--- a/simulation/intersection_geometry_predicate/requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-argparse
-trimesh
\ No newline at end of file
diff --git a/simulation/robossembler_scene_builder/main.py b/simulation/robossembler_scene_builder/main.py
deleted file mode 100644
index 70d8cba..0000000
--- a/simulation/robossembler_scene_builder/main.py
+++ /dev/null
@@ -1,97 +0,0 @@
-from types import LambdaType, UnionType
-from returns.pipeline import is_successful
-from typing import List, TypeVar
-from returns.result import Result, Success, Failure
-import os
-from model.robossembler_assets import (
- MappingInstanceAtModel,
- RobossemblerAssets,
- Instance,
-)
-import re
-import pathlib
-from repository.file_system import FileSystemRepository
-from model.robossembler_assets import Physics
-from argparse import ArgumentParser
-
-
-T = TypeVar("T")
-
-
-class JsonReaderAndModelMapperUseCase:
- def call(path: str, model: T) -> Result[T, str]:
- try:
- if not re.search("^(.+)\/([^\/]+)$", path):
- return Failure("path not valid")
- if model.from_dict == None:
- return Failure("Model is not have mapping method from_dict")
- return Success(model.from_dict(FileSystemRepository.readJSON(path=path)))
- except:
- return Failure("JsonReaderAndModelMapperUseCase unknown error")
-
-
-class MappingInstanceAtModelToSdfUseCase:
- def call(instances: List[MappingInstanceAtModel]) -> Result[List[str], str]:
- try:
- return Success(list(map(lambda el: el.toSDF(), instances)))
- except:
- return Failure("MappingInstanceAtModelToSdfUseCase unknown error")
-
-
-class MappingSdfWorldToPhysicsModelUseCase:
- def call(physicModel: Physics) -> Result[List[str], str]:
- try:
- return Success(Physics.toSDF(physicModel))
- except:
- return Failure("MappingInstanceAtModelToSdfUseCase unknown error")
-
-
-class FormationOfTheSDFUseCase:
- def call(worldTag: str, modelsTags: List[str], path: str) -> Result[bool, str]:
- path = str(pathlib.Path(path).parent.resolve()) + "/"
- if modelsTags == None:
- return Failure("FormationOfTheSDFUseCase modelsTags is None")
- if worldTag == None:
- return Failure("FormationOfTheSDFUseCase worldTag is None")
-
- FileSystemRepository.writeFile(
- data=worldTag.replace("{models}", "\n".join(modelsTags)),
- filePath=path,
- fileName="world.sdf",
- )
- return Success(True)
-
-
-def main():
- parser = ArgumentParser()
- parser.add_argument("--path", help="need path .json")
- args = parser.parse_args()
-
- if args.path == None:
- parser.print_help()
- return
- path = args.path
- jsonReaderAndModelMapperUseCase = JsonReaderAndModelMapperUseCase.call(
- path=path, model=RobossemblerAssets
- )
-
- if not is_successful(jsonReaderAndModelMapperUseCase):
- return
- robossemblerAssets = jsonReaderAndModelMapperUseCase.value_or(None)
-
- instanceSdfModel = MappingInstanceAtModelToSdfUseCase.call(
- instances=robossemblerAssets.getAllAssetsInstanceAtModel()
- )
-
- sdfWorld = MappingSdfWorldToPhysicsModelUseCase.call(
- physicModel=robossemblerAssets.physics
- )
-
- FormationOfTheSDFUseCase.call(
- worldTag=sdfWorld.value_or(None),
- modelsTags=instanceSdfModel.value_or(None),
- path=path,
- )
-
-
-main()
diff --git a/simulation/robossembler_scene_builder/mocks/light_sdf.xml b/simulation/robossembler_scene_builder/mocks/light_sdf.xml
deleted file mode 100644
index 7e5aeee..0000000
--- a/simulation/robossembler_scene_builder/mocks/light_sdf.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
- {x} {y} {z} {roll} {pitch} {yaw}
- {r} {g} {b} {a}
- .1 .1 .1 1
-
- 20
- 0.2
- 0.8
- 0.01
-
- false
-
\ No newline at end of file
diff --git a/simulation/robossembler_scene_builder/mocks/model_include_sdf.xml b/simulation/robossembler_scene_builder/mocks/model_include_sdf.xml
deleted file mode 100644
index e23a942..0000000
--- a/simulation/robossembler_scene_builder/mocks/model_include_sdf.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
- {x} {y} {z} {roll} {pitch} {yaw}
-
- {name}
- model://{uri}
-
-
\ No newline at end of file
diff --git a/simulation/robossembler_scene_builder/mocks/world.xml b/simulation/robossembler_scene_builder/mocks/world.xml
deleted file mode 100644
index 7265866..0000000
--- a/simulation/robossembler_scene_builder/mocks/world.xml
+++ /dev/null
@@ -1,105 +0,0 @@
-
-
-
-
- 0.001
- 1.0
- 1000
-
-
-
-
-
-
- ogre2
-
- {gravity_x} {gravity_y} {gravity_z}
-
- 6e-06
- 2.3e-05 -4.2e-05
-
-
- 0.4 0.4 0.4 1
- 0.7 0.7 0.7 1
- false
-
-
-
-
- 3D View
- false
- docked
-
- ogre2
- scene
- 1.0 1.0 1.0
- 0.4 0.6 1.0
- 3.3 2.8 2.8 0 0.5 -2.4
-
-
-
- World stats
- false
- false
- 110
- 290
- 1
- floating
-
-
-
-
-
- true
- true
- true
- true
-
-
-
- true
- 0 0 10 0 0 0
- 0.8 0.8 0.8 1
- 0.2 0.2 0.2 1
-
- 1000
- 0.9
- 0.01
- 0.001
-
- -0.5 0.1 -0.9
-
-
- true
-
-
-
-
- 0 0 1
-
-
-
-
-
-
- 0 0 1
- 100 100
-
-
-
- 0.8 0.8 0.8 1
- 0.8 0.8 0.8 1
- 0.8 0.8 0.8 1
-
-
-
-
- {models}
-
\ No newline at end of file
diff --git a/simulation/robossembler_scene_builder/model/robossembler_assets.py b/simulation/robossembler_scene_builder/model/robossembler_assets.py
deleted file mode 100644
index c6cf484..0000000
--- a/simulation/robossembler_scene_builder/model/robossembler_assets.py
+++ /dev/null
@@ -1,394 +0,0 @@
-from dataclasses import dataclass
-import os
-from returns.result import Result, Success, Failure
-from typing import Optional, Any, List, TypeVar, Callable, Type, cast
-from enum import Enum
-
-from repository.file_system import FileSystemRepository
-
-T = TypeVar("T")
-EnumT = TypeVar("EnumT", bound=Enum)
-
-
-def from_float(x: Any) -> float:
- assert isinstance(x, (float, int)) and not isinstance(x, bool)
- return float(x)
-
-
-def from_none(x: Any) -> Any:
- return x
-
-
-def from_union(fs, x):
- for f in fs:
- try:
- return f(x)
- except:
- pass
- assert False
-
-
-def to_float(x: Any) -> float:
- assert isinstance(x, float)
- return x
-
-
-def from_str(x: Any) -> str:
- assert isinstance(x, str)
- return x
-
-
-def from_int(x: Any) -> int:
- assert isinstance(x, int) and not isinstance(x, bool)
- return x
-
-
-def from_list(f: Callable[[Any], T], x: Any) -> List[T]:
- assert isinstance(x, list)
- return [f(y) for y in x]
-
-
-def to_class(c: Type[T], x: Any) -> dict:
- assert isinstance(x, c)
- return cast(Any, x).to_dict()
-
-
-def to_enum(c: Type[EnumT], x: Any) -> EnumT:
- assert isinstance(x, c)
- return x.value
-
-
-@dataclass
-class Model:
- name: Optional[str] = None
- id: Optional[str] = None
- path: Optional[str] = None
-
- @staticmethod
- def from_dict(obj: Any) -> "Model":
- assert isinstance(obj, dict)
- name = from_union([from_str, from_none], obj.get("name"))
- id = from_union([from_str, from_none], obj.get("id"))
- path = from_union([from_str, from_none], obj.get("path"))
- return Model(name, id, path)
-
- def to_dict(self) -> dict:
- result: dict = {}
- if self.name is not None:
- result["name"] = from_union([from_str, from_none], self.name)
- if self.id is not None:
- result["id"] = from_union([from_str, from_none], self.id)
- if self.path is not None:
- result["path"] = from_union([from_str, from_none], self.path)
- return result
-
-
-@dataclass
-class Pose:
- x: Optional[float] = None
- y: Optional[float] = None
- z: Optional[float] = None
- roll: Optional[float] = None
- pitch: Optional[float] = None
- yaw: Optional[float] = None
-
- @staticmethod
- def from_dict(obj: Any) -> "Pose":
- assert isinstance(obj, dict)
- x = from_union([from_float, from_none], obj.get("x"))
- y = from_union([from_float, from_none], obj.get("y"))
- z = from_union([from_float, from_none], obj.get("z"))
- roll = from_union([from_float, from_none], obj.get("roll"))
- pitch = from_union([from_float, from_none], obj.get("pitch"))
- yaw = from_union([from_float, from_none], obj.get("yaw"))
- return Pose(x, y, z, roll, pitch, yaw)
-
- def to_dict(self) -> dict:
- result: dict = {}
- if self.x is not None:
- result["x"] = from_union([to_float, from_none], self.x)
- if self.y is not None:
- result["y"] = from_union([to_float, from_none], self.y)
- if self.z is not None:
- result["z"] = from_union([to_float, from_none], self.z)
- if self.roll is not None:
- result["roll"] = from_union([to_float, from_none], self.roll)
- if self.pitch is not None:
- result["pitch"] = from_union([to_float, from_none], self.pitch)
- if self.yaw is not None:
- result["yaw"] = from_union([to_float, from_none], self.yaw)
- return result
-
-
-class TypeEnum(Enum):
- ASSET = "asset"
- LIGHT = "light"
-
-
-@dataclass
-class Instance:
- model_name: Optional[str] = None
- model_id: Optional[str] = None
- id: Optional[str] = None
- pose: Optional[Pose] = None
- scale: Optional[int] = None
- type: Optional[TypeEnum] = None
- parent: Optional[str] = None
- light_type: Optional[str] = None
- intencity: Optional[int] = None
- diffuse: Optional[List[float]] = None
- spot_angle: Optional[int] = None
-
- @staticmethod
- def from_dict(obj: Any) -> "Instance":
- assert isinstance(obj, dict)
- model_name = from_union([from_str, from_none], obj.get("model_name"))
- model_id = from_union([from_str, from_none], obj.get("model_id"))
- id = from_union([from_str, from_none], obj.get("id"))
- pose = from_union([Pose.from_dict, from_none], obj.get("pose"))
- scale = from_union([from_int, from_none], obj.get("scale"))
- type = from_union([TypeEnum, from_none], obj.get("type"))
- parent = from_union([from_str, from_none], obj.get("parent"))
- light_type = from_union([from_str, from_none], obj.get("light_type"))
- intencity = from_union([from_int, from_none], obj.get("intencity"))
- diffuse = from_union(
- [lambda x: from_list(from_float, x), from_none], obj.get("diffuse")
- )
- spot_angle = from_union([from_int, from_none], obj.get("spot_angle"))
- return Instance(
- model_name,
- model_id,
- id,
- pose,
- scale,
- type,
- parent,
- light_type,
- intencity,
- diffuse,
- spot_angle,
- )
-
- def fromMappingInstanceAtModel(
- self, models: List[Model]
- ) -> "MappingInstanceAtModel":
- for el in models:
- if el.id == self.model_id:
- return MappingInstanceAtModel(instance=self, model=el)
- return Failure("not found model at {self.model_id} ")
-
- def to_dict(self) -> dict:
- result: dict = {}
- if self.model_name is not None:
- result["model_name"] = from_union([from_str, from_none], self.model_name)
- if self.model_id is not None:
- result["model_id"] = from_union([from_str, from_none], self.model_id)
- if self.id is not None:
- result["id"] = from_union([from_str, from_none], self.id)
- if self.pose is not None:
- result["pose"] = from_union(
- [lambda x: to_class(Pose, x), from_none], self.pose
- )
- if self.scale is not None:
- result["scale"] = from_union([from_int, from_none], self.scale)
- if self.type is not None:
- result["type"] = from_union(
- [lambda x: to_enum(TypeEnum, x), from_none], self.type
- )
- if self.parent is not None:
- result["parent"] = from_union([from_str, from_none], self.parent)
- if self.light_type is not None:
- result["light_type"] = from_union([from_str, from_none], self.light_type)
- if self.intencity is not None:
- result["intencity"] = from_union([from_int, from_none], self.intencity)
- if self.diffuse is not None:
- result["diffuse"] = from_union(
- [lambda x: from_list(to_float, x), from_none], self.diffuse
- )
- if self.spot_angle is not None:
- result["spot_angle"] = from_union([from_int, from_none], self.spot_angle)
- return result
-
-
-class BasePose:
- def __init__(self, x: float, y: float, z: float, **kwargs):
- self.x = x
- self.y = y
- self.z = z
-
- def toPose(self, sdfXmlMock: str):
- return (
- sdfXmlMock.replace("{x}", str(self.x))
- .replace("{y}", str(self.y))
- .replace("{z}", str(self.z))
- )
-
-
-class MappingInstanceAtModel(BasePose):
- instance: Instance
- model: Model
-
- def __init__(self, instance: Instance, model: Model) -> None:
- self.instance = instance
- self.model = model
- pass
-
- def toSDF(self):
- pose = self.instance.pose
- match self.instance.type:
- case TypeEnum.ASSET:
- mock = FileSystemRepository.readFile(
- os.path.dirname(os.path.realpath(__file__))
- + "/../mocks/model_include_sdf.xml"
- )
- # mockPose = self.toPose(mock)
- return (
- mock.replace("{name}", str(self.model.name))
- .replace("{x}", str(pose.x))
- .replace("{y}", str(pose.y))
- .replace("{z}", str(pose.z))
- .replace("{pitch}", str(pose.pitch))
- .replace("{yaw}", str(pose.yaw))
- .replace("{roll}", str(pose.roll))
- .replace("{uri}", str(self.model.path))
- )
- case TypeEnum.LIGHT:
- pathMock = (
- os.path.dirname(os.path.realpath(__file__))
- + "/../mocks/light_sdf.xml"
- )
-
- return (
- FileSystemRepository.readFile(pathMock)
- .replace("{x}", str(pose.x))
- .replace("{y}", str(pose.y))
- .replace("{z}", str(pose.z))
- .replace("{pitch}", str(pose.pitch))
- .replace("{yaw}", str(pose.yaw))
- .replace("{roll}", str(pose.roll))
- .replace("{type_light}", str(self.instance.light_type))
- .replace("{name_light}", str("132"))
- .replace("{r}", self.instance.diffuse[0])
- .replace("{g}", self.instance.diffuse[1])
- .replace("{b}", self.instance.diffuse[2])
- .replace("{a}", self.instance.diffuse[3])
- )
-
-
-@dataclass
-class Gravity:
- x: Optional[int] = None
- y: Optional[int] = None
- z: Optional[float] = None
-
- @staticmethod
- def from_dict(obj: Any) -> "Gravity":
- assert isinstance(obj, dict)
- x = from_union([from_int, from_none], obj.get("x"))
- y = from_union([from_int, from_none], obj.get("y"))
- z = from_union([from_float, from_none], obj.get("z"))
- return Gravity(x, y, z)
-
- def to_dict(self) -> dict:
- result: dict = {}
- if self.x is not None:
- result["x"] = from_union([from_int, from_none], self.x)
- if self.y is not None:
- result["y"] = from_union([from_int, from_none], self.y)
- if self.z is not None:
- result["z"] = from_union([to_float, from_none], self.z)
- return result
-
-
-@dataclass
-class Physics:
- engine_name: Optional[str] = None
- gravity: Optional[Gravity] = None
-
- @staticmethod
- def from_dict(obj: Any) -> "Physics":
- assert isinstance(obj, dict)
- engine_name = from_union([from_str, from_none], obj.get("engine_name"))
- gravity = from_union([Gravity.from_dict, from_none], obj.get("gravity"))
- return Physics(engine_name, gravity)
-
- def to_dict(self) -> dict:
- result: dict = {}
- if self.engine_name is not None:
- result["engine_name"] = from_union([from_str, from_none], self.engine_name)
- if self.gravity is not None:
- result["gravity"] = from_union(
- [lambda x: to_class(Gravity, x), from_none], self.gravity
- )
- return result
-
- def toSDF(self) -> str:
- pathMock = os.path.dirname(os.path.realpath(__file__)) + "/../mocks/world.xml"
- gravity = self.gravity
-
- return (
- FileSystemRepository.readFile(pathMock)
- .replace("{gravity_x}", str(gravity.x))
- .replace("{gravity_y}", str(gravity.y))
- .replace("{gravity_z}", str(gravity.z))
- .replace("{engine_type}", str(self.engine_name))
- )
-
-
-@dataclass
-class RobossemblerAssets:
- models: Optional[List[Model]] = None
- instances: Optional[List[Instance]] = None
- physics: Optional[Physics] = None
-
- @staticmethod
- def from_dict(obj: Any) -> "RobossemblerAssets":
- assert isinstance(obj, dict)
- models = from_union(
- [lambda x: from_list(Model.from_dict, x), from_none], obj.get("models")
- )
-
- instances = from_union(
- [lambda x: from_list(Instance.from_dict, x), from_none],
- obj.get("instances"),
- )
-
- physics = from_union([Physics.from_dict, from_none], obj.get("physics"))
- return RobossemblerAssets(models, instances, physics)
-
- def to_dict(self) -> dict:
- result: dict = {}
- if self.models is not None:
- result["models"] = from_union(
- [lambda x: from_list(lambda x: to_class(Model, x), x), from_none],
- self.models,
- )
- if self.instances is not None:
- result["instances"] = from_union(
- [lambda x: from_list(lambda x: to_class(Instance, x), x), from_none],
- self.instances,
- )
- if self.physics is not None:
- result["physics"] = from_union(
- [lambda x: to_class(Physics, x), from_none], self.physics
- )
- return result
-
- def _getAllAtType(self, type: TypeEnum) -> List[Instance]:
- return list(filter(lambda x: x.type == type, self.instances))
-
- def getAllLightInstances(self) -> List[Instance]:
- return list(
- map(
- lambda el: el.fromMappingInstanceAtModel(self.models),
- self._getAllAtType(type=TypeEnum.LIGHT),
- )
- )
-
- def getAllAssetsInstanceAtModel(self) -> List[MappingInstanceAtModel]:
- return list(
- map(
- lambda el: el.fromMappingInstanceAtModel(self.models),
- self._getAllAtType(type=TypeEnum.ASSET),
- )
- )
diff --git a/simulation/robossembler_scene_builder/repository/file_system.py b/simulation/robossembler_scene_builder/repository/file_system.py
deleted file mode 100644
index 4f8ca26..0000000
--- a/simulation/robossembler_scene_builder/repository/file_system.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import json
-import os
-
-
-class FileSystemRepository:
- def readJSON(path: str):
- return json.loads((open(path)).read())
-
- def writeFile(data, filePath, fileName):
- file_to_open = filePath + fileName
-
- f = open(file_to_open, "w", encoding="utf-8", errors="ignore")
- f.write(data)
- f.close()
-
- def readFile(path: str):
- return open(path).read()
-
- def readFilesTypeFolder(pathFolder: str, fileType=".json"):
- filesJson = list(
- filter(
- lambda x: x[-fileType.__len__() :] == fileType, os.listdir(pathFolder)
- )
- )
- return filesJson
diff --git a/simulation/stability_process_predicate/main.py b/simulation/stability_process_predicate/main.py
deleted file mode 100644
index ac8741f..0000000
--- a/simulation/stability_process_predicate/main.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import argparse
-from usecases.stability_check_usecase import StabilityCheckUseCase
-
-# python3 main.py --aspPath /Users/idontsudo/Desktop/asp-example/
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("--aspPath", help="asp folder generation path")
- args = parser.parse_args()
- StabilityCheckUseCase().call(args.aspPath)
-
-
-main()
diff --git a/simulation/stability_process_predicate/usecases/stability_check_usecase.py b/simulation/stability_process_predicate/usecases/stability_check_usecase.py
deleted file mode 100644
index b4c1931..0000000
--- a/simulation/stability_process_predicate/usecases/stability_check_usecase.py
+++ /dev/null
@@ -1,229 +0,0 @@
-from typing import Any, List, TypeVar, Type, cast, Callable
-import numpy as np
-import pybullet as p
-import time
-import pybullet_data
-import os
-import json
-
-from time import sleep
-
-
-T = TypeVar("T")
-
-
-def from_str(x):
- assert isinstance(x, str)
- return x
-
-
-def from_float(x: Any) -> float:
- assert isinstance(x, (float, int)) and not isinstance(x, bool)
- return float(x)
-
-
-def to_float(x: Any) -> float:
- assert isinstance(x, float)
- return x
-
-
-def from_int(x: Any) -> int:
- assert isinstance(x, int) and not isinstance(x, bool)
- return x
-
-
-def to_class(c: Type[T], x: Any) -> dict:
- assert isinstance(x, c)
- return cast(Any, x).to_dict()
-
-
-def from_list(f: Callable[[Any], T], x: Any) -> List[T]:
- assert isinstance(x, list)
- return [f(y) for y in x]
-
-
-class Coords:
- x: float
- y: float
- z: float
-
- def __init__(self, x: float, y: float, z: float) -> None:
- self.x = x
- self.y = y
- self.z = z
-
- @staticmethod
- def from_dict(obj: Any) -> "Coords":
- assert isinstance(obj, dict)
- x = from_float(obj.get("x"))
- y = from_float(obj.get("y"))
- z = from_float(obj.get("z"))
- return Coords(x, y, z)
-
- def to_dict(self) -> dict:
- result: dict = {}
- result["x"] = to_float(self.x)
- result["y"] = to_float(self.y)
- result["z"] = to_float(self.z)
- return result
-
-
-class SimulatorStabilityResultModel:
- id: str
- quaternion: Coords
- position: Coords
-
- def __init__(self, id: int, quaternion: Coords, position: Coords) -> None:
- self.id = id
- self.quaternion = quaternion
- self.position = position
-
- @staticmethod
- def from_dict(obj: Any) -> "SimulatorStabilityResultModel":
- assert isinstance(obj, dict)
- id = from_str(obj.get("id"))
- quaternion = Coords.from_dict(obj.get("quaternion"))
- position = Coords.from_dict(obj.get("position"))
- return SimulatorStabilityResultModel(id, quaternion, position)
-
- def to_dict(self) -> dict:
- result: dict = {}
- result["id"] = from_str(self.id)
- result["quaternion"] = to_class(Coords, self.quaternion)
- result["position"] = to_class(Coords, self.position)
- return result
-
-
-def SimulatorStabilityModelfromdict(s: Any) -> List[SimulatorStabilityResultModel]:
- return from_list(SimulatorStabilityResultModel.from_dict, s)
-
-
-def SimulatorStabilityModeltodict(x: List[SimulatorStabilityResultModel]) -> Any:
- return from_list(lambda x: to_class(SimulatorStabilityResultModel, x), x)
-
-
-class StabilityCheckUseCase:
- def urdfLoader(
- self, assembly: list[str], outPath: str, urdfGeneration: dict[str:str]
- ):
- urdfs = []
- for assemblyCount in range(len(assembly)):
- urdf = urdfGeneration.get(assembly[assemblyCount])
- file_to_open = outPath + "/generation/" + str(assemblyCount) + ".urdf"
- f = open(file_to_open, "w", encoding="utf-8", errors="ignore")
- f.write(urdf)
- f.close()
- urdfs.append(os.path.abspath(f.name))
- return urdfs
-
- def executeSimulation(
- self,
- assembly: list[str],
- outPath: str,
- urdfGeneration: dict[str:str],
- duration: int,
- ) -> list["SimulatorStabilityResultModel"]:
- p.connect(p.DIRECT)
- p.setGravity(0, 0, -10)
- p.setAdditionalSearchPath(pybullet_data.getDataPath())
- p.loadURDF("plane.urdf")
- resultCoords = []
-
- urdfs = self.urdfLoader(
- assembly=assembly, urdfGeneration=urdfGeneration, outPath=outPath
- )
- bulletIds = []
- for el in urdfs:
- id = p.loadURDF(el)
- bulletIds.append(id)
- for i in range(duration):
- if i + 200 == duration:
- inc = 0
- for bulletUUID in bulletIds:
- pos, rot = p.getBasePositionAndOrientation(bulletUUID)
- resultCoords.append(
- SimulatorStabilityResultModel(
- id=assembly[inc],
- quaternion=Coords(x=rot[0], y=rot[1], z=rot[2]),
- position=Coords(x=pos[0], y=pos[1], z=pos[2]),
- )
- )
- p.removeBody(bulletUUID)
- inc += 1
-
- p.stepSimulation()
-
- time.sleep(1.0 / 240.0)
- return resultCoords
-
- def call(self, aspPath: str):
- try:
- assemblyFolder = aspPath
- assemblesStructures = json.loads(
- (open(assemblyFolder + "sequences.json")).read()
- ).get("sequences")
-
- tasks = len(assemblesStructures) * len(assemblesStructures[0])
- taskCounter = 0
- urdfGeneration = json.loads(
- (open(assemblyFolder + "generation/urdf-generation.json")).read()
- )
- for activeAssemblyNumber in range(len(assemblesStructures)):
- pathSaveResultAssemblyFolder = (
- aspPath + "stability" + "/" + str(activeAssemblyNumber + 1) + "/"
- )
- if not os.path.exists(pathSaveResultAssemblyFolder):
- os.makedirs(pathSaveResultAssemblyFolder)
-
- for subAssemblyNumber in range(
- len(assemblesStructures[activeAssemblyNumber])
- ):
- taskCounter += 1
- subAssembly = assemblesStructures[activeAssemblyNumber][
- 0 : subAssemblyNumber + 1
- ]
- print(subAssembly)
-
- if subAssembly == [
- "disk_top",
- "disk_middel",
- ]:
- asm = []
- for el in subAssembly:
- asm.append(el)
-
- resultSimulationStates = self.executeSimulation(
- assembly=asm,
- outPath=aspPath,
- urdfGeneration=urdfGeneration,
- duration=1000,
- )
-
- pathSaveResultSubAssemblyFolder = (
- aspPath
- + "stability"
- + "/"
- + str(activeAssemblyNumber + 1)
- + "/"
- + str(subAssemblyNumber)
- + "/"
- )
- if not os.path.exists(pathSaveResultSubAssemblyFolder):
- os.makedirs(pathSaveResultSubAssemblyFolder)
- results = {}
- for state in resultSimulationStates:
- results[state.id] = state.to_dict()
- f = open(
- pathSaveResultSubAssemblyFolder
- + "/"
- + "motion_result.json",
- "w",
- encoding="utf-8",
- errors="ignore",
- )
- f.write(json.dumps(results, ensure_ascii=False, indent=4))
- f.close()
- percentageOfCompletion = taskCounter / tasks * 100
- print("process complete: " + str(percentageOfCompletion) + "%")
- except Exception as e:
- print(e)
diff --git a/test_models/asm_reductor.FCStd b/test_models/asm_reductor.FCStd
deleted file mode 100644
index 51d828c..0000000
Binary files a/test_models/asm_reductor.FCStd and /dev/null differ
diff --git a/test_models/assembly_settings.json b/test_models/assembly_settings.json
deleted file mode 100644
index 1ffac35..0000000
--- a/test_models/assembly_settings.json
+++ /dev/null
@@ -1,34 +0,0 @@
-[
- {
- "Name": "Fastener_Set",
- "Type": "fastener_set",
- "Parent": "body_down",
- "Child": "body_up",
- "Fasteners": [
- "bolt4",
- "bolt",
- "bolt2",
- "bolt3"
- ]
- },
- {
- "Name": "Assembly_Sequence",
- "Type": "asm_sequence",
- "Parent": "body_down",
- "Child": "sol_gear"
- },
- {
- "Name": "Clearance_Constraint",
- "Type": "clearance",
- "PartName": [
- "planet_gear002",
- "planet_gear005",
- "planet_gear004",
- "planet_gear003",
- "planet_gear",
- "output_shaft",
- "sol_gear"
- ],
- "MaxClearance": 1.0
- }
-]
\ No newline at end of file
diff --git a/test_models/assembly_settings_test_reductor.json b/test_models/assembly_settings_test_reductor.json
deleted file mode 100644
index 1ffac35..0000000
--- a/test_models/assembly_settings_test_reductor.json
+++ /dev/null
@@ -1,34 +0,0 @@
-[
- {
- "Name": "Fastener_Set",
- "Type": "fastener_set",
- "Parent": "body_down",
- "Child": "body_up",
- "Fasteners": [
- "bolt4",
- "bolt",
- "bolt2",
- "bolt3"
- ]
- },
- {
- "Name": "Assembly_Sequence",
- "Type": "asm_sequence",
- "Parent": "body_down",
- "Child": "sol_gear"
- },
- {
- "Name": "Clearance_Constraint",
- "Type": "clearance",
- "PartName": [
- "planet_gear002",
- "planet_gear005",
- "planet_gear004",
- "planet_gear003",
- "planet_gear",
- "output_shaft",
- "sol_gear"
- ],
- "MaxClearance": 1.0
- }
-]
\ No newline at end of file
diff --git a/test_models/crux_hotend.FCStd b/test_models/crux_hotend.FCStd
deleted file mode 100644
index f854ab7..0000000
Binary files a/test_models/crux_hotend.FCStd and /dev/null differ
diff --git a/test_models/desk_table.FCStd b/test_models/desk_table.FCStd
deleted file mode 100644
index 8b3c384..0000000
Binary files a/test_models/desk_table.FCStd and /dev/null differ
diff --git a/test_models/sequences.json b/test_models/sequences.json
deleted file mode 100644
index 07d5216..0000000
--- a/test_models/sequences.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "sequences": [
-
- "body_down",
- "sol_gear",
- "output_shaft",
- "planet_gear",
- "planet_gear002"
- ]
-}
\ No newline at end of file
diff --git a/test_models/table_pc.FCStd b/test_models/table_pc.FCStd
deleted file mode 100644
index ce0788f..0000000
Binary files a/test_models/table_pc.FCStd and /dev/null differ
diff --git a/test_models/test_reductor.FCStd b/test_models/test_reductor.FCStd
deleted file mode 100644
index 6ee9991..0000000
Binary files a/test_models/test_reductor.FCStd and /dev/null differ
diff --git a/train_models/models_dope.py b/train_models/models_dope.py
deleted file mode 100755
index 0c89004..0000000
--- a/train_models/models_dope.py
+++ /dev/null
@@ -1,196 +0,0 @@
-"""
-NVIDIA from jtremblay@gmail.com
-"""
-
-# Networks
-import torch
-import torch
-import torch.nn as nn
-import torch.nn.parallel
-import torch.utils.data
-import torchvision.models as models
-
-
-class DopeNetwork(nn.Module):
- def __init__(
- self,
- pretrained=False,
- numBeliefMap=9,
- numAffinity=16,
- stop_at_stage=6, # number of stages to process (if less than total number of stages)
- ):
- super(DopeNetwork, self).__init__()
-
- self.stop_at_stage = stop_at_stage
-
- vgg_full = models.vgg19(pretrained=False).features
- self.vgg = nn.Sequential()
- for i_layer in range(24):
- self.vgg.add_module(str(i_layer), vgg_full[i_layer])
-
- # Add some layers
- i_layer = 23
- self.vgg.add_module(
- str(i_layer), nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1)
- )
- self.vgg.add_module(str(i_layer + 1), nn.ReLU(inplace=True))
- self.vgg.add_module(
- str(i_layer + 2), nn.Conv2d(256, 128, kernel_size=3, stride=1, padding=1)
- )
- self.vgg.add_module(str(i_layer + 3), nn.ReLU(inplace=True))
-
- # print('---Belief------------------------------------------------')
- # _2 are the belief map stages
- self.m1_2 = DopeNetwork.create_stage(128, numBeliefMap, True)
- self.m2_2 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numBeliefMap, False
- )
- self.m3_2 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numBeliefMap, False
- )
- self.m4_2 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numBeliefMap, False
- )
- self.m5_2 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numBeliefMap, False
- )
- self.m6_2 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numBeliefMap, False
- )
-
- # print('---Affinity----------------------------------------------')
- # _1 are the affinity map stages
- self.m1_1 = DopeNetwork.create_stage(128, numAffinity, True)
- self.m2_1 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numAffinity, False
- )
- self.m3_1 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numAffinity, False
- )
- self.m4_1 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numAffinity, False
- )
- self.m5_1 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numAffinity, False
- )
- self.m6_1 = DopeNetwork.create_stage(
- 128 + numBeliefMap + numAffinity, numAffinity, False
- )
-
- def forward(self, x):
- """Runs inference on the neural network"""
-
- out1 = self.vgg(x)
-
- out1_2 = self.m1_2(out1)
- out1_1 = self.m1_1(out1)
-
- if self.stop_at_stage == 1:
- return [out1_2], [out1_1]
-
- out2 = torch.cat([out1_2, out1_1, out1], 1)
- out2_2 = self.m2_2(out2)
- out2_1 = self.m2_1(out2)
-
- if self.stop_at_stage == 2:
- return [out1_2, out2_2], [out1_1, out2_1]
-
- out3 = torch.cat([out2_2, out2_1, out1], 1)
- out3_2 = self.m3_2(out3)
- out3_1 = self.m3_1(out3)
-
- if self.stop_at_stage == 3:
- return [out1_2, out2_2, out3_2], [out1_1, out2_1, out3_1]
-
- out4 = torch.cat([out3_2, out3_1, out1], 1)
- out4_2 = self.m4_2(out4)
- out4_1 = self.m4_1(out4)
-
- if self.stop_at_stage == 4:
- return [out1_2, out2_2, out3_2, out4_2], [out1_1, out2_1, out3_1, out4_1]
-
- out5 = torch.cat([out4_2, out4_1, out1], 1)
- out5_2 = self.m5_2(out5)
- out5_1 = self.m5_1(out5)
-
- if self.stop_at_stage == 5:
- return [out1_2, out2_2, out3_2, out4_2, out5_2], [
- out1_1,
- out2_1,
- out3_1,
- out4_1,
- out5_1,
- ]
-
- out6 = torch.cat([out5_2, out5_1, out1], 1)
- out6_2 = self.m6_2(out6)
- out6_1 = self.m6_1(out6)
-
- return [out1_2, out2_2, out3_2, out4_2, out5_2, out6_2], [
- out1_1,
- out2_1,
- out3_1,
- out4_1,
- out5_1,
- out6_1,
- ]
-
- @staticmethod
- def create_stage(in_channels, out_channels, first=False):
- """Create the neural network layers for a single stage."""
-
- model = nn.Sequential()
- mid_channels = 128
- if first:
- padding = 1
- kernel = 3
- count = 6
- final_channels = 512
- else:
- padding = 3
- kernel = 7
- count = 10
- final_channels = mid_channels
-
- # First convolution
- model.add_module(
- "0",
- nn.Conv2d(
- in_channels, mid_channels, kernel_size=kernel, stride=1, padding=padding
- ),
- )
-
- # Middle convolutions
- i = 1
- while i < count - 1:
- model.add_module(str(i), nn.ReLU(inplace=True))
- i += 1
- model.add_module(
- str(i),
- nn.Conv2d(
- mid_channels,
- mid_channels,
- kernel_size=kernel,
- stride=1,
- padding=padding,
- ),
- )
- i += 1
-
- # Penultimate convolution
- model.add_module(str(i), nn.ReLU(inplace=True))
- i += 1
- model.add_module(
- str(i), nn.Conv2d(mid_channels, final_channels, kernel_size=1, stride=1)
- )
- i += 1
-
- # Last convolution
- model.add_module(str(i), nn.ReLU(inplace=True))
- i += 1
- model.add_module(
- str(i), nn.Conv2d(final_channels, out_channels, kernel_size=1, stride=1)
- )
- i += 1
-
- return model
diff --git a/train_models/rbs_train.py b/train_models/rbs_train.py
deleted file mode 100644
index 2a78759..0000000
--- a/train_models/rbs_train.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
- rbs_train
- Общая задача: web-service pipeline
- Реализуемая функция: обучение нейросетевой модели по заданному BOP-датасету
-
- python3 $PYTHON_EDUCATION --path /Users/idontsudo/webservice/server/build/public/7065d6b6-c8a3-48c5-9679-bb8f3a690296 \
- --name test1234 --datasetName 32123213
-
- 27.04.2024 @shalenikol release 0.1
-"""
-import argparse
-from train_Yolo import train_YoloV8
-from train_Dope import train_Dope_i
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--path", required=True, help="Path for dataset")
- parser.add_argument("--name", required=True, help="String with result weights name")
- parser.add_argument("--datasetName", required=True, help="String with dataset name")
- parser.add_argument("--outpath", default="weights", help="Output path for weights")
- parser.add_argument("--type", default="ObjectDetection", help="Type of implementation")
- parser.add_argument("--epoch", default=3, type=int, help="How many training epochs")
- parser.add_argument('--pretrain', action="store_true", help="Use pretraining")
- args = parser.parse_args()
-
- if args.type == "ObjectDetection":
- train_YoloV8(args.path, args.name, args.datasetName, args.outpath, args.epoch, args.pretrain)
- else:
- train_Dope_i(args.path, args.name, args.datasetName, args.outpath, args.epoch, args.pretrain)
diff --git a/train_models/train_Dope.py b/train_models/train_Dope.py
deleted file mode 100644
index f9908bc..0000000
--- a/train_models/train_Dope.py
+++ /dev/null
@@ -1,542 +0,0 @@
-"""
- train_Dope
- Общая задача: оценка позиции объекта (Pose estimation)
- Реализуемая функция: обучение нейросетевой модели DOPE по заданному BOP-датасету
-
- python3 $PYTHON_EDUCATION --path /Users/user/webservice/server/build/public/7065d6b6-c8a3-48c5-9679-bb8f3a690296 \
- --name test1234 --datasetName 32123213
-
- 08.05.2024 @shalenikol release 0.1
-"""
-import os
-import json
-import shutil
-import numpy as np
-import transforms3d as t3d
-
-FILE_RBS_INFO = "rbs_info.json"
-FILE_CAMERA = "camera.json"
-FILE_GT = "scene_gt.json"
-FILE_GT_COCO = "scene_gt_coco.json"
-FILE_GT_INFO = "scene_gt_info.json"
-
-FILE_MODEL = "epoch"
-EXT_MODEL = ".pth"
-EXT_RGB = "jpg"
-DIR_ROOT_DS = "dataset_dope"
-DIR_TRAIN_OUT = "out_weights"
-
-MODEL_SCALE = 1000 # исходная модель в метрах, преобразуем в мм (для DOPE)
-
-# Own_Numbering_Files = True # наименование image-файлов: собственная нумерация
-nn_image = 0
-K_intrinsic = []
-model_info = []
-camera_data = {}
-im_width = 0
-
-nb_update_network = 0
-# [
-# [min(x), min(y), min(z)],
-# [min(x), max(y), min(z)],
-# [min(x), max(y), max(z)],
-# [min(x), min(y), max(z)],
-# [max(x), min(y), max(z)],
-# [max(x), max(y), min(z)],
-# [max(x), max(y), max(z)],
-# [max(x), min(y), max(z)],
-# [xc, yc, zc] # min + (max - min) / 2
-# ]
-
-def trans_3Dto2D_point_in_camera(xyz, K_m, R_m2c, t_m2c):
- """
- xyz : 3D-координаты точки
- K_m : внутренняя матрица камеры 3х3
- R_m2c : матрица поворота 3х3
- t_m2c : вектор перемещения 3х1
- return [u,v]
- """
- K = np.array(K_m)
- r = np.array(R_m2c)
- r.shape = (3, 3)
- t = np.array(t_m2c)
- t.shape = (3, 1)
- T = np.concatenate((r, t), axis=1)
-
- P_m = np.array(xyz)
- P_m.resize(4)
- P_m[-1] = 1.0
- P_m.shape = (4, 1)
-
- # Project (X, Y, Z, 1) into cameras coordinate system
- P_c = T @ P_m # 4x1
- # Apply camera intrinsics to map (Xc, Yc, Zc) to p=(x, y, z)
- p = K @ P_c
- # Normalize by z to get (u,v,1)
- uv = (p / p[2][0])[:-1]
- return uv.flatten().tolist()
-
-def gt_parse(path: str, out_dir: str):
- global nn_image
- with open(os.path.join(path, FILE_GT_COCO), "r") as fh:
- coco_data = json.load(fh)
- with open(os.path.join(path, FILE_GT), "r") as fh:
- gt_data = json.load(fh)
- with open(os.path.join(path, FILE_GT_INFO), "r") as fh:
- gt_info = json.load(fh)
-
- for img in coco_data["images"]:
- rgb_file = os.path.join(path, img["file_name"])
- if os.path.isfile(rgb_file):
- # if Own_Numbering_Files:
- ext = os.path.splitext(rgb_file)[1] # only ext
- f = f"{nn_image:06}"
- out_img = os.path.join(out_dir, f + ext)
- # else:
- # f = os.path.split(rgb_file)[1] # filename with extension
- # f = os.path.splitext(f)[0] # only filename
- # out_img = out_dir
- shutil.copy2(rgb_file, out_img)
- out_file = os.path.join(out_dir,f+".json")
- nn_image += 1
-
- # full annotation of the one image
- all_data = camera_data.copy()
- cat_names = {obj["id"]: obj["name"] for obj in coco_data["categories"]}
- id_img = img["id"] # 0, 1, 2 ...
- sid_img = str(id_img) # "0", "1", "2" ...
- img_info = gt_info[sid_img]
- img_gt = gt_data[sid_img]
- img_idx = 0 # object index on the image
- objs = []
- for ann in coco_data["annotations"]:
- if ann["image_id"] == id_img:
- item = ann["category_id"]
- obj_data = {}
- obj_data["class"] = cat_names[item]
- x, y, width, height = ann["bbox"]
- obj_data["bounding_box"] = {"top_left":[x,y], "bottom_right":[x+width,y+height]}
-
- # visibility from FILE_GT_INFO
- item_info = img_info[img_idx]
- obj_data["visibility"] = item_info["visib_fract"]
-
- # location from FILE_GT
- item_gt = img_gt[img_idx]
- obj_id = item_gt["obj_id"] - 1 # index with 0
- cam_R_m2c = item_gt["cam_R_m2c"]
- cam_t_m2c = item_gt["cam_t_m2c"]
- obj_data["location"] = cam_t_m2c
- q = t3d.quaternions.mat2quat(np.array(cam_R_m2c))
- obj_data["quaternion_xyzw"] = [q[1], q[2], q[3], q[0]]
-
- cuboid_xyz = model_info[obj_id]
- obj_data["projected_cuboid"] = [
- trans_3Dto2D_point_in_camera(cub, K_intrinsic, cam_R_m2c, cam_t_m2c)
- for cub in cuboid_xyz
- ]
-
- objs.append(obj_data)
- img_idx += 1
-
- all_data["objects"] = objs
- with open(out_file, "w") as fh:
- json.dump(all_data, fh, indent=2)
-
-def explore(path: str, res_dir: str):
- if not os.path.isdir(path):
- return
- folders = [
- os.path.join(path, o)
- for o in os.listdir(path)
- if os.path.isdir(os.path.join(path, o))
- ]
- for path_entry in folders:
- if os.path.isfile(os.path.join(path_entry,FILE_GT_COCO)) and \
- os.path.isfile(os.path.join(path_entry,FILE_GT_INFO)) and \
- os.path.isfile(os.path.join(path_entry,FILE_GT)):
- gt_parse(path_entry, res_dir)
- else:
- explore(path_entry, res_dir)
-
-def BOP2DOPE_dataset(dpath: str, out_dir: str) -> str:
- """ Convert BOP-dataset to YOLO format for train """
- res_dir = os.path.join(out_dir, DIR_ROOT_DS)
- if os.path.isdir(res_dir):
- shutil.rmtree(res_dir)
- os.mkdir(res_dir)
-
- explore(dpath, res_dir)
-
- return out_dir
-
-def train(dopepath:str, wname:str, epochs:int, pretrain: bool, lname: list):
- import random
- # try:
- import configparser as configparser
- # except ImportError:
- # import ConfigParser as configparser
- import torch
- # import torch.nn.parallel
- import torch.optim as optim
- import torch.utils.data
- import torchvision.transforms as transforms
- from torch.autograd import Variable
- import datetime
- from tensorboardX import SummaryWriter
-
- from models_dope import DopeNetwork
- from utils_dope import CleanVisiiDopeLoader #, VisualizeBeliefMap, save_image
-
- import warnings
- warnings.filterwarnings("ignore")
-
- os.environ["CUDA_VISIBLE_DEVICES"] = "0,1,2,3,4,5,6,7"
-
- torch.autograd.set_detect_anomaly(False)
- torch.autograd.profiler.profile(False)
- torch.autograd.gradcheck = False
- torch.backends.cudnn.benchmark = True
-
- start_time = datetime.datetime.now()
- print("start:", start_time.strftime("%m/%d/%Y, %H:%M:%S"))
-
- res_model = os.path.join(dopepath, wname + EXT_MODEL)
-
- local_rank = 0
- opt = lambda: None
- opt.use_s3 = False
- opt.train_buckets = []
- opt.endpoint = None
- opt.lr=0.0001
- opt.loginterval=100
- opt.sigma=0.5 # 4
- opt.nbupdates=None
- # opt.save=False
- # opt.option="default"
- # opt.gpuids=[0]
-
- opt.namefile=FILE_MODEL
- opt.workers=8
- opt.batchsize=16
-
- opt.data = [os.path.join(dopepath, DIR_ROOT_DS)]
- opt.outf = os.path.join(dopepath, DIR_TRAIN_OUT)
- opt.object = lname #["fork"]
- opt.exts = [EXT_RGB]
- # opt.imagesize = im_width
- opt.epochs = epochs
- opt.pretrained = pretrain
- opt.net_path = res_model if pretrain else None
- opt.manualseed = random.randint(1, 10000)
-
- # # Validate Arguments
- # if opt.use_s3 and (opt.train_buckets is None or opt.endpoint is None):
- # raise ValueError(
- # "--train_buckets and --endpoint must be specified if training with data from s3 bucket."
- # )
- # if not opt.use_s3 and opt.data is None:
- # raise ValueError("--data field must be specified.")
-
- os.makedirs(opt.outf, exist_ok=True)
-
- # if local_rank == 0:
- # writer = SummaryWriter(opt.outf + "/runs/")
- random.seed(opt.manualseed)
- torch.cuda.set_device(local_rank)
- # torch.distributed.init_process_group(backend="nccl", init_method="env://")
- torch.manual_seed(opt.manualseed)
- torch.cuda.manual_seed_all(opt.manualseed)
-
- # # Data Augmentation
- # if not opt.save:
- # contrast = 0.2
- # brightness = 0.2
- # noise = 0.1
- # normal_imgs = [0.59, 0.25]
- # transform = transforms.Compose(
- # [
- # AddRandomContrast(0.2),
- # AddRandomBrightness(0.2),
- # transforms.Resize(opt.imagesize),
- # ]
- # )
- # else:
- # contrast = 0.00001
- # brightness = 0.00001
- # noise = 0.00001
- # normal_imgs = None
- # transform = transforms.Compose(
- # [transforms.Resize(opt.imagesize), transforms.ToTensor()]
- # )
-
- # Load Model
- net = DopeNetwork()
- output_size = 50
- # opt.sigma = 0.5
-
- train_dataset = CleanVisiiDopeLoader(
- opt.data,
- sigma=opt.sigma,
- output_size=output_size,
- extensions=opt.exts,
- objects=opt.object,
- use_s3=opt.use_s3,
- buckets=opt.train_buckets,
- endpoint_url=opt.endpoint,
- )
- trainingdata = torch.utils.data.DataLoader(
- train_dataset,
- batch_size=opt.batchsize,
- shuffle=True,
- num_workers=opt.workers,
- pin_memory=True,
- )
- if not trainingdata is None:
- print(f"training data: {len(trainingdata)} batches")
-
- print("Loading Model...")
- net = net.cuda()
- # net = torch.nn.parallel.DistributedDataParallel(
- # net.cuda(), device_ids=[local_rank], output_device=local_rank
- # )
- if opt.pretrained:
- if opt.net_path is not None:
- net.load_state_dict(torch.load(opt.net_path))
- else:
- print("Error: Did not specify path to pretrained weights.")
- quit()
-
- parameters = filter(lambda p: p.requires_grad, net.parameters())
- optimizer = optim.Adam(parameters, lr=opt.lr)
-
- print("ready to train!")
-
- global nb_update_network
- nb_update_network = 0
- # best_results = {"epoch": None, "passed": None, "add_mean": None, "add_std": None}
-
- scaler = torch.cuda.amp.GradScaler()
-
- def _runnetwork(epoch, train_loader): #, syn=False
- global nb_update_network
- # net
- net.train()
-
- loss_avg_to_log = {}
- loss_avg_to_log["loss"] = []
- loss_avg_to_log["loss_affinities"] = []
- loss_avg_to_log["loss_belief"] = []
- loss_avg_to_log["loss_class"] = []
- for batch_idx, targets in enumerate(train_loader):
- optimizer.zero_grad()
-
- data = Variable(targets["img"].cuda())
- target_belief = Variable(targets["beliefs"].cuda())
- target_affinities = Variable(targets["affinities"].cuda())
-
- output_belief, output_aff = net(data)
-
- loss = None
-
- loss_belief = torch.tensor(0).float().cuda()
- loss_affinities = torch.tensor(0).float().cuda()
- loss_class = torch.tensor(0).float().cuda()
-
- for stage in range(len(output_aff)): # output, each belief map layers.
- loss_affinities += (
- (output_aff[stage] - target_affinities)
- * (output_aff[stage] - target_affinities)
- ).mean()
-
- loss_belief += (
- (output_belief[stage] - target_belief)
- * (output_belief[stage] - target_belief)
- ).mean()
-
- loss = loss_affinities + loss_belief
-
- # if batch_idx == 0:
- # post = "train"
- # if local_rank == 0:
- # for i_output in range(1):
- # # input images
- # writer.add_image(
- # f"{post}_input_{i_output}",
- # targets["img_original"][i_output],
- # epoch,
- # dataformats="CWH",
- # )
- # # belief maps gt
- # imgs = VisualizeBeliefMap(target_belief[i_output])
- # img, grid = save_image(
- # imgs, "some_img.png", mean=0, std=1, nrow=3, save=False
- # )
- # writer.add_image(
- # f"{post}_belief_ground_truth_{i_output}",
- # grid,
- # epoch,
- # dataformats="CWH",
- # )
- # # belief maps guess
- # imgs = VisualizeBeliefMap(output_belief[-1][i_output])
- # img, grid = save_image(
- # imgs, "some_img.png", mean=0, std=1, nrow=3, save=False
- # )
- # writer.add_image(
- # f"{post}_belief_guess_{i_output}",
- # grid,
- # epoch,
- # dataformats="CWH",
- # )
-
- loss.backward()
-
- optimizer.step()
-
- nb_update_network += 1
-
- # log the loss
- loss_avg_to_log["loss"].append(loss.item())
- loss_avg_to_log["loss_class"].append(loss_class.item())
- loss_avg_to_log["loss_affinities"].append(loss_affinities.item())
- loss_avg_to_log["loss_belief"].append(loss_belief.item())
-
- if batch_idx % opt.loginterval == 0:
- print(
- "Train Epoch: {} [{}/{} ({:.0f}%)] \tLoss: {:.15f} \tLocal Rank: {}".format(
- epoch,
- batch_idx * len(data),
- len(train_loader.dataset),
- 100.0 * batch_idx / len(train_loader),
- loss.item(),
- local_rank,
- )
- )
- # # log the loss values
- # if local_rank == 0:
- # writer.add_scalar("loss/train_loss", np.mean(loss_avg_to_log["loss"]), epoch)
- # writer.add_scalar("loss/train_cls", np.mean(loss_avg_to_log["loss_class"]), epoch)
- # writer.add_scalar("loss/train_aff", np.mean(loss_avg_to_log["loss_affinities"]), epoch)
- # writer.add_scalar("loss/train_bel", np.mean(loss_avg_to_log["loss_belief"]), epoch)
-
- for epoch in range(1, opt.epochs + 1):
-
- _runnetwork(epoch, trainingdata)
-
- try:
- if local_rank == 0:
- torch.save(
- net.state_dict(),
- f"{opt.outf}/{opt.namefile}_{str(epoch).zfill(3)}.pth",
- )
- except Exception as e:
- print(f"Encountered Exception: {e}")
-
- if not opt.nbupdates is None and nb_update_network > int(opt.nbupdates):
- break
-
- # if local_rank == 0:
- # save result model
- torch.save(net.state_dict(), res_model) #os.path.join(dopepath, wname + EXT_MODEL))
- # else:
- # torch.save(
- # net.state_dict(),
- # f"{opt.outf}/{opt.namefile}_{str(epoch).zfill(3)}_rank_{local_rank}.pth",
- # )
-
- print("end:", datetime.datetime.now().strftime("%m/%d/%Y, %H:%M:%S"))
- print("Total time taken: ", str(datetime.datetime.now() - start_time).split(".")[0])
-
-def train_Dope_i(path:str, wname:str, dname:str, outpath:str, epochs:int, pretrain: bool):
- """ Main procedure for train DOPE model """
- global K_intrinsic, model_info, camera_data, im_width
-
- if not os.path.isdir(outpath):
- print(f"Invalid output path '{outpath}'")
- exit(-1)
- out_dir = os.path.join(outpath, wname)
- ds_path = os.path.join(path, dname)
-
- if not os.path.isdir(ds_path):
- print(f"{ds_path} : no BOP directory")
- return ""
-
- camera_json = os.path.join(ds_path, FILE_CAMERA)
- if not os.path.isfile(camera_json):
- print(f"{camera_json} : no intrinsic camera file")
- return ""
-
- rbs_info = os.path.join(ds_path, FILE_RBS_INFO)
- if not os.path.isfile(rbs_info):
- print(f"{rbs_info} : no dataset info file")
- return ""
-
- camera_data = {}
- with open(camera_json, "r") as fh:
- data = json.load(fh)
- keys = ["cx","cy","fx","fy"]
- intrinsic = {k: data[k] for k in keys}
- im_height = data["height"]
- im_width = data["width"]
- camera_data["camera_data"] = dict(intrinsic=intrinsic, height=im_height, width=im_width)
- K_intrinsic = [
- [data["fx"], 0.0, data["cx"]],
- [0.0, data["fy"], data["cy"]],
- [0.0, 0.0, 1.0]
- ]
- # calc cuboid + center
- with open(rbs_info, "r") as fh:
- info = json.load(fh)
- # список имён объектов
- list_name = list(map(lambda x: x["name"], info))
- # in FILE_RBS_INFO model numbering from smallest to largest
- model_info = []
- for m_info in info:
- cub = np.array(m_info["cuboid"]) * MODEL_SCALE
- xyz_min = cub.min(axis=0)
- xyz_max = cub.max(axis=0)
- # [xc, yc, zc] # min + (max - min) / 2
- center = []
- for i in range(3):
- center.append(xyz_min[i] + (xyz_max[i]- xyz_min[i]) / 2)
- c = np.array(center, ndmin=2)
- model_info.append(np.append(cub, c, axis=0))
-
- if pretrain:
- # продолжить обучение
- if not os.path.isdir(out_dir):
- print(f"No dir '{out_dir}'")
- exit(-2)
- dpath = out_dir
- # model_path = os.path.join(dpath, wname + ".pt")
- else:
- # обучение сначала
- if not os.path.isdir(out_dir):
- os.mkdir(out_dir)
-
- dpath = BOP2DOPE_dataset(ds_path, out_dir)
- if len(dpath) == 0:
- print(f"Error in convert dataset '{ds_path}' to '{outpath}'")
- exit(-4)
- # model_path = os.path.join(dpath, FILE_BASEMODEL)
-
- # results = f"python train.py --local_rank 0 --data {dpath} --object fork" \
- # + f" -e {epochs} --batchsize 16 --exts jpg --imagesize 640 --pretrained" \
- # + " --net_path /home/shalenikol/fork_work/dope_training/output/weights_2996/net_epoch_47.pth"
- # print(results)
- train(dpath, wname, epochs, pretrain, list_name)
-
-import argparse
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--path", required=True, help="Path for dataset")
- parser.add_argument("--name", required=True, help="String with result weights name")
- parser.add_argument("--datasetName", required=True, help="String with dataset name")
- parser.add_argument("--outpath", default="weights", help="Output path for weights")
- parser.add_argument("--epoch", default=3, help="How many training epochs")
- parser.add_argument('--pretrain', action="store_true", help="Use pretraining")
- args = parser.parse_args()
-
- train_Dope_i(args.path, args.name, args.datasetName, args.outpath, args.epoch, args.pretrain)
diff --git a/train_models/train_Yolo.py b/train_models/train_Yolo.py
deleted file mode 100644
index 1eaf7a0..0000000
--- a/train_models/train_Yolo.py
+++ /dev/null
@@ -1,181 +0,0 @@
-"""
- train_Yolo
- Общая задача: обнаружение объекта (Object detection)
- Реализуемая функция: обучение нейросетевой модели YoloV8 по заданному BOP-датасету
-
- python3 $PYTHON_TRAIN --path /Users/idontsudo/webservice/server/build/public/7065d6b6-c8a3-48c5-9679-bb8f3a690296/datasets \
- --name test123 --datasetName ds213 --outpath /Users/idontsudo/webservice/server/build/public/7065d6b6-c8a3-48c5-9679-bb8f3a690296/weights
-
- 27.04.2024 @shalenikol release 0.1
-"""
-import os
-import shutil
-import json
-import yaml
-
-from ultralytics import YOLO
-# from ultralytics.utils.metrics import DetMetrics
-
-FILE_BASEMODEL = "yolov8n.pt"
-FILE_RBS_INFO = "rbs_info.json"
-FILE_RBS_TRAIN = "rbs_train.yaml"
-FILE_GT_COCO = "scene_gt_coco.json"
-FILE_L_TRAIN = "i_train.txt"
-FILE_L_VAL = "i_val.txt"
-FILE_TRAIN_RES = "weights/last.pt"
-DIR_ROOT_DS = "datasets"
-DIR_COCO_DS = "rbs_coco"
-DIR_RGB_DS = "images"
-DIR_LABELS_DS = "labels"
-
-SZ_SERIES = 15 # number of train images per validation images
-
-nn_image = 0
-f1 = f2 = None
-
-def convert2relative(height, width, bbox):
- """ YOLO format use relative coordinates for annotation """
- x, y, w, h = bbox
- x += w/2
- y += h/2
- return x/width, y/height, w/width, h/height
-
-def gt_parse(path: str, out_dir: str):
- global nn_image, f1, f2
- with open(os.path.join(path, FILE_GT_COCO), "r") as fh:
- coco_data = json.load(fh)
-
- for img in coco_data["images"]:
- rgb_file = os.path.join(path, img["file_name"])
- if os.path.isfile(rgb_file):
- ext = os.path.splitext(rgb_file)[1] # only ext
- f = f"{nn_image:06}"
- out_img = os.path.join(out_dir, DIR_RGB_DS, f + ext)
- shutil.copy2(rgb_file, out_img)
-
- # заполним файлы с метками bbox
- img_id = img["id"]
- with open(os.path.join(out_dir, DIR_LABELS_DS, f + ".txt"), "w") as fh:
- for i in coco_data["annotations"]:
- if i["image_id"] == img_id:
- cat_id = i["category_id"]
- if cat_id < 999:
- bbox = i["bbox"]
- im_h = i["height"]
- im_w = i["width"]
- rel = convert2relative(im_h,im_w,bbox)
- # формат:
- fh.write(f"{cat_id-1} {rel[0]} {rel[1]} {rel[2]} {rel[3]}\n") # category from 0
-
- nn_image += 1
- line = os.path.join("./", DIR_RGB_DS, f + ext) + "\n"
- if nn_image % SZ_SERIES == 0:
- f2.write(line)
- else:
- f1.write(line)
-
-def explore(path: str, res_dir: str):
- if not os.path.isdir(path):
- return
- folders = [
- os.path.join(path, o)
- for o in os.listdir(path)
- if os.path.isdir(os.path.join(path, o))
- ]
- for path_entry in folders:
- if os.path.isfile(os.path.join(path_entry,FILE_GT_COCO)):
- gt_parse(path_entry, res_dir)
- else:
- explore(path_entry, res_dir)
-
-def BOP2Yolo_dataset(dpath: str, out_dir: str, lname: list) -> str:
- """ Convert BOP-dataset to YOLO format for train """
- cfg_yaml = os.path.join(out_dir, FILE_RBS_TRAIN)
- p = os.path.join(out_dir, DIR_ROOT_DS, DIR_COCO_DS)
- cfg_data = {"path": p, "train": FILE_L_TRAIN, "val": FILE_L_VAL}
- cfg_data["names"] = {i:x for i,x in enumerate(lname)}
- with open(cfg_yaml, "w") as fh:
- yaml.dump(cfg_data, fh)
-
- res_dir = os.path.join(out_dir, DIR_ROOT_DS)
- if not os.path.isdir(res_dir):
- os.mkdir(res_dir)
-
- res_dir = os.path.join(res_dir, DIR_COCO_DS)
- if not os.path.isdir(res_dir):
- os.mkdir(res_dir)
-
- p = os.path.join(res_dir, DIR_RGB_DS)
- if not os.path.isdir(p):
- os.mkdir(p)
- p = os.path.join(res_dir, DIR_LABELS_DS)
- if not os.path.isdir(p):
- os.mkdir(p)
-
- global f1, f2
- f1 = open(os.path.join(res_dir, FILE_L_TRAIN), "w")
- f2 = open(os.path.join(res_dir, FILE_L_VAL), "w")
- explore(dpath, res_dir)
- f1.close()
- f2.close()
-
- return out_dir
-
-def train_YoloV8(path:str, wname:str, dname:str, outpath:str, epochs:int, pretrain: bool):
- """ Main procedure for train YOLOv8 model """
- if not os.path.isdir(outpath):
- print(f"Invalid output path '{outpath}'")
- exit(-1)
- out_dir = os.path.join(outpath, wname)
-
- if pretrain:
- # продолжить обучение
- if not os.path.isdir(out_dir):
- print(f"No dir '{out_dir}'")
- exit(-2)
- dpath = out_dir
- model_path = os.path.join(dpath, wname + ".pt")
- else:
- # обучение сначала
- if not os.path.isdir(out_dir):
- os.mkdir(out_dir)
-
- ds_path = os.path.join(path, dname)
- rbs_info = os.path.join(ds_path, FILE_RBS_INFO)
- if not os.path.isfile(rbs_info):
- print(f"{rbs_info} : no dataset description file")
- exit(-3)
-
- with open(rbs_info, "r") as fh:
- y = json.load(fh)
- # список имён объектов
- list_name = list(map(lambda x: x["name"], y))
-
- dpath = BOP2Yolo_dataset(ds_path, out_dir, list_name)
- if len(dpath) == 0:
- print(f"Error in convert dataset '{ds_path}' to '{outpath}'")
- exit(-4)
- model_path = os.path.join(dpath, FILE_BASEMODEL)
-
- model = YOLO(model_path)
- results = model.train(data=os.path.join(dpath, FILE_RBS_TRAIN), epochs=epochs, project=out_dir)
- wf = os.path.join(results.save_dir, FILE_TRAIN_RES)
- if not os.path.isfile(wf):
- print(f"Error in train: no result file '{wf}'")
- exit(-5)
-
- shutil.copy2(wf, os.path.join(dpath, wname + ".pt"))
- shutil.rmtree(results.save_dir)
-
-if __name__ == "__main__":
- import argparse
- parser = argparse.ArgumentParser()
- parser.add_argument("--path", required=True, help="Path for dataset")
- parser.add_argument("--name", required=True, help="String with result weights name")
- parser.add_argument("--datasetName", required=True, help="String with dataset name")
- parser.add_argument("--outpath", default="weights", help="Output path for weights")
- parser.add_argument("--epoch", default=3, type=int, help="How many training epochs")
- parser.add_argument('--pretrain', action="store_true", help="Use pretraining")
- args = parser.parse_args()
-
- train_YoloV8(args.path, args.name, args.datasetName, args.outpath, args.epoch, args.pretrain)
diff --git a/train_models/utils_dope.py b/train_models/utils_dope.py
deleted file mode 100755
index 55ab058..0000000
--- a/train_models/utils_dope.py
+++ /dev/null
@@ -1,967 +0,0 @@
-"""
-NVIDIA from jtremblay@gmail.com
-"""
-import numpy as np
-import torch
-
-import os
-
-import torch
-import torch.nn as nn
-import torch.nn.parallel
-
-import torch.utils.data
-
-import torchvision.transforms as transforms
-
-import torch.utils.data as data
-import glob
-import os
-import boto3
-import io
-
-from PIL import Image
-from PIL import ImageDraw
-from PIL import ImageEnhance
-
-from math import acos
-from math import sqrt
-from math import pi
-
-from os.path import exists, basename
-import json
-from os.path import join
-
-import albumentations as A
-
-
-def default_loader(path):
- return Image.open(path).convert("RGB")
-
-
-def length(v):
- return sqrt(v[0] ** 2 + v[1] ** 2)
-
-
-def dot_product(v, w):
- return v[0] * w[0] + v[1] * w[1]
-
-
-def normalize(v):
- norm = np.linalg.norm(v, ord=1)
- if norm == 0:
- norm = np.finfo(v.dtype).eps
- return v / norm
-
-
-def determinant(v, w):
- return v[0] * w[1] - v[1] * w[0]
-
-
-def inner_angle(v, w):
- cosx = dot_product(v, w) / (length(v) * length(w))
- rad = acos(cosx) # in radians
- return rad * 180 / pi # returns degrees
-
-
-def py_ang(A, B=(1, 0)):
- inner = inner_angle(A, B)
- det = determinant(A, B)
- if (
- det < 0
- ): # this is a property of the det. If the det < 0 then B is clockwise of A
- return inner
- else: # if the det > 0 then A is immediately clockwise of B
- return 360 - inner
-
-
-import colorsys, math
-
-
-def append_dot(extensions):
- res = []
-
- for ext in extensions:
- if not ext.startswith("."):
- res.append(f".{ext}")
- else:
- res.append(ext)
-
- return res
-
-
-def loadimages(root, extensions=["png"]):
- imgs = []
- extensions = append_dot(extensions)
-
- def add_json_files(
- path,
- ):
- for ext in extensions:
- for file in os.listdir(path):
- imgpath = os.path.join(path, file)
- if (
- imgpath.endswith(ext)
- and exists(imgpath)
- and exists(imgpath.replace(ext, ".json"))
- ):
- imgs.append(
- (
- imgpath,
- imgpath.replace(path, "").replace("/", ""),
- imgpath.replace(ext, ".json"),
- )
- )
-
- def explore(path):
- if not os.path.isdir(path):
- return
- folders = [
- os.path.join(path, o)
- for o in os.listdir(path)
- if os.path.isdir(os.path.join(path, o))
- ]
-
- for path_entry in folders:
- explore(path_entry)
-
- add_json_files(path)
-
- explore(root)
-
- return imgs
-
-
-def loadweights(root):
- if root.endswith(".pth") and os.path.isfile(root):
- return [root]
- else:
- weights = [
- os.path.join(root, f)
- for f in os.listdir(root)
- if os.path.isfile(os.path.join(root, f)) and f.endswith(".pth")
- ]
-
- weights.sort()
- return weights
-
-
-def loadimages_inference(root, extensions):
- imgs, imgsname = [], []
- extensions = append_dot(extensions)
-
- def add_imgs(
- path,
- ):
- for ext in extensions:
- for file in os.listdir(path):
- imgpath = os.path.join(path, file)
- if imgpath.endswith(ext) and exists(imgpath):
- imgs.append(imgpath)
- imgsname.append(imgpath.replace(root, ""))
-
- def explore(path):
- if not os.path.isdir(path):
- return
- folders = [
- os.path.join(path, o)
- for o in os.listdir(path)
- if os.path.isdir(os.path.join(path, o))
- ]
-
- for path_entry in folders:
- explore(path_entry)
-
- add_imgs(path)
-
- explore(root)
-
- return imgs, imgsname
-
-
-class CleanVisiiDopeLoader(data.Dataset):
- def __init__(
- self,
- path_dataset,
- objects=None,
- sigma=1,
- output_size=400,
- extensions=["png"],
- debug=False,
- use_s3=False,
- buckets=[],
- endpoint_url=None,
- ):
- ###################
- self.path_dataset = path_dataset
- self.objects_interest = objects
- self.sigma = sigma
- self.output_size = output_size
- self.extensions = append_dot(extensions)
- self.debug = debug
- ###################
-
- self.imgs = []
- self.s3_buckets = {}
- self.use_s3 = use_s3
-
- if self.use_s3:
- self.session = boto3.Session()
- self.s3 = self.session.resource(
- service_name="s3", endpoint_url=endpoint_url
- )
-
- for bucket_name in buckets:
- try:
- self.s3_buckets[bucket_name] = self.s3.Bucket(bucket_name)
- except Exception as e:
- print(
- f"Error trying to load bucket {bucket_name} for training data:",
- e,
- )
-
- for bucket in self.s3_buckets:
- bucket_objects = [
- str(obj.key) for obj in self.s3_buckets[bucket].objects.all()
- ]
-
- jsons = set([json for json in bucket_objects if json.endswith(".json")])
- imgs = [
- img
- for img in bucket_objects
- if img.endswith(tuple(self.extensions))
- ]
-
- for ext in self.extensions:
- for img in imgs:
- # Only add images that have a ground truth file
- if img.endswith(ext) and img.replace(ext, ".json") in jsons:
- # (img key, bucket name, json key)
- self.imgs.append((img, bucket, img.replace(ext, ".json")))
-
- else:
- for path_look in path_dataset:
- self.imgs += loadimages(path_look, extensions=self.extensions)
-
- # np.random.shuffle(self.imgs)
- print("Number of Training Images:", len(self.imgs))
- print(self.imgs)
-
- if debug:
- print("Debuging will be save in debug/")
- if os.path.isdir("debug"):
- print(f'folder {"debug"}/ exists')
- else:
- os.mkdir("debug")
- print(f'created folder {"debug"}/')
-
- def __len__(self):
- return len(self.imgs)
-
- def __getitem__(self, index):
-
- # load the data
- if self.use_s3:
- img_key, bucket, json_key = self.imgs[index]
- mem_img = io.BytesIO()
-
- object_img = self.s3_buckets[bucket].Object(img_key)
- object_img.download_fileobj(mem_img)
-
- img = np.array(Image.open(mem_img).convert("RGB"))
-
- object_json = self.s3_buckets[bucket].Object(json_key)
- data_json = json.load(object_json.get()["Body"])
-
- img_name = img_key[:-3]
-
- else:
- path_img, img_name, path_json = self.imgs[index]
-
- # load the image
- img = np.array(Image.open(path_img).convert("RGB"))
-
- # load the json file
- with open(path_json) as f:
- data_json = json.load(f)
-
- all_projected_cuboid_keypoints = []
-
- # load the projected cuboid keypoints
- for obj in data_json["objects"]:
- if (
- self.objects_interest is not None
- and not obj["class"] in self.objects_interest
- ):
- continue
- # load the projected_cuboid_keypoints
- # 06.02.2024 @shalenikol
- # if obj["visibility_image"] > 0:
- if obj["visibility"] > 0:
- projected_cuboid_keypoints = obj["projected_cuboid"]
- # FAT dataset only has 8 corners for 'projected_cuboid'
- if len(projected_cuboid_keypoints) == 8:
- projected_cuboid_keypoints.append(obj["projected_cuboid_centroid"])
- else:
- projected_cuboid_keypoints = [
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- ]
- all_projected_cuboid_keypoints.append(projected_cuboid_keypoints)
-
- if len(all_projected_cuboid_keypoints) == 0:
- all_projected_cuboid_keypoints = [
- [
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- [-100, -100],
- ]
- ]
-
- # flatten the keypoints
- flatten_projected_cuboid = []
- for obj in all_projected_cuboid_keypoints:
- for p in obj:
- flatten_projected_cuboid.append(p)
-
- #######
- if self.debug:
- img_to_save = Image.fromarray(img)
- draw = ImageDraw.Draw(img_to_save)
-
- for ip, p in enumerate(flatten_projected_cuboid):
- draw.ellipse(
- (int(p[0]) - 2, int(p[1]) - 2, int(p[0]) + 2, int(p[1]) + 2),
- fill="green",
- )
-
- img_to_save.save(f"debug/{img_name.replace('.png','_original.png')}")
- #######
-
- # data augmentation
- transform = A.Compose(
- [
- A.RandomCrop(width=400, height=400),
- A.Rotate(limit=180),
- A.RandomBrightnessContrast(
- brightness_limit=0.2, contrast_limit=0.15, p=1
- ),
- A.GaussNoise(p=1),
- ],
- keypoint_params=A.KeypointParams(format="xy", remove_invisible=False),
- )
- transformed = transform(image=img, keypoints=flatten_projected_cuboid)
- img_transformed = transformed["image"]
- flatten_projected_cuboid_transformed = transformed["keypoints"]
-
- #######
-
- # transform to the final output
- if not self.output_size == 400:
- transform = A.Compose(
- [
- A.Resize(width=self.output_size, height=self.output_size),
- ],
- keypoint_params=A.KeypointParams(format="xy", remove_invisible=False),
- )
- transformed = transform(
- image=img_transformed, keypoints=flatten_projected_cuboid_transformed
- )
- img_transformed_output_size = transformed["image"]
- flatten_projected_cuboid_transformed_output_size = transformed["keypoints"]
-
- else:
- img_transformed_output_size = img_transformed
- flatten_projected_cuboid_transformed_output_size = (
- flatten_projected_cuboid_transformed
- )
-
- #######
- if self.debug:
- img_transformed_saving = Image.fromarray(img_transformed)
-
- draw = ImageDraw.Draw(img_transformed_saving)
-
- for ip, p in enumerate(flatten_projected_cuboid_transformed):
- draw.ellipse(
- (int(p[0]) - 2, int(p[1]) - 2, int(p[0]) + 2, int(p[1]) + 2),
- fill="green",
- )
-
- img_transformed_saving.save(
- f"debug/{img_name.replace('.png','_transformed.png')}"
- )
- #######
-
- # update the keypoints list
- # obj x keypoint_id x (x,y)
- i_all = 0
- for i_obj, obj in enumerate(all_projected_cuboid_keypoints):
- for i_p, point in enumerate(obj):
- all_projected_cuboid_keypoints[i_obj][
- i_p
- ] = flatten_projected_cuboid_transformed_output_size[i_all]
- i_all += 1
-
- # generate the belief maps
- beliefs = CreateBeliefMap(
- size=int(self.output_size),
- pointsBelief=all_projected_cuboid_keypoints,
- sigma=self.sigma,
- nbpoints=9,
- save=False,
- )
- beliefs = torch.from_numpy(np.array(beliefs))
- # generate affinity fields with centroid.
- affinities = GenerateMapAffinity(
- size=int(self.output_size),
- nb_vertex=8,
- pointsInterest=all_projected_cuboid_keypoints,
- objects_centroid=np.array(all_projected_cuboid_keypoints)[:, -1].tolist(),
- scale=1,
- )
-
- # prepare for the image tensors
- normalize_tensor = transforms.Compose(
- [
- transforms.ToTensor(),
- transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
- ]
- )
- to_tensor = transforms.Compose(
- [
- transforms.ToTensor(),
- ]
- )
- img_tensor = normalize_tensor(Image.fromarray(img_transformed))
- img_original = to_tensor(img_transformed)
-
- ########
- if self.debug:
- imgs = VisualizeBeliefMap(beliefs)
- img, grid = save_image(
- imgs,
- f"debug/{img_name.replace('.png','_beliefs.png')}",
- mean=0,
- std=1,
- nrow=3,
- save=True,
- )
- imgs = VisualizeAffinityMap(affinities)
- save_image(
- imgs,
- f"debug/{img_name.replace('.png','_affinities.png')}",
- mean=0,
- std=1,
- nrow=3,
- save=True,
- )
- ########
- img_tensor[torch.isnan(img_tensor)] = 0
- affinities[torch.isnan(affinities)] = 0
- beliefs[torch.isnan(beliefs)] = 0
-
- img_tensor[torch.isinf(img_tensor)] = 0
- affinities[torch.isinf(affinities)] = 0
- beliefs[torch.isinf(beliefs)] = 0
-
- return {
- "img": img_tensor,
- "affinities": torch.clamp(affinities, -1, 1),
- "beliefs": torch.clamp(beliefs, 0, 1),
- "file_name": img_name,
- "img_original": img_original,
- }
-
-
-def VisualizeAffinityMap(
- tensor,
- # tensor of (len(keypoints)*2)xwxh
- threshold_norm_vector=0.4,
- # how long does the vector has to be to be drawn
- points=None,
- # list of points to draw in white on top of the image
- factor=1.0,
- # by how much the image was reduced, scale factor
- translation=(0, 0)
- # by how much the points were moved
- # return len(keypoints)x3xwxh # stack of images
-):
- images = torch.zeros(tensor.shape[0] // 2, 3, tensor.shape[1], tensor.shape[2])
- for i_image in range(0, tensor.shape[0], 2): # could be read as i_keypoint
-
- indices = (
- torch.abs(tensor[i_image, :, :]) + torch.abs(tensor[i_image + 1, :, :])
- > threshold_norm_vector
- ).nonzero()
-
- for indice in indices:
-
- i, j = indice
-
- angle_vector = np.array([tensor[i_image, i, j], tensor[i_image + 1, i, j]])
- if length(angle_vector) > threshold_norm_vector:
- angle = py_ang(angle_vector)
- c = colorsys.hsv_to_rgb(angle / 360, 1, 1)
- else:
- c = [0, 0, 0]
- for i_c in range(3):
- images[i_image // 2, i_c, i, j] = c[i_c]
- if not points is None:
- point = points[i_image // 2]
-
- print(
- int(point[1] * factor + translation[1]),
- int(point[0] * factor + translation[0]),
- )
- images[
- i_image // 2,
- :,
- int(point[1] * factor + translation[1])
- - 1 : int(point[1] * factor + translation[1])
- + 1,
- int(point[0] * factor + translation[0])
- - 1 : int(point[0] * factor + translation[0])
- + 1,
- ] = 1
-
- return images
-
-
-def VisualizeBeliefMap(
- tensor,
- # tensor of len(keypoints)xwxh
- points=None,
- # list of points to draw on top of the image
- factor=1.0,
- # by how much the image was reduced, scale factor
- translation=(0, 0)
- # by how much the points were moved
- # return len(keypoints)x3xwxh # stack of images in torch tensor
-):
- images = torch.zeros(tensor.shape[0], 3, tensor.shape[1], tensor.shape[2])
- for i_image in range(0, tensor.shape[0]): # could be read as i_keypoint
-
- belief = tensor[i_image].clone()
- belief -= float(torch.min(belief).item())
- belief /= float(torch.max(belief).item())
-
- belief = torch.clamp(belief, 0, 1)
- belief = torch.cat(
- [belief.unsqueeze(0), belief.unsqueeze(0), belief.unsqueeze(0)]
- ).unsqueeze(0)
-
- images[i_image] = belief
-
- return images
-
-
-def GenerateMapAffinity(
- size, nb_vertex, pointsInterest, objects_centroid, scale, save=False
-):
- # Apply the downscale right now, so the vectors are correct.
-
- img_affinity = Image.new("RGB", (int(size / scale), int(size / scale)), "black")
- # create the empty tensors
- totensor = transforms.Compose([transforms.ToTensor()])
-
- affinities = []
- for i_points in range(nb_vertex):
- affinities.append(torch.zeros(2, int(size / scale), int(size / scale)))
-
- for i_pointsImage in range(len(pointsInterest)):
- pointsImage = pointsInterest[i_pointsImage]
- center = objects_centroid[i_pointsImage]
- for i_points in range(nb_vertex):
- point = pointsImage[i_points]
-
- affinity_pair, img_affinity = getAfinityCenter(
- int(size / scale),
- int(size / scale),
- tuple((np.array(pointsImage[i_points]) / scale).tolist()),
- tuple((np.array(center) / scale).tolist()),
- img_affinity=img_affinity,
- radius=1,
- )
-
- affinities[i_points] = (affinities[i_points] + affinity_pair) / 2
-
- # Normalizing
- v = affinities[i_points].numpy()
-
- xvec = v[0]
- yvec = v[1]
-
- norms = np.sqrt(xvec * xvec + yvec * yvec)
- nonzero = norms > 0
-
- xvec[nonzero] /= norms[nonzero]
- yvec[nonzero] /= norms[nonzero]
-
- affinities[i_points] = torch.from_numpy(np.concatenate([[xvec], [yvec]]))
- affinities = torch.cat(affinities, 0)
-
- return affinities
-
-
-def getAfinityCenter(
- width, height, point, center, radius=7, tensor=None, img_affinity=None
-):
- """
- Create the affinity map
- """
- if tensor is None:
- tensor = torch.zeros(2, height, width).float()
-
- # create the canvas for the afinity output
- imgAffinity = Image.new("RGB", (width, height), "black")
- totensor = transforms.Compose([transforms.ToTensor()])
- draw = ImageDraw.Draw(imgAffinity)
- r1 = radius
- p = point
- draw.ellipse((p[0] - r1, p[1] - r1, p[0] + r1, p[1] + r1), (255, 255, 255))
-
- del draw
-
- # compute the array to add the afinity
- array = (np.array(imgAffinity) / 255)[:, :, 0]
-
- angle_vector = np.array(center) - np.array(point)
- angle_vector = normalize(angle_vector)
- affinity = np.concatenate([[array * angle_vector[0]], [array * angle_vector[1]]])
-
- if not img_affinity is None:
- # find the angle vector
- if length(angle_vector) > 0:
- angle = py_ang(angle_vector)
- else:
- angle = 0
- c = np.array(colorsys.hsv_to_rgb(angle / 360, 1, 1)) * 255
- draw = ImageDraw.Draw(img_affinity)
- draw.ellipse(
- (p[0] - r1, p[1] - r1, p[0] + r1, p[1] + r1),
- fill=(int(c[0]), int(c[1]), int(c[2])),
- )
- del draw
- re = torch.from_numpy(affinity).float() + tensor
- return re, img_affinity
-
-
-def CreateBeliefMap(size, pointsBelief, nbpoints, sigma=16, save=False):
- # Create the belief maps in the points
- beliefsImg = []
- for numb_point in range(nbpoints):
- array = np.zeros([size, size])
- out = np.zeros([size, size])
-
- for point in pointsBelief:
- p = [point[numb_point][1], point[numb_point][0]]
- w = int(sigma * 2)
- if p[0] - w >= 0 and p[0] + w < size and p[1] - w >= 0 and p[1] + w < size:
- for i in range(int(p[0]) - w, int(p[0]) + w + 1):
- for j in range(int(p[1]) - w, int(p[1]) + w + 1):
-
- # if there is already a point there.
- array[i, j] = max(
- np.exp(
- -(
- ((i - p[0]) ** 2 + (j - p[1]) ** 2)
- / (2 * (sigma**2))
- )
- ),
- array[i, j],
- )
-
- beliefsImg.append(array.copy())
-
- if save:
- stack = np.stack([array, array, array], axis=0).transpose(2, 1, 0)
- imgBelief = Image.fromarray((stack * 255).astype("uint8"))
- imgBelief.save("debug/{}.png".format(numb_point))
- return beliefsImg
-
-
-def crop(img, i, j, h, w):
- """Crop the given PIL.Image.
- Args:
- img (PIL.Image): Image to be cropped.
- i: Upper pixel coordinate.
- j: Left pixel coordinate.
- h: Height of the cropped image.
- w: Width of the cropped image.
- Returns:
- PIL.Image: Cropped image.
- """
- return img.crop((j, i, j + w, i + h))
-
-
-class AddRandomContrast(object):
- """
- Apply some random image filters from PIL
- """
-
- def __init__(self, sigma=0.1):
- self.sigma = sigma
-
- def __call__(self, im):
-
- contrast = ImageEnhance.Contrast(im)
-
- im = contrast.enhance(np.random.normal(1, self.sigma))
-
- return im
-
-
-class AddRandomBrightness(object):
- """
- Apply some random image filters from PIL
- """
-
- def __init__(self, sigma=0.1):
- self.sigma = sigma
-
- def __call__(self, im):
-
- contrast = ImageEnhance.Brightness(im)
- im = contrast.enhance(np.random.normal(1, self.sigma))
- return im
-
-
-class AddNoise(object):
- """Given mean: (R, G, B) and std: (R, G, B),
- will normalize each channel of the torch.*Tensor, i.e.
- channel = (channel - mean) / std
- """
-
- def __init__(self, std=0.1):
- self.std = std
-
- def __call__(self, tensor):
- # TODO: make efficient
- t = torch.FloatTensor(tensor.size()).normal_(0, self.std)
-
- t = tensor.add(t)
- t = torch.clamp(t, -1, 1) # this is expansive
- return t
-
-
-irange = range
-
-
-def make_grid(
- tensor,
- nrow=8,
- padding=2,
- normalize=False,
- range=None,
- scale_each=False,
- pad_value=0,
-):
- """Make a grid of images.
- Args:
- tensor (Tensor or list): 4D mini-batch Tensor of shape (B x C x H x W)
- or a list of images all of the same size.
- nrow (int, optional): Number of images displayed in each row of the grid.
- The Final grid size is (B / nrow, nrow). Default is 8.
- padding (int, optional): amount of padding. Default is 2.
- normalize (bool, optional): If True, shift the image to the range (0, 1),
- by subtracting the minimum and dividing by the maximum pixel value.
- range (tuple, optional): tuple (min, max) where min and max are numbers,
- then these numbers are used to normalize the image. By default, min and max
- are computed from the tensor.
- scale_each (bool, optional): If True, scale each image in the batch of
- images separately rather than the (min, max) over all images.
- pad_value (float, optional): Value for the padded pixels.
- Example:
- See this notebook `here `_
- """
- if not (
- torch.is_tensor(tensor)
- or (isinstance(tensor, list) and all(torch.is_tensor(t) for t in tensor))
- ):
- raise TypeError(
- "tensor or list of tensors expected, got {}".format(type(tensor))
- )
-
- # if list of tensors, convert to a 4D mini-batch Tensor
- if isinstance(tensor, list):
- tensor = torch.stack(tensor, dim=0)
-
- if tensor.dim() == 2: # single image H x W
- tensor = tensor.view(1, tensor.size(0), tensor.size(1))
- if tensor.dim() == 3: # single image
- if tensor.size(0) == 1: # if single-channel, convert to 3-channel
- tensor = torch.cat((tensor, tensor, tensor), 0)
- tensor = tensor.view(1, tensor.size(0), tensor.size(1), tensor.size(2))
-
- if tensor.dim() == 4 and tensor.size(1) == 1: # single-channel images
- tensor = torch.cat((tensor, tensor, tensor), 1)
-
- if normalize is True:
- tensor = tensor.clone() # avoid modifying tensor in-place
- if range is not None:
- assert isinstance(
- range, tuple
- ), "range has to be a tuple (min, max) if specified. min and max are numbers"
-
- def norm_ip(img, min, max):
- img.clamp_(min=min, max=max)
- img.add_(-min).div_(max - min + 1e-5)
-
- def norm_range(t, range):
- if range is not None:
- norm_ip(t, range[0], range[1])
- else:
- norm_ip(t, float(t.min()), float(t.max()))
-
- if scale_each is True:
- for t in tensor: # loop over mini-batch dimension
- norm_range(t, range)
- else:
- norm_range(tensor, range)
-
- if tensor.size(0) == 1:
- return tensor.squeeze()
-
- # make the mini-batch of images into a grid
- nmaps = tensor.size(0)
- xmaps = min(nrow, nmaps)
- ymaps = int(math.ceil(float(nmaps) / xmaps))
- height, width = int(tensor.size(2) + padding), int(tensor.size(3) + padding)
- grid = tensor.new(3, height * ymaps + padding, width * xmaps + padding).fill_(
- pad_value
- )
- k = 0
- for y in irange(ymaps):
- for x in irange(xmaps):
- if k >= nmaps:
- break
- grid.narrow(1, y * height + padding, height - padding).narrow(
- 2, x * width + padding, width - padding
- ).copy_(tensor[k])
- k = k + 1
- return grid
-
-
-def save_image(tensor, filename, nrow=4, padding=2, mean=None, std=None, save=True):
- """
- Saves a given Tensor into an image file.
- If given a mini-batch tensor, will save the tensor as a grid of images.
- """
- from PIL import Image
-
- tensor = tensor.cpu()
- grid = make_grid(tensor, nrow=nrow, padding=10, pad_value=1)
- if not mean is None:
- # ndarr = grid.mul(std).add(mean).mul(255).byte().transpose(0,2).transpose(0,1).numpy()
- ndarr = (
- grid.mul(std)
- .add(mean)
- .mul(255)
- .byte()
- .transpose(0, 2)
- .transpose(0, 1)
- .numpy()
- )
- else:
- ndarr = (
- grid.mul(0.5)
- .add(0.5)
- .mul(255)
- .byte()
- .transpose(0, 2)
- .transpose(0, 1)
- .numpy()
- )
- im = Image.fromarray(ndarr)
- if save is True:
- im.save(filename)
- return im, grid
-
-
-from PIL import ImageDraw, Image, ImageFont
-import json
-
-
-class Draw(object):
- """Drawing helper class to visualize the neural network output"""
-
- def __init__(self, im):
- """
- :param im: The image to draw in.
- """
- self.draw = ImageDraw.Draw(im)
- self.width = im.size[0]
-
- def draw_line(self, point1, point2, line_color, line_width=2):
- """Draws line on image"""
- if point1 is not None and point2 is not None:
- self.draw.line([point1, point2], fill=line_color, width=line_width)
-
- def draw_dot(self, point, point_color, point_radius):
- """Draws dot (filled circle) on image"""
- if point is not None:
- xy = [
- point[0] - point_radius,
- point[1] - point_radius,
- point[0] + point_radius,
- point[1] + point_radius,
- ]
- self.draw.ellipse(xy, fill=point_color, outline=point_color)
-
- def draw_text(self, point, text, text_color):
- """Draws text on image"""
- if point is not None:
- self.draw.text(point, text, fill=text_color, font=ImageFont.truetype("misc/arial.ttf", self.width // 50))
-
- def draw_cube(self, points, color=(0, 255, 0)):
- """
- Draws cube with a thick solid line across
- the front top edge and an X on the top face.
- """
- # draw front
- self.draw_line(points[0], points[1], color)
- self.draw_line(points[1], points[2], color)
- self.draw_line(points[3], points[2], color)
- self.draw_line(points[3], points[0], color)
-
- # draw back
- self.draw_line(points[4], points[5], color)
- self.draw_line(points[6], points[5], color)
- self.draw_line(points[6], points[7], color)
- self.draw_line(points[4], points[7], color)
-
- # draw sides
- self.draw_line(points[0], points[4], color)
- self.draw_line(points[7], points[3], color)
- self.draw_line(points[5], points[1], color)
- self.draw_line(points[2], points[6], color)
-
- # draw dots
- self.draw_dot(points[0], point_color=color, point_radius=4)
- self.draw_dot(points[1], point_color=color, point_radius=4)
-
- # draw x on the top
- self.draw_line(points[0], points[5], color)
- self.draw_line(points[1], points[4], color)
-
- # Draw center
- self.draw_dot(points[8], point_color=color, point_radius=6)
-
- for i in range(9):
- self.draw_text(points[i], str(i), (255, 0, 0))
-
-
diff --git a/utils/README.md b/utils/README.md
deleted file mode 100644
index 68336c1..0000000
--- a/utils/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-## cg.utils
-
-Общие утилиты для всех модулей фреймворка.
-
-### cmd_proc.py
-
-Позволяет запустить программу командной строки с параметрами и вернуть ее результат.
-
-### custom_parser.py
-
-Надстройка для ArgumentParser.
-Позволяет использовать передачу аргументов командной строки для Bledner и FreeCAD.
-
-### get_interfaces.py
-
-Получение информации о ROS2-топиках в пакете цифрового двойника, навыка.
\ No newline at end of file
diff --git a/utils/cmd_proc.py b/utils/cmd_proc.py
deleted file mode 100644
index 6ea6ae4..0000000
--- a/utils/cmd_proc.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# coding: utf-8
-# Copyright (C) 2023 Ilia Kurochkin
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-'''
-DESCRIPTION.
-Run cmd program with args and kwargs.
-Get return via cmd output.
-'''
-__version__ = '0.1'
-
-import subprocess
-
-
-def cmd_proc(*args, **kwargs):
- command = list(args)
- for akey, aval in kwargs.items():
- command.append(f'--{akey}')
- command.append(str(aval))
-
- return subprocess.run(command,
- check=True,
- stdout=subprocess.PIPE,
- encoding='utf-8').stdout
diff --git a/utils/custom_parser.py b/utils/custom_parser.py
deleted file mode 100644
index 6ae4a2a..0000000
--- a/utils/custom_parser.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# coding: utf-8
-# original code https://blender.stackexchange.com/a/134596
-# Copyright (C) 2023 Ilia Kurochkin
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-'''
-DESCRIPTION.
-Custom parser for Blender and FreeCAD.
-This is an overlay for standard ArgumentParser.
-'''
-__version__ = '0.1'
-
-import argparse
-import sys
-
-
-class CustomArgumentParser(argparse.ArgumentParser):
- """
- This class is identical to its superclass, except for the parse_args
- method (see docstring). It resolves the ambiguity generated when calling
- Blender from the CLI with a python script, and both Blender and the script
- have arguments. E.g., the following call will make Blender crash because
- it will try to process the script's -a and -b flags:
- >>> blender --python my_script.py -a 1 -b 2
-
- To bypass this issue this class uses the fact that Blender will ignore all
- arguments given after a double-dash ('--'). The approach is that all
- arguments before '--' go to Blender, arguments after go to the script.
- The following calls work fine:
- >>> blender --python my_script.py -- -a 1 -b 2
- >>> blender --python my_script.py --
- """
-
- @staticmethod
- def _get_argv_after_doubledash():
- """
- Given the sys.argv as a list of strings, this method returns the
- sublist right after the '--' element (if present, otherwise returns
- an empty list).
- """
- try:
- idx = sys.argv.index("--")
- return sys.argv[idx+1:] # the list after '--'
- except ValueError as e: # '--' not in the list:
- return None
-
- # overrides superclass
- def parse_args(self, args=None, namespace=None):
- """
- This method is expected to behave identically as in the superclass,
- except that the sys.argv list will be pre-processed using
- _get_argv_after_doubledash before. See the docstring of the class for
- usage examples and details.
- """
- return super().parse_args(
- args=args or self._get_argv_after_doubledash(),
- namespace=namespace
- )
-
- def parse_known_args(self, args=None, namespace=None):
- ''' Parse only known args '''
- return super().parse_known_args(
- args=args or self._get_argv_after_doubledash(),
- namespace=namespace
- )
diff --git a/utils/get_interfaces.py b/utils/get_interfaces.py
deleted file mode 100644
index 1b83915..0000000
--- a/utils/get_interfaces.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import argparse
-import os
-import json
-import subprocess
-import signal
-import time
-
-from ros2cli.node.strategy import NodeStrategy
-from ros2topic.api import get_topic_names_and_types
-
-OUTPUT_FILE = "topics.json"
-TOPICS_FILTER = ["/parameter_events", "/rosout"]
-
-def get_script_args(cfg):
- args = cfg["command"].split()
- args.append(cfg["package"])
- args.append(cfg["executable"])
- return args
-
-def get_topics(filename, path):
- jsonpath = os.path.join(path, filename)
-
- with NodeStrategy({}) as node:
- topic_names_and_types = get_topic_names_and_types(node=node, include_hidden_topics=False)
-
- topic_info = []
- for (topic_name, topic_types) in topic_names_and_types:
- if not topic_name in TOPICS_FILTER:
- topic_info.append({"name": topic_name, "type": topic_types[0]})
-
- print(f"---> number of topics: {len(topic_info)}")
-
- j_data = {"topics": topic_info}
- with open(jsonpath, "w") as fh:
- json.dump(j_data, fh, indent=2)
-
- for topic in topic_info:
- print(topic["name"])
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--package", required=True, help="Json-string/file with package parameters")
- parser.add_argument("--path", default="", help="Output path")
- parser.add_argument("--json", default=OUTPUT_FILE, help="Output file name in json-format")
- parser.add_argument('--delay', default=5, type=int, help="Delay in seconds")
- args = parser.parse_args()
-
- if args.package[-5:] == ".json":
- if not os.path.isfile(args.package):
- print(f"Error: no such file '{args.package}'")
- exit(-1)
- with open(args.package, "r") as f:
- j_data = f.read()
- else:
- j_data = args.package
- try:
- cfg = json.loads(j_data)
- except json.JSONDecodeError as e:
- print(f"JSon error: {e}")
- exit(-2)
-
- cmd = get_script_args(cfg)
- process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-
- time.sleep(args.delay)
- get_topics(args.json, args.path)
-
- process.send_signal(signal.SIGINT)
diff --git a/utils/ros2_topic_to_json.py b/utils/ros2_topic_to_json.py
deleted file mode 100644
index ffb1630..0000000
--- a/utils/ros2_topic_to_json.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""
- ROS2_Topic_to_json
- ROS 2 program for outputting system objects to json format
-
- From https://github.com/ros2/ros2cli/blob/humble/ros2topic/ros2topic/verb/list.py
-
- @shalenikol release 0.1
-"""
-"""
-usage: python ros2_topic_to_json.py [-h] [--jsonpath JSONPATH]
-
-options:
- -h, --help show this help message and exit
- --jsonpath JSONPATH Output file in json-format
-"""
-import argparse
-import json
-from ros2cli.node.strategy import NodeStrategy
-from ros2topic.api import get_topic_names_and_types
-
-OUTPUT_FILE = "topics.json"
-
-# def show_topic_info(topic_info, is_publisher):
-# message = ('Published' if is_publisher else 'Subscribed') + ' topics:\n'
-# for (topic_name, topic_types, pub_count, sub_count) in topic_info:
-# count = pub_count if is_publisher else sub_count
-# if count:
-# topic_types_formatted = ', '.join(topic_types)
-# count_str = str(count) + ' ' + ('publisher' if is_publisher else 'subscriber') \
-# + ('s' if count > 1 else '')
-# message += f' * {topic_name} [{topic_types_formatted}] {count_str}\n'
-# return message
-
-def main(args, jsonpath):
- topic_info = []
- with NodeStrategy(args) as node:
- topic_names_and_types = get_topic_names_and_types(
- node=node,
- include_hidden_topics=False)
- for (topic_name, topic_types) in topic_names_and_types:
- # if args.verbose:
- # pub_count = node.count_publishers(topic_name)
- # sub_count = node.count_subscribers(topic_name)
- # topic_info.append((topic_name, topic_types, pub_count, sub_count))
- # else:
- topic_info.append((topic_name, topic_types))
-
- # if args.count_topics:
- print(f"---> number of topics: {len(topic_names_and_types)}")
-
- j_data = {"topics": topic_info}
- with open(jsonpath, "w") as fh:
- json.dump(j_data, fh, indent=2)
- # elif topic_names_and_types:
- # if args.verbose:
- # print(show_topic_info(topic_info, is_publisher=True))
- # print(show_topic_info(topic_info, is_publisher=False))
- # else:
- for (topic_name, topic_types) in topic_info:
- msg = "{topic_name}"
- # topic_types_formatted = ', '.join(topic_types)
- # if args.show_types:
- # msg += ' [{topic_types_formatted}]'
- print(msg.format_map(locals()))
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("--jsonpath", default=OUTPUT_FILE, help="Output file in json-format")
- m_args = parser.parse_args()
-
- args = {}
- main(args, m_args.jsonpath)
\ No newline at end of file