Compare commits

..

1 commit

Author SHA1 Message Date
brothermechanic
b26334fe4a
CG Pileline: add global readme 2023-12-08 17:26:06 +03:00
359 changed files with 78829 additions and 7213 deletions

6
.gitignore vendored
View file

@ -112,8 +112,4 @@ install_plugin_cad.sh
*#
.#*
\#*\#
out/
#freecad_workbench
freecad_workbench/freecad/update_workbench.sh
*.FCBak
out/

10
.gitmodules vendored
View file

@ -1,9 +1,3 @@
[submodule "rcg_pipeline"]
path = rcg_pipeline
url = https://gitlab.com/robossembler/rcg-pipeline.git
[submodule "freecad_workbench"]
path = freecad_workbench
url = https://gitlab.com/robossembler/robossembler-freecad-workbench.git
[submodule "simulation/insertion_vector_predicate/assembly"]
path = simulation/insertion_vector_predicate/assembly
[submodule "insertion_vector_predicate/assembly"]
path = insertion_vector_predicate/assembly
url = https://github.com/yunshengtian/Assemble-Them-All

105
ObjectDetection/README.md Normal file
View file

@ -0,0 +1,105 @@
# Инструкция для запуска
Должен быть установлен пакет [BlenderProc](https://github.com/DLR-RM/BlenderProc)
## Создание датасета в формате YoloV4 для заданного объекта
Команда для запуска:
```
blenderproc run obj2Yolov4dataset.py [obj] [output_dir] [--imgs 1]
```
- obj: файл описания объекта *.obj
- output_dir: выходной каталог
- --imgs 1: количество изображений на выходе
## Создание датасета в формате YoloV4 для серии заданных объектов в заданной сцене
Команда для запуска:
```
blenderproc run objs2Yolov4dataset.py [scene] [obj_path] [output_dir] [vhacd_path] [--imgs 1]
```
- scene: путь к файлу описания сцены (*.blend)
- obj_path: путь к каталогу с файлами описания детектируемых объектов *.obj
- output_dir: выходной каталог
- vhacd_path: каталог, в котором должен быть установлен или уже установлен vhacd (по умолчанию blenderproc_resources/vhacd)
- --imgs 1: количество серий рендеринга (по 15 изображений каждая) на выходе (например, если imgs=100, то будет получено 1500 изображений)
Файл описания сцены обязательно должен содержать плоскость (с именем 'floor'), на которую будут сэмплированы объекты для обнаружения.
Должен быть собран пакет [darknet](https://github.com/AlexeyAB/darknet) для работы на заданном ПО и оборудовании (CPU, GPU ...)
---
## Обучение нейросети и получение файла с её весами
Команда для запуска:
```
darknet detector train [data] [cfg] [weight]
```
- data: файл с описанием датасета (*.data)
- cfg: файл с описанием нейросети
- weight: файл весов нейросети
Для обучения нужно загрузить файл с предобученными весами (162 MB): [yolov4.conv.137](https://github.com/AlexeyAB/darknet/releases/download/darknet_yolo_v3_optimal/yolov4.conv.137)
Для разного количества детектируемых объектов в выборке нужны свои файлы [data](https://gitlab.com/robossembler/framework/-/blob/master/ObjectDetection/yolov4_objs2.data) и [cfg](https://gitlab.com/robossembler/framework/-/blob/master/ObjectDetection/yolov4_objs2.cfg).
---
## Команда для обнаружения объектов нейросетью с обученными весами
* вариант 1 (в файле t.txt - список изображений):
```
darknet detector test yolov4_objs2.data yolov4_test.cfg yolov4_objs2_final.weights -dont_show -ext_output < t.txt > res.txt
```
* вариант 2 (файл 000015.jpg - тестовое изображение):
```
darknet detector test yolov4_objs2.data yolov4_test.cfg yolov4_objs2_final.weights -dont_show -ext_output 000015.jpg > res.txt
```
* вариант 3 (в файле t.txt - список изображений):
```
darknet detector test yolov4_objs2.data yolov4_test.cfg yolov4_objs2_final.weights -dont_show -ext_output -out res.json < t.txt
```
Файл res.txt после запуска варианта 2:
> net.optimized_memory = 0
> mini_batch = 1, batch = 1, time_steps = 1, train = 0
> Create CUDA-stream - 0
> Create cudnn-handle 0
> nms_kind: greedynms (1), beta = 0.600000
> nms_kind: greedynms (1), beta = 0.600000
> nms_kind: greedynms (1), beta = 0.600000
>
> seen 64, trained: 768 K-images (12 Kilo-batches_64)
> Detection layer: 139 - type = 28
> Detection layer: 150 - type = 28
> Detection layer: 161 - type = 28
>000015.jpg: Predicted in 620.357000 milli-seconds.
>fork.001: 94% (left_x: 145 top_y: -0 width: 38 height: 18)
>asm_element_edge.001: 28% (left_x: 195 top_y: 320 width: 40 height: 61)
>start_link.001: 87% (left_x: 197 top_y: 313 width: 39 height: 68)
>doking_link.001: 99% (left_x: 290 top_y: 220 width: 32 height: 21)
>start_link.001: 90% (left_x: 342 top_y: 198 width: 33 height: 34)
>doking_link.001: 80% (left_x: 342 top_y: 198 width: 32 height: 34)
>assemb_link.001: 100% (left_x: 426 top_y: 410 width: 45 height: 61)
Файл res.json после запуска варианта 3:
>[
{
"frame_id":1,
"filename":"img_test/000001.jpg",
"objects": [
{"class_id":5, "name":"asm_element_edge.001", "relative_coordinates":{"center_x":0.498933, "center_y":0.502946, "width":0.083075, "height":0.073736}, "confidence":0.999638},
{"class_id":4, "name":"grip-tool.001", "relative_coordinates":{"center_x":0.858856, "center_y":0.031339, "width":0.043919, "height":0.064563}, "confidence":0.996551}
]
},
{
"frame_id":2,
"filename":"img_test/000002.jpg",
"objects": [
{"class_id":1, "name":"start_link.001", "relative_coordinates":{"center_x":0.926026, "center_y":0.728457, "width":0.104029, "height":0.132757}, "confidence":0.995811},
{"class_id":0, "name":"assemb_link.001", "relative_coordinates":{"center_x":0.280403, "center_y":0.129059, "width":0.029980, "height":0.025067}, "confidence":0.916782}
]
}

View file

@ -0,0 +1,144 @@
import blenderproc as bproc
"""
obj2Yolov4dataset
Общая задача: обнаружение объекта (Object detection)
Реализуемая функция: создание датасета в формате YoloV4 для заданного объекта (*.obj)
Используется модуль blenderproc
24.01.2023 @shalenikol release 0.1
22.02.2023 @shalenikol release 0.2 исправлен расчёт x,y в convert2relative
"""
import numpy as np
import argparse
import random
import os
import shutil
import json
def convert2relative(height, width, bbox):
"""
YOLO format use relative coordinates for annotation
"""
x, y, w, h = bbox
x += w/2
y += h/2
return x/width, y/height, w/width, h/height
parser = argparse.ArgumentParser()
parser.add_argument('scene', nargs='?', default="resources/robossembler-asset.obj", help="Path to the object file.")
parser.add_argument('output_dir', nargs='?', default="output", help="Path to where the final files, will be saved")
parser.add_argument('--imgs', default=1, type=int, help="The number of times the objects should be rendered.")
args = parser.parse_args()
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
bproc.init()
# load the objects into the scene
obj = bproc.loader.load_obj(args.scene)[0]
obj.set_cp("category_id", 1)
# Randomly perturbate the material of the object
mat = obj.get_materials()[0]
mat.set_principled_shader_value("Specular", random.uniform(0, 1))
mat.set_principled_shader_value("Roughness", random.uniform(0, 1))
mat.set_principled_shader_value("Base Color", np.random.uniform([0, 0, 0, 1], [1, 1, 1, 1]))
mat.set_principled_shader_value("Metallic", random.uniform(0, 1))
# Create a new light
light = bproc.types.Light()
light.set_type("POINT")
# Sample its location around the object
light.set_location(bproc.sampler.shell(
center=obj.get_location(),
radius_min=1,
radius_max=5,
elevation_min=1,
elevation_max=89
))
# Randomly set the color and energy
light.set_color(np.random.uniform([0.5, 0.5, 0.5], [1, 1, 1]))
light.set_energy(random.uniform(100, 1000))
bproc.camera.set_resolution(640, 480)
# Sample five camera poses
poses = 0
tries = 0
while tries < 10000 and poses < args.imgs:
# Sample random camera location around the object
location = bproc.sampler.shell(
center=obj.get_location(),
radius_min=1,
radius_max=4,
elevation_min=1,
elevation_max=89
)
# Compute rotation based lookat point which is placed randomly around the object
lookat_point = obj.get_location() + np.random.uniform([-0.5, -0.5, -0.5], [0.5, 0.5, 0.5])
rotation_matrix = bproc.camera.rotation_from_forward_vec(lookat_point - location, inplane_rot=np.random.uniform(-0.7854, 0.7854))
# Add homog cam pose based on location an rotation
cam2world_matrix = bproc.math.build_transformation_mat(location, rotation_matrix)
# Only add camera pose if object is still visible
if obj in bproc.camera.visible_objects(cam2world_matrix):
bproc.camera.add_camera_pose(cam2world_matrix)
poses += 1
tries += 1
# Enable transparency so the background becomes transparent
bproc.renderer.set_output_format(enable_transparency=True)
# add segmentation masks (per class and per instance)
bproc.renderer.enable_segmentation_output(map_by=["category_id", "instance", "name"])
# Render RGB images
data = bproc.renderer.render()
# Write data to coco file
res_dir = os.path.join(args.output_dir, 'coco_data')
bproc.writer.write_coco_annotations(res_dir,
instance_segmaps=data["instance_segmaps"],
instance_attribute_maps=data["instance_attribute_maps"],
color_file_format='JPEG',
colors=data["colors"],
append_to_existing_output=True)
#загрузим аннотацию
with open(os.path.join(res_dir,"coco_annotations.json"), "r") as fh:
y = json.load(fh)
# список имен объектов
with open(os.path.join(res_dir,"obj.names"), "w") as fh:
for cat in y["categories"]:
fh.write(cat["name"]+"\n")
# содадим или очистим папку data для датасета
res_data = os.path.join(res_dir, 'data')
if os.path.isdir(res_data):
for f in os.listdir(res_data):
os.remove(os.path.join(res_data, f))
else:
os.mkdir(res_data)
# список имен файлов с изображениями
s = []
with open(os.path.join(res_dir,"images.txt"), "w") as fh:
for i in y["images"]:
filename = i["file_name"]
shutil.copy(os.path.join(res_dir,filename),res_data)
fh.write(filename.replace('images','data')+"\n")
s.append((os.path.split(filename))[1])
# предполагается, что "images" и "annotations" следуют в одном и том же порядке
c = 0
for i in y["annotations"]:
bbox = i["bbox"]
im_h = i["height"]
im_w = i["width"]
rel = convert2relative(im_h,im_w,bbox)
fn = (os.path.splitext(s[c]))[0] # только имя файла
with open(os.path.join(res_data,fn+".txt"), "w") as fh:
# формат: <target> <x-center> <y-center> <width> <height>
fh.write("0 "+'{:-f} {:-f} {:-f} {:-f}'.format(rel[0],rel[1],rel[2],rel[3])+"\n")
c += 1

View file

@ -0,0 +1,296 @@
import blenderproc as bproc
"""
objs2Yolov4dataset
Общая задача: обнаружение объекта (Object detection)
Реализуемая функция: создание датасета в формате YoloV4 для серии заданных объектов (*.obj) в заданной сцене (*.blend)
Используется модуль blenderproc
17.02.2023 @shalenikol release 0.1
22.02.2023 @shalenikol release 0.2 исправлен расчёт x,y в convert2relative
"""
import sys
import numpy as np
import argparse
import random
import os
import shutil
import json
def convert2relative(height, width, bbox):
"""
YOLO format use relative coordinates for annotation
"""
x, y, w, h = bbox
x += w/2
y += h/2
return x/width, y/height, w/width, h/height
parser = argparse.ArgumentParser()
parser.add_argument('scene', nargs='?', default="resources/sklad.blend", help="Path to the scene object.")
parser.add_argument('obj_path', nargs='?', default="resources/in_obj", help="Path to the object files.")
parser.add_argument('output_dir', nargs='?', default="output", help="Path to where the final files, will be saved")
parser.add_argument('vhacd_path', nargs='?', default="blenderproc_resources/vhacd", help="The directory in which vhacd should be installed or is already installed.")
parser.add_argument('--imgs', default=2, type=int, help="The number of times the objects should be rendered.")
args = parser.parse_args()
if not os.path.isdir(args.obj_path):
print(f"{args.obj_path} : no object directory")
sys.exit()
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
bproc.init()
# ? загрузим свет из сцены
#cam = bproc.loader.load_blend(args.scene, data_blocks=["cameras"])
#lights = bproc.loader.load_blend(args.scene, data_blocks=["lights"])
# загрузим объекты
list_files = os.listdir(args.obj_path)
meshs = []
i = 0
for f in list_files:
if (os.path.splitext(f))[1] == ".obj":
f = os.path.join(args.obj_path, f) # путь к файлу объекта
if os.path.isfile(f):
meshs += bproc.loader.load_obj(f)
i += 1
if i == 0:
print("Objects not found")
sys.exit()
for i,o in enumerate(meshs):
o.set_cp("category_id", i+1)
# загрузим сцену
scene = bproc.loader.load_blend(args.scene, data_blocks=["objects"])
#scene = bproc.loader.load_obj(args.scene)
# найдём пол
floor = None
for o in scene:
o.set_cp("category_id", 999)
s = o.get_name()
if s.find("floor") >= 0:
floor = o
if floor == None:
print("Floor not found in the scene")
sys.exit()
floor.enable_rigidbody(False, collision_shape='BOX')
objs = meshs + scene
for obj in meshs:
# Make the object actively participate in the physics simulation
obj.enable_rigidbody(active=True, collision_shape="COMPOUND")
# Also use convex decomposition as collision shapes
obj.build_convex_decomposition_collision_shape(args.vhacd_path)
with open(os.path.join(args.output_dir,"res.txt"), "w") as fh:
# fh.write(str(type(scene[0]))+"\n")
i = 0
for o in objs:
i += 1
loc = o.get_location()
euler = o.get_rotation_euler()
fh.write(f"{i} : {o.get_name()} {loc} {euler}\n")
# define a light and set its location and energy level
light = bproc.types.Light()
light.set_type("POINT")
light.set_location([5, -5, 5])
#light.set_energy(900)
#light.set_color([0.7, 0.7, 0.7])
light1 = bproc.types.Light(name="light1")
light1.set_type("SUN")
light1.set_location([0, 0, 0])
light1.set_rotation_euler([-0.063, 0.6177, -0.1985])
#light1.set_energy(7)
light1.set_color([1, 1, 1])
"""
# Sample its location around the object
light.set_location(bproc.sampler.shell(
center=obj.get_location(),
radius_min=2.5,
radius_max=5,
elevation_min=1,
elevation_max=89
))
"""
# define the camera intrinsics
bproc.camera.set_intrinsics_from_blender_params(1, 640, 480, lens_unit="FOV")
bproc.renderer.enable_segmentation_output(map_by=["category_id", "instance", "name"])
res_dir = os.path.join(args.output_dir, 'coco_data')
# Цикл рендеринга
n_cam_location = 5 # количество случайных локаций камеры
n_cam_poses = 3 # количество сэмплов для каждой локации камеры
# Do multiple times: Position the shapenet objects using the physics simulator and render X images with random camera poses
for r in range(args.imgs):
# Randomly set the color and energy
light.set_color(np.random.uniform([0.5, 0.5, 0.5], [1, 1, 1]))
light.set_energy(random.uniform(500, 1000))
light1.set_energy(random.uniform(3, 11))
for i,o in enumerate(objs):
mat = o.get_materials()[0]
mat.set_principled_shader_value("Specular", random.uniform(0, 1))
mat.set_principled_shader_value("Roughness", random.uniform(0, 1))
mat.set_principled_shader_value("Base Color", np.random.uniform([0, 0, 0, 1], [1, 1, 1, 1]))
mat.set_principled_shader_value("Metallic", random.uniform(0, 1))
# Clear all key frames from the previous run
bproc.utility.reset_keyframes()
# Define a function that samples 6-DoF poses
def sample_pose(obj: bproc.types.MeshObject):
obj.set_location(np.random.uniform([-1, -1.5, 0.2], [1, 2, 1.2])) #[-1, -1, 0], [1, 1, 2]))
obj.set_rotation_euler(bproc.sampler.uniformSO3())
# Sample the poses of all shapenet objects above the ground without any collisions in-between
bproc.object.sample_poses(meshs, objects_to_check_collisions = meshs + [floor], sample_pose_func = sample_pose)
# Run the simulation and fix the poses of the shapenet objects at the end
bproc.object.simulate_physics_and_fix_final_poses(min_simulation_time=4, max_simulation_time=20, check_object_interval=1)
# Find point of interest, all cam poses should look towards it
poi = bproc.object.compute_poi(meshs)
coord_max = [0.1, 0.1, 0.1]
coord_min = [0., 0., 0.]
with open(os.path.join(args.output_dir,"res.txt"), "a") as fh:
fh.write("*****************\n")
fh.write(f"{r}) poi = {poi}\n")
i = 0
for o in meshs:
i += 1
loc = o.get_location()
euler = o.get_rotation_euler()
fh.write(f" {i} : {o.get_name()} {loc} {euler}\n")
for j in range(3):
if loc[j] < coord_min[j]:
coord_min[j] = loc[j]
if loc[j] > coord_max[j]:
coord_max[j] = loc[j]
# Sample up to X camera poses
#an = np.random.uniform(0.78, 1.2) #1. #0.35
for i in range(n_cam_location):
# Sample location
location = bproc.sampler.shell(center=[0, 0, 0],
radius_min=1.1,
radius_max=3.3,
elevation_min=5,
elevation_max=89)
# координата, по которой будем сэмплировать положение камеры
j = random.randint(0, 2)
# разовый сдвиг по случайной координате
d = (coord_max[j] - coord_min[j]) / n_cam_poses
if location[j] < 0:
d = -d
for k in range(n_cam_poses):
# Compute rotation based on vector going from location towards poi
rotation_matrix = bproc.camera.rotation_from_forward_vec(poi - location, inplane_rot=np.random.uniform(-0.7854, 0.7854))
# Add homog cam pose based on location an rotation
cam2world_matrix = bproc.math.build_transformation_mat(location, rotation_matrix)
bproc.camera.add_camera_pose(cam2world_matrix)
location[j] -= d
#world_matrix = bproc.math.build_transformation_mat([2.3, -0.4, 0.66], [1.396, 0., an])
#bproc.camera.add_camera_pose(world_matrix)
#an += 0.2
# render the whole pipeline
data = bproc.renderer.render()
# Write data to coco file
bproc.writer.write_coco_annotations(res_dir,
instance_segmaps=data["instance_segmaps"],
instance_attribute_maps=data["instance_attribute_maps"],
color_file_format='JPEG',
colors=data["colors"],
append_to_existing_output=True)
#загрузим аннотацию
with open(os.path.join(res_dir,"coco_annotations.json"), "r") as fh:
y = json.load(fh)
# список имен объектов
n_obj = 0
obj_list = []
with open(os.path.join(res_dir,"obj.names"), "w") as fh:
for cat in y["categories"]:
if cat["id"] < 999:
n = cat["name"]
i = cat["id"]
obj_list.append([n,i,n_obj])
fh.write(n+"\n")
n_obj += 1
# содадим или очистим папку data для датасета
res_data = os.path.join(res_dir, 'data')
if os.path.isdir(res_data):
for f in os.listdir(res_data):
os.remove(os.path.join(res_data, f))
else:
os.mkdir(res_data)
# список имен файлов с изображениями
fn_image = os.path.join(res_dir,"images.txt")
img_list = []
with open(fn_image, "w") as fh:
for i in y["images"]:
filename = i["file_name"]
shutil.copy(os.path.join(res_dir,filename),res_data)
fh.write(filename.replace('images','data')+"\n")
img_list.append([i["id"], (os.path.split(filename))[1]])
# создадим 2 списка имен файлов для train и valid
n_image_in_series = n_cam_location * n_cam_poses # количество изображений в серии
i = 0
fh = open(fn_image, "r")
f1 = open(os.path.join(res_dir,"i_train.txt"), "w")
f2 = open(os.path.join(res_dir,"i_val.txt"), "w")
for line in fh:
i += 1
if i % n_image_in_series == 0:
f2.write(line)
else:
f1.write(line)
fh.close()
f1.close()
f2.close()
# заполним файлы с метками bbox
for i in y["annotations"]:
cat_id = i["category_id"]
if cat_id < 999:
im_id = i["image_id"]
bbox = i["bbox"]
im_h = i["height"]
im_w = i["width"]
rel = convert2relative(im_h,im_w,bbox)
# находим индекс списка с нужным изображением
j = next(k for k, (x, _) in enumerate(img_list) if x == im_id)
filename = img_list[j][1]
fn = (os.path.splitext(filename))[0] # только имя файла
with open(os.path.join(res_data,fn+".txt"), "a") as fh:
# находим индекс списка с нужным объектом
j = next(k for k, (_, x, _) in enumerate(obj_list) if x == cat_id)
# формат: <target> <x-center> <y-center> <width> <height>
fh.write(f"{obj_list[j][2]} {rel[0]} {rel[1]} {rel[2]} {rel[3]}\n")
# создадим файл описания датасета для darknet
with open(os.path.join(res_dir,"yolov4_objs2.data"), "w") as fh:
fh.write(f"classes = {n_obj}\n")
fh.write("train = i_train.txt\n")
fh.write("valid = i_val.txt\n")
fh.write("names = obj.names\n")
fh.write("backup = backup\n")
fh.write("eval = coco\n")

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,7 @@
classes= 1
train = i_train.txt
valid = i_val.txt
names = obj.names
backup = backup
eval=coco

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,7 @@
classes= 6
train = i_train.txt
valid = i_val.txt
names = obj.names
backup = backup
eval=coco

File diff suppressed because it is too large Load diff

0
PoseEstimation/.gitkeep Normal file
View file

View file

@ -0,0 +1,44 @@
---
id: BOP_dataset
title: script for create BOP dataset
---
## Структура входных данных:
```
<example_dir>/
input_obj/asm_element_edge.mtl # файл материала
input_obj/asm_element_edge.obj # меш-объект
input_obj/fork.mtl
input_obj/fork.obj
input_obj/...
resources/sklad.blend # файл сцены
objs2BOPdataset.py # этот скрипт
```
## Пример команды запуска скрипта:
```
cd <example_dir>/
blenderproc run objs2BOPdataset.py resources/sklad.blend input_obj output --imgs 333
```
- resources/sklad.blend : файл сцены
- input_obj : каталог с меш-файлами
- output : выходной каталог
- imgs : количество пакетов по 9 кадров в каждом (в примере 333 * 9 = 2997)
## Структура BOP датасета на выходе:
```
output/
bop_data/
train_pbr/
000000/
depth/... # файлы глубины
mask/... # файлы маски
mask_visib/... # файлы маски видимости
rgb/... # файлы изображений RGB
scene_camera.json
scene_gt.json
scene_gt_coco.json
scene_gt_info.json
camera.json # внутренние параметры камеры (для всего датасета)
res.txt # протокол создания пакетов датасета
```

View file

@ -0,0 +1,261 @@
import blenderproc as bproc
"""
objs2BOPdataset
Общая задача: распознавание 6D позы объекта (6D pose estimation)
Реализуемая функция: создание датасета в формате BOP для серии заданных объектов (*.obj) в заданной сцене (*.blend)
Используется модуль blenderproc
29.08.2023 @shalenikol release 0.1
12.10.2023 @shalenikol release 0.2
"""
import sys
import numpy as np
import argparse
import random
import os
import shutil
import json
Not_Categories_Name = True # наименование категории в аннотации отсутствует
def convert2relative(height, width, bbox):
"""
YOLO format use relative coordinates for annotation
"""
x, y, w, h = bbox
x += w/2
y += h/2
return x/width, y/height, w/width, h/height
parser = argparse.ArgumentParser()
parser.add_argument('scene', nargs='?', default="resources/sklad.blend", help="Path to the scene object.")
parser.add_argument('obj_path', nargs='?', default="resources/in_obj", help="Path to the object files.")
parser.add_argument('output_dir', nargs='?', default="output", help="Path to where the final files, will be saved")
parser.add_argument('vhacd_path', nargs='?', default="blenderproc_resources/vhacd", help="The directory in which vhacd should be installed or is already installed.")
parser.add_argument('-single_object', nargs='?', type= bool, default=True, help="One object per frame.")
parser.add_argument('--imgs', default=2, type=int, help="The number of times the objects should be rendered.")
args = parser.parse_args()
if not os.path.isdir(args.obj_path):
print(f"{args.obj_path} : no object directory")
sys.exit()
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
single_object = args.single_object
bproc.init()
# ? загрузим свет из сцены
#cam = bproc.loader.load_blend(args.scene, data_blocks=["cameras"])
#lights = bproc.loader.load_blend(args.scene, data_blocks=["lights"])
# загрузим объекты
list_files = os.listdir(args.obj_path)
obj_names = []
obj_filenames = []
all_meshs = []
nObj = 0
for f in list_files:
if (os.path.splitext(f))[1] == ".obj":
f = os.path.join(args.obj_path, f) # путь к файлу объекта
if os.path.isfile(f):
obj = bproc.loader.load_obj(f)
all_meshs += obj
obj_names += [obj[0].get_name()]
obj_filenames += [f]
nObj += 1
if nObj == 0:
print("Objects not found")
sys.exit()
for i,obj in enumerate(all_meshs):
#print(f"{i} *** {obj}")
obj.set_cp("category_id", i+1)
# загрузим сцену
scene = bproc.loader.load_blend(args.scene, data_blocks=["objects"])
# найдём объекты коллизии (пол и т.д.)
obj_type = ["floor", "obj"]
collision_objects = []
#floor = None
for o in scene:
o.set_cp("category_id", 999)
s = o.get_name()
for type in obj_type:
if s.find(type) >= 0:
collision_objects += [o]
o.enable_rigidbody(False, collision_shape='BOX')
if not collision_objects:
print("Collision objects not found in the scene")
sys.exit()
#floor.enable_rigidbody(False, collision_shape='BOX')
for obj in all_meshs:
# Make the object actively participate in the physics simulation
obj.enable_rigidbody(active=True, collision_shape="COMPOUND")
# Also use convex decomposition as collision shapes
obj.build_convex_decomposition_collision_shape(args.vhacd_path)
objs = all_meshs + scene
with open(os.path.join(args.output_dir,"res.txt"), "w") as fh:
# fh.write(str(type(scene[0]))+"\n")
i = 0
for o in objs:
i += 1
loc = o.get_location()
euler = o.get_rotation_euler()
fh.write(f"{i} : {o.get_name()} {loc} {euler} category_id = {o.get_cp('category_id')}\n")
# define a light and set its location and energy level
light = bproc.types.Light()
light.set_type("POINT")
light.set_location([5, -5, 5])
#light.set_energy(900)
#light.set_color([0.7, 0.7, 0.7])
light1 = bproc.types.Light(name="light1")
light1.set_type("SUN")
light1.set_location([0, 0, 0])
light1.set_rotation_euler([-0.063, 0.6177, -0.1985])
#light1.set_energy(7)
light1.set_color([1, 1, 1])
# define the camera intrinsics
bproc.camera.set_intrinsics_from_blender_params(1, 640, 480, lens_unit="FOV")
# add segmentation masks (per class and per instance)
bproc.renderer.enable_segmentation_output(map_by=["category_id", "instance", "name"])
#bproc.renderer.enable_segmentation_output(map_by=["category_id", "instance", "name", "bop_dataset_name"],
# default_values={"category_id": 0, "bop_dataset_name": None})
# activate depth rendering
bproc.renderer.enable_depth_output(activate_antialiasing=False)
res_dir = os.path.join(args.output_dir, "bop_data")
if os.path.isdir(res_dir):
shutil.rmtree(res_dir)
# Цикл рендеринга
n_cam_location = 3 #5 # количество случайных локаций камеры
n_cam_poses = 3 #3 # количество сэмплов для каждой локации камеры
# Do multiple times: Position the shapenet objects using the physics simulator and render X images with random camera poses
for r in range(args.imgs):
# один случайный объект в кадре / все заданные объекты
meshs = [random.choice(all_meshs)] if single_object else all_meshs[:]
# Randomly set the color and energy
light.set_color(np.random.uniform([0.5, 0.5, 0.5], [1, 1, 1]))
light.set_energy(random.uniform(500, 1000))
light1.set_energy(random.uniform(3, 11))
for i,o in enumerate(meshs): #objs
mat = o.get_materials()[0]
mat.set_principled_shader_value("Specular", random.uniform(0, 1))
mat.set_principled_shader_value("Roughness", random.uniform(0, 1))
mat.set_principled_shader_value("Base Color", np.random.uniform([0, 0, 0, 1], [1, 1, 1, 1]))
mat.set_principled_shader_value("Metallic", random.uniform(0, 1))
# Clear all key frames from the previous run
bproc.utility.reset_keyframes()
# Define a function that samples 6-DoF poses
def sample_pose(obj: bproc.types.MeshObject):
obj.set_location(np.random.uniform([-1, -1.5, 0.2], [1, 2, 1.2])) #[-1, -1, 0], [1, 1, 2]))
obj.set_rotation_euler(bproc.sampler.uniformSO3())
# Sample the poses of all shapenet objects above the ground without any collisions in-between
#bproc.object.sample_poses(meshs, objects_to_check_collisions = meshs + [floor], sample_pose_func = sample_pose)
bproc.object.sample_poses(meshs, objects_to_check_collisions = meshs + collision_objects, sample_pose_func = sample_pose)
# Run the simulation and fix the poses of the shapenet objects at the end
bproc.object.simulate_physics_and_fix_final_poses(min_simulation_time=4, max_simulation_time=20, check_object_interval=1)
# Find point of interest, all cam poses should look towards it
poi = bproc.object.compute_poi(meshs)
coord_max = [0.1, 0.1, 0.1]
coord_min = [0., 0., 0.]
with open(os.path.join(args.output_dir,"res.txt"), "a") as fh:
fh.write("*****************\n")
fh.write(f"{r}) poi = {poi}\n")
i = 0
for o in meshs:
i += 1
loc = o.get_location()
euler = o.get_rotation_euler()
fh.write(f" {i} : {o.get_name()} {loc} {euler}\n")
for j in range(3):
if loc[j] < coord_min[j]:
coord_min[j] = loc[j]
if loc[j] > coord_max[j]:
coord_max[j] = loc[j]
# Sample up to X camera poses
#an = np.random.uniform(0.78, 1.2) #1. #0.35
for i in range(n_cam_location):
# Sample location
location = bproc.sampler.shell(center=[0, 0, 0],
radius_min=1.1,
radius_max=2.2,
elevation_min=5,
elevation_max=89)
# координата, по которой будем сэмплировать положение камеры
j = random.randint(0, 2)
# разовый сдвиг по случайной координате
d = (coord_max[j] - coord_min[j]) / n_cam_poses
if location[j] < 0:
d = -d
for k in range(n_cam_poses):
# Compute rotation based on vector going from location towards poi
rotation_matrix = bproc.camera.rotation_from_forward_vec(poi - location, inplane_rot=np.random.uniform(-0.7854, 0.7854))
# Add homog cam pose based on location an rotation
cam2world_matrix = bproc.math.build_transformation_mat(location, rotation_matrix)
bproc.camera.add_camera_pose(cam2world_matrix)
location[j] -= d
#world_matrix = bproc.math.build_transformation_mat([2.3, -0.4, 0.66], [1.396, 0., an])
#bproc.camera.add_camera_pose(world_matrix)
#an += 0.2
# render the whole pipeline
data = bproc.renderer.render()
# Write data to bop format
bproc.writer.write_bop(res_dir,
target_objects = all_meshs, # Optional[List[MeshObject]] = None
depths = data["depth"],
depth_scale = 1.0,
colors = data["colors"],
color_file_format='JPEG',
append_to_existing_output = (r>0),
save_world2cam = False) # world coords are arbitrary in most real BOP datasets
# dataset="robo_ds",
"""
!!! categories -> name берётся из category_id !!!
см.ниже
blenderproc.python.writer : BopWriterUtility.py
class _BopWriterUtility
def calc_gt_coco
...
CATEGORIES = [{'id': obj.get_cp('category_id'), 'name': str(obj.get_cp('category_id')), 'supercategory':
dataset_name} for obj in dataset_objects]
поэтому заменим наименование категории в аннотации
"""
if Not_Categories_Name:
coco_file = os.path.join(res_dir,"train_pbr/000000/scene_gt_coco.json")
with open(coco_file, "r") as fh:
data = json.load(fh)
cats = data["categories"]
#print(f"type(cat) = {type(cat)} cat : {cat}")
i = 0
for cat in cats:
cat["name"] = obj_names[i]
i += 1
#print(cat)
with open(coco_file, "w") as fh:
json.dump(data, fh, indent=0)

View file

@ -1,7 +1,5 @@
# Фреймворк Робосборщик
Фреймворк Робосборщик (Robossembler Framework) предназначен для автоматизации разработки управляющих программ для роботов-манипуляторов, их отладки в виртуальных средах и оценки производительности.
Фреймворк состоит из следующих функциональных модулей
@ -23,9 +21,14 @@
- __Предикат стабильной осуществимости__. Верен для последовательности сборки, когда сборка на каждом из этапов приходит к стабильному состоянию.
- __Предикат степеней свободы__. Формируется на основе уже сгенерированных графов/графа сборки. В каких степенях свободы возможно перемещать деталь.
# Генерация сцен
TODO: составить описание
[пример файла описания сцены](docs/scene_generator)
# Технология компьютерной графики (СG)
Общие функции CG-модуля:
- передача 3д-моделей из CAD в CG,
- полигональное моделирование/преобразование меш объектов,
- перестроение материалов в PBR представление,
- запекание текстуры меш объектов,
- сборка CG ассетов из объектов и текстур,
- экспорт полигональных объектов с текстурами в универсальные форматы,
- утилиты общего пользования,
- прикладные инстументы редактирования 3д-моделей.

26
asp-review-app/.gitignore vendored Normal file
View file

@ -0,0 +1,26 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
/build
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
**/node_modules
server/public/
**/computed/

2719
asp-review-app/server/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,45 @@
{
"name": "express-typescript",
"version": "1.0.0",
"description": "",
"main": "index.js",
"type": "module",
"scripts": {
"build": "npx tsc",
"start": "npx tsc && node --experimental-specifier-resolution=node dist/server.js",
"dev": "nodemon --exec ts-node --esm --transpileOnly ./src/server.ts"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/compression": "^1.7.2",
"@types/cors": "^2.8.13",
"@types/express": "^4.17.17",
"@types/express-fileupload": "^1.4.1",
"@types/mongoose": "^5.11.97",
"@types/node": "^17.0.45",
"typescript": "^4.9.5"
},
"dependencies": {
"body-parser": "^1.20.2",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.0",
"compression": "^1.7.4",
"concurrently": "^8.0.1",
"cors": "^2.8.5",
"decompress": "^4.2.1",
"express": "^4.18.2",
"express-cross": "^1.0.0",
"express-fileupload": "^1.4.0",
"first-di": "^1.0.11",
"form-data": "^4.0.0",
"lodash": "^4.17.21",
"morgan": "^1.10.0",
"multer": "^1.4.5-lts.1",
"node-stream-zip": "^1.15.0",
"nodemon": "^2.0.22",
"shelljs": "^0.8.5",
"ts-node": "^10.9.1"
}
}

View file

@ -0,0 +1,71 @@
import express from "express";
import compression from "compression";
import cors from "cors";
import { Routes } from "./core/interfaces/router";
import bodyParser from "body-parser";
import fileUpload from "express-fileupload";
import { DevEnv } from "./core/env/env";
import path from 'path';
import { locator } from "./core/di/register_di";
export const dirname = path.resolve();
const corsOptions = {
origin: process.env.CORS_ALLOW_ORIGIN || '*',
methods: ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization']
};
export class App {
public app: express.Application;
public port: string | number;
public env: string;
constructor(routes: Routes[], port) {
this.app = express();
this.port = port;
this.env = process.env.NODE_ENV || "development";
this.initializeMiddleware();
this.initializeRoutes(routes);
this.loadAppDependencies();
}
public listen() {
this.app.listen(this.port, () => {
console.info(`=================================`);
console.info(`======= ENV: ${this.env} =======`);
console.info(`🚀 App listening on the port ${this.port}`);
console.info(`=================================`);
});
}
public getServer() {
return this.app;
}
private initializeMiddleware() {
this.app.use(
cors(corsOptions)
);
this.app.use(compression());
this.app.use(express.json());
this.app.use(express.urlencoded({ extended: true }));
this.app.use(bodyParser.json());
this.app.use(bodyParser.urlencoded({ extended: true }));
this.app.use(express.static(dirname + '/public/'));
this.app.use(fileUpload({
createParentPath: true
}));
}
private initializeRoutes(routes: Routes[]) {
routes.forEach((route) => {
this.app.use("/", route.router);
});
}
loadAppDependencies() {
locator(new DevEnv());
}
}

View file

@ -0,0 +1,28 @@
import { override } from "first-di";
import { Env } from "../env/env";
import { AssemblyController } from "../../features/assembly_create/assembly_create_controller";
import { AssemblyPreviewsController } from "../../features/assembly_previews/assembly_previews_controller";
import { EntityRepository } from "../repository/entity_repository";
import { ZipRepository } from "../repository/zip_repository";
import { ComputeRepository } from "../repository/compute_repository";
export const locator = (env: Env) => {
// override(Env, env)
registerController(env)
registerRepository(env)
};
const registerRepository = (env:Env) => {
override(ZipRepository, ZipRepository);
override(EntityRepository, EntityRepository);
override(ComputeRepository,ComputeRepository);
}
const registerController = (env: Env) => {
override(AssemblyController,AssemblyController)
override(AssemblyPreviewsController, AssemblyPreviewsController)
}

View file

@ -0,0 +1,10 @@
export class HttpException extends Error {
public status: number;
public message: string;
constructor(status: number, message: string) {
super(message);
this.status = status;
this.message = message;
}
}

View file

@ -0,0 +1,191 @@
interface MemoOptions<F extends Fn, S extends unknown[] = unknown[]> {
serialize?: (...args: Parameters<F>) => S;
}
interface MemoAsyncOptions<F extends Fn> extends MemoOptions<F> {
external?: {
get: (args: Parameters<F>) => Promise<Awaited<ReturnType<F>> | undefined | null>;
set: (args: Parameters<F>, value: Awaited<ReturnType<F>>) => Promise<void>;
remove: (args: Parameters<F>) => Promise<void>;
clear: () => Promise<void>;
};
}
type Fn = (...params: any[]) => any;
type AsyncFn = (...params: any[]) => Promise<any>;
interface MemoFunc<F extends Fn> {
(...args: Parameters<F>): ReturnType<F>;
get(...args: Parameters<F>): ReturnType<F>;
raw(...args: Parameters<F>): ReturnType<F>;
clear(...args: Parameters<F> | []): void | Promise<void>;
}
export const enum State {
Empty,
Ok,
Waiting,
Error
}
export interface Node<T extends Fn> {
state: State;
value: ReturnType<T> | undefined;
error: unknown;
primitive: Map<any, Node<T>>;
reference: WeakMap<any, Node<T>>;
callbacks?: Set<{ res: (value: ReturnType<T>) => void; rej: (error: unknown) => void }>;
}
function makeNode<T extends Fn>(): Node<T> {
return {
state: State.Empty,
value: undefined,
error: undefined,
primitive: new Map(),
reference: new WeakMap()
};
}
function clearNode<T extends Fn>(node: Node<T> | undefined) {
if (node) {
node.state = State.Empty;
node.value = undefined;
node.error = undefined;
node.primitive = new Map();
node.reference = new WeakMap();
}
}
function isPrimitiveType(value: unknown) {
return (typeof value !== 'object' && typeof value !== 'function') || value === null;
}
function walkBase<T extends Fn, P extends any[] = Parameters<T>>(
node: Node<T>,
args: P,
hooks: { makeNode: () => Node<T> | undefined }
): Node<T> | undefined {
let cur = node;
for (const arg of args) {
if (isPrimitiveType(arg)) {
if (cur.primitive.has(arg)) {
cur = cur.primitive.get(arg)!;
} else {
const newNode = hooks.makeNode();
if (newNode) {
cur.primitive.set(arg, newNode);
cur = newNode;
} else {
return undefined;
}
}
} else {
if (cur.reference.has(arg)) {
cur = cur.reference.get(arg)!;
} else {
const newNode = hooks.makeNode();
if (newNode) {
cur.reference.set(arg, newNode);
cur = newNode;
} else {
return undefined;
}
}
}
}
return cur;
}
function walkAndCreate<T extends Fn, P extends any[] = Parameters<T>>(
node: Node<T>,
args: P
) {
return walkBase(node, args, { makeNode })!;
}
function walkOrBreak<T extends Fn, P extends any[] = Parameters<T>>(node: Node<T>, args: P) {
return walkBase(node, args, { makeNode: () => undefined });
}
export function memoAsync<F extends AsyncFn>(
fn: F,
options: MemoAsyncOptions<F> = {}
): MemoFunc<F> {
const root = makeNode<F>();
const memoFunc = async function (...args: Parameters<F>) {
const path = options.serialize ? options.serialize(...args) : args;
const cur = walkAndCreate<F, any[]>(root, path);
if (cur.state === State.Ok) {
return cur.value;
} else if (cur.state === State.Error) {
throw cur.error;
} else if (cur.state === State.Waiting) {
return new Promise((res, rej) => {
if (!cur.callbacks) {
cur.callbacks = new Set();
}
cur.callbacks!.add({ res, rej });
});
} else {
try {
cur.state = State.Waiting;
const external = options.external ? await options.external.get(args) : undefined;
const value = external !== undefined && external !== null ? external : await fn(...args);
cur.state = State.Ok;
cur.value = value;
if (options.external) {
await options.external.set(args, value);
}
for (const callback of cur.callbacks ?? []) {
callback.res(value);
}
return value;
} catch (error) {
cur.state = State.Error;
cur.error = error;
for (const callback of cur.callbacks ?? []) {
callback.rej(error);
}
throw error;
}
}
} as MemoFunc<F>;
memoFunc.get = (...args) => {
return memoFunc(...args);
};
memoFunc.raw = (...args) => {
return fn(...args) as ReturnType<F>;
};
memoFunc.clear = async (...args) => {
if (args.length === 0) {
clearNode(root);
if (options.external) {
await options.external.clear();
}
} else {
const cur = walkOrBreak<F>(root, args as Parameters<F>);
clearNode(cur);
if (options.external) {
await options.external.remove(args as Parameters<F>);
}
}
};
return memoFunc;
}

View file

@ -0,0 +1,6 @@
import { Router } from "express";
export interface Routes {
path?: string;
router: Router;
}

View file

@ -0,0 +1,25 @@
import { HttpException } from '../exceptions/HttpException';
import { plainToClass } from 'class-transformer';
import { validate, ValidationError } from 'class-validator';
import { RequestHandler } from 'express';
const validationMiddleware = (
type: any,
value = 'body',
skipMissingProperties = false,
whitelist = true,
forbidNonWhitelisted = true,
): RequestHandler => {
return (req, res, next) => {
validate(plainToClass(type, req[value]), { skipMissingProperties, whitelist, forbidNonWhitelisted }).then((errors: ValidationError[]) => {
if (errors.length > 0) {
const message = errors.map((error: ValidationError) => Object.values(error.constraints)).join(', ');
next(new HttpException(400, message));
} else {
next();
}
});
};
};
export default validationMiddleware;

View file

@ -0,0 +1,76 @@
import { reflection } from 'first-di';
import "reflect-metadata";
import { promises as fs } from 'fs';
import { async } from 'node-stream-zip';
import * as cp from 'child_process';
import path from 'path';
async function exec(cmd: string, opts: (cp.ExecOptions & { trim?: boolean }) = {}): Promise<string> {
return new Promise((c, e) => {
cp.exec(cmd, { env: process.env, ...opts }, (err, stdout) => err ? e(err) : c(opts.trim ? stdout.trim() : stdout));
});
}
@reflection
export class ComputeRepository {
public computedAdjaxedMatrix = async (outPath: string, cadEntity: string, entityId: string) => {
const envPath = '/home/idontsudo/t/framework/asp-review-app/server/computed/geometric_feasibility_predicate/env.json'
const computedScript = '/home/idontsudo/t/framework/asp-review-app/server/computed/geometric_feasibility_predicate/main.py'
const computedComand = 'freecadcmd'
const env = JSON.parse((await fs.readFile(envPath)).toString())
env['cadFilePath'] = cadEntity
env['outPath'] = outPath
await fs.writeFile(envPath, JSON.stringify(env))
// console.log(this._computedPath(computedScript))
exec(computedComand + ' ' + computedScript, { cwd: this._computedPath(computedScript) }).then((data) => {
console.log(data)
})
this.cadGeneration(cadEntity, entityId, outPath)
// if (stderr) {
// console.log(stderr)
// }
// console.log(stdout)
};
public computedWriteStability = async (assemblyFolder: string, buildNumber: string, id: string) => {
const computedScript = '/home/idontsudo/t/framework/cad_stability_input/main.py'
const computedComand = 'freecad'
const envPath = '/home/idontsudo/t/framework/cad_stability_input/env.json'
const env = JSON.parse((await fs.readFile(envPath)).toString())
env.assemblyFolder = assemblyFolder
env['projectId'] = id
env['buildNumber'] = buildNumber
env['assemblyFolder'] = assemblyFolder
env['resultURL'] = 'http://localhost:3002/assembly/stabilty/create/?id=' + id + '&' + 'buildNumber=' + buildNumber
await fs.writeFile(envPath, JSON.stringify(env))
await exec(computedComand + ' ' + computedScript, { cwd: this._computedPath(computedScript) })
}
private _computedPath(f: string) {
const file = path.basename(f);
const absolutPath = path.resolve(f)
return absolutPath.replace(file, '')
}
public cadGeneration = async (cadEntity, entity: string, outPath: string,) => {
const computedScript = '/home/idontsudo/t/framework/cad_generation/main.py'
const computedComand = 'freecad'
const envPath = '/home/idontsudo/t/framework/cad_generation/env.json'
const env = JSON.parse((await fs.readFile(envPath)).toString())
env.doc = cadEntity
env.projectId = entity
env.resultURL = "http://localhost:3002/assembly/save/out"
await fs.writeFile(envPath, JSON.stringify(env))
// /stabilty/create
exec(computedComand + ' ' + computedScript, { cwd: this._computedPath(computedScript) }).then((data) => {
console.log(data)
})
}
}

View file

@ -0,0 +1,87 @@
import { promises as fs } from 'fs';
import { dirname } from '../../app';
import fsSync from "fs";
import { autowired, reflection } from 'first-di';
import "reflect-metadata";
import { ComputeRepository } from './compute_repository';
import { ZipRepository } from './zip_repository';
@reflection
export class EntityRepository {
@autowired()
private readonly computedRepository: ComputeRepository;
@autowired()
private readonly zipRepository: ZipRepository;
private path: String = dirname + '/public/'
private getFileName(file: String) {
return file.slice(0, file.indexOf('.'))
}
public async getDir(path) {
return this._fullPath(await fs.readdir(path + ''), duplicatedDelete(this.path, path))
}
public isExistDirPath(path: String): boolean {
return fsSync.existsSync(path + '')
}
public async saveRootEntity(buffer: Buffer, name: string) {
const filePath = this.path + this.getFileName(name) + '/'
if (this.isExistDirPath(filePath)) {
await fs.rm(filePath, { recursive: true })
}
await fs.mkdir(filePath);
await fs.writeFile(filePath + name, buffer);
this.computedRepository.computedAdjaxedMatrix(filePath, filePath + name, this.getFileName(name))
}
public async getAllRootEntity() {
return await fs.readdir('' + this.path)
}
public async getEntityStorage(entity: string): Promise<String[]> | undefined {
return this._fullPath(await fs.readdir(this.path + entity), entity + '/')
}
private _fullPath(folderPath, helpElement = '') {
return folderPath.map((el) => this.path + helpElement + el)
}
public async readJson<T>(path) {
return JSON.parse((await fs.readFile(path)).toString())
}
public async saveGeration(data: Buffer, id: String) {
const rootFolderPath = '' + this.path + id + '/'
console.log(rootFolderPath)
this.zipRepository.archive(rootFolderPath, data)
}
public computedStability(id: string, buildNumber: string) {
const assemblyFolder = this.path + id + '/generation/'
this.computedRepository.computedWriteStability(assemblyFolder, buildNumber, id)
}
public async saveStability(zip: Buffer, id:string, buildNumber:string) {
const filePath = await this.zipRepository.archive(this.path as string, zip)
// const buildNumber = data['buildNumber']
const assemblyFolder = this.path + id + '/generation/stability/'
if (!this.isExistDirPath(assemblyFolder)) {
await fs.mkdir(assemblyFolder);
}
await this.zipRepository.archive(assemblyFolder as string, zip, buildNumber)
fs.rmdir(filePath + '/', { recursive: true})
}
}
function duplicatedDelete(strChild: String, strMain: String) {
let result = ''
for (let i = 0; i < strMain.length; i++) {
if (!(strMain[i] === strChild[i])) {
result += strMain[i]
}
}
return result
}

View file

@ -0,0 +1,13 @@
import StreamZip from 'node-stream-zip';
import { promises as fs } from 'fs';
import decompress from 'decompress'
export class ZipRepository {
public async archive(outhPath: string, zipFile: Buffer, name='generation') {
const entry = outhPath + 'archive.zip'
await fs.writeFile(entry, zipFile)
await decompress(entry, outhPath + name);
fs.rm(entry)
return outhPath + name
}
}

View file

@ -0,0 +1,5 @@
import { AssemblyRoute } from "../../features/assembly_create/assembly_create_route";
import { AssemblyPreviewsRoute } from "../../features/assembly_previews/assembly_previews_route";
export const routes = [new AssemblyRoute(), new AssemblyPreviewsRoute()];

View file

@ -0,0 +1,101 @@
import { NextFunction, Request, Response } from "express";
import { autowired } from "first-di";
import { async } from "node-stream-zip";
import { EntityRepository } from "../../core/repository/entity_repository";
import { IFile } from "./model/zip_files_model";
export class AssemblyController {
@autowired()
private readonly entityRepository: EntityRepository;
public createRootEntity = (
req: Request,
res: Response,
next: NextFunction
) => {
const file = req.files;
const cadFile = file["freecad"] as IFile;
this.entityRepository.saveRootEntity(cadFile.data, cadFile.name);
res.status(200).json("ok");
return;
};
public getAllAssembly = (
req: Request,
res: Response,
next: NextFunction
): void => { };
public createAssembly = (
req: Request,
res: Response,
next: NextFunction
): void => {
try {
const file = req.files.freecad as IFile;
const buffer = file.data as Buffer;
this.entityRepository.saveRootEntity(file.data, file.name);
res.sendStatus(200);
} catch (error) {
next(error);
}
};
public test = (req: Request,
res: Response,
next: NextFunction) => {
try {
const file = req.files;
const generation = file["zip"] as IFile;
const id = 'cubes';
this.entityRepository.saveGeration(generation.data, id)
res.sendStatus(200);
} catch (error) {
next(error);
}
}
public stabilityComputed = async (
req: Request,
res: Response,
next: NextFunction
) => {
try {
// const file = req.files;
console.log(req.body)
const id = req.body.id;
// console.log(req.query.id)
const buildNumber = req.body.buildNumber;
console.log(buildNumber)
console.log(id)
// const generation = file["zip"] as IFile;
// const id = 'cubes';
await this.entityRepository.computedStability(id, buildNumber)
res.sendStatus(200);
} catch (error) {
next(error);
}
}
public stabilityCreate = (
req: Request,
res: Response,
next: NextFunction
) => {
try {
const files = req.files;
const zip = files['zip'] as IFile
const query = req.query as any
this.entityRepository.saveStability(zip.data, query.id, query.buildNumber)
res.sendStatus(200);
} catch (error) {
next(error);
}
}
}

View file

@ -0,0 +1,46 @@
import express, { Router } from "express";
import { Routes } from "../../core/interfaces/router";
import { autowired } from "first-di";
import { AssemblyController } from "./assembly_create_controller";
import validationMiddleware from "../../core/middlewares/ValidationMiddleware";
import { CadFilesModel } from "./model/zip_files_model";
export class AssemblyRoute implements Routes {
public path = "/assembly";
public router = Router();
@autowired()
private readonly assemblyController: AssemblyController;
constructor() {
this.initializeRoutes();
}
private initializeRoutes() {
this.router.post(
`${this.path}`,
validationMiddleware(CadFilesModel, "files"),
this.assemblyController.createAssembly
);
this.router.post(
`${this.path}/save/out`,
// validationMiddleware(CadFilesModel, "files"),
this.assemblyController.test
);
this.router.get(`${this.path}`, this.assemblyController.getAllAssembly);
this.router.post(
`${this.path}/create`,
this.assemblyController.createRootEntity
);
this.router.post(
`${this.path}/stability/write/computed`,
this.assemblyController.stabilityComputed
);
this.router.post(
`${this.path}/stabilty/create/`,
this.assemblyController.stabilityCreate
);
}
}

View file

@ -0,0 +1,23 @@
import { IsArray, IsObject } from "class-validator";
export interface IFile {
name: string,
data: Buffer,
size: Number,
encoding: string,
tempFilePath: string,
truncated: Boolean,
mimetype: string,
md5: string,
}
interface ICadFileModel {
freecad: IFile;
}
export class CadFilesModel implements ICadFileModel {
@IsObject()
public freecad: IFile;
}

View file

@ -0,0 +1,156 @@
import { NextFunction, Request, Response } from "express";
import { autowired } from "first-di";
import { EntityRepository } from "../../core/repository/entity_repository";
import { port } from "../../server";
import { memoAsync } from "../../core/helper/memorization";
export class AssemblyPreviewsController {
@autowired()
private readonly entityRepository: EntityRepository;
public getAllAssembly = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
res.send(await this.entityRepository.getAllRootEntity());
} catch (error) {
next(error);
}
};
public getAssemblySubsequenceById = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void> => {
try {
const entity = await this.entityRepository.getEntityStorage(
req.params.id
);
const aspUsage = Number(req.query.count) - 1;
if (entity === undefined) {
res.status(404).json("entity not found");
return;
}
res.json(
await this._assemblyCompute(
aspUsage,
entity,
this.entityRepository,
req.hostname,
req.params.id
)
);
} catch (error) {
next(error);
}
};
public getAssemblyInsertionSequenceById = async (
req: Request,
res: Response,
next: NextFunction
) => {
const entity = await this.entityRepository.getEntityStorage(req.params.id);
const aspUsage = Number(req.query.count);
const assemblyFolder = entity.find((el) => {
return el.match("assembly");
});
const asmCountFolder = "0000" + aspUsage;
const assemblyDirPath = assemblyFolder + "/" + asmCountFolder;
if (!this.entityRepository.isExistDirPath(assemblyDirPath)) {
return res.status(400).json({ error: "bad request" });
}
const assemblyProcessDir = await this.entityRepository.getDir(
assemblyDirPath + "/process/"
);
const firstObj = assemblyProcessDir.find((el) => {
return el.match("1.obj");
});
const zeroObj = await assemblyProcessDir.find((el) => {
return el.match("0.obj");
});
const insertions = await this.entityRepository.readJson(
assemblyDirPath + "/" + "insertion_path.json"
);
if (
insertions === undefined ||
zeroObj === undefined ||
firstObj === undefined
) {
res.status(400).json({ error: "bad" });
return;
}
res.json({
offset: aspUsage,
count: 4,
parent: `http://${req.hostname}:${port}/${
req.params.id
}/assembly/${asmCountFolder}/${0}.obj`,
child: `http://${req.hostname}:${port}/${
req.params.id
}/assembly/${asmCountFolder}/${1}.obj`,
insertions: insertions,
});
return;
};
private async _assemblyCompute(
id: number,
entityFolder: Array<String>,
repository: EntityRepository,
host: string,
entity: string
) {
const assemblySequence = entityFolder.find((el) => {
return el.match("step-structure.json");
});
const assembly: Array<String> = await repository.readJson<Array<String>>(
assemblySequence
);
if (id == 0) {
return {
assembly: [
`http://${host}:${port}/${entity}/sdf/meshes/${assembly[id]}.obj`,
],
offset: 1,
count: assemblySequence.length,
};
} else {
const assemblyIndexed = assembly
.map((_item, index) => {
if (index <= id) {
return index;
}
})
.filter((el) => el != undefined);
return {
assembly: assemblyIndexed.map((el) => {
return `http://${host}:${port}/${entity}/sdf/meshes/${assembly[el]}.obj`;
}),
count: assemblyIndexed.length,
offset: assembly.length,
};
}
}
}

View file

@ -0,0 +1,29 @@
import express, { Router } from 'express';
import { Routes } from '../../core/interfaces/router';
import { autowired } from 'first-di';
// import { AssemblyController } from './assembly_create_controller';
import path from 'path';
import { dirname } from '../../app';
import validationMiddleware from '../../core/middlewares/ValidationMiddleware';
import { AssemblyPreviewsController } from './assembly_previews_controller';
// import { CadFilesModel } from './model/zip_files_model';
export class AssemblyPreviewsRoute implements Routes {
public path = '/assembly/preview/';
public router = Router();
@autowired()
private readonly assemblyPreviewsController: AssemblyPreviewsController;
constructor() {
this.initializeRoutes();
}
private initializeRoutes() {
this.router.get(`${this.path}`, this.assemblyPreviewsController.getAllAssembly);
// this.router.get(`${this.path}`)
this.router.get(`${this.path}subsequence/:id`, this.assemblyPreviewsController.getAssemblySubsequenceById)
this.router.get(`${this.path}insertion_sequence/:id`, this.assemblyPreviewsController.getAssemblyInsertionSequenceById)
// this.router.post(`${this.path}`, validationMiddleware(CadFilesModel, 'files'), this.assemblyController.createAssembly)
// this.router.get(`${this.path}`, this.assemblyController.getAllAssembly)
}
}

View file

@ -0,0 +1,14 @@
import { App } from "./app";
import { routes } from "./core/routes/routes";
import "reflect-metadata";
export const port = 3002
const app = new App(routes,port);
function main() {
app.listen();
}
main();

View file

@ -0,0 +1,28 @@
{
"compileOnSave": false,
"compilerOptions": {
"target": "es2017",
"lib": ["es2017", "esnext.asynciterable"],
"typeRoots": ["node_modules/@types"],
"allowSyntheticDefaultImports": true,
"experimentalDecorators": true,
"emitDecoratorMetadata": true,
"forceConsistentCasingInFileNames": true,
"moduleResolution": "node",
"module": "ESNext",
"pretty": true,
"sourceMap": true,
"declaration": true,
"outDir": "./dist",
"allowJs": true,
"noEmit": false,
"esModuleInterop": true,
"resolveJsonModule": true,
},
"ts-node": {
"esm": true,
"experimentalSpecifierResolution": "node",
},
"include": ["src/**/*.ts", "src/**/*.json", ".env"],
"exclude": ["node_modules"]
}

View file

@ -0,0 +1,46 @@
# Getting Started with Create React App
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## Available Scripts
In the project directory, you can run:
### `yarn start`
Runs the app in the development mode.\
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
The page will reload if you make edits.\
You will also see any lint errors in the console.
### `yarn test`
Launches the test runner in the interactive watch mode.\
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
### `yarn build`
Builds the app for production to the `build` folder.\
It correctly bundles React in production mode and optimizes the build for the best performance.
The build is minified and the filenames include the hashes.\
Your app is ready to be deployed!
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
### `yarn eject`
**Note: this is a one-way operation. Once you `eject`, you cant go back!**
If you arent satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point youre on your own.
You dont have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldnt feel obligated to use this feature. However we understand that this tool wouldnt be useful if you couldnt customize it when you are ready for it.
## Learn More
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
To learn React, check out the [React documentation](https://reactjs.org/).

View file

@ -0,0 +1,104 @@
'use strict';
const fs = require('fs');
const path = require('path');
const paths = require('./paths');
// Make sure that including paths.js after env.js will read .env variables.
delete require.cache[require.resolve('./paths')];
const NODE_ENV = process.env.NODE_ENV;
if (!NODE_ENV) {
throw new Error(
'The NODE_ENV environment variable is required but was not specified.'
);
}
// https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use
const dotenvFiles = [
`${paths.dotenv}.${NODE_ENV}.local`,
// Don't include `.env.local` for `test` environment
// since normally you expect tests to produce the same
// results for everyone
NODE_ENV !== 'test' && `${paths.dotenv}.local`,
`${paths.dotenv}.${NODE_ENV}`,
paths.dotenv,
].filter(Boolean);
// Load environment variables from .env* files. Suppress warnings using silent
// if this file is missing. dotenv will never modify any environment variables
// that have already been set. Variable expansion is supported in .env files.
// https://github.com/motdotla/dotenv
// https://github.com/motdotla/dotenv-expand
dotenvFiles.forEach(dotenvFile => {
if (fs.existsSync(dotenvFile)) {
require('dotenv-expand')(
require('dotenv').config({
path: dotenvFile,
})
);
}
});
// We support resolving modules according to `NODE_PATH`.
// This lets you use absolute paths in imports inside large monorepos:
// https://github.com/facebook/create-react-app/issues/253.
// It works similar to `NODE_PATH` in Node itself:
// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders
// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored.
// Otherwise, we risk importing Node.js core modules into an app instead of webpack shims.
// https://github.com/facebook/create-react-app/issues/1023#issuecomment-265344421
// We also resolve them to make sure all tools using them work consistently.
const appDirectory = fs.realpathSync(process.cwd());
process.env.NODE_PATH = (process.env.NODE_PATH || '')
.split(path.delimiter)
.filter(folder => folder && !path.isAbsolute(folder))
.map(folder => path.resolve(appDirectory, folder))
.join(path.delimiter);
// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be
// injected into the application via DefinePlugin in webpack configuration.
const REACT_APP = /^REACT_APP_/i;
function getClientEnvironment(publicUrl) {
const raw = Object.keys(process.env)
.filter(key => REACT_APP.test(key))
.reduce(
(env, key) => {
env[key] = process.env[key];
return env;
},
{
// Useful for determining whether were running in production mode.
// Most importantly, it switches React into the correct mode.
NODE_ENV: process.env.NODE_ENV || 'development',
// Useful for resolving the correct path to static assets in `public`.
// For example, <img src={process.env.PUBLIC_URL + '/img/logo.png'} />.
// This should only be used as an escape hatch. Normally you would put
// images into the `src` and `import` them in code to get their paths.
PUBLIC_URL: publicUrl,
// We support configuring the sockjs pathname during development.
// These settings let a developer run multiple simultaneous projects.
// They are used as the connection `hostname`, `pathname` and `port`
// in webpackHotDevClient. They are used as the `sockHost`, `sockPath`
// and `sockPort` options in webpack-dev-server.
WDS_SOCKET_HOST: process.env.WDS_SOCKET_HOST,
WDS_SOCKET_PATH: process.env.WDS_SOCKET_PATH,
WDS_SOCKET_PORT: process.env.WDS_SOCKET_PORT,
// Whether or not react-refresh is enabled.
// It is defined here so it is available in the webpackHotDevClient.
FAST_REFRESH: process.env.FAST_REFRESH !== 'false',
}
);
// Stringify all values so we can feed into webpack DefinePlugin
const stringified = {
'process.env': Object.keys(raw).reduce((env, key) => {
env[key] = JSON.stringify(raw[key]);
return env;
}, {}),
};
return { raw, stringified };
}
module.exports = getClientEnvironment;

View file

@ -0,0 +1,66 @@
'use strict';
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const chalk = require('react-dev-utils/chalk');
const paths = require('./paths');
// Ensure the certificate and key provided are valid and if not
// throw an easy to debug error
function validateKeyAndCerts({ cert, key, keyFile, crtFile }) {
let encrypted;
try {
// publicEncrypt will throw an error with an invalid cert
encrypted = crypto.publicEncrypt(cert, Buffer.from('test'));
} catch (err) {
throw new Error(
`The certificate "${chalk.yellow(crtFile)}" is invalid.\n${err.message}`
);
}
try {
// privateDecrypt will throw an error with an invalid key
crypto.privateDecrypt(key, encrypted);
} catch (err) {
throw new Error(
`The certificate key "${chalk.yellow(keyFile)}" is invalid.\n${
err.message
}`
);
}
}
// Read file and throw an error if it doesn't exist
function readEnvFile(file, type) {
if (!fs.existsSync(file)) {
throw new Error(
`You specified ${chalk.cyan(
type
)} in your env, but the file "${chalk.yellow(file)}" can't be found.`
);
}
return fs.readFileSync(file);
}
// Get the https config
// Return cert files if provided in env, otherwise just true or false
function getHttpsConfig() {
const { SSL_CRT_FILE, SSL_KEY_FILE, HTTPS } = process.env;
const isHttps = HTTPS === 'true';
if (isHttps && SSL_CRT_FILE && SSL_KEY_FILE) {
const crtFile = path.resolve(paths.appPath, SSL_CRT_FILE);
const keyFile = path.resolve(paths.appPath, SSL_KEY_FILE);
const config = {
cert: readEnvFile(crtFile, 'SSL_CRT_FILE'),
key: readEnvFile(keyFile, 'SSL_KEY_FILE'),
};
validateKeyAndCerts({ ...config, keyFile, crtFile });
return config;
}
return isHttps;
}
module.exports = getHttpsConfig;

View file

@ -0,0 +1,29 @@
'use strict';
const babelJest = require('babel-jest').default;
const hasJsxRuntime = (() => {
if (process.env.DISABLE_NEW_JSX_TRANSFORM === 'true') {
return false;
}
try {
require.resolve('react/jsx-runtime');
return true;
} catch (e) {
return false;
}
})();
module.exports = babelJest.createTransformer({
presets: [
[
require.resolve('babel-preset-react-app'),
{
runtime: hasJsxRuntime ? 'automatic' : 'classic',
},
],
],
babelrc: false,
configFile: false,
});

View file

@ -0,0 +1,14 @@
'use strict';
// This is a custom Jest transformer turning style imports into empty objects.
// http://facebook.github.io/jest/docs/en/webpack.html
module.exports = {
process() {
return 'module.exports = {};';
},
getCacheKey() {
// The output is always the same.
return 'cssTransform';
},
};

View file

@ -0,0 +1,40 @@
'use strict';
const path = require('path');
const camelcase = require('camelcase');
// This is a custom Jest transformer turning file imports into filenames.
// http://facebook.github.io/jest/docs/en/webpack.html
module.exports = {
process(src, filename) {
const assetFilename = JSON.stringify(path.basename(filename));
if (filename.match(/\.svg$/)) {
// Based on how SVGR generates a component name:
// https://github.com/smooth-code/svgr/blob/01b194cf967347d43d4cbe6b434404731b87cf27/packages/core/src/state.js#L6
const pascalCaseFilename = camelcase(path.parse(filename).name, {
pascalCase: true,
});
const componentName = `Svg${pascalCaseFilename}`;
return `const React = require('react');
module.exports = {
__esModule: true,
default: ${assetFilename},
ReactComponent: React.forwardRef(function ${componentName}(props, ref) {
return {
$$typeof: Symbol.for('react.element'),
type: 'svg',
ref: ref,
key: null,
props: Object.assign({}, props, {
children: ${assetFilename}
})
};
}),
};`;
}
return `module.exports = ${assetFilename};`;
},
};

View file

@ -0,0 +1,134 @@
'use strict';
const fs = require('fs');
const path = require('path');
const paths = require('./paths');
const chalk = require('react-dev-utils/chalk');
const resolve = require('resolve');
/**
* Get additional module paths based on the baseUrl of a compilerOptions object.
*
* @param {Object} options
*/
function getAdditionalModulePaths(options = {}) {
const baseUrl = options.baseUrl;
if (!baseUrl) {
return '';
}
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
// We don't need to do anything if `baseUrl` is set to `node_modules`. This is
// the default behavior.
if (path.relative(paths.appNodeModules, baseUrlResolved) === '') {
return null;
}
// Allow the user set the `baseUrl` to `appSrc`.
if (path.relative(paths.appSrc, baseUrlResolved) === '') {
return [paths.appSrc];
}
// If the path is equal to the root directory we ignore it here.
// We don't want to allow importing from the root directly as source files are
// not transpiled outside of `src`. We do allow importing them with the
// absolute path (e.g. `src/Components/Button.js`) but we set that up with
// an alias.
if (path.relative(paths.appPath, baseUrlResolved) === '') {
return null;
}
// Otherwise, throw an error.
throw new Error(
chalk.red.bold(
"Your project's `baseUrl` can only be set to `src` or `node_modules`." +
' Create React App does not support other values at this time.'
)
);
}
/**
* Get webpack aliases based on the baseUrl of a compilerOptions object.
*
* @param {*} options
*/
function getWebpackAliases(options = {}) {
const baseUrl = options.baseUrl;
if (!baseUrl) {
return {};
}
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
if (path.relative(paths.appPath, baseUrlResolved) === '') {
return {
src: paths.appSrc,
};
}
}
/**
* Get jest aliases based on the baseUrl of a compilerOptions object.
*
* @param {*} options
*/
function getJestAliases(options = {}) {
const baseUrl = options.baseUrl;
if (!baseUrl) {
return {};
}
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
if (path.relative(paths.appPath, baseUrlResolved) === '') {
return {
'^src/(.*)$': '<rootDir>/src/$1',
};
}
}
function getModules() {
// Check if TypeScript is setup
const hasTsConfig = fs.existsSync(paths.appTsConfig);
const hasJsConfig = fs.existsSync(paths.appJsConfig);
if (hasTsConfig && hasJsConfig) {
throw new Error(
'You have both a tsconfig.json and a jsconfig.json. If you are using TypeScript please remove your jsconfig.json file.'
);
}
let config;
// If there's a tsconfig.json we assume it's a
// TypeScript project and set up the config
// based on tsconfig.json
if (hasTsConfig) {
const ts = require(resolve.sync('typescript', {
basedir: paths.appNodeModules,
}));
config = ts.readConfigFile(paths.appTsConfig, ts.sys.readFile).config;
// Otherwise we'll check if there is jsconfig.json
// for non TS projects.
} else if (hasJsConfig) {
config = require(paths.appJsConfig);
}
config = config || {};
const options = config.compilerOptions || {};
const additionalModulePaths = getAdditionalModulePaths(options);
return {
additionalModulePaths: additionalModulePaths,
webpackAliases: getWebpackAliases(options),
jestAliases: getJestAliases(options),
hasTsConfig,
};
}
module.exports = getModules();

View file

@ -0,0 +1,77 @@
'use strict';
const path = require('path');
const fs = require('fs');
const getPublicUrlOrPath = require('react-dev-utils/getPublicUrlOrPath');
// Make sure any symlinks in the project folder are resolved:
// https://github.com/facebook/create-react-app/issues/637
const appDirectory = fs.realpathSync(process.cwd());
const resolveApp = relativePath => path.resolve(appDirectory, relativePath);
// We use `PUBLIC_URL` environment variable or "homepage" field to infer
// "public path" at which the app is served.
// webpack needs to know it to put the right <script> hrefs into HTML even in
// single-page apps that may serve index.html for nested URLs like /todos/42.
// We can't use a relative path in HTML because we don't want to load something
// like /todos/42/static/js/bundle.7289d.js. We have to know the root.
const publicUrlOrPath = getPublicUrlOrPath(
process.env.NODE_ENV === 'development',
require(resolveApp('package.json')).homepage,
process.env.PUBLIC_URL
);
const buildPath = process.env.BUILD_PATH || 'build';
const moduleFileExtensions = [
'web.mjs',
'mjs',
'web.js',
'js',
'web.ts',
'ts',
'web.tsx',
'tsx',
'json',
'web.jsx',
'jsx',
];
// Resolve file paths in the same order as webpack
const resolveModule = (resolveFn, filePath) => {
const extension = moduleFileExtensions.find(extension =>
fs.existsSync(resolveFn(`${filePath}.${extension}`))
);
if (extension) {
return resolveFn(`${filePath}.${extension}`);
}
return resolveFn(`${filePath}.js`);
};
// config after eject: we're in ./config/
module.exports = {
dotenv: resolveApp('.env'),
appPath: resolveApp('.'),
appBuild: resolveApp(buildPath),
appPublic: resolveApp('public'),
appHtml: resolveApp('public/index.html'),
appIndexJs: resolveModule(resolveApp, 'src/index'),
appPackageJson: resolveApp('package.json'),
appSrc: resolveApp('src'),
appTsConfig: resolveApp('tsconfig.json'),
appJsConfig: resolveApp('jsconfig.json'),
yarnLockFile: resolveApp('yarn.lock'),
testsSetup: resolveModule(resolveApp, 'src/setupTests'),
proxySetup: resolveApp('src/setupProxy.js'),
appNodeModules: resolveApp('node_modules'),
appWebpackCache: resolveApp('node_modules/.cache'),
appTsBuildInfoFile: resolveApp('node_modules/.cache/tsconfig.tsbuildinfo'),
swSrc: resolveModule(resolveApp, 'src/service-worker'),
publicUrlOrPath,
};
module.exports.moduleFileExtensions = moduleFileExtensions;

View file

@ -0,0 +1,755 @@
'use strict';
const fs = require('fs');
const path = require('path');
const webpack = require('webpack');
const resolve = require('resolve');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
const InlineChunkHtmlPlugin = require('react-dev-utils/InlineChunkHtmlPlugin');
const TerserPlugin = require('terser-webpack-plugin');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const CssMinimizerPlugin = require('css-minimizer-webpack-plugin');
const { WebpackManifestPlugin } = require('webpack-manifest-plugin');
const InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
const WorkboxWebpackPlugin = require('workbox-webpack-plugin');
const ModuleScopePlugin = require('react-dev-utils/ModuleScopePlugin');
const getCSSModuleLocalIdent = require('react-dev-utils/getCSSModuleLocalIdent');
const ESLintPlugin = require('eslint-webpack-plugin');
const paths = require('./paths');
const modules = require('./modules');
const getClientEnvironment = require('./env');
const ModuleNotFoundPlugin = require('react-dev-utils/ModuleNotFoundPlugin');
const ForkTsCheckerWebpackPlugin =
process.env.TSC_COMPILE_ON_ERROR === 'true'
? require('react-dev-utils/ForkTsCheckerWarningWebpackPlugin')
: require('react-dev-utils/ForkTsCheckerWebpackPlugin');
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
const createEnvironmentHash = require('./webpack/persistentCache/createEnvironmentHash');
// Source maps are resource heavy and can cause out of memory issue for large source files.
const shouldUseSourceMap = process.env.GENERATE_SOURCEMAP !== 'false';
const reactRefreshRuntimeEntry = require.resolve('react-refresh/runtime');
const reactRefreshWebpackPluginRuntimeEntry = require.resolve(
'@pmmmwh/react-refresh-webpack-plugin'
);
const babelRuntimeEntry = require.resolve('babel-preset-react-app');
const babelRuntimeEntryHelpers = require.resolve(
'@babel/runtime/helpers/esm/assertThisInitialized',
{ paths: [babelRuntimeEntry] }
);
const babelRuntimeRegenerator = require.resolve('@babel/runtime/regenerator', {
paths: [babelRuntimeEntry],
});
// Some apps do not need the benefits of saving a web request, so not inlining the chunk
// makes for a smoother build process.
const shouldInlineRuntimeChunk = process.env.INLINE_RUNTIME_CHUNK !== 'false';
const emitErrorsAsWarnings = process.env.ESLINT_NO_DEV_ERRORS === 'true';
const disableESLintPlugin = process.env.DISABLE_ESLINT_PLUGIN === 'true';
const imageInlineSizeLimit = parseInt(
process.env.IMAGE_INLINE_SIZE_LIMIT || '10000'
);
// Check if TypeScript is setup
const useTypeScript = fs.existsSync(paths.appTsConfig);
// Check if Tailwind config exists
const useTailwind = fs.existsSync(
path.join(paths.appPath, 'tailwind.config.js')
);
// Get the path to the uncompiled service worker (if it exists).
const swSrc = paths.swSrc;
// style files regexes
const cssRegex = /\.css$/;
const cssModuleRegex = /\.module\.css$/;
const sassRegex = /\.(scss|sass)$/;
const sassModuleRegex = /\.module\.(scss|sass)$/;
const hasJsxRuntime = (() => {
if (process.env.DISABLE_NEW_JSX_TRANSFORM === 'true') {
return false;
}
try {
require.resolve('react/jsx-runtime');
return true;
} catch (e) {
return false;
}
})();
// This is the production and development configuration.
// It is focused on developer experience, fast rebuilds, and a minimal bundle.
module.exports = function (webpackEnv) {
const isEnvDevelopment = webpackEnv === 'development';
const isEnvProduction = webpackEnv === 'production';
// Variable used for enabling profiling in Production
// passed into alias object. Uses a flag if passed into the build command
const isEnvProductionProfile =
isEnvProduction && process.argv.includes('--profile');
// We will provide `paths.publicUrlOrPath` to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz.
// Get environment variables to inject into our app.
const env = getClientEnvironment(paths.publicUrlOrPath.slice(0, -1));
const shouldUseReactRefresh = env.raw.FAST_REFRESH;
// common function to get style loaders
const getStyleLoaders = (cssOptions, preProcessor) => {
const loaders = [
isEnvDevelopment && require.resolve('style-loader'),
isEnvProduction && {
loader: MiniCssExtractPlugin.loader,
// css is located in `static/css`, use '../../' to locate index.html folder
// in production `paths.publicUrlOrPath` can be a relative path
options: paths.publicUrlOrPath.startsWith('.')
? { publicPath: '../../' }
: {},
},
{
loader: require.resolve('css-loader'),
options: cssOptions,
},
{
// Options for PostCSS as we reference these options twice
// Adds vendor prefixing based on your specified browser support in
// package.json
loader: require.resolve('postcss-loader'),
options: {
postcssOptions: {
// Necessary for external CSS imports to work
// https://github.com/facebook/create-react-app/issues/2677
ident: 'postcss',
config: false,
plugins: !useTailwind
? [
'postcss-flexbugs-fixes',
[
'postcss-preset-env',
{
autoprefixer: {
flexbox: 'no-2009',
},
stage: 3,
},
],
// Adds PostCSS Normalize as the reset css with default options,
// so that it honors browserslist config in package.json
// which in turn let's users customize the target behavior as per their needs.
'postcss-normalize',
]
: [
'tailwindcss',
'postcss-flexbugs-fixes',
[
'postcss-preset-env',
{
autoprefixer: {
flexbox: 'no-2009',
},
stage: 3,
},
],
],
},
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
},
},
].filter(Boolean);
if (preProcessor) {
loaders.push(
{
loader: require.resolve('resolve-url-loader'),
options: {
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
root: paths.appSrc,
},
},
{
loader: require.resolve(preProcessor),
options: {
sourceMap: true,
},
}
);
}
return loaders;
};
return {
target: ['browserslist'],
// Webpack noise constrained to errors and warnings
stats: 'errors-warnings',
mode: isEnvProduction ? 'production' : isEnvDevelopment && 'development',
// Stop compilation early in production
bail: isEnvProduction,
devtool: isEnvProduction
? shouldUseSourceMap
? 'source-map'
: false
: isEnvDevelopment && 'cheap-module-source-map',
// These are the "entry points" to our application.
// This means they will be the "root" imports that are included in JS bundle.
entry: paths.appIndexJs,
output: {
// The build folder.
path: paths.appBuild,
// Add /* filename */ comments to generated require()s in the output.
pathinfo: isEnvDevelopment,
// There will be one main bundle, and one file per asynchronous chunk.
// In development, it does not produce real files.
filename: isEnvProduction
? 'static/js/[name].[contenthash:8].js'
: isEnvDevelopment && 'static/js/bundle.js',
// There are also additional JS chunk files if you use code splitting.
chunkFilename: isEnvProduction
? 'static/js/[name].[contenthash:8].chunk.js'
: isEnvDevelopment && 'static/js/[name].chunk.js',
assetModuleFilename: 'static/media/[name].[hash][ext]',
// webpack uses `publicPath` to determine where the app is being served from.
// It requires a trailing slash, or the file assets will get an incorrect path.
// We inferred the "public path" (such as / or /my-project) from homepage.
publicPath: paths.publicUrlOrPath,
// Point sourcemap entries to original disk location (format as URL on Windows)
devtoolModuleFilenameTemplate: isEnvProduction
? info =>
path
.relative(paths.appSrc, info.absoluteResourcePath)
.replace(/\\/g, '/')
: isEnvDevelopment &&
(info => path.resolve(info.absoluteResourcePath).replace(/\\/g, '/')),
},
cache: {
type: 'filesystem',
version: createEnvironmentHash(env.raw),
cacheDirectory: paths.appWebpackCache,
store: 'pack',
buildDependencies: {
defaultWebpack: ['webpack/lib/'],
config: [__filename],
tsconfig: [paths.appTsConfig, paths.appJsConfig].filter(f =>
fs.existsSync(f)
),
},
},
infrastructureLogging: {
level: 'none',
},
optimization: {
minimize: isEnvProduction,
minimizer: [
// This is only used in production mode
new TerserPlugin({
terserOptions: {
parse: {
// We want terser to parse ecma 8 code. However, we don't want it
// to apply any minification steps that turns valid ecma 5 code
// into invalid ecma 5 code. This is why the 'compress' and 'output'
// sections only apply transformations that are ecma 5 safe
// https://github.com/facebook/create-react-app/pull/4234
ecma: 8,
},
compress: {
ecma: 5,
warnings: false,
// Disabled because of an issue with Uglify breaking seemingly valid code:
// https://github.com/facebook/create-react-app/issues/2376
// Pending further investigation:
// https://github.com/mishoo/UglifyJS2/issues/2011
comparisons: false,
// Disabled because of an issue with Terser breaking valid code:
// https://github.com/facebook/create-react-app/issues/5250
// Pending further investigation:
// https://github.com/terser-js/terser/issues/120
inline: 2,
},
mangle: {
safari10: true,
},
// Added for profiling in devtools
keep_classnames: isEnvProductionProfile,
keep_fnames: isEnvProductionProfile,
output: {
ecma: 5,
comments: false,
// Turned on because emoji and regex is not minified properly using default
// https://github.com/facebook/create-react-app/issues/2488
ascii_only: true,
},
},
}),
// This is only used in production mode
new CssMinimizerPlugin(),
],
},
resolve: {
// This allows you to set a fallback for where webpack should look for modules.
// We placed these paths second because we want `node_modules` to "win"
// if there are any conflicts. This matches Node resolution mechanism.
// https://github.com/facebook/create-react-app/issues/253
modules: ['node_modules', paths.appNodeModules].concat(
modules.additionalModulePaths || []
),
// These are the reasonable defaults supported by the Node ecosystem.
// We also include JSX as a common component filename extension to support
// some tools, although we do not recommend using it, see:
// https://github.com/facebook/create-react-app/issues/290
// `web` extension prefixes have been added for better support
// for React Native Web.
extensions: paths.moduleFileExtensions
.map(ext => `.${ext}`)
.filter(ext => useTypeScript || !ext.includes('ts')),
alias: {
// Support React Native Web
// https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/
'react-native': 'react-native-web',
// Allows for better profiling with ReactDevTools
...(isEnvProductionProfile && {
'react-dom$': 'react-dom/profiling',
'scheduler/tracing': 'scheduler/tracing-profiling',
}),
...(modules.webpackAliases || {}),
},
plugins: [
// Prevents users from importing files from outside of src/ (or node_modules/).
// This often causes confusion because we only process files within src/ with babel.
// To fix this, we prevent you from importing files out of src/ -- if you'd like to,
// please link the files into your node_modules/ and let module-resolution kick in.
// Make sure your source files are compiled, as they will not be processed in any way.
new ModuleScopePlugin(paths.appSrc, [
paths.appPackageJson,
reactRefreshRuntimeEntry,
reactRefreshWebpackPluginRuntimeEntry,
babelRuntimeEntry,
babelRuntimeEntryHelpers,
babelRuntimeRegenerator,
]),
],
},
module: {
strictExportPresence: true,
rules: [
// Handle node_modules packages that contain sourcemaps
shouldUseSourceMap && {
enforce: 'pre',
exclude: /@babel(?:\/|\\{1,2})runtime/,
test: /\.(js|mjs|jsx|ts|tsx|css)$/,
loader: require.resolve('source-map-loader'),
},
{
// "oneOf" will traverse all following loaders until one will
// match the requirements. When no loader matches it will fall
// back to the "file" loader at the end of the loader list.
oneOf: [
// TODO: Merge this config once `image/avif` is in the mime-db
// https://github.com/jshttp/mime-db
{
test: [/\.avif$/],
type: 'asset',
mimetype: 'image/avif',
parser: {
dataUrlCondition: {
maxSize: imageInlineSizeLimit,
},
},
},
// "url" loader works like "file" loader except that it embeds assets
// smaller than specified limit in bytes as data URLs to avoid requests.
// A missing `test` is equivalent to a match.
{
test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/],
type: 'asset',
parser: {
dataUrlCondition: {
maxSize: imageInlineSizeLimit,
},
},
},
{
test: /\.svg$/,
use: [
{
loader: require.resolve('@svgr/webpack'),
options: {
prettier: false,
svgo: false,
svgoConfig: {
plugins: [{ removeViewBox: false }],
},
titleProp: true,
ref: true,
},
},
{
loader: require.resolve('file-loader'),
options: {
name: 'static/media/[name].[hash].[ext]',
},
},
],
issuer: {
and: [/\.(ts|tsx|js|jsx|md|mdx)$/],
},
},
// Process application JS with Babel.
// The preset includes JSX, Flow, TypeScript, and some ESnext features.
{
test: /\.(js|mjs|jsx|ts|tsx)$/,
include: paths.appSrc,
loader: require.resolve('babel-loader'),
options: {
customize: require.resolve(
'babel-preset-react-app/webpack-overrides'
),
presets: [
[
require.resolve('babel-preset-react-app'),
{
runtime: hasJsxRuntime ? 'automatic' : 'classic',
},
],
],
plugins: [
isEnvDevelopment &&
shouldUseReactRefresh &&
require.resolve('react-refresh/babel'),
].filter(Boolean),
// This is a feature of `babel-loader` for webpack (not Babel itself).
// It enables caching results in ./node_modules/.cache/babel-loader/
// directory for faster rebuilds.
cacheDirectory: true,
// See #6846 for context on why cacheCompression is disabled
cacheCompression: false,
compact: isEnvProduction,
},
},
// Process any JS outside of the app with Babel.
// Unlike the application JS, we only compile the standard ES features.
{
test: /\.(js|mjs)$/,
exclude: /@babel(?:\/|\\{1,2})runtime/,
loader: require.resolve('babel-loader'),
options: {
babelrc: false,
configFile: false,
compact: false,
presets: [
[
require.resolve('babel-preset-react-app/dependencies'),
{ helpers: true },
],
],
cacheDirectory: true,
// See #6846 for context on why cacheCompression is disabled
cacheCompression: false,
// Babel sourcemaps are needed for debugging into node_modules
// code. Without the options below, debuggers like VSCode
// show incorrect code and set breakpoints on the wrong lines.
sourceMaps: shouldUseSourceMap,
inputSourceMap: shouldUseSourceMap,
},
},
// "postcss" loader applies autoprefixer to our CSS.
// "css" loader resolves paths in CSS and adds assets as dependencies.
// "style" loader turns CSS into JS modules that inject <style> tags.
// In production, we use MiniCSSExtractPlugin to extract that CSS
// to a file, but in development "style" loader enables hot editing
// of CSS.
// By default we support CSS Modules with the extension .module.css
{
test: cssRegex,
exclude: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
sourceMap: isEnvProduction
? shouldUseSourceMap
: isEnvDevelopment,
modules: {
mode: 'icss',
},
}),
// Don't consider CSS imports dead code even if the
// containing package claims to have no side effects.
// Remove this when webpack adds a warning or an error for this.
// See https://github.com/webpack/webpack/issues/6571
sideEffects: true,
},
// Adds support for CSS Modules (https://github.com/css-modules/css-modules)
// using the extension .module.css
{
test: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
sourceMap: isEnvProduction
? shouldUseSourceMap
: isEnvDevelopment,
modules: {
mode: 'local',
getLocalIdent: getCSSModuleLocalIdent,
},
}),
},
// Opt-in support for SASS (using .scss or .sass extensions).
// By default we support SASS Modules with the
// extensions .module.scss or .module.sass
{
test: sassRegex,
exclude: sassModuleRegex,
use: getStyleLoaders(
{
importLoaders: 3,
sourceMap: isEnvProduction
? shouldUseSourceMap
: isEnvDevelopment,
modules: {
mode: 'icss',
},
},
'sass-loader'
),
// Don't consider CSS imports dead code even if the
// containing package claims to have no side effects.
// Remove this when webpack adds a warning or an error for this.
// See https://github.com/webpack/webpack/issues/6571
sideEffects: true,
},
// Adds support for CSS Modules, but using SASS
// using the extension .module.scss or .module.sass
{
test: sassModuleRegex,
use: getStyleLoaders(
{
importLoaders: 3,
sourceMap: isEnvProduction
? shouldUseSourceMap
: isEnvDevelopment,
modules: {
mode: 'local',
getLocalIdent: getCSSModuleLocalIdent,
},
},
'sass-loader'
),
},
// "file" loader makes sure those assets get served by WebpackDevServer.
// When you `import` an asset, you get its (virtual) filename.
// In production, they would get copied to the `build` folder.
// This loader doesn't use a "test" so it will catch all modules
// that fall through the other loaders.
{
// Exclude `js` files to keep "css" loader working as it injects
// its runtime that would otherwise be processed through "file" loader.
// Also exclude `html` and `json` extensions so they get processed
// by webpacks internal loaders.
exclude: [/^$/, /\.(js|mjs|jsx|ts|tsx)$/, /\.html$/, /\.json$/],
type: 'asset/resource',
},
// ** STOP ** Are you adding a new loader?
// Make sure to add the new loader(s) before the "file" loader.
],
},
].filter(Boolean),
},
plugins: [
// Generates an `index.html` file with the <script> injected.
new HtmlWebpackPlugin(
Object.assign(
{},
{
inject: true,
template: paths.appHtml,
},
isEnvProduction
? {
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true,
},
}
: undefined
)
),
// Inlines the webpack runtime script. This script is too small to warrant
// a network request.
// https://github.com/facebook/create-react-app/issues/5358
isEnvProduction &&
shouldInlineRuntimeChunk &&
new InlineChunkHtmlPlugin(HtmlWebpackPlugin, [/runtime-.+[.]js/]),
// Makes some environment variables available in index.html.
// The public URL is available as %PUBLIC_URL% in index.html, e.g.:
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
// It will be an empty string unless you specify "homepage"
// in `package.json`, in which case it will be the pathname of that URL.
new InterpolateHtmlPlugin(HtmlWebpackPlugin, env.raw),
// This gives some necessary context to module not found errors, such as
// the requesting resource.
new ModuleNotFoundPlugin(paths.appPath),
// Makes some environment variables available to the JS code, for example:
// if (process.env.NODE_ENV === 'production') { ... }. See `./env.js`.
// It is absolutely essential that NODE_ENV is set to production
// during a production build.
// Otherwise React will be compiled in the very slow development mode.
new webpack.DefinePlugin(env.stringified),
// Experimental hot reloading for React .
// https://github.com/facebook/react/tree/main/packages/react-refresh
isEnvDevelopment &&
shouldUseReactRefresh &&
new ReactRefreshWebpackPlugin({
overlay: false,
}),
// Watcher doesn't work well if you mistype casing in a path so we use
// a plugin that prints an error when you attempt to do this.
// See https://github.com/facebook/create-react-app/issues/240
isEnvDevelopment && new CaseSensitivePathsPlugin(),
isEnvProduction &&
new MiniCssExtractPlugin({
// Options similar to the same options in webpackOptions.output
// both options are optional
filename: 'static/css/[name].[contenthash:8].css',
chunkFilename: 'static/css/[name].[contenthash:8].chunk.css',
}),
// Generate an asset manifest file with the following content:
// - "files" key: Mapping of all asset filenames to their corresponding
// output file so that tools can pick it up without having to parse
// `index.html`
// - "entrypoints" key: Array of files which are included in `index.html`,
// can be used to reconstruct the HTML if necessary
new WebpackManifestPlugin({
fileName: 'asset-manifest.json',
publicPath: paths.publicUrlOrPath,
generate: (seed, files, entrypoints) => {
const manifestFiles = files.reduce((manifest, file) => {
manifest[file.name] = file.path;
return manifest;
}, seed);
const entrypointFiles = entrypoints.main.filter(
fileName => !fileName.endsWith('.map')
);
return {
files: manifestFiles,
entrypoints: entrypointFiles,
};
},
}),
// Moment.js is an extremely popular library that bundles large locale files
// by default due to how webpack interprets its code. This is a practical
// solution that requires the user to opt into importing specific locales.
// https://github.com/jmblog/how-to-optimize-momentjs-with-webpack
// You can remove this if you don't use Moment.js:
new webpack.IgnorePlugin({
resourceRegExp: /^\.\/locale$/,
contextRegExp: /moment$/,
}),
// Generate a service worker script that will precache, and keep up to date,
// the HTML & assets that are part of the webpack build.
isEnvProduction &&
fs.existsSync(swSrc) &&
new WorkboxWebpackPlugin.InjectManifest({
swSrc,
dontCacheBustURLsMatching: /\.[0-9a-f]{8}\./,
exclude: [/\.map$/, /asset-manifest\.json$/, /LICENSE/],
// Bump up the default maximum size (2mb) that's precached,
// to make lazy-loading failure scenarios less likely.
// See https://github.com/cra-template/pwa/issues/13#issuecomment-722667270
maximumFileSizeToCacheInBytes: 5 * 1024 * 1024,
}),
// TypeScript type checking
useTypeScript &&
new ForkTsCheckerWebpackPlugin({
async: isEnvDevelopment,
typescript: {
typescriptPath: resolve.sync('typescript', {
basedir: paths.appNodeModules,
}),
configOverwrite: {
compilerOptions: {
sourceMap: isEnvProduction
? shouldUseSourceMap
: isEnvDevelopment,
skipLibCheck: true,
inlineSourceMap: false,
declarationMap: false,
noEmit: true,
incremental: true,
tsBuildInfoFile: paths.appTsBuildInfoFile,
},
},
context: paths.appPath,
diagnosticOptions: {
syntactic: true,
},
mode: 'write-references',
// profile: true,
},
issue: {
// This one is specifically to match during CI tests,
// as micromatch doesn't match
// '../cra-template-typescript/template/src/App.tsx'
// otherwise.
include: [
{ file: '../**/src/**/*.{ts,tsx}' },
{ file: '**/src/**/*.{ts,tsx}' },
],
exclude: [
{ file: '**/src/**/__tests__/**' },
{ file: '**/src/**/?(*.){spec|test}.*' },
{ file: '**/src/setupProxy.*' },
{ file: '**/src/setupTests.*' },
],
},
logger: {
infrastructure: 'silent',
},
}),
!disableESLintPlugin &&
new ESLintPlugin({
// Plugin options
extensions: ['js', 'mjs', 'jsx', 'ts', 'tsx'],
formatter: require.resolve('react-dev-utils/eslintFormatter'),
eslintPath: require.resolve('eslint'),
failOnError: !(isEnvDevelopment && emitErrorsAsWarnings),
context: paths.appSrc,
cache: true,
cacheLocation: path.resolve(
paths.appNodeModules,
'.cache/.eslintcache'
),
// ESLint class options
cwd: paths.appPath,
resolvePluginsRelativeTo: __dirname,
baseConfig: {
extends: [require.resolve('eslint-config-react-app/base')],
rules: {
...(!hasJsxRuntime && {
'react/react-in-jsx-scope': 'error',
}),
},
},
}),
].filter(Boolean),
// Turn off performance processing because we utilize
// our own hints via the FileSizeReporter
performance: false,
};
};

View file

@ -0,0 +1,9 @@
'use strict';
const { createHash } = require('crypto');
module.exports = env => {
const hash = createHash('md5');
hash.update(JSON.stringify(env));
return hash.digest('hex');
};

View file

@ -0,0 +1,127 @@
'use strict';
const fs = require('fs');
const evalSourceMapMiddleware = require('react-dev-utils/evalSourceMapMiddleware');
const noopServiceWorkerMiddleware = require('react-dev-utils/noopServiceWorkerMiddleware');
const ignoredFiles = require('react-dev-utils/ignoredFiles');
const redirectServedPath = require('react-dev-utils/redirectServedPathMiddleware');
const paths = require('./paths');
const getHttpsConfig = require('./getHttpsConfig');
const host = process.env.HOST || '0.0.0.0';
const sockHost = process.env.WDS_SOCKET_HOST;
const sockPath = process.env.WDS_SOCKET_PATH; // default: '/ws'
const sockPort = process.env.WDS_SOCKET_PORT;
module.exports = function (proxy, allowedHost) {
const disableFirewall =
!proxy || process.env.DANGEROUSLY_DISABLE_HOST_CHECK === 'true';
return {
// WebpackDevServer 2.4.3 introduced a security fix that prevents remote
// websites from potentially accessing local content through DNS rebinding:
// https://github.com/webpack/webpack-dev-server/issues/887
// https://medium.com/webpack/webpack-dev-server-middleware-security-issues-1489d950874a
// However, it made several existing use cases such as development in cloud
// environment or subdomains in development significantly more complicated:
// https://github.com/facebook/create-react-app/issues/2271
// https://github.com/facebook/create-react-app/issues/2233
// While we're investigating better solutions, for now we will take a
// compromise. Since our WDS configuration only serves files in the `public`
// folder we won't consider accessing them a vulnerability. However, if you
// use the `proxy` feature, it gets more dangerous because it can expose
// remote code execution vulnerabilities in backends like Django and Rails.
// So we will disable the host check normally, but enable it if you have
// specified the `proxy` setting. Finally, we let you override it if you
// really know what you're doing with a special environment variable.
// Note: ["localhost", ".localhost"] will support subdomains - but we might
// want to allow setting the allowedHosts manually for more complex setups
allowedHosts: disableFirewall ? 'all' : [allowedHost],
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*',
},
// Enable gzip compression of generated files.
compress: true,
static: {
// By default WebpackDevServer serves physical files from current directory
// in addition to all the virtual build products that it serves from memory.
// This is confusing because those files wont automatically be available in
// production build folder unless we copy them. However, copying the whole
// project directory is dangerous because we may expose sensitive files.
// Instead, we establish a convention that only files in `public` directory
// get served. Our build script will copy `public` into the `build` folder.
// In `index.html`, you can get URL of `public` folder with %PUBLIC_URL%:
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
// In JavaScript code, you can access it with `process.env.PUBLIC_URL`.
// Note that we only recommend to use `public` folder as an escape hatch
// for files like `favicon.ico`, `manifest.json`, and libraries that are
// for some reason broken when imported through webpack. If you just want to
// use an image, put it in `src` and `import` it from JavaScript instead.
directory: paths.appPublic,
publicPath: [paths.publicUrlOrPath],
// By default files from `contentBase` will not trigger a page reload.
watch: {
// Reportedly, this avoids CPU overload on some systems.
// https://github.com/facebook/create-react-app/issues/293
// src/node_modules is not ignored to support absolute imports
// https://github.com/facebook/create-react-app/issues/1065
ignored: ignoredFiles(paths.appSrc),
},
},
client: {
webSocketURL: {
// Enable custom sockjs pathname for websocket connection to hot reloading server.
// Enable custom sockjs hostname, pathname and port for websocket connection
// to hot reloading server.
hostname: sockHost,
pathname: sockPath,
port: sockPort,
},
overlay: {
errors: true,
warnings: false,
},
},
devMiddleware: {
// It is important to tell WebpackDevServer to use the same "publicPath" path as
// we specified in the webpack config. When homepage is '.', default to serving
// from the root.
// remove last slash so user can land on `/test` instead of `/test/`
publicPath: paths.publicUrlOrPath.slice(0, -1),
},
https: getHttpsConfig(),
host,
historyApiFallback: {
// Paths with dots should still use the history fallback.
// See https://github.com/facebook/create-react-app/issues/387.
disableDotRule: true,
index: paths.publicUrlOrPath,
},
// `proxy` is run between `before` and `after` `webpack-dev-server` hooks
proxy,
onBeforeSetupMiddleware(devServer) {
// Keep `evalSourceMapMiddleware`
// middlewares before `redirectServedPath` otherwise will not have any effect
// This lets us fetch source contents from webpack for the error overlay
devServer.app.use(evalSourceMapMiddleware(devServer));
if (fs.existsSync(paths.proxySetup)) {
// This registers user provided middleware for proxy reasons
require(paths.proxySetup)(devServer.app);
}
},
onAfterSetupMiddleware(devServer) {
// Redirect to `PUBLIC_URL` or `homepage` from `package.json` if url not match
devServer.app.use(redirectServedPath(paths.publicUrlOrPath));
// This service worker file is effectively a 'no-op' that will reset any
// previous service worker registered for the same host:port combination.
// We do this in development to avoid hitting the production cache if
// it used the same host and port.
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
devServer.app.use(noopServiceWorkerMiddleware(paths.publicUrlOrPath));
},
};
};

17226
asp-review-app/ui/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,165 @@
{
"name": "i18next",
"version": "0.1.0",
"private": true,
"dependencies": {
"@babel/core": "^7.16.0",
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.3",
"@react-three/drei": "^9.65.3",
"@svgr/webpack": "^5.5.0",
"@testing-library/jest-dom": "^5.14.1",
"@testing-library/react": "^13.0.0",
"@testing-library/user-event": "^13.2.1",
"@types/jest": "^27.0.1",
"@types/node": "^16.7.13",
"@types/react": "18.0.25",
"@types/react-dom": "18.0.9",
"antd": "^5.5.2",
"babel-jest": "^27.4.2",
"babel-loader": "^8.2.3",
"babel-plugin-named-asset-import": "^0.3.8",
"babel-preset-react-app": "^10.0.1",
"bfj": "^7.0.2",
"browserslist": "^4.18.1",
"camelcase": "^6.2.1",
"case-sensitive-paths-webpack-plugin": "^2.4.0",
"css-loader": "^6.5.1",
"css-minimizer-webpack-plugin": "^3.2.0",
"dotenv": "^10.0.0",
"dotenv-expand": "^5.1.0",
"eslint": "^8.3.0",
"eslint-config-react-app": "^7.0.1",
"eslint-webpack-plugin": "^3.1.1",
"file-loader": "^6.2.0",
"fs-extra": "^10.0.0",
"html-webpack-plugin": "^5.5.0",
"i18next": "^22.4.14",
"i18next-browser-languagedetector": "^7.0.1",
"identity-obj-proxy": "^3.0.0",
"jest": "^27.4.3",
"jest-resolve": "^27.4.2",
"jest-watch-typeahead": "^1.0.0",
"localforage": "^1.10.0",
"match-sorter": "^6.3.1",
"mini-css-extract-plugin": "^2.4.5",
"mobx": "^6.9.0",
"mobx-react": "^7.6.0",
"postcss": "^8.4.4",
"postcss-flexbugs-fixes": "^5.0.2",
"postcss-loader": "^6.2.1",
"postcss-normalize": "^10.0.1",
"postcss-preset-env": "^7.0.1",
"prompts": "^2.4.2",
"react": "18.0.0",
"react-app-polyfill": "^3.0.0",
"react-dev-utils": "^12.0.1",
"react-dom": "18.0.0",
"react-i18next": "^12.2.0",
"react-refresh": "^0.11.0",
"react-router-dom": "^6.11.2",
"react-three-fiber": "^6.0.13",
"resolve": "^1.20.0",
"resolve-url-loader": "^4.0.0",
"rete": "2.0.0-beta.9",
"rete-area-plugin": "2.0.0-beta.12",
"rete-connection-plugin": "2.0.0-beta.16",
"rete-react-render-plugin": "2.0.0-beta.22",
"rete-render-utils": "2.0.0-beta.12",
"sass-loader": "^12.3.0",
"semver": "^7.3.5",
"sort-by": "^1.2.0",
"source-map-loader": "^3.0.0",
"style-loader": "^3.3.1",
"tailwindcss": "^3.0.2",
"terser-webpack-plugin": "^5.2.5",
"three": "^0.151.3",
"typescript": "^4.4.2",
"web-vitals": "^2.1.0",
"webpack": "^5.64.4",
"webpack-dev-server": "^4.6.0",
"webpack-manifest-plugin": "^4.0.2",
"workbox-webpack-plugin": "^6.4.1"
},
"scripts": {
"dev": "node scripts/start.js",
"build": "node scripts/build.js",
"test": "node scripts/test.js"
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
},
"devDependencies": {
"@types/three": "^0.150.1"
},
"jest": {
"roots": [
"<rootDir>/src"
],
"collectCoverageFrom": [
"src/**/*.{js,jsx,ts,tsx}",
"!src/**/*.d.ts"
],
"setupFiles": [
"react-app-polyfill/jsdom"
],
"setupFilesAfterEnv": [
"<rootDir>/src/setupTests.ts"
],
"testMatch": [
"<rootDir>/src/**/__tests__/**/*.{js,jsx,ts,tsx}",
"<rootDir>/src/**/*.{spec,test}.{js,jsx,ts,tsx}"
],
"testEnvironment": "jsdom",
"transform": {
"^.+\\.(js|jsx|mjs|cjs|ts|tsx)$": "<rootDir>/config/jest/babelTransform.js",
"^.+\\.css$": "<rootDir>/config/jest/cssTransform.js",
"^(?!.*\\.(js|jsx|mjs|cjs|ts|tsx|css|json)$)": "<rootDir>/config/jest/fileTransform.js"
},
"transformIgnorePatterns": [
"[/\\\\]node_modules[/\\\\].+\\.(js|jsx|mjs|cjs|ts|tsx)$",
"^.+\\.module\\.(css|sass|scss)$"
],
"modulePaths": [],
"moduleNameMapper": {
"^react-native$": "react-native-web",
"^.+\\.module\\.(css|sass|scss)$": "identity-obj-proxy"
},
"moduleFileExtensions": [
"web.js",
"js",
"web.ts",
"ts",
"web.tsx",
"tsx",
"json",
"web.jsx",
"jsx",
"node"
],
"watchPlugins": [
"jest-watch-typeahead/filename",
"jest-watch-typeahead/testname"
],
"resetMocks": true
},
"babel": {
"presets": [
"react-app"
]
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

View file

@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Web site created using create-react-app"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>React App</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div class="root" id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.4 KiB

View file

@ -0,0 +1,25 @@
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View file

@ -0,0 +1,3 @@
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:

View file

@ -0,0 +1,217 @@
'use strict';
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'production';
process.env.NODE_ENV = 'production';
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
process.on('unhandledRejection', err => {
throw err;
});
// Ensure environment variables are read.
require('../config/env');
const path = require('path');
const chalk = require('react-dev-utils/chalk');
const fs = require('fs-extra');
const bfj = require('bfj');
const webpack = require('webpack');
const configFactory = require('../config/webpack.config');
const paths = require('../config/paths');
const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
const formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
const printHostingInstructions = require('react-dev-utils/printHostingInstructions');
const FileSizeReporter = require('react-dev-utils/FileSizeReporter');
const printBuildError = require('react-dev-utils/printBuildError');
const measureFileSizesBeforeBuild =
FileSizeReporter.measureFileSizesBeforeBuild;
const printFileSizesAfterBuild = FileSizeReporter.printFileSizesAfterBuild;
const useYarn = fs.existsSync(paths.yarnLockFile);
// These sizes are pretty large. We'll warn for bundles exceeding them.
const WARN_AFTER_BUNDLE_GZIP_SIZE = 512 * 1024;
const WARN_AFTER_CHUNK_GZIP_SIZE = 1024 * 1024;
const isInteractive = process.stdout.isTTY;
// Warn and crash if required files are missing
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
process.exit(1);
}
const argv = process.argv.slice(2);
const writeStatsJson = argv.indexOf('--stats') !== -1;
// Generate configuration
const config = configFactory('production');
// We require that you explicitly set browsers and do not fall back to
// browserslist defaults.
const { checkBrowsers } = require('react-dev-utils/browsersHelper');
checkBrowsers(paths.appPath, isInteractive)
.then(() => {
// First, read the current file sizes in build directory.
// This lets us display how much they changed later.
return measureFileSizesBeforeBuild(paths.appBuild);
})
.then(previousFileSizes => {
// Remove all content but keep the directory so that
// if you're in it, you don't end up in Trash
fs.emptyDirSync(paths.appBuild);
// Merge with the public folder
copyPublicFolder();
// Start the webpack build
return build(previousFileSizes);
})
.then(
({ stats, previousFileSizes, warnings }) => {
if (warnings.length) {
console.log(chalk.yellow('Compiled with warnings.\n'));
console.log(warnings.join('\n\n'));
console.log(
'\nSearch for the ' +
chalk.underline(chalk.yellow('keywords')) +
' to learn more about each warning.'
);
console.log(
'To ignore, add ' +
chalk.cyan('// eslint-disable-next-line') +
' to the line before.\n'
);
} else {
console.log(chalk.green('Compiled successfully.\n'));
}
console.log('File sizes after gzip:\n');
printFileSizesAfterBuild(
stats,
previousFileSizes,
paths.appBuild,
WARN_AFTER_BUNDLE_GZIP_SIZE,
WARN_AFTER_CHUNK_GZIP_SIZE
);
console.log();
const appPackage = require(paths.appPackageJson);
const publicUrl = paths.publicUrlOrPath;
const publicPath = config.output.publicPath;
const buildFolder = path.relative(process.cwd(), paths.appBuild);
printHostingInstructions(
appPackage,
publicUrl,
publicPath,
buildFolder,
useYarn
);
},
err => {
const tscCompileOnError = process.env.TSC_COMPILE_ON_ERROR === 'true';
if (tscCompileOnError) {
console.log(
chalk.yellow(
'Compiled with the following type errors (you may want to check these before deploying your app):\n'
)
);
printBuildError(err);
} else {
console.log(chalk.red('Failed to compile.\n'));
printBuildError(err);
process.exit(1);
}
}
)
.catch(err => {
if (err && err.message) {
console.log(err.message);
}
process.exit(1);
});
// Create the production build and print the deployment instructions.
function build(previousFileSizes) {
console.log('Creating an optimized production build...');
const compiler = webpack(config);
return new Promise((resolve, reject) => {
compiler.run((err, stats) => {
let messages;
if (err) {
if (!err.message) {
return reject(err);
}
let errMessage = err.message;
// Add additional information for postcss errors
if (Object.prototype.hasOwnProperty.call(err, 'postcssNode')) {
errMessage +=
'\nCompileError: Begins at CSS selector ' +
err['postcssNode'].selector;
}
messages = formatWebpackMessages({
errors: [errMessage],
warnings: [],
});
} else {
messages = formatWebpackMessages(
stats.toJson({ all: false, warnings: true, errors: true })
);
}
if (messages.errors.length) {
// Only keep the first error. Others are often indicative
// of the same problem, but confuse the reader with noise.
if (messages.errors.length > 1) {
messages.errors.length = 1;
}
return reject(new Error(messages.errors.join('\n\n')));
}
if (
process.env.CI &&
(typeof process.env.CI !== 'string' ||
process.env.CI.toLowerCase() !== 'false') &&
messages.warnings.length
) {
// Ignore sourcemap warnings in CI builds. See #8227 for more info.
const filteredWarnings = messages.warnings.filter(
w => !/Failed to parse source map/.test(w)
);
if (filteredWarnings.length) {
console.log(
chalk.yellow(
'\nTreating warnings as errors because process.env.CI = true.\n' +
'Most CI servers set it automatically.\n'
)
);
return reject(new Error(filteredWarnings.join('\n\n')));
}
}
const resolveArgs = {
stats,
previousFileSizes,
warnings: messages.warnings,
};
if (writeStatsJson) {
return bfj
.write(paths.appBuild + '/bundle-stats.json', stats.toJson())
.then(() => resolve(resolveArgs))
.catch(error => reject(new Error(error)));
}
return resolve(resolveArgs);
});
});
}
function copyPublicFolder() {
fs.copySync(paths.appPublic, paths.appBuild, {
dereference: true,
filter: file => file !== paths.appHtml,
});
}

View file

@ -0,0 +1,154 @@
'use strict';
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'development';
process.env.NODE_ENV = 'development';
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
process.on('unhandledRejection', err => {
throw err;
});
// Ensure environment variables are read.
require('../config/env');
const fs = require('fs');
const chalk = require('react-dev-utils/chalk');
const webpack = require('webpack');
const WebpackDevServer = require('webpack-dev-server');
const clearConsole = require('react-dev-utils/clearConsole');
const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
const {
choosePort,
createCompiler,
prepareProxy,
prepareUrls,
} = require('react-dev-utils/WebpackDevServerUtils');
const openBrowser = require('react-dev-utils/openBrowser');
const semver = require('semver');
const paths = require('../config/paths');
const configFactory = require('../config/webpack.config');
const createDevServerConfig = require('../config/webpackDevServer.config');
const getClientEnvironment = require('../config/env');
const react = require(require.resolve('react', { paths: [paths.appPath] }));
const env = getClientEnvironment(paths.publicUrlOrPath.slice(0, -1));
const useYarn = fs.existsSync(paths.yarnLockFile);
const isInteractive = process.stdout.isTTY;
// Warn and crash if required files are missing
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
process.exit(1);
}
// Tools like Cloud9 rely on this.
const DEFAULT_PORT = parseInt(process.env.PORT, 10) || 3000;
const HOST = process.env.HOST || '0.0.0.0';
if (process.env.HOST) {
console.log(
chalk.cyan(
`Attempting to bind to HOST environment variable: ${chalk.yellow(
chalk.bold(process.env.HOST)
)}`
)
);
console.log(
`If this was unintentional, check that you haven't mistakenly set it in your shell.`
);
console.log(
`Learn more here: ${chalk.yellow('https://cra.link/advanced-config')}`
);
console.log();
}
// We require that you explicitly set browsers and do not fall back to
// browserslist defaults.
const { checkBrowsers } = require('react-dev-utils/browsersHelper');
checkBrowsers(paths.appPath, isInteractive)
.then(() => {
// We attempt to use the default port but if it is busy, we offer the user to
// run on a different port. `choosePort()` Promise resolves to the next free port.
return choosePort(HOST, DEFAULT_PORT);
})
.then(port => {
if (port == null) {
// We have not found a port.
return;
}
const config = configFactory('development');
const protocol = process.env.HTTPS === 'true' ? 'https' : 'http';
const appName = require(paths.appPackageJson).name;
const useTypeScript = fs.existsSync(paths.appTsConfig);
const urls = prepareUrls(
protocol,
HOST,
port,
paths.publicUrlOrPath.slice(0, -1)
);
// Create a webpack compiler that is configured with custom messages.
const compiler = createCompiler({
appName,
config,
urls,
useYarn,
useTypeScript,
webpack,
});
// Load proxy config
const proxySetting = require(paths.appPackageJson).proxy;
const proxyConfig = prepareProxy(
proxySetting,
paths.appPublic,
paths.publicUrlOrPath
);
// Serve webpack assets generated by the compiler over a web server.
const serverConfig = {
...createDevServerConfig(proxyConfig, urls.lanUrlForConfig),
host: HOST,
port,
};
const devServer = new WebpackDevServer(serverConfig, compiler);
// Launch WebpackDevServer.
devServer.startCallback(() => {
if (isInteractive) {
clearConsole();
}
if (env.raw.FAST_REFRESH && semver.lt(react.version, '16.10.0')) {
console.log(
chalk.yellow(
`Fast Refresh requires React 16.10 or higher. You are using React ${react.version}.`
)
);
}
console.log(chalk.cyan('Starting the development server...\n'));
openBrowser(urls.localUrlForBrowser);
});
['SIGINT', 'SIGTERM'].forEach(function (sig) {
process.on(sig, function () {
devServer.close();
process.exit();
});
});
if (process.env.CI !== 'true') {
// Gracefully exit when stdin ends
process.stdin.on('end', function () {
devServer.close();
process.exit();
});
}
})
.catch(err => {
if (err && err.message) {
console.log(err.message);
}
process.exit(1);
});

View file

@ -0,0 +1,52 @@
'use strict';
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'test';
process.env.NODE_ENV = 'test';
process.env.PUBLIC_URL = '';
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
process.on('unhandledRejection', err => {
throw err;
});
// Ensure environment variables are read.
require('../config/env');
const jest = require('jest');
const execSync = require('child_process').execSync;
let argv = process.argv.slice(2);
function isInGitRepository() {
try {
execSync('git rev-parse --is-inside-work-tree', { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
function isInMercurialRepository() {
try {
execSync('hg --cwd . root', { stdio: 'ignore' });
return true;
} catch (e) {
return false;
}
}
// Watch unless on CI or explicitly running all tests
if (
!process.env.CI &&
argv.indexOf('--watchAll') === -1 &&
argv.indexOf('--watchAll=false') === -1
) {
// https://github.com/facebook/create-react-app/issues/5210
const hasSourceControl = isInGitRepository() || isInMercurialRepository();
argv.push(hasSourceControl ? '--watch' : '--watchAll');
}
jest.run(argv);

View file

@ -0,0 +1,44 @@
.canvas{
width: 100vw;
height: 100vh;
display: block;
}
.root{
overflow-y: hidden;
}
.centeredDiv{
width: 100vw;
display: flex;
justify-content: center;
}
.projects-container{
width: 100%;
background-color: aliceblue;
display: flex;
flex-direction: column;
justify-content: space-evenly;
align-items: center;
overflow-y:hidden;
}
.centeredContainer{
display: flex;
flex-direction: column;
align-content: center;
align-items: center;
}
label {
background-color: indigo;
color: white;
padding: 0.5rem;
font-family: sans-serif;
border-radius: 0.3rem;
cursor: pointer;
margin-top: 1rem;
}
#file-chosen{
margin-left: 0.3rem;
font-family: sans-serif;
}

View file

@ -0,0 +1,10 @@
// @ts-nocheck
import {ReactComponent as SolidSvg} from "./assets/solid.svg";
import {ReactComponent as PartSvg} from "./assets/part.svg";
export const svg = {SolidSvg, PartSvg}
export { svg as SVG };

View file

@ -0,0 +1,35 @@
export enum HttpMethod {
GET = 'GET',
POST = 'POST'
}
export enum HttpRoute {
insertionPath = '/assembly/preview/insertion_sequence/',
assemblyPreviewPath = '/assembly/preview/subsequence/',
projects = '/assembly/preview',
createProject = '/assembly/create',
ajaxMatrix = 'matrix.json'
}
export class HttpRepository {
static server = 'http://localhost:3002'
static async jsonRequest<T>(method: HttpMethod, url: string, data?: any): Promise<T> {
const reqInit = {
'body': data,
'method': method,
'headers': { 'Content-Type': 'application/json' },
}
if (data !== undefined) {
reqInit['body'] = JSON.stringify(data)
}
return (await fetch(this.server + url, reqInit)).json()
}
static async request<T>(method: HttpMethod, url: string, data?: any): Promise<T> {
const reqInit = {
'body': data,
'method': method,
}
if (data !== undefined) {
reqInit['body'] = data
}
return (await fetch(this.server + url, reqInit)).json()
}
}

View file

@ -0,0 +1 @@
export {}

View file

@ -0,0 +1,89 @@
import * as React from "react";
import { useEffect, useState } from "react";
import {
HttpMethod,
HttpRepository,
HttpRoute,
} from "../../core/repository/http_repository";
import { Button } from "antd";
import { Typography } from "antd";
import { Card } from "antd";
import { createProjectRoute } from "../create_project/create_project";
import { useNavigate } from "react-router-dom";
import { pathAjaxTopologyScreen } from "../topology_ajax_preview/topology_ajax_preview";
import { pathStabilityScreen } from "../stability_preview/stability_preview";
const { Text, Link, Title } = Typography;
function LinkCreateProjectPage() {
const navigate = useNavigate();
return (
<Link
style={{ paddingLeft: "10px" }}
onClick={() => {
navigate(createProjectRoute);
}}
>
<> add new project?</>
</Link>
);
}
export const ProjectsPath = "/";
export const ProjectScreen: React.FunctionComponent = () => {
const [projects, setProjects] = useState<Array<String>>([]);
const navigate = useNavigate();
useEffect(() => {
async function fetchData() {
setProjects(
await HttpRepository.jsonRequest<Array<String>>(
HttpMethod.GET,
HttpRoute.projects
)
);
}
fetchData();
}, []);
return (
<>
<div className="centeredDiv">
<Title>Projects</Title>
</div>
<div>
{projects.length === 0 ? (
<div className="centeredDiv">
<Text>Not found projects</Text>
<div>
<LinkCreateProjectPage />
</div>
</div>
) : (
<div></div>
)}
</div>
<div className="projects-container">
{projects.map((el) => {
return (
<>
<Card style={{ width: 300 }}>
<div>{el}</div>
<Button onClick={() => {
navigate(pathAjaxTopologyScreen + el);
}} > Preview topology ajax computed </Button>
<Button onClick={() => {
navigate(pathStabilityScreen + el);
}} > Preview stability computed </Button>
<Button> Preview insert Path </Button>
<Button>Preview assembly logical </Button>
</Card>
</>
);
})}
<div> {projects.length === 0 ? <></> : <LinkCreateProjectPage />} </div>
</div>
</>
);
};

View file

@ -0,0 +1,198 @@
import * as React from "react";
import {
DirectionalLight,
Object3D,
PerspectiveCamera,
Scene,
WebGLRenderer,
AmbientLight,
Vector3,
Group,
Quaternion,
} from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls";
import { OBJLoader } from "three/examples/jsm/loaders/OBJLoader";
import CSS from "csstype";
import {
HttpMethod,
HttpRepository,
HttpRoute,
} from "../../core/repository/http_repository";
import { useParams } from "react-router-dom";
const canvasStyle: CSS.Properties = {
backgroundColor: "rgb(151 41 41 / 85%)",
};
export const AssemblyPreviewInsertVectorPath = "/insertion_vector/";
export interface AssemblyPreviewInsertionPathModel {
offset: number;
count: number;
parent: string;
child: string;
insertions: Insertions;
}
export interface Insertions {
time: number;
insertion_path: InsertionPath[];
status: string;
}
export interface InsertionPath {
quadrelion: number[];
xyz: number[];
euler: number[];
}
export function AssemblyPreviewInsertVector() {
const container = new Object3D();
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const scene = new Scene();
const camera = new PerspectiveCamera(
80,
window.innerWidth / window.innerHeight,
0.1,
1000
);
let renderId = 1;
let assemblyCounter: undefined | Number = undefined;
let params = useParams().id;
React.useEffect(() => {
const renderer = new WebGLRenderer({
canvas: canvasRef.current as HTMLCanvasElement,
antialias: true,
alpha: true,
});
camera.position.set(2, 1, 2);
const directionalLight = new DirectionalLight(0xffffff, 0.2);
directionalLight.castShadow = true;
directionalLight.position.set(-1, 2, 4);
scene.add(directionalLight);
const ambientLight = new AmbientLight(0xffffff, 0.7);
scene.add(ambientLight);
container.position.set(0, 0, 0);
renderer.setSize(window.innerWidth, window.innerHeight);
const onResize = () => {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer!.setSize(window.innerWidth, window.innerHeight);
};
window.addEventListener("resize", onResize, false);
new OrbitControls(camera, renderer.domElement);
renderer!.setAnimationLoop(() => {
renderer!.render(scene, camera);
});
renderObject(1, params!);
});
async function renderObject(renderId: Number, projectId: String) {
const assemblyResponse =
await HttpRepository.jsonRequest<AssemblyPreviewInsertionPathModel>(
HttpMethod.GET,
`${HttpRoute.insertionPath}${projectId}?count=${renderId}`
);
const objectControl = (
await loadObject([assemblyResponse.child, assemblyResponse.parent])
)[1];
function assemblyAnimate(objectId: Number, coords: InsertionPath, b:boolean) {
const object = scene.getObjectById(objectId as number);
const r = 1
object?.position.set(coords.xyz[0] * r, coords.xyz[1] * r, coords.xyz[2] * r);
object?.setRotationFromQuaternion(
new Quaternion(
coords.quadrelion[0],
coords.quadrelion[1],
coords.quadrelion[2],
coords.quadrelion[3]
)
);
console.log(object?.position)
}
function timer(ms: number) {
return new Promise((res) => setTimeout(res, ms));
}
const b = true
async function load(id: Number, len: number) {
for (var i = 0; i < len; i++) {
assemblyAnimate(objectControl, assemblyResponse.insertions.insertion_path[i], b);
await timer(3);
}
}
assemblyResponse.insertions.insertion_path = assemblyResponse.insertions.insertion_path.reverse()
load(objectControl, assemblyResponse.insertions.insertion_path.length);
}
async function click() {
renderId = renderId + 1;
if (assemblyCounter === renderId) {
renderId = 1;
}
scene.clear();
renderObject(renderId, params!);
}
async function loadObject(objectList: string[]): Promise<Number[]> {
const promises: Array<Promise<Group>> = [];
objectList.forEach((e) => {
const fbxLoader = new OBJLoader();
promises.push(fbxLoader.loadAsync(e));
});
const objects = await Promise.all(promises);
const result: Array<Number> = [];
for (let i = 0; objects.length > i; i++) {
const el = objects[i];
container.add(el);
scene.add(container);
result.push(el.id);
const directionalLight = new DirectionalLight(0xffffff, 0.2);
directionalLight.castShadow = true;
directionalLight.position.set(container.position.x - 10,container.position.y - 10,container.position.z - 10);
scene.add(directionalLight);
container.position.set(0, 0, 0);
fitCameraToCenteredObject(camera, container);
}
return result;
}
function fitCameraToCenteredObject(
camera: PerspectiveCamera,
object: Object3D
) {
const dist = 20;
const vector = new Vector3();
camera.getWorldDirection(vector);
vector.multiplyScalar(dist);
vector.add(camera.position);
object.position.set(vector.x, vector.y, vector.z);
object.setRotationFromQuaternion(camera.quaternion);
}
return (
<>
<div className="loader">
<div onClick={() => click()}>next</div>
<canvas style={canvasStyle} ref={canvasRef} />
</div>
</>
);
}

View file

@ -0,0 +1,141 @@
import React, { useEffect } from "react";
import {
DirectionalLight,
Object3D,
PerspectiveCamera,
Scene,
WebGLRenderer,
AmbientLight,
Vector3,
} from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls";
import { OBJLoader } from "three/examples/jsm/loaders/OBJLoader";
import CSS from "csstype";
import { useParams } from "react-router-dom";
import { HttpMethod, HttpRepository, HttpRoute } from "../../core/repository/http_repository";
const canvasStyle: CSS.Properties = {
backgroundColor: "rgb(151 41 41 / 85%)",
};
export interface AssemblyPreviewStructure {
assembly: string[];
offset: number;
count: number;
}
export const AssemblyPreviewSubsequencePath = "/123/";
export const AssemblyPreviewSubsequence = () => {
const container = new Object3D();
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const scene = new Scene();
const camera = new PerspectiveCamera(
80,
window.innerWidth / window.innerHeight,
0.1,
1000
);
let renderId = 1;
let assemblyCounter: undefined | Number = undefined;
let params = useParams().id;
useEffect(() => {
const renderer = new WebGLRenderer({
canvas: canvasRef.current as HTMLCanvasElement,
antialias: true,
alpha: true,
});
camera.position.set(2, 1, 2);
const directionalLight = new DirectionalLight(0xffffff, 0.2);
directionalLight.castShadow = true;
directionalLight.position.set(-1, 2, 4);
scene.add(directionalLight);
const ambientLight = new AmbientLight(0xffffff, 0.7);
scene.add(ambientLight);
container.position.set(0, 0, 0);
renderer.setSize(window.innerWidth, window.innerHeight);
const onResize = () => {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer!.setSize(window.innerWidth, window.innerHeight);
};
window.addEventListener("resize", onResize, false);
new OrbitControls(camera, renderer.domElement);
renderer!.setAnimationLoop(() => {
renderer!.render(scene, camera);
});
renderObject(1, params!);
} );
async function renderObject(renderId: Number,projectId:string ) {
const assemblyResponse =
await HttpRepository.jsonRequest<AssemblyPreviewStructure>(
HttpMethod.GET,
`${HttpRoute.assemblyPreviewPath}${projectId}?count=${renderId}`
);
assemblyCounter = assemblyResponse.count;
loadObject(assemblyResponse.assembly);
}
async function click() {
renderId = renderId + 1;
console.log(assemblyCounter);
console.log(renderId);
if (assemblyCounter === renderId) {
renderId = 1;
}
renderObject(renderId, params!);
}
function loadObject(objectList: string[]) {
objectList.forEach((el) => {
const fbxLoader = new OBJLoader();
fbxLoader.load(
el,
(object) => {
object.scale.x = 0.3;
object.scale.y = 0.3;
object.scale.z = 0.3;
object.rotation.x = -Math.PI / 2;
object.position.y = -30;
container.add(object);
scene.add(container);
fitCameraToCenteredObject(camera, container);
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + "% loaded");
},
(error) => {
console.log(error);
}
);
});
}
function fitCameraToCenteredObject(
camera: PerspectiveCamera,
object: Object3D
) {
const dist = 50;
const vector = new Vector3();
camera.getWorldDirection(vector);
vector.multiplyScalar(dist);
vector.add(camera.position);
object.position.set(vector.x, vector.y, vector.z);
object.setRotationFromQuaternion(camera.quaternion);
}
return <canvas onClick={() => click()} style={canvasStyle} ref={canvasRef} />;
};

View file

@ -0,0 +1,72 @@
import { Spin, Typography } from "antd";
import * as React from "react";
import { useNavigate } from "react-router-dom";
import {
HttpMethod,
HttpRepository,
HttpRoute,
} from "../../core/repository/http_repository";
import { pathStabilityScreen } from "../stability_preview/stability_preview";
const { Title } = Typography;
export const createProjectRoute = "/new_project";
const UploadButton = () => {
const navigate = useNavigate();
const [isLoading, setLoading] = React.useState<boolean>(false);
const handleImageChange = function (e: React.ChangeEvent<HTMLInputElement>) {
const fileList = e.target.files;
if (!fileList) return;
let file = fileList[0] as File;
uploadFile(file);
};
const uploadFile = async (file: File) => {
if (file) {
const formData = new FormData();
formData.append("freecad", file, file.name);
setLoading(true);
await HttpRepository.request(
HttpMethod.POST,
HttpRoute.createProject,
formData
);
setLoading(false);
navigate(pathStabilityScreen)
}
};
return isLoading ? (
<>
<Spin />
</>
) : (
<label htmlFor="photo">
<input
accept="*/.FCStd"
style={{ display: "none" }}
id="photo"
name="photo"
type="file"
multiple={false}
onChange={handleImageChange}
/>
Choose Cad file
</label>
);
};
export default function CreateProject() {
return (
<div className="centeredContainer">
<div className="centeredDiv">
<Title>Create new project</Title>
</div>
<div style={{ paddingTop: "10px" }}>
<UploadButton />
</div>
</div>
);
}

View file

@ -0,0 +1,57 @@
import { Button } from 'antd';
import * as React from 'react';
import { useParams } from 'react-router-dom';
import { HttpRepository, HttpMethod, HttpRoute } from '../../core/repository/http_repository';
export const pathStabilityScreen = '/stability/preview/usecase/'
interface IStabilityCheckResponce {
status: "rejected" | "fulfilled";
value: undefined | string;
index: number;
}
interface IStability {
status: boolean;
detail: string;
}
export const StabilityPreviewScreen: React.FunctionComponent = () => {
const id = useParams().id
const [stabilityResult, setStability] = React.useState<IStability[] | null>(null);
React.useEffect(() => {
const stabilityCheck = async () => {
const result = await HttpRepository.jsonRequest<Array<string>>(HttpMethod.GET, '/' + id + '/generation/step-structure.json')
const promises = []
for (let i = 0; i !== result.length; i++) {
const stabilitySubId = i + 1
promises.push(HttpRepository.jsonRequest<Array<string>>(HttpMethod.GET, '/' + id + '/generation/stability/' + stabilitySubId + '/geometry.json'))
}
const stabilityCheck = await (await Promise.allSettled(promises)).map<IStability>((element, index) => {
return {
status: element.status === 'fulfilled' ? true : false,
detail: result[index],
}
})
setStability(stabilityCheck)
};
stabilityCheck()
}, []);
return (<div>
{stabilityResult != null ? (<>
{stabilityResult.map((el, index) => {
return (<div><div>{el.detail}</div> <div>{el.status ? (<>Sucses</>) : (<><Button onClick={async () => {
await HttpRepository.jsonRequest(HttpMethod.POST, '/assembly/stability/write/computed', {
"id": id,
"buildNumber": (index + 1).toString()
})
}}>need input </Button></>)}</div> </div>)
})}
</>) : (<div>loading</div>)}
</div>);
};

View file

@ -0,0 +1,48 @@
import * as React from 'react';
import { useParams } from 'react-router-dom';
import { HttpRepository, HttpMethod, HttpRoute } from '../../core/repository/http_repository';
export const pathAjaxTopologyScreen = '/topology/adjax/usecase/'
export interface IAdjaxMatrix {
allParts: string[];
firstDetail: string;
matrix: StringMap;
matrixError: StringMap | null;
}
interface StringMap { [key: string]: string; }
export const MatrixTopologyAdjaxScreen: React.FunctionComponent = () => {
const [matrix, setMatrix] = React.useState<IAdjaxMatrix | null>(null);
const param = useParams().id
React.useEffect(() => {
async function fetchData() {
setMatrix(
await HttpRepository.jsonRequest<IAdjaxMatrix>(
HttpMethod.GET,
'/' + param + '/' + HttpRoute.ajaxMatrix
)
);
}
fetchData();
}, []);
return (<div>
{matrix === null ? (<>loaded</>) : (<>
{matrix.matrixError != null ? (<>
{Object.keys(matrix.matrixError).map((keyName, i) => {
const m = matrix.matrixError as StringMap;
return (
<div key={i}>
<div>{m[keyName]}</div>
</div>
)
})}
</>) : (<>Success</>)}
</>)}
</div>);
};

14
asp-review-app/ui/src/global.d.ts vendored Normal file
View file

@ -0,0 +1,14 @@
/// <reference types="react-scripts" />
import { resources, defaultNS } from './i18n';
declare module 'i18next' {
interface CustomTypeOptions {
defaultNS: typeof defaultNS;
resources: typeof resources['en'];
}
}
declare module "*.svg" {
import { ReactElement, SVGProps } from "react";
const content: (props: SVGProps<SVGElement>) => ReactElement;
export default content;
}

View file

@ -0,0 +1,13 @@
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
monospace;
}

View file

@ -0,0 +1,50 @@
import { render } from "react-dom";
import "./App.css";
import "./index.css";
import { createBrowserRouter, RouterProvider } from "react-router-dom";
import {
AssemblyPreviewInsertVector,
AssemblyPreviewInsertVectorPath,
} from "./features/assembly_preview_insert_vector/Assembly_preview_insert_vector_screen";
import {
ProjectScreen,
ProjectsPath,
} from "./features/all_project/all_project_screen";
import {
AssemblyPreviewSubsequence,
AssemblyPreviewSubsequencePath,
} from "./features/assembly_preview_subsequence/assembly_preview_subsequence_screen";
import CreateProject, { createProjectRoute } from "./features/create_project/create_project";
import { pathAjaxTopologyScreen, MatrixTopologyAdjaxScreen } from "./features/topology_ajax_preview/topology_ajax_preview";
import { pathStabilityScreen, StabilityPreviewScreen } from "./features/stability_preview/stability_preview";
const rootElement = document.getElementById("root");
const router = createBrowserRouter([
{
path: ProjectsPath,
element: <ProjectScreen />,
},
{
path:createProjectRoute,
element:<CreateProject/>
},
{
path: AssemblyPreviewSubsequencePath + ":id",
element: <AssemblyPreviewSubsequence />,
},
{
path: AssemblyPreviewInsertVectorPath + ":id",
element: <AssemblyPreviewInsertVector />,
},
{
path: pathAjaxTopologyScreen + ":id",
element:<MatrixTopologyAdjaxScreen/>
},
{
path: pathStabilityScreen + ':id',
element:<StabilityPreviewScreen/>
}
]);
render(<RouterProvider router={router} />, rootElement);

View file

@ -0,0 +1,15 @@
import { ReportHandler } from 'web-vitals';
const reportWebVitals = (onPerfEntry?: ReportHandler) => {
if (onPerfEntry && onPerfEntry instanceof Function) {
import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
getCLS(onPerfEntry);
getFID(onPerfEntry);
getFCP(onPerfEntry);
getLCP(onPerfEntry);
getTTFB(onPerfEntry);
});
}
};
export default reportWebVitals;

View file

@ -0,0 +1,27 @@
{
"compilerOptions": {
"target": "ES6",
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"allowJs": true,
"skipLibCheck": true,
"esModuleInterop": true,
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noFallthroughCasesInSwitch": true,
"module": "esnext",
"moduleResolution": "node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": [
"src"
]
}

10607
asp-review-app/ui/yarn.lock Normal file

File diff suppressed because it is too large Load diff

View file

@ -8,21 +8,18 @@ class FS:
return json.loads((open(path)).read())
def writeFile(data, filePath, fileName):
file_to_open = filePath + fileName
f = open(file_to_open, "w", encoding="utf-8", errors="ignore")
f = open(file_to_open, 'w', )
f.write(data)
f.close()
def readFile(path: str):
def readFile(path:str):
return open(path).read()
def readFilesTypeFolder(pathFolder: str, fileType=".json"):
def readFilesTypeFolder(pathFolder: str, fileType = '.json'):
filesJson = list(
filter(
lambda x: x[-fileType.__len__() :] == fileType, os.listdir(pathFolder)
)
)
filter(lambda x: x[-fileType.__len__():] == fileType, os.listdir(pathFolder)))
return filesJson
@ -33,5 +30,6 @@ def listGetFirstValue(iterable, default=False, pred=None):
def filterModels(filterModels, filterModelsDescription: list[str]):
models = []
for el in filterModelsDescription:
models.append(listGetFirstValue(filterModels, None, lambda x: x.name == el))
models.append(listGetFirstValue(
filterModels, None, lambda x: x.name == el))
return models

45
asp/main.py Normal file
View file

@ -0,0 +1,45 @@
import argparse
import shutil
from helper.fs import FS
from src.usecases.urdf_sub_assembly_usecase import UrdfSubAssemblyUseCase
from src.model.sdf_geometry import GeometryModel
from src.usecases.sdf_sub_assembly_usecase import SdfSubAssemblyUseCase
import os
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--generationFolder', help='FreeCad generation folder')
parser.add_argument('--outPath', help='save SDF path')
parser.add_argument('--world', help='adding sdf world')
parser.add_argument('--format', help='urdf,sdf,mujoco')
args = parser.parse_args()
if args.generationFolder == None or args.outPath == None:
parser.print_help()
outPath = args.outPath
geometryFiles = FS.readFilesTypeFolder(args.generationFolder + '/assets/')
assemblyStructure = FS.readJSON(
args.generationFolder + '/step-structure.json')
geometryModels: list[GeometryModel] = []
for el in geometryFiles:
geometryModels.append(GeometryModel.from_dict(
FS.readJSON(args.generationFolder + '/assets/' + el)))
# if os.path.exists(outPath + 'sdf-generation/'):
# shutil.rmtree(path=outPath + 'sdf-generation/')
if (args.format == 'sdf'):
SdfSubAssemblyUseCase().call(
geometryModels=geometryModels, assembly=assemblyStructure,
world=args.world,
generationFolder=args.generationFolder,
outPath=args.outPath
)
if (args.format == 'urdf'):
UrdfSubAssemblyUseCase().call(
geometryModels=geometryModels, assembly=assemblyStructure,
world=args.world,
generationFolder=args.generationFolder,
outPath=args.outPath
)

View file

@ -1,7 +1,14 @@
<sdf version='1.7'>
<world name='empty'>
<physics name='1ms' type='ignored'>
<max_step_size>0.001</max_step_size>
<real_time_factor>1</real_time_factor>
<real_time_update_rate>1000</real_time_update_rate>
</physics>
<plugin name='ignition::gazebo::systems::Physics' filename='ignition-gazebo-physics-system'/>
<plugin name='ignition::gazebo::systems::UserCommands' filename='ignition-gazebo-user-commands-system'/>
<plugin name='ignition::gazebo::systems::SceneBroadcaster' filename='ignition-gazebo-scene-broadcaster-system'/>
<plugin name='ignition::gazebo::systems::Contact' filename='ignition-gazebo-contact-system'/>
<gravity>0 0 -9.8</gravity>
<magnetic_field>6e-06 2.3e-05 -4.2e-05</magnetic_field>
<atmosphere type='adiabatic'/>
@ -59,6 +66,6 @@
<pose>0 0 0 0 -0 0</pose>
<self_collide>false</self_collide>
</model>
</world>
</sdf>

2
asp/src/model/enum.py Normal file
View file

@ -0,0 +1,2 @@
class Enum:
folderPath = 'sdf-generation/';

View file

@ -0,0 +1,181 @@
import os
from helper.fs import FS
from src.model.sdf_join import SdfJoin
import typing
import uuid
def from_str(x):
assert isinstance(x, str)
return x
def from_none(x):
assert x is None
return x
def from_union(fs, x):
for f in fs:
try:
return f(x)
except:
pass
assert False
def to_class(c, x):
assert isinstance(x, c)
return x.to_dict()
DELIMITER_SCALE = 10000
class GeometryModel:
def __init__(self, name, ixx, ixy, ixz, iyy, izz, massSDF, posX, posY, posZ, eulerX, eulerY, eulerZ, iyz, stl, link, friction, centerMassX, centerMassY, centerMassZ):
self.name = name
self.ixx = ixx
self.ixy = ixy
self.ixz = ixz
self.iyy = iyy
self.izz = izz
self.massSDF = massSDF
self.posX = posX
self.posY = posY
self.posZ = posZ
self.eulerX = eulerX
self.eulerY = eulerY
self.eulerZ = eulerZ
self.iyz = iyz
self.stl = stl
self.link = link
self.friction = friction
self.centerMassX = centerMassX
self.centerMassY = centerMassY
self.centerMassZ = centerMassZ
@staticmethod
def from_dict(obj):
assert isinstance(obj, dict)
name = from_union([from_str, from_none], obj.get("name"))
ixx = from_union([from_str, from_none], obj.get("ixx"))
ixy = from_union([from_str, from_none], obj.get("ixy"))
ixz = from_union([from_str, from_none], obj.get("ixz"))
iyy = from_union([from_str, from_none], obj.get("iyy"))
izz = from_union([from_str, from_none], obj.get("izz"))
massSDF = from_union([from_str, from_none], obj.get("massSDF"))
posX = from_union([from_str, from_none], obj.get("posX"))
posY = from_union([from_str, from_none], obj.get("posY"))
posZ = from_union([from_str, from_none], obj.get("posZ"))
eulerX = from_union([from_str, from_none], obj.get("eulerX"))
eulerY = from_union([from_str, from_none], obj.get("eulerY"))
eulerZ = from_union([from_str, from_none], obj.get("eulerZ"))
iyz = from_union([from_str, from_none], obj.get("iyz"))
stl = from_union([from_str, from_none], obj.get("stl"))
link = from_union([from_str, from_none], obj.get('link'))
friction = from_union([from_str, from_none], obj.get("friction"))
centerMassX = from_union([from_str, from_none], obj.get("centerMassX"))
centerMassY = from_union([from_str, from_none], obj.get("centerMassY"))
centerMassZ = from_union([from_str, from_none], obj.get("centerMassZ"))
return GeometryModel(name, ixx, ixy, ixz, iyy, izz, massSDF, posX, posY, posZ, eulerX, eulerY, eulerZ, iyz, stl, link, friction, centerMassX, centerMassY, centerMassZ)
def to_dict(self):
result = {}
if self.name is not None:
result["name"] = from_union([from_str, from_none], self.name)
if self.ixx is not None:
result["ixx"] = from_union([from_str, from_none], self.ixx)
if self.ixy is not None:
result["ixy"] = from_union([from_str, from_none], self.ixy)
if self.ixz is not None:
result["ixz"] = from_union([from_str, from_none], self.ixz)
if self.iyy is not None:
result["iyy"] = from_union([from_str, from_none], self.iyy)
if self.izz is not None:
result["izz"] = from_union([from_str, from_none], self.izz)
if self.massSDF is not None:
result["massSDF"] = from_union([from_str, from_none], self.massSDF)
if self.posX is not None:
result["posX"] = from_union([from_str, from_none], self.posX)
if self.posY is not None:
result["posY"] = from_union([from_str, from_none], self.posY)
if self.posZ is not None:
result["posZ"] = from_union([from_str, from_none], self.posZ)
if self.eulerX is not None:
result["eulerX"] = from_union([from_str, from_none], self.eulerX)
if self.eulerY is not None:
result["eulerY"] = from_union([from_str, from_none], self.eulerY)
if self.eulerZ is not None:
result["eulerZ"] = from_union([from_str, from_none], self.eulerZ)
if self.iyz is not None:
result["iyz"] = from_union([from_str, from_none], self.iyz)
if self.stl is not None:
result["stl"] = from_union([from_str, from_none], self.stl)
if self.link is not None:
result['link'] = from_union([from_str, from_none], self.link)
if self.friction is not None:
result["friction"] = from_union([from_str, from_none], self.eulerZ)
if self.centerMassX is not None:
result['centerMassX'] = from_union(
[from_str, from_none], self.centerMassX)
if self.centerMassY is not None:
result['centerMassY'] = from_union(
[from_str, from_none], self.centerMassY)
if self.centerMassZ is not None:
result['centerMassZ'] = from_union(
[from_str, from_none], self.centerMassZ)
return result
def toJSON(self) -> str:
return str(self.to_dict()).replace('\'', '"')
def toSDF(self):
return FS.readFile(os.path.dirname(os.path.realpath(__file__))
+ '/../../mocks/sdf/model.sdf').replace('{name}', self.name,).replace('{posX}', self.posX).replace('{posY}', self.posY).replace('{posZ}', self.posZ).replace('{eulerX}', self.eulerX).replace('{eulerY}', self.eulerY).replace('{eulerZ}', self.eulerZ).replace('{ixx}', self.ixx).replace('{ixy}', self.ixy).replace('{ixz}', self.ixz).replace('{iyy}', self.iyy).replace('{iyz}', self.iyz).replace('{izz}', self.izz).replace('{massSDF}', self.massSDF,).replace('{stl}', self.stl).replace('{friction}', self.friction)
def toSdfLink(self):
return FS.readFile(os.path.dirname(os.path.realpath(__file__))
+ '/../../mocks/sdf/link.sdf').replace('{name}', self.name,).replace('{posX}', self.posX).replace('{posY}', self.posY).replace('{posZ}', self.posZ).replace('{eulerX}', self.eulerX).replace('{eulerY}', self.eulerY).replace('{eulerZ}', self.eulerZ).replace('{ixx}', self.ixx).replace('{ixy}', self.ixy).replace('{ixz}', self.ixz).replace('{iyy}', self.iyy).replace('{iyz}', self.iyz).replace('{izz}', self.izz).replace('{massSDF}', self.massSDF,).replace('{stl}', self.stl).replace('{friction}', self.friction)
def includeLink(self, pose=False):
if (pose == False):
return FS.readFile(os.path.dirname(os.path.realpath(__file__))
+ '/../../mocks/sdf/include.sdf').replace('{name}', self.name).replace('{uri}', '/' + self.name)
return FS.readFile(os.path.dirname(os.path.realpath(__file__))
+ '/../../mocks/sdf/include_pose.sdf').replace('{name}', self.name).replace('{uri}', '/' + self.name).replace('{posX}', self.posX).replace('{posY}', self.posY).replace('{posZ}', self.posZ).replace('{eulerX}', self.eulerX).replace('{eulerY}', self.eulerY).replace('{eulerZ}', self.eulerZ).replace('{ixx}', self.ixx).replace('{ixy}', self.ixy).replace('{ixz}', self.ixz).replace('{iyy}', self.iyy).replace('{iyz}', self.iyz).replace('{izz}', self.izz)
def generateSDFatJoinFixed(self, sdfModels: list['GeometryModel']):
sdf = '\n<model name="assembly">\n'
sdf += ' <link name="base_link">\n'
sdf += " <pose>0 0 0 0 0 0</pose>\n"
sdf += " </link>\n"
link = sdf + self.includeLink(pose=True)
if sdfModels.__len__() == 0:
return link
endTagLinkInc = link.__len__()
beginSDF = link[0: endTagLinkInc]
sdfJoin = beginSDF + '\n'
for el in sdfModels:
if el.name != self.name:
sdfJoin += el.includeLink(pose=True) + '\n'
endSDF = link[endTagLinkInc:link.__len__()]
for el in sdfModels:
if el.name != self.name:
sdfJoin += SdfJoin(name=str(uuid.uuid4()),
parent=self.name, child=el.name, modelAt=el).toSDF() + '\n'
sdfJoin += endSDF
sdfJoin += '</model>'
return sdfJoin
def toUrdf(self):
return FS.readFile(os.path.dirname(os.path.realpath(__file__))
+ '/../../mocks/urdf/model.urdf').replace('{name}', self.name).replace('{name}', self.name).replace('{uri}', '/' + self.name).replace('{posX}', self.posX).replace('{posY}', self.posY).replace('{posZ}', self.posZ).replace('{eulerX}', self.eulerX).replace('{eulerY}', self.eulerY).replace('{eulerZ}', self.eulerZ).replace('{ixx}', self.ixx).replace('{ixy}', self.ixy).replace('{ixz}', self.ixz).replace('{iyy}', self.iyy).replace('{iyz}', self.iyz).replace('{izz}', self.izz).replace('{stl}', '/' + self.stl).replace('{massSDF}', self.massSDF).replace('{centerMassX}', self.centerMassX).replace('{centerMassY}', self.centerMassY).replace('{centerMassZ}', self.centerMassZ)

View file

@ -0,0 +1,12 @@
import os
from helper.fs import FS
class SdfGenerateWorldUseCase:
def call(assembly:str) -> str:
world = FS.readFile(os.path.dirname(os.path.realpath(__file__))
+ '/../../mocks/sdf/world.sdf')
beginWorld = world[0:world.find('</world') - 1]
endWorld = world[world.find('</world') - 1: world.__len__()]
return beginWorld + assembly + endWorld

View file

@ -0,0 +1,57 @@
import os
from typing import Optional
from helper.fs import FS
from helper.fs import filterModels, listGetFirstValue
from src.model.asm import Assembly
from src.model.enum import Enum
from src.usecases.formatter_usecase import FormatterUseCase
from src.usecases.sdf_generate_world_usecase import SdfGenerateWorldUseCase
from src.model.sdf_geometry import GeometryModel
from distutils.dir_util import copy_tree
SDF_FILE_FORMAT = '.sdf'
CONFIG_PATH = os.path.dirname(os.path.realpath(
__file__)) + '/../../mocks/sdf/model.config'
class SdfSubAssemblyUseCase(Assembly):
def call(self, geometryModels: list[GeometryModel], assembly: list[str], outPath: str, generationFolder: str, world: bool):
asm = {}
generateSubAssemblyModels = self.generateSubAssembly(assembly)
inc = 0
for key, value in generateSubAssemblyModels.items():
inc += 1
if value['assembly'].__len__() != 0:
model: Optional[GeometryModel] = listGetFirstValue(
geometryModels, None, lambda x: x.name == value['assembly'][0])
if model != None:
asm[key] = {"assembly": model.generateSDFatJoinFixed(filterModels(geometryModels, value['assembly'])), "part": (
listGetFirstValue(geometryModels, None, lambda x: x.name == value['part'])).includeLink()}
self.copy(generationFolder=
generationFolder, format='/sdf', outPath=outPath)
dirPath = outPath + Enum.folderPath
for el in geometryModels:
path = dirPath + el.name + '/'
os.makedirs(path)
FS.writeFile(data=el.toSDF(), filePath=path,
fileName='/model' + SDF_FILE_FORMAT)
FS.writeFile(data=FS.readFile(CONFIG_PATH),
filePath=path, fileName='/model' + '.config')
for key, v in asm.items():
FS.writeFile(data=v['assembly'], filePath=dirPath,
fileName='/' + key + SDF_FILE_FORMAT)
else:
for key, v in asm.items():
FS.writeFile(data=SdfGenerateWorldUseCase.call(v['assembly']), filePath=dirPath,
fileName='/' + key + SDF_FILE_FORMAT)
FormatterUseCase.call(outPath=outPath, format=SDF_FILE_FORMAT)

View file

@ -0,0 +1,19 @@
from helper.fs import FS
from src.model.enum import Enum
from src.model.asm import Assembly
from src.model.sdf_geometry import GeometryModel
import json
import re
URDF_FILE_FORMAT = '.urdf'
URDF_GENERATOR_FILE = 'urdf-generation' + '.json'
class UrdfSubAssemblyUseCase(Assembly):
def call(self, geometryModels: list[GeometryModel], assembly: list[str], outPath: str, generationFolder: str, world: bool):
dirPath = generationFolder + Enum.folderPath
asm = {}
for el in geometryModels:
asm[el.name] = el.toUrdf()
FS.writeFile(data=json.dumps(asm,indent=4),
fileName=URDF_GENERATOR_FILE, filePath=dirPath)

6
cad_generation/env.json Normal file
View file

@ -0,0 +1,6 @@
{
"doc": "/home/idontsudo/framework/asp/out/disk_and_axis_n.FCStd",
"out": "/home/idontsudo/framework/asp/out",
"resultURL": "http://localhost:3002/assembly/save/out",
"projectId": "cubes"
}

Some files were not shown because too many files have changed in this diff Show more