Merge branch 'master' of https://gitlab.com/robossembler/framework into 59-freecad2pddl
This commit is contained in:
commit
62949e8292
107 changed files with 44329 additions and 135 deletions
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
[submodule "insertion_vector_predicate/assembly"]
|
||||
path = insertion_vector_predicate/assembly
|
||||
url = https://github.com/yunshengtian/Assemble-Them-All
|
24
asp-review-app/.gitignore
vendored
Normal file
24
asp-review-app/.gitignore
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
**/node_modules
|
2219
asp-review-app/server/package-lock.json
generated
Normal file
2219
asp-review-app/server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
43
asp-review-app/server/package.json
Normal file
43
asp-review-app/server/package.json
Normal file
|
@ -0,0 +1,43 @@
|
|||
{
|
||||
"name": "express-typescript",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "npx tsc",
|
||||
"start": "node dist/index.js",
|
||||
"dev": "nodemon --exec ts-node --esm --transpileOnly ./src/server.ts"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/cors": "^2.8.13",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/express-fileupload": "^1.4.1",
|
||||
"@types/mongoose": "^5.11.97",
|
||||
"@types/node": "^17.0.45",
|
||||
"typescript": "^4.9.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"body-parser": "^1.20.2",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.0",
|
||||
"compression": "^1.7.4",
|
||||
"concurrently": "^8.0.1",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.18.2",
|
||||
"express-cross": "^1.0.0",
|
||||
"express-fileupload": "^1.4.0",
|
||||
"first-di": "^1.0.11",
|
||||
"form-data": "^4.0.0",
|
||||
"lodash": "^4.17.21",
|
||||
"morgan": "^1.10.0",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"node-stream-zip": "^1.15.0",
|
||||
"nodemon": "^2.0.22",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
69
asp-review-app/server/src/app.ts
Normal file
69
asp-review-app/server/src/app.ts
Normal file
|
@ -0,0 +1,69 @@
|
|||
import express from "express";
|
||||
import compression from "compression";
|
||||
import cors from "cors";
|
||||
import { Routes } from "./core/interfaces/router";
|
||||
|
||||
import bodyParser from "body-parser";
|
||||
import fileUpload from "express-fileupload";
|
||||
import { DevEnv } from "./core/env/env";
|
||||
import path from 'path';
|
||||
import { locator } from "./core/di/register_di";
|
||||
export const dirname = path.resolve();
|
||||
|
||||
const corsOptions = {
|
||||
origin: process.env.CORS_ALLOW_ORIGIN || '*',
|
||||
methods: ['GET', 'PUT', 'POST', 'DELETE', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization']
|
||||
};
|
||||
export class App {
|
||||
public app: express.Application;
|
||||
public port: string | number;
|
||||
public env: string;
|
||||
constructor(routes: Routes[], port) {
|
||||
this.app = express();
|
||||
this.port = port;
|
||||
this.env = process.env.NODE_ENV || "development";
|
||||
this.initializeMiddleware();
|
||||
this.initializeRoutes(routes);
|
||||
this.loadAppDependencies();
|
||||
}
|
||||
|
||||
public listen() {
|
||||
this.app.listen(this.port, () => {
|
||||
console.info(`=================================`);
|
||||
console.info(`======= ENV: ${this.env} =======`);
|
||||
console.info(`🚀 App listening on the port ${this.port}`);
|
||||
console.info(`=================================`);
|
||||
});
|
||||
}
|
||||
|
||||
public getServer() {
|
||||
return this.app;
|
||||
}
|
||||
|
||||
private initializeMiddleware() {
|
||||
this.app.use(
|
||||
cors(corsOptions)
|
||||
);
|
||||
this.app.use(compression());
|
||||
this.app.use(express.json());
|
||||
this.app.use(express.urlencoded({ extended: true }));
|
||||
this.app.use(bodyParser.json());
|
||||
this.app.use(bodyParser.urlencoded({ extended: true }));
|
||||
console.log(dirname + '/public/')
|
||||
this.app.use(express.static(dirname + '/public/'));
|
||||
this.app.use(fileUpload({
|
||||
createParentPath: true
|
||||
}));
|
||||
}
|
||||
|
||||
private initializeRoutes(routes: Routes[]) {
|
||||
routes.forEach((route) => {
|
||||
this.app.use("/", route.router);
|
||||
});
|
||||
}
|
||||
loadAppDependencies() {
|
||||
|
||||
locator(new DevEnv());
|
||||
}
|
||||
}
|
24
asp-review-app/server/src/core/di/register_di.ts
Normal file
24
asp-review-app/server/src/core/di/register_di.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
|
||||
import { override } from "first-di";
|
||||
import { Env } from "../env/env";
|
||||
import { AssemblyController } from "../../features/assembly_create/assembly_create_controller";
|
||||
import { AssemblyPreviewsController } from "../../features/assembly_previews/assembly_previews_controller";
|
||||
import { EntityRepository } from "../repository/entity_repository";
|
||||
import { ZipRepository } from "../repository/zip_repository";
|
||||
|
||||
|
||||
export const locator = (env: Env) => {
|
||||
// override(Env, env)
|
||||
registerRepository(env)
|
||||
registerController(env)
|
||||
};
|
||||
const registerRepository = (env:Env) => {
|
||||
override(ZipRepository, ZipRepository);
|
||||
override(EntityRepository, EntityRepository)
|
||||
|
||||
}
|
||||
const registerController = (env: Env) => {
|
||||
override(AssemblyController,AssemblyController)
|
||||
override(AssemblyPreviewsController, AssemblyController)
|
||||
|
||||
}
|
10
asp-review-app/server/src/core/exceptions/HttpException.ts
Normal file
10
asp-review-app/server/src/core/exceptions/HttpException.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
export class HttpException extends Error {
|
||||
public status: number;
|
||||
public message: string;
|
||||
|
||||
constructor(status: number, message: string) {
|
||||
super(message);
|
||||
this.status = status;
|
||||
this.message = message;
|
||||
}
|
||||
}
|
202
asp-review-app/server/src/core/helper/memorization.ts
Normal file
202
asp-review-app/server/src/core/helper/memorization.ts
Normal file
|
@ -0,0 +1,202 @@
|
|||
interface MemoOptions<F extends Fn, S extends unknown[] = unknown[]> {
|
||||
/**
|
||||
* Serialize the function call arguments
|
||||
* This is used to identify cache key
|
||||
*/
|
||||
serialize?: (...args: Parameters<F>) => S;
|
||||
}
|
||||
interface MemoAsyncOptions<F extends Fn> extends MemoOptions<F> {
|
||||
external?: {
|
||||
get: (args: Parameters<F>) => Promise<Awaited<ReturnType<F>> | undefined | null>;
|
||||
|
||||
set: (args: Parameters<F>, value: Awaited<ReturnType<F>>) => Promise<void>;
|
||||
|
||||
remove: (args: Parameters<F>) => Promise<void>;
|
||||
|
||||
clear: () => Promise<void>;
|
||||
};
|
||||
}
|
||||
|
||||
type Fn = (...params: any[]) => any;
|
||||
|
||||
type AsyncFn = (...params: any[]) => Promise<any>;
|
||||
|
||||
interface MemoFunc<F extends Fn> {
|
||||
// Call the target function, if cache is valid, return cache
|
||||
(...args: Parameters<F>): ReturnType<F>;
|
||||
|
||||
// Same with this function
|
||||
get(...args: Parameters<F>): ReturnType<F>;
|
||||
|
||||
// Call the raw function and skip cache
|
||||
raw(...args: Parameters<F>): ReturnType<F>;
|
||||
|
||||
// Clear cache
|
||||
clear(...args: Parameters<F> | []): void | Promise<void>;
|
||||
}
|
||||
|
||||
export const enum State {
|
||||
Empty,
|
||||
Ok,
|
||||
Waiting,
|
||||
Error
|
||||
}
|
||||
|
||||
export interface Node<T extends Fn> {
|
||||
state: State;
|
||||
value: ReturnType<T> | undefined;
|
||||
error: unknown;
|
||||
primitive: Map<any, Node<T>>;
|
||||
reference: WeakMap<any, Node<T>>;
|
||||
callbacks?: Set<{ res: (value: ReturnType<T>) => void; rej: (error: unknown) => void }>;
|
||||
}
|
||||
|
||||
function makeNode<T extends Fn>(): Node<T> {
|
||||
return {
|
||||
state: State.Empty,
|
||||
value: undefined,
|
||||
error: undefined,
|
||||
primitive: new Map(),
|
||||
reference: new WeakMap()
|
||||
};
|
||||
}
|
||||
|
||||
function clearNode<T extends Fn>(node: Node<T> | undefined) {
|
||||
if (node) {
|
||||
node.state = State.Empty;
|
||||
node.value = undefined;
|
||||
node.error = undefined;
|
||||
node.primitive = new Map();
|
||||
node.reference = new WeakMap();
|
||||
}
|
||||
}
|
||||
function isPrimitiveType(value: unknown) {
|
||||
return (typeof value !== 'object' && typeof value !== 'function') || value === null;
|
||||
}
|
||||
function walkBase<T extends Fn, P extends any[] = Parameters<T>>(
|
||||
node: Node<T>,
|
||||
args: P,
|
||||
hooks: { makeNode: () => Node<T> | undefined }
|
||||
): Node<T> | undefined {
|
||||
let cur = node;
|
||||
for (const arg of args) {
|
||||
if (isPrimitiveType(arg)) {
|
||||
if (cur.primitive.has(arg)) {
|
||||
cur = cur.primitive.get(arg)!;
|
||||
} else {
|
||||
const newNode = hooks.makeNode();
|
||||
if (newNode) {
|
||||
cur.primitive.set(arg, newNode);
|
||||
cur = newNode;
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (cur.reference.has(arg)) {
|
||||
cur = cur.reference.get(arg)!;
|
||||
} else {
|
||||
const newNode = hooks.makeNode();
|
||||
if (newNode) {
|
||||
cur.reference.set(arg, newNode);
|
||||
cur = newNode;
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return cur;
|
||||
}
|
||||
|
||||
function walkAndCreate<T extends Fn, P extends any[] = Parameters<T>>(
|
||||
node: Node<T>,
|
||||
args: P
|
||||
) {
|
||||
return walkBase(node, args, { makeNode })!;
|
||||
}
|
||||
|
||||
function walkOrBreak<T extends Fn, P extends any[] = Parameters<T>>(node: Node<T>, args: P) {
|
||||
return walkBase(node, args, { makeNode: () => undefined });
|
||||
}
|
||||
export function memoAsync<F extends AsyncFn>(
|
||||
fn: F,
|
||||
options: MemoAsyncOptions<F> = {}
|
||||
): MemoFunc<F> {
|
||||
const root = makeNode<F>();
|
||||
|
||||
const memoFunc = async function (...args: Parameters<F>) {
|
||||
// Serialize args
|
||||
const path = options.serialize ? options.serialize(...args) : args;
|
||||
const cur = walkAndCreate<F, any[]>(root, path);
|
||||
|
||||
if (cur.state === State.Ok) {
|
||||
return cur.value;
|
||||
} else if (cur.state === State.Error) {
|
||||
throw cur.error;
|
||||
} else if (cur.state === State.Waiting) {
|
||||
return new Promise((res, rej) => {
|
||||
if (!cur.callbacks) {
|
||||
cur.callbacks = new Set();
|
||||
}
|
||||
cur.callbacks!.add({ res, rej });
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
cur.state = State.Waiting;
|
||||
|
||||
const external = options.external ? await options.external.get(args) : undefined;
|
||||
const value = external !== undefined && external !== null ? external : await fn(...args);
|
||||
|
||||
cur.state = State.Ok;
|
||||
cur.value = value;
|
||||
|
||||
if (options.external) {
|
||||
await options.external.set(args, value);
|
||||
}
|
||||
|
||||
// Resolve other waiting callbacks
|
||||
for (const callback of cur.callbacks ?? []) {
|
||||
callback.res(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
} catch (error) {
|
||||
cur.state = State.Error;
|
||||
cur.error = error;
|
||||
|
||||
// Reject other waiting callbacks
|
||||
for (const callback of cur.callbacks ?? []) {
|
||||
callback.rej(error);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} as MemoFunc<F>;
|
||||
|
||||
memoFunc.get = (...args) => {
|
||||
return memoFunc(...args);
|
||||
};
|
||||
|
||||
memoFunc.raw = (...args) => {
|
||||
return fn(...args) as ReturnType<F>;
|
||||
};
|
||||
|
||||
memoFunc.clear = async (...args) => {
|
||||
if (args.length === 0) {
|
||||
clearNode(root);
|
||||
if (options.external) {
|
||||
await options.external.clear();
|
||||
}
|
||||
} else {
|
||||
const cur = walkOrBreak<F>(root, args as Parameters<F>);
|
||||
clearNode(cur);
|
||||
if (options.external) {
|
||||
await options.external.remove(args as Parameters<F>);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return memoFunc;
|
||||
}
|
6
asp-review-app/server/src/core/interfaces/router.ts
Normal file
6
asp-review-app/server/src/core/interfaces/router.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
import { Router } from "express";
|
||||
|
||||
export interface Routes {
|
||||
path?: string;
|
||||
router: Router;
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
import { HttpException } from '../exceptions/HttpException';
|
||||
import { plainToClass } from 'class-transformer';
|
||||
import { validate, ValidationError } from 'class-validator';
|
||||
import { RequestHandler } from 'express';
|
||||
|
||||
const validationMiddleware = (
|
||||
type: any,
|
||||
value = 'body',
|
||||
skipMissingProperties = false,
|
||||
whitelist = true,
|
||||
forbidNonWhitelisted = true,
|
||||
): RequestHandler => {
|
||||
return (req, res, next) => {
|
||||
console.log(req[value])
|
||||
validate(plainToClass(type, req[value]), { skipMissingProperties, whitelist, forbidNonWhitelisted }).then((errors: ValidationError[]) => {
|
||||
if (errors.length > 0) {
|
||||
const message = errors.map((error: ValidationError) => Object.values(error.constraints)).join(', ');
|
||||
next(new HttpException(400, message));
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
export default validationMiddleware;
|
|
@ -0,0 +1,3 @@
|
|||
export class ComputeRepository{
|
||||
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
import { promises as fs } from 'fs';
|
||||
import { dirname } from '../../app';
|
||||
import fsSync from "fs";
|
||||
import { constants } from 'buffer';
|
||||
|
||||
export class EntityRepository {
|
||||
private path: String = dirname + '/public/'
|
||||
private getFileName(file: String) {
|
||||
|
||||
return file.slice(0, file.indexOf('.'))
|
||||
}
|
||||
public async getDir(path){
|
||||
return this._fullPath(await fs.readdir(path + ''), duplicatedDelete(this.path, path))
|
||||
}
|
||||
public isExistDirPath(path:String):boolean{
|
||||
return fsSync.existsSync(path + '')
|
||||
}
|
||||
public async saveRootEntity(buffer: Buffer, name: string) {
|
||||
const filePath = this.path + this.getFileName(name) + '/'
|
||||
|
||||
if (this.isExistDirPath(filePath)) {
|
||||
await fs.rm(filePath, { recursive: true })
|
||||
}
|
||||
await fs.mkdir(filePath);
|
||||
await fs.writeFile(filePath + name, buffer);
|
||||
}
|
||||
public async getAllRootEntity() {
|
||||
return await fs.readdir('' + this.path)
|
||||
}
|
||||
public async getEntityStorage(entity: string):Promise<String[]> | undefined {
|
||||
return this._fullPath(await fs.readdir(this.path + entity), entity + '/' )
|
||||
}
|
||||
|
||||
private _fullPath(folderPath,helpElement = '') {
|
||||
return folderPath.map((el) => this.path + helpElement + el )
|
||||
}
|
||||
public async readJson<T>(path) {
|
||||
|
||||
return JSON.parse((await fs.readFile(path)).toString())
|
||||
}
|
||||
}
|
||||
function duplicatedDelete(strChild:String,strMain:String){
|
||||
let result = ''
|
||||
for(let i = 0;i < strMain.length; i++){
|
||||
if(!(strMain[i] === strChild[i])){
|
||||
result+=strMain[i]
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
export class ZipRepository {
|
||||
|
||||
}
|
5
asp-review-app/server/src/core/routes/routes.ts
Normal file
5
asp-review-app/server/src/core/routes/routes.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
import { AssemblyRoute } from "../../features/assembly_create/assembly_create_route";
|
||||
import { AssemblyPreviewsRoute } from "../../features/assembly_previews/assembly_previews_route";
|
||||
|
||||
|
||||
export const routes = [new AssemblyRoute(), new AssemblyPreviewsRoute()];
|
|
@ -0,0 +1,36 @@
|
|||
import { NextFunction, Request, Response } from 'express';
|
||||
import { autowired } from 'first-di';
|
||||
|
||||
import "reflect-metadata";
|
||||
import { dirname } from '../../app';
|
||||
import { EntityRepository } from '../../core/repository/entity_repository';
|
||||
import { IFile } from './model/zip_files_model';
|
||||
|
||||
|
||||
export class AssemblyController {
|
||||
public getAllAssembly = (req: Request, res: Response, next: NextFunction): void => {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
|
||||
@autowired()
|
||||
private readonly fsRepository: EntityRepository;
|
||||
|
||||
public createAssembly = (req: Request, res: Response, next: NextFunction): void => {
|
||||
try {
|
||||
const file = req.files.freecad as IFile;
|
||||
const buffer = file.data as Buffer;
|
||||
console.log(file.data)
|
||||
// console.log(files.freecad.data)
|
||||
// const filePath = dirname + '/' + files.freecad.name as string;
|
||||
this.fsRepository.saveRootEntity(file.data, file.name)
|
||||
// console.log(filePath)
|
||||
|
||||
res.sendStatus(200);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
|
||||
import express, { Router } from 'express';
|
||||
import { Routes } from '../../core/interfaces/router';
|
||||
import { autowired } from 'first-di';
|
||||
import { AssemblyController } from './assembly_create_controller';
|
||||
import path from 'path';
|
||||
import { dirname } from '../../app';
|
||||
import validationMiddleware from '../../core/middlewares/ValidationMiddleware';
|
||||
import { CadFilesModel } from './model/zip_files_model';
|
||||
|
||||
export class AssemblyRoute implements Routes {
|
||||
public path = '/assembly';
|
||||
public router = Router();
|
||||
@autowired()
|
||||
private readonly assemblyController: AssemblyController;
|
||||
constructor() {
|
||||
this.initializeRoutes();
|
||||
}
|
||||
|
||||
private initializeRoutes() {
|
||||
// this.router.use(`${this.path}`, express.static(path.join(dirname, '../../public')));
|
||||
this.router.post(`${this.path}`, validationMiddleware(CadFilesModel, 'files'), this.assemblyController.createAssembly)
|
||||
|
||||
this.router.get(`${this.path}`, this.assemblyController.getAllAssembly)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
import { IsArray, IsObject } from "class-validator";
|
||||
export interface IFile {
|
||||
name: string,
|
||||
data: Buffer,
|
||||
size: Number,
|
||||
encoding: string,
|
||||
tempFilePath: string,
|
||||
truncated: Boolean,
|
||||
mimetype: string,
|
||||
md5: string,
|
||||
}
|
||||
interface ICadFileModel {
|
||||
freecad: IFile;
|
||||
}
|
||||
export class CadFilesModel implements ICadFileModel {
|
||||
@IsObject()
|
||||
public freecad: IFile;
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,151 @@
|
|||
import { NextFunction, Request, Response } from "express";
|
||||
import { autowired } from "first-di";
|
||||
import { EntityRepository } from "../../core/repository/entity_repository";
|
||||
import { port } from "../../server";
|
||||
import { memoAsync } from "../../core/helper/memorization";
|
||||
|
||||
import "reflect-metadata";
|
||||
import { async } from "node-stream-zip";
|
||||
|
||||
export class AssemblyPreviewsController {
|
||||
@autowired()
|
||||
private readonly entityRepository: EntityRepository;
|
||||
|
||||
public getAllAssembly = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> => {
|
||||
try {
|
||||
res.send(await this.entityRepository.getAllRootEntity());
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
|
||||
public getAssemblySubsequenceById = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const entity = await this.entityRepository.getEntityStorage(
|
||||
req.params.id
|
||||
);
|
||||
const aspUsage = Number(req.query.count) - 1;
|
||||
console.log(aspUsage);
|
||||
if (entity === undefined) {
|
||||
res.status(404).json("entity not found");
|
||||
return;
|
||||
}
|
||||
|
||||
res.json(
|
||||
await this._assemblyCompute(
|
||||
aspUsage,
|
||||
entity,
|
||||
this.entityRepository,
|
||||
req.hostname,
|
||||
req.params.id
|
||||
)
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
|
||||
public getAssemblyInsertionSequenceById = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) => {
|
||||
const entity = await this.entityRepository.getEntityStorage(req.params.id);
|
||||
const aspUsage = Number(req.query.count);
|
||||
const assemblyFolder = entity.find((el) => {
|
||||
return el.match("assembly");
|
||||
});
|
||||
const asmCountFolder = "0000" + aspUsage;
|
||||
|
||||
const assemblyDirPath = assemblyFolder + "/" + asmCountFolder;
|
||||
|
||||
if (!this.entityRepository.isExistDirPath(assemblyDirPath)) {
|
||||
return res.status(400).json({ error: "bad request" });
|
||||
}
|
||||
const assemblyProcessDir = await this.entityRepository.getDir(
|
||||
assemblyDirPath + "/process/"
|
||||
);
|
||||
const firstObj = assemblyProcessDir.find((el) => {
|
||||
return el.match("1.obj");
|
||||
});
|
||||
const zeroObj = await assemblyProcessDir.find((el) => {
|
||||
return el.match("0.obj");
|
||||
});
|
||||
|
||||
const insertions = await this.entityRepository.readJson(
|
||||
assemblyDirPath + "/" + "insertion_path.json"
|
||||
);
|
||||
|
||||
if (
|
||||
insertions === undefined ||
|
||||
zeroObj === undefined ||
|
||||
firstObj === undefined
|
||||
) {
|
||||
res.status(400).json({ error: "bad" });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({
|
||||
offset: aspUsage,
|
||||
count: 4,
|
||||
parent: `http://${req.hostname}:${port}/${
|
||||
req.params.id
|
||||
}/assembly/${asmCountFolder}/${0}.obj`,
|
||||
|
||||
child: `http://${req.hostname}:${port}/${
|
||||
req.params.id
|
||||
}/assembly/${asmCountFolder}/${1}.obj`,
|
||||
|
||||
insertions: insertions,
|
||||
});
|
||||
return;
|
||||
};
|
||||
private async _assemblyCompute(
|
||||
id: number,
|
||||
entityFolder: Array<String>,
|
||||
repository: EntityRepository,
|
||||
host: string,
|
||||
entity: string
|
||||
) {
|
||||
const assemblySequence = entityFolder.find((el) => {
|
||||
return el.match("step-structure.json");
|
||||
});
|
||||
|
||||
const assembly: Array<String> = await repository.readJson<Array<String>>(
|
||||
assemblySequence
|
||||
);
|
||||
|
||||
if (id == 0) {
|
||||
return {
|
||||
assembly: [
|
||||
`http://${host}:${port}/${entity}/sdf/meshes/${assembly[id]}.obj`,
|
||||
],
|
||||
offset: 1,
|
||||
count: assemblySequence.length,
|
||||
};
|
||||
} else {
|
||||
const assemblyIndexed = assembly
|
||||
.map((_item, index) => {
|
||||
if (index <= id) {
|
||||
return index;
|
||||
}
|
||||
})
|
||||
.filter((el) => el != undefined);
|
||||
return {
|
||||
assembly: assemblyIndexed.map((el) => {
|
||||
return `http://${host}:${port}/${entity}/sdf/meshes/${assembly[el]}.obj`;
|
||||
}),
|
||||
count: assemblyIndexed.length,
|
||||
offset: assembly.length,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
import express, { Router } from 'express';
|
||||
import { Routes } from '../../core/interfaces/router';
|
||||
import { autowired } from 'first-di';
|
||||
// import { AssemblyController } from './assembly_create_controller';
|
||||
import path from 'path';
|
||||
import { dirname } from '../../app';
|
||||
import validationMiddleware from '../../core/middlewares/ValidationMiddleware';
|
||||
import { AssemblyPreviewsController } from './assembly_previews_controller';
|
||||
// import { CadFilesModel } from './model/zip_files_model';
|
||||
|
||||
export class AssemblyPreviewsRoute implements Routes {
|
||||
public path = '/assembly/preview/';
|
||||
public router = Router();
|
||||
@autowired()
|
||||
private readonly assemblyController: AssemblyPreviewsController;
|
||||
constructor() {
|
||||
this.initializeRoutes();
|
||||
}
|
||||
|
||||
private initializeRoutes() {
|
||||
this.router.get(`${this.path}`, this.assemblyController.getAllAssembly);
|
||||
// this.router.get(`${this.path}`)
|
||||
this.router.get(`${this.path}subsequence/:id`, this.assemblyController.getAssemblySubsequenceById)
|
||||
this.router.get(`${this.path}insertion_sequence/:id`, this.assemblyController.getAssemblyInsertionSequenceById)
|
||||
// this.router.post(`${this.path}`, validationMiddleware(CadFilesModel, 'files'), this.assemblyController.createAssembly)
|
||||
|
||||
// this.router.get(`${this.path}`, this.assemblyController.getAllAssembly)
|
||||
}
|
||||
}
|
14
asp-review-app/server/src/server.ts
Normal file
14
asp-review-app/server/src/server.ts
Normal file
|
@ -0,0 +1,14 @@
|
|||
import { App } from "./app";
|
||||
import { routes } from "./core/routes/routes";
|
||||
import "reflect-metadata";
|
||||
|
||||
export const port = 3002
|
||||
|
||||
|
||||
const app = new App(routes,port);
|
||||
|
||||
|
||||
function main() {
|
||||
app.listen();
|
||||
}
|
||||
main();
|
28
asp-review-app/server/tsconfig.json
Normal file
28
asp-review-app/server/tsconfig.json
Normal file
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"compileOnSave": false,
|
||||
"compilerOptions": {
|
||||
"target": "es2017",
|
||||
"lib": ["es2017", "esnext.asynciterable"],
|
||||
"typeRoots": ["node_modules/@types"],
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node",
|
||||
"module": "ESNext",
|
||||
"pretty": true,
|
||||
"sourceMap": true,
|
||||
"declaration": true,
|
||||
"outDir": "./dist",
|
||||
"allowJs": true,
|
||||
"noEmit": false,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
},
|
||||
"ts-node": {
|
||||
"esm": true,
|
||||
"experimentalSpecifierResolution": "node",
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.json", ".env"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
46
asp-review-app/ui/README.md
Normal file
46
asp-review-app/ui/README.md
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Getting Started with Create React App
|
||||
|
||||
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
|
||||
|
||||
## Available Scripts
|
||||
|
||||
In the project directory, you can run:
|
||||
|
||||
### `yarn start`
|
||||
|
||||
Runs the app in the development mode.\
|
||||
Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
|
||||
|
||||
The page will reload if you make edits.\
|
||||
You will also see any lint errors in the console.
|
||||
|
||||
### `yarn test`
|
||||
|
||||
Launches the test runner in the interactive watch mode.\
|
||||
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
|
||||
|
||||
### `yarn build`
|
||||
|
||||
Builds the app for production to the `build` folder.\
|
||||
It correctly bundles React in production mode and optimizes the build for the best performance.
|
||||
|
||||
The build is minified and the filenames include the hashes.\
|
||||
Your app is ready to be deployed!
|
||||
|
||||
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
|
||||
|
||||
### `yarn eject`
|
||||
|
||||
**Note: this is a one-way operation. Once you `eject`, you can’t go back!**
|
||||
|
||||
If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
|
||||
|
||||
Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own.
|
||||
|
||||
You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it.
|
||||
|
||||
## Learn More
|
||||
|
||||
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
|
||||
|
||||
To learn React, check out the [React documentation](https://reactjs.org/).
|
104
asp-review-app/ui/config/env.js
Normal file
104
asp-review-app/ui/config/env.js
Normal file
|
@ -0,0 +1,104 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const paths = require('./paths');
|
||||
|
||||
// Make sure that including paths.js after env.js will read .env variables.
|
||||
delete require.cache[require.resolve('./paths')];
|
||||
|
||||
const NODE_ENV = process.env.NODE_ENV;
|
||||
if (!NODE_ENV) {
|
||||
throw new Error(
|
||||
'The NODE_ENV environment variable is required but was not specified.'
|
||||
);
|
||||
}
|
||||
|
||||
// https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use
|
||||
const dotenvFiles = [
|
||||
`${paths.dotenv}.${NODE_ENV}.local`,
|
||||
// Don't include `.env.local` for `test` environment
|
||||
// since normally you expect tests to produce the same
|
||||
// results for everyone
|
||||
NODE_ENV !== 'test' && `${paths.dotenv}.local`,
|
||||
`${paths.dotenv}.${NODE_ENV}`,
|
||||
paths.dotenv,
|
||||
].filter(Boolean);
|
||||
|
||||
// Load environment variables from .env* files. Suppress warnings using silent
|
||||
// if this file is missing. dotenv will never modify any environment variables
|
||||
// that have already been set. Variable expansion is supported in .env files.
|
||||
// https://github.com/motdotla/dotenv
|
||||
// https://github.com/motdotla/dotenv-expand
|
||||
dotenvFiles.forEach(dotenvFile => {
|
||||
if (fs.existsSync(dotenvFile)) {
|
||||
require('dotenv-expand')(
|
||||
require('dotenv').config({
|
||||
path: dotenvFile,
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// We support resolving modules according to `NODE_PATH`.
|
||||
// This lets you use absolute paths in imports inside large monorepos:
|
||||
// https://github.com/facebook/create-react-app/issues/253.
|
||||
// It works similar to `NODE_PATH` in Node itself:
|
||||
// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders
|
||||
// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored.
|
||||
// Otherwise, we risk importing Node.js core modules into an app instead of webpack shims.
|
||||
// https://github.com/facebook/create-react-app/issues/1023#issuecomment-265344421
|
||||
// We also resolve them to make sure all tools using them work consistently.
|
||||
const appDirectory = fs.realpathSync(process.cwd());
|
||||
process.env.NODE_PATH = (process.env.NODE_PATH || '')
|
||||
.split(path.delimiter)
|
||||
.filter(folder => folder && !path.isAbsolute(folder))
|
||||
.map(folder => path.resolve(appDirectory, folder))
|
||||
.join(path.delimiter);
|
||||
|
||||
// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be
|
||||
// injected into the application via DefinePlugin in webpack configuration.
|
||||
const REACT_APP = /^REACT_APP_/i;
|
||||
|
||||
function getClientEnvironment(publicUrl) {
|
||||
const raw = Object.keys(process.env)
|
||||
.filter(key => REACT_APP.test(key))
|
||||
.reduce(
|
||||
(env, key) => {
|
||||
env[key] = process.env[key];
|
||||
return env;
|
||||
},
|
||||
{
|
||||
// Useful for determining whether we’re running in production mode.
|
||||
// Most importantly, it switches React into the correct mode.
|
||||
NODE_ENV: process.env.NODE_ENV || 'development',
|
||||
// Useful for resolving the correct path to static assets in `public`.
|
||||
// For example, <img src={process.env.PUBLIC_URL + '/img/logo.png'} />.
|
||||
// This should only be used as an escape hatch. Normally you would put
|
||||
// images into the `src` and `import` them in code to get their paths.
|
||||
PUBLIC_URL: publicUrl,
|
||||
// We support configuring the sockjs pathname during development.
|
||||
// These settings let a developer run multiple simultaneous projects.
|
||||
// They are used as the connection `hostname`, `pathname` and `port`
|
||||
// in webpackHotDevClient. They are used as the `sockHost`, `sockPath`
|
||||
// and `sockPort` options in webpack-dev-server.
|
||||
WDS_SOCKET_HOST: process.env.WDS_SOCKET_HOST,
|
||||
WDS_SOCKET_PATH: process.env.WDS_SOCKET_PATH,
|
||||
WDS_SOCKET_PORT: process.env.WDS_SOCKET_PORT,
|
||||
// Whether or not react-refresh is enabled.
|
||||
// It is defined here so it is available in the webpackHotDevClient.
|
||||
FAST_REFRESH: process.env.FAST_REFRESH !== 'false',
|
||||
}
|
||||
);
|
||||
// Stringify all values so we can feed into webpack DefinePlugin
|
||||
const stringified = {
|
||||
'process.env': Object.keys(raw).reduce((env, key) => {
|
||||
env[key] = JSON.stringify(raw[key]);
|
||||
return env;
|
||||
}, {}),
|
||||
};
|
||||
|
||||
return { raw, stringified };
|
||||
}
|
||||
|
||||
module.exports = getClientEnvironment;
|
66
asp-review-app/ui/config/getHttpsConfig.js
Normal file
66
asp-review-app/ui/config/getHttpsConfig.js
Normal file
|
@ -0,0 +1,66 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const paths = require('./paths');
|
||||
|
||||
// Ensure the certificate and key provided are valid and if not
|
||||
// throw an easy to debug error
|
||||
function validateKeyAndCerts({ cert, key, keyFile, crtFile }) {
|
||||
let encrypted;
|
||||
try {
|
||||
// publicEncrypt will throw an error with an invalid cert
|
||||
encrypted = crypto.publicEncrypt(cert, Buffer.from('test'));
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`The certificate "${chalk.yellow(crtFile)}" is invalid.\n${err.message}`
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
// privateDecrypt will throw an error with an invalid key
|
||||
crypto.privateDecrypt(key, encrypted);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`The certificate key "${chalk.yellow(keyFile)}" is invalid.\n${
|
||||
err.message
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Read file and throw an error if it doesn't exist
|
||||
function readEnvFile(file, type) {
|
||||
if (!fs.existsSync(file)) {
|
||||
throw new Error(
|
||||
`You specified ${chalk.cyan(
|
||||
type
|
||||
)} in your env, but the file "${chalk.yellow(file)}" can't be found.`
|
||||
);
|
||||
}
|
||||
return fs.readFileSync(file);
|
||||
}
|
||||
|
||||
// Get the https config
|
||||
// Return cert files if provided in env, otherwise just true or false
|
||||
function getHttpsConfig() {
|
||||
const { SSL_CRT_FILE, SSL_KEY_FILE, HTTPS } = process.env;
|
||||
const isHttps = HTTPS === 'true';
|
||||
|
||||
if (isHttps && SSL_CRT_FILE && SSL_KEY_FILE) {
|
||||
const crtFile = path.resolve(paths.appPath, SSL_CRT_FILE);
|
||||
const keyFile = path.resolve(paths.appPath, SSL_KEY_FILE);
|
||||
const config = {
|
||||
cert: readEnvFile(crtFile, 'SSL_CRT_FILE'),
|
||||
key: readEnvFile(keyFile, 'SSL_KEY_FILE'),
|
||||
};
|
||||
|
||||
validateKeyAndCerts({ ...config, keyFile, crtFile });
|
||||
return config;
|
||||
}
|
||||
return isHttps;
|
||||
}
|
||||
|
||||
module.exports = getHttpsConfig;
|
29
asp-review-app/ui/config/jest/babelTransform.js
Normal file
29
asp-review-app/ui/config/jest/babelTransform.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
'use strict';
|
||||
|
||||
const babelJest = require('babel-jest').default;
|
||||
|
||||
const hasJsxRuntime = (() => {
|
||||
if (process.env.DISABLE_NEW_JSX_TRANSFORM === 'true') {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
require.resolve('react/jsx-runtime');
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
})();
|
||||
|
||||
module.exports = babelJest.createTransformer({
|
||||
presets: [
|
||||
[
|
||||
require.resolve('babel-preset-react-app'),
|
||||
{
|
||||
runtime: hasJsxRuntime ? 'automatic' : 'classic',
|
||||
},
|
||||
],
|
||||
],
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
});
|
14
asp-review-app/ui/config/jest/cssTransform.js
Normal file
14
asp-review-app/ui/config/jest/cssTransform.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
'use strict';
|
||||
|
||||
// This is a custom Jest transformer turning style imports into empty objects.
|
||||
// http://facebook.github.io/jest/docs/en/webpack.html
|
||||
|
||||
module.exports = {
|
||||
process() {
|
||||
return 'module.exports = {};';
|
||||
},
|
||||
getCacheKey() {
|
||||
// The output is always the same.
|
||||
return 'cssTransform';
|
||||
},
|
||||
};
|
40
asp-review-app/ui/config/jest/fileTransform.js
Normal file
40
asp-review-app/ui/config/jest/fileTransform.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const camelcase = require('camelcase');
|
||||
|
||||
// This is a custom Jest transformer turning file imports into filenames.
|
||||
// http://facebook.github.io/jest/docs/en/webpack.html
|
||||
|
||||
module.exports = {
|
||||
process(src, filename) {
|
||||
const assetFilename = JSON.stringify(path.basename(filename));
|
||||
|
||||
if (filename.match(/\.svg$/)) {
|
||||
// Based on how SVGR generates a component name:
|
||||
// https://github.com/smooth-code/svgr/blob/01b194cf967347d43d4cbe6b434404731b87cf27/packages/core/src/state.js#L6
|
||||
const pascalCaseFilename = camelcase(path.parse(filename).name, {
|
||||
pascalCase: true,
|
||||
});
|
||||
const componentName = `Svg${pascalCaseFilename}`;
|
||||
return `const React = require('react');
|
||||
module.exports = {
|
||||
__esModule: true,
|
||||
default: ${assetFilename},
|
||||
ReactComponent: React.forwardRef(function ${componentName}(props, ref) {
|
||||
return {
|
||||
$$typeof: Symbol.for('react.element'),
|
||||
type: 'svg',
|
||||
ref: ref,
|
||||
key: null,
|
||||
props: Object.assign({}, props, {
|
||||
children: ${assetFilename}
|
||||
})
|
||||
};
|
||||
}),
|
||||
};`;
|
||||
}
|
||||
|
||||
return `module.exports = ${assetFilename};`;
|
||||
},
|
||||
};
|
134
asp-review-app/ui/config/modules.js
Normal file
134
asp-review-app/ui/config/modules.js
Normal file
|
@ -0,0 +1,134 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const paths = require('./paths');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const resolve = require('resolve');
|
||||
|
||||
/**
|
||||
* Get additional module paths based on the baseUrl of a compilerOptions object.
|
||||
*
|
||||
* @param {Object} options
|
||||
*/
|
||||
function getAdditionalModulePaths(options = {}) {
|
||||
const baseUrl = options.baseUrl;
|
||||
|
||||
if (!baseUrl) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
|
||||
|
||||
// We don't need to do anything if `baseUrl` is set to `node_modules`. This is
|
||||
// the default behavior.
|
||||
if (path.relative(paths.appNodeModules, baseUrlResolved) === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Allow the user set the `baseUrl` to `appSrc`.
|
||||
if (path.relative(paths.appSrc, baseUrlResolved) === '') {
|
||||
return [paths.appSrc];
|
||||
}
|
||||
|
||||
// If the path is equal to the root directory we ignore it here.
|
||||
// We don't want to allow importing from the root directly as source files are
|
||||
// not transpiled outside of `src`. We do allow importing them with the
|
||||
// absolute path (e.g. `src/Components/Button.js`) but we set that up with
|
||||
// an alias.
|
||||
if (path.relative(paths.appPath, baseUrlResolved) === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Otherwise, throw an error.
|
||||
throw new Error(
|
||||
chalk.red.bold(
|
||||
"Your project's `baseUrl` can only be set to `src` or `node_modules`." +
|
||||
' Create React App does not support other values at this time.'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get webpack aliases based on the baseUrl of a compilerOptions object.
|
||||
*
|
||||
* @param {*} options
|
||||
*/
|
||||
function getWebpackAliases(options = {}) {
|
||||
const baseUrl = options.baseUrl;
|
||||
|
||||
if (!baseUrl) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
|
||||
|
||||
if (path.relative(paths.appPath, baseUrlResolved) === '') {
|
||||
return {
|
||||
src: paths.appSrc,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get jest aliases based on the baseUrl of a compilerOptions object.
|
||||
*
|
||||
* @param {*} options
|
||||
*/
|
||||
function getJestAliases(options = {}) {
|
||||
const baseUrl = options.baseUrl;
|
||||
|
||||
if (!baseUrl) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const baseUrlResolved = path.resolve(paths.appPath, baseUrl);
|
||||
|
||||
if (path.relative(paths.appPath, baseUrlResolved) === '') {
|
||||
return {
|
||||
'^src/(.*)$': '<rootDir>/src/$1',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getModules() {
|
||||
// Check if TypeScript is setup
|
||||
const hasTsConfig = fs.existsSync(paths.appTsConfig);
|
||||
const hasJsConfig = fs.existsSync(paths.appJsConfig);
|
||||
|
||||
if (hasTsConfig && hasJsConfig) {
|
||||
throw new Error(
|
||||
'You have both a tsconfig.json and a jsconfig.json. If you are using TypeScript please remove your jsconfig.json file.'
|
||||
);
|
||||
}
|
||||
|
||||
let config;
|
||||
|
||||
// If there's a tsconfig.json we assume it's a
|
||||
// TypeScript project and set up the config
|
||||
// based on tsconfig.json
|
||||
if (hasTsConfig) {
|
||||
const ts = require(resolve.sync('typescript', {
|
||||
basedir: paths.appNodeModules,
|
||||
}));
|
||||
config = ts.readConfigFile(paths.appTsConfig, ts.sys.readFile).config;
|
||||
// Otherwise we'll check if there is jsconfig.json
|
||||
// for non TS projects.
|
||||
} else if (hasJsConfig) {
|
||||
config = require(paths.appJsConfig);
|
||||
}
|
||||
|
||||
config = config || {};
|
||||
const options = config.compilerOptions || {};
|
||||
|
||||
const additionalModulePaths = getAdditionalModulePaths(options);
|
||||
|
||||
return {
|
||||
additionalModulePaths: additionalModulePaths,
|
||||
webpackAliases: getWebpackAliases(options),
|
||||
jestAliases: getJestAliases(options),
|
||||
hasTsConfig,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getModules();
|
77
asp-review-app/ui/config/paths.js
Normal file
77
asp-review-app/ui/config/paths.js
Normal file
|
@ -0,0 +1,77 @@
|
|||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const getPublicUrlOrPath = require('react-dev-utils/getPublicUrlOrPath');
|
||||
|
||||
// Make sure any symlinks in the project folder are resolved:
|
||||
// https://github.com/facebook/create-react-app/issues/637
|
||||
const appDirectory = fs.realpathSync(process.cwd());
|
||||
const resolveApp = relativePath => path.resolve(appDirectory, relativePath);
|
||||
|
||||
// We use `PUBLIC_URL` environment variable or "homepage" field to infer
|
||||
// "public path" at which the app is served.
|
||||
// webpack needs to know it to put the right <script> hrefs into HTML even in
|
||||
// single-page apps that may serve index.html for nested URLs like /todos/42.
|
||||
// We can't use a relative path in HTML because we don't want to load something
|
||||
// like /todos/42/static/js/bundle.7289d.js. We have to know the root.
|
||||
const publicUrlOrPath = getPublicUrlOrPath(
|
||||
process.env.NODE_ENV === 'development',
|
||||
require(resolveApp('package.json')).homepage,
|
||||
process.env.PUBLIC_URL
|
||||
);
|
||||
|
||||
const buildPath = process.env.BUILD_PATH || 'build';
|
||||
|
||||
const moduleFileExtensions = [
|
||||
'web.mjs',
|
||||
'mjs',
|
||||
'web.js',
|
||||
'js',
|
||||
'web.ts',
|
||||
'ts',
|
||||
'web.tsx',
|
||||
'tsx',
|
||||
'json',
|
||||
'web.jsx',
|
||||
'jsx',
|
||||
];
|
||||
|
||||
// Resolve file paths in the same order as webpack
|
||||
const resolveModule = (resolveFn, filePath) => {
|
||||
const extension = moduleFileExtensions.find(extension =>
|
||||
fs.existsSync(resolveFn(`${filePath}.${extension}`))
|
||||
);
|
||||
|
||||
if (extension) {
|
||||
return resolveFn(`${filePath}.${extension}`);
|
||||
}
|
||||
|
||||
return resolveFn(`${filePath}.js`);
|
||||
};
|
||||
|
||||
// config after eject: we're in ./config/
|
||||
module.exports = {
|
||||
dotenv: resolveApp('.env'),
|
||||
appPath: resolveApp('.'),
|
||||
appBuild: resolveApp(buildPath),
|
||||
appPublic: resolveApp('public'),
|
||||
appHtml: resolveApp('public/index.html'),
|
||||
appIndexJs: resolveModule(resolveApp, 'src/index'),
|
||||
appPackageJson: resolveApp('package.json'),
|
||||
appSrc: resolveApp('src'),
|
||||
appTsConfig: resolveApp('tsconfig.json'),
|
||||
appJsConfig: resolveApp('jsconfig.json'),
|
||||
yarnLockFile: resolveApp('yarn.lock'),
|
||||
testsSetup: resolveModule(resolveApp, 'src/setupTests'),
|
||||
proxySetup: resolveApp('src/setupProxy.js'),
|
||||
appNodeModules: resolveApp('node_modules'),
|
||||
appWebpackCache: resolveApp('node_modules/.cache'),
|
||||
appTsBuildInfoFile: resolveApp('node_modules/.cache/tsconfig.tsbuildinfo'),
|
||||
swSrc: resolveModule(resolveApp, 'src/service-worker'),
|
||||
publicUrlOrPath,
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports.moduleFileExtensions = moduleFileExtensions;
|
755
asp-review-app/ui/config/webpack.config.js
Normal file
755
asp-review-app/ui/config/webpack.config.js
Normal file
|
@ -0,0 +1,755 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const webpack = require('webpack');
|
||||
const resolve = require('resolve');
|
||||
const HtmlWebpackPlugin = require('html-webpack-plugin');
|
||||
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
|
||||
const InlineChunkHtmlPlugin = require('react-dev-utils/InlineChunkHtmlPlugin');
|
||||
const TerserPlugin = require('terser-webpack-plugin');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const CssMinimizerPlugin = require('css-minimizer-webpack-plugin');
|
||||
const { WebpackManifestPlugin } = require('webpack-manifest-plugin');
|
||||
const InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
|
||||
const WorkboxWebpackPlugin = require('workbox-webpack-plugin');
|
||||
const ModuleScopePlugin = require('react-dev-utils/ModuleScopePlugin');
|
||||
const getCSSModuleLocalIdent = require('react-dev-utils/getCSSModuleLocalIdent');
|
||||
const ESLintPlugin = require('eslint-webpack-plugin');
|
||||
const paths = require('./paths');
|
||||
const modules = require('./modules');
|
||||
const getClientEnvironment = require('./env');
|
||||
const ModuleNotFoundPlugin = require('react-dev-utils/ModuleNotFoundPlugin');
|
||||
const ForkTsCheckerWebpackPlugin =
|
||||
process.env.TSC_COMPILE_ON_ERROR === 'true'
|
||||
? require('react-dev-utils/ForkTsCheckerWarningWebpackPlugin')
|
||||
: require('react-dev-utils/ForkTsCheckerWebpackPlugin');
|
||||
const ReactRefreshWebpackPlugin = require('@pmmmwh/react-refresh-webpack-plugin');
|
||||
|
||||
const createEnvironmentHash = require('./webpack/persistentCache/createEnvironmentHash');
|
||||
|
||||
// Source maps are resource heavy and can cause out of memory issue for large source files.
|
||||
const shouldUseSourceMap = process.env.GENERATE_SOURCEMAP !== 'false';
|
||||
|
||||
const reactRefreshRuntimeEntry = require.resolve('react-refresh/runtime');
|
||||
const reactRefreshWebpackPluginRuntimeEntry = require.resolve(
|
||||
'@pmmmwh/react-refresh-webpack-plugin'
|
||||
);
|
||||
const babelRuntimeEntry = require.resolve('babel-preset-react-app');
|
||||
const babelRuntimeEntryHelpers = require.resolve(
|
||||
'@babel/runtime/helpers/esm/assertThisInitialized',
|
||||
{ paths: [babelRuntimeEntry] }
|
||||
);
|
||||
const babelRuntimeRegenerator = require.resolve('@babel/runtime/regenerator', {
|
||||
paths: [babelRuntimeEntry],
|
||||
});
|
||||
|
||||
// Some apps do not need the benefits of saving a web request, so not inlining the chunk
|
||||
// makes for a smoother build process.
|
||||
const shouldInlineRuntimeChunk = process.env.INLINE_RUNTIME_CHUNK !== 'false';
|
||||
|
||||
const emitErrorsAsWarnings = process.env.ESLINT_NO_DEV_ERRORS === 'true';
|
||||
const disableESLintPlugin = process.env.DISABLE_ESLINT_PLUGIN === 'true';
|
||||
|
||||
const imageInlineSizeLimit = parseInt(
|
||||
process.env.IMAGE_INLINE_SIZE_LIMIT || '10000'
|
||||
);
|
||||
|
||||
// Check if TypeScript is setup
|
||||
const useTypeScript = fs.existsSync(paths.appTsConfig);
|
||||
|
||||
// Check if Tailwind config exists
|
||||
const useTailwind = fs.existsSync(
|
||||
path.join(paths.appPath, 'tailwind.config.js')
|
||||
);
|
||||
|
||||
// Get the path to the uncompiled service worker (if it exists).
|
||||
const swSrc = paths.swSrc;
|
||||
|
||||
// style files regexes
|
||||
const cssRegex = /\.css$/;
|
||||
const cssModuleRegex = /\.module\.css$/;
|
||||
const sassRegex = /\.(scss|sass)$/;
|
||||
const sassModuleRegex = /\.module\.(scss|sass)$/;
|
||||
|
||||
const hasJsxRuntime = (() => {
|
||||
if (process.env.DISABLE_NEW_JSX_TRANSFORM === 'true') {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
require.resolve('react/jsx-runtime');
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
})();
|
||||
|
||||
// This is the production and development configuration.
|
||||
// It is focused on developer experience, fast rebuilds, and a minimal bundle.
|
||||
module.exports = function (webpackEnv) {
|
||||
const isEnvDevelopment = webpackEnv === 'development';
|
||||
const isEnvProduction = webpackEnv === 'production';
|
||||
|
||||
// Variable used for enabling profiling in Production
|
||||
// passed into alias object. Uses a flag if passed into the build command
|
||||
const isEnvProductionProfile =
|
||||
isEnvProduction && process.argv.includes('--profile');
|
||||
|
||||
// We will provide `paths.publicUrlOrPath` to our app
|
||||
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
|
||||
// Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz.
|
||||
// Get environment variables to inject into our app.
|
||||
const env = getClientEnvironment(paths.publicUrlOrPath.slice(0, -1));
|
||||
|
||||
const shouldUseReactRefresh = env.raw.FAST_REFRESH;
|
||||
|
||||
// common function to get style loaders
|
||||
const getStyleLoaders = (cssOptions, preProcessor) => {
|
||||
const loaders = [
|
||||
isEnvDevelopment && require.resolve('style-loader'),
|
||||
isEnvProduction && {
|
||||
loader: MiniCssExtractPlugin.loader,
|
||||
// css is located in `static/css`, use '../../' to locate index.html folder
|
||||
// in production `paths.publicUrlOrPath` can be a relative path
|
||||
options: paths.publicUrlOrPath.startsWith('.')
|
||||
? { publicPath: '../../' }
|
||||
: {},
|
||||
},
|
||||
{
|
||||
loader: require.resolve('css-loader'),
|
||||
options: cssOptions,
|
||||
},
|
||||
{
|
||||
// Options for PostCSS as we reference these options twice
|
||||
// Adds vendor prefixing based on your specified browser support in
|
||||
// package.json
|
||||
loader: require.resolve('postcss-loader'),
|
||||
options: {
|
||||
postcssOptions: {
|
||||
// Necessary for external CSS imports to work
|
||||
// https://github.com/facebook/create-react-app/issues/2677
|
||||
ident: 'postcss',
|
||||
config: false,
|
||||
plugins: !useTailwind
|
||||
? [
|
||||
'postcss-flexbugs-fixes',
|
||||
[
|
||||
'postcss-preset-env',
|
||||
{
|
||||
autoprefixer: {
|
||||
flexbox: 'no-2009',
|
||||
},
|
||||
stage: 3,
|
||||
},
|
||||
],
|
||||
// Adds PostCSS Normalize as the reset css with default options,
|
||||
// so that it honors browserslist config in package.json
|
||||
// which in turn let's users customize the target behavior as per their needs.
|
||||
'postcss-normalize',
|
||||
]
|
||||
: [
|
||||
'tailwindcss',
|
||||
'postcss-flexbugs-fixes',
|
||||
[
|
||||
'postcss-preset-env',
|
||||
{
|
||||
autoprefixer: {
|
||||
flexbox: 'no-2009',
|
||||
},
|
||||
stage: 3,
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
|
||||
},
|
||||
},
|
||||
].filter(Boolean);
|
||||
if (preProcessor) {
|
||||
loaders.push(
|
||||
{
|
||||
loader: require.resolve('resolve-url-loader'),
|
||||
options: {
|
||||
sourceMap: isEnvProduction ? shouldUseSourceMap : isEnvDevelopment,
|
||||
root: paths.appSrc,
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: require.resolve(preProcessor),
|
||||
options: {
|
||||
sourceMap: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
return loaders;
|
||||
};
|
||||
|
||||
return {
|
||||
target: ['browserslist'],
|
||||
// Webpack noise constrained to errors and warnings
|
||||
stats: 'errors-warnings',
|
||||
mode: isEnvProduction ? 'production' : isEnvDevelopment && 'development',
|
||||
// Stop compilation early in production
|
||||
bail: isEnvProduction,
|
||||
devtool: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
? 'source-map'
|
||||
: false
|
||||
: isEnvDevelopment && 'cheap-module-source-map',
|
||||
// These are the "entry points" to our application.
|
||||
// This means they will be the "root" imports that are included in JS bundle.
|
||||
entry: paths.appIndexJs,
|
||||
output: {
|
||||
// The build folder.
|
||||
path: paths.appBuild,
|
||||
// Add /* filename */ comments to generated require()s in the output.
|
||||
pathinfo: isEnvDevelopment,
|
||||
// There will be one main bundle, and one file per asynchronous chunk.
|
||||
// In development, it does not produce real files.
|
||||
filename: isEnvProduction
|
||||
? 'static/js/[name].[contenthash:8].js'
|
||||
: isEnvDevelopment && 'static/js/bundle.js',
|
||||
// There are also additional JS chunk files if you use code splitting.
|
||||
chunkFilename: isEnvProduction
|
||||
? 'static/js/[name].[contenthash:8].chunk.js'
|
||||
: isEnvDevelopment && 'static/js/[name].chunk.js',
|
||||
assetModuleFilename: 'static/media/[name].[hash][ext]',
|
||||
// webpack uses `publicPath` to determine where the app is being served from.
|
||||
// It requires a trailing slash, or the file assets will get an incorrect path.
|
||||
// We inferred the "public path" (such as / or /my-project) from homepage.
|
||||
publicPath: paths.publicUrlOrPath,
|
||||
// Point sourcemap entries to original disk location (format as URL on Windows)
|
||||
devtoolModuleFilenameTemplate: isEnvProduction
|
||||
? info =>
|
||||
path
|
||||
.relative(paths.appSrc, info.absoluteResourcePath)
|
||||
.replace(/\\/g, '/')
|
||||
: isEnvDevelopment &&
|
||||
(info => path.resolve(info.absoluteResourcePath).replace(/\\/g, '/')),
|
||||
},
|
||||
cache: {
|
||||
type: 'filesystem',
|
||||
version: createEnvironmentHash(env.raw),
|
||||
cacheDirectory: paths.appWebpackCache,
|
||||
store: 'pack',
|
||||
buildDependencies: {
|
||||
defaultWebpack: ['webpack/lib/'],
|
||||
config: [__filename],
|
||||
tsconfig: [paths.appTsConfig, paths.appJsConfig].filter(f =>
|
||||
fs.existsSync(f)
|
||||
),
|
||||
},
|
||||
},
|
||||
infrastructureLogging: {
|
||||
level: 'none',
|
||||
},
|
||||
optimization: {
|
||||
minimize: isEnvProduction,
|
||||
minimizer: [
|
||||
// This is only used in production mode
|
||||
new TerserPlugin({
|
||||
terserOptions: {
|
||||
parse: {
|
||||
// We want terser to parse ecma 8 code. However, we don't want it
|
||||
// to apply any minification steps that turns valid ecma 5 code
|
||||
// into invalid ecma 5 code. This is why the 'compress' and 'output'
|
||||
// sections only apply transformations that are ecma 5 safe
|
||||
// https://github.com/facebook/create-react-app/pull/4234
|
||||
ecma: 8,
|
||||
},
|
||||
compress: {
|
||||
ecma: 5,
|
||||
warnings: false,
|
||||
// Disabled because of an issue with Uglify breaking seemingly valid code:
|
||||
// https://github.com/facebook/create-react-app/issues/2376
|
||||
// Pending further investigation:
|
||||
// https://github.com/mishoo/UglifyJS2/issues/2011
|
||||
comparisons: false,
|
||||
// Disabled because of an issue with Terser breaking valid code:
|
||||
// https://github.com/facebook/create-react-app/issues/5250
|
||||
// Pending further investigation:
|
||||
// https://github.com/terser-js/terser/issues/120
|
||||
inline: 2,
|
||||
},
|
||||
mangle: {
|
||||
safari10: true,
|
||||
},
|
||||
// Added for profiling in devtools
|
||||
keep_classnames: isEnvProductionProfile,
|
||||
keep_fnames: isEnvProductionProfile,
|
||||
output: {
|
||||
ecma: 5,
|
||||
comments: false,
|
||||
// Turned on because emoji and regex is not minified properly using default
|
||||
// https://github.com/facebook/create-react-app/issues/2488
|
||||
ascii_only: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
// This is only used in production mode
|
||||
new CssMinimizerPlugin(),
|
||||
],
|
||||
},
|
||||
resolve: {
|
||||
// This allows you to set a fallback for where webpack should look for modules.
|
||||
// We placed these paths second because we want `node_modules` to "win"
|
||||
// if there are any conflicts. This matches Node resolution mechanism.
|
||||
// https://github.com/facebook/create-react-app/issues/253
|
||||
modules: ['node_modules', paths.appNodeModules].concat(
|
||||
modules.additionalModulePaths || []
|
||||
),
|
||||
// These are the reasonable defaults supported by the Node ecosystem.
|
||||
// We also include JSX as a common component filename extension to support
|
||||
// some tools, although we do not recommend using it, see:
|
||||
// https://github.com/facebook/create-react-app/issues/290
|
||||
// `web` extension prefixes have been added for better support
|
||||
// for React Native Web.
|
||||
extensions: paths.moduleFileExtensions
|
||||
.map(ext => `.${ext}`)
|
||||
.filter(ext => useTypeScript || !ext.includes('ts')),
|
||||
alias: {
|
||||
// Support React Native Web
|
||||
// https://www.smashingmagazine.com/2016/08/a-glimpse-into-the-future-with-react-native-for-web/
|
||||
'react-native': 'react-native-web',
|
||||
// Allows for better profiling with ReactDevTools
|
||||
...(isEnvProductionProfile && {
|
||||
'react-dom$': 'react-dom/profiling',
|
||||
'scheduler/tracing': 'scheduler/tracing-profiling',
|
||||
}),
|
||||
...(modules.webpackAliases || {}),
|
||||
},
|
||||
plugins: [
|
||||
// Prevents users from importing files from outside of src/ (or node_modules/).
|
||||
// This often causes confusion because we only process files within src/ with babel.
|
||||
// To fix this, we prevent you from importing files out of src/ -- if you'd like to,
|
||||
// please link the files into your node_modules/ and let module-resolution kick in.
|
||||
// Make sure your source files are compiled, as they will not be processed in any way.
|
||||
new ModuleScopePlugin(paths.appSrc, [
|
||||
paths.appPackageJson,
|
||||
reactRefreshRuntimeEntry,
|
||||
reactRefreshWebpackPluginRuntimeEntry,
|
||||
babelRuntimeEntry,
|
||||
babelRuntimeEntryHelpers,
|
||||
babelRuntimeRegenerator,
|
||||
]),
|
||||
],
|
||||
},
|
||||
module: {
|
||||
strictExportPresence: true,
|
||||
rules: [
|
||||
// Handle node_modules packages that contain sourcemaps
|
||||
shouldUseSourceMap && {
|
||||
enforce: 'pre',
|
||||
exclude: /@babel(?:\/|\\{1,2})runtime/,
|
||||
test: /\.(js|mjs|jsx|ts|tsx|css)$/,
|
||||
loader: require.resolve('source-map-loader'),
|
||||
},
|
||||
{
|
||||
// "oneOf" will traverse all following loaders until one will
|
||||
// match the requirements. When no loader matches it will fall
|
||||
// back to the "file" loader at the end of the loader list.
|
||||
oneOf: [
|
||||
// TODO: Merge this config once `image/avif` is in the mime-db
|
||||
// https://github.com/jshttp/mime-db
|
||||
{
|
||||
test: [/\.avif$/],
|
||||
type: 'asset',
|
||||
mimetype: 'image/avif',
|
||||
parser: {
|
||||
dataUrlCondition: {
|
||||
maxSize: imageInlineSizeLimit,
|
||||
},
|
||||
},
|
||||
},
|
||||
// "url" loader works like "file" loader except that it embeds assets
|
||||
// smaller than specified limit in bytes as data URLs to avoid requests.
|
||||
// A missing `test` is equivalent to a match.
|
||||
{
|
||||
test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/],
|
||||
type: 'asset',
|
||||
parser: {
|
||||
dataUrlCondition: {
|
||||
maxSize: imageInlineSizeLimit,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
test: /\.svg$/,
|
||||
use: [
|
||||
{
|
||||
loader: require.resolve('@svgr/webpack'),
|
||||
options: {
|
||||
prettier: false,
|
||||
svgo: false,
|
||||
svgoConfig: {
|
||||
plugins: [{ removeViewBox: false }],
|
||||
},
|
||||
titleProp: true,
|
||||
ref: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: require.resolve('file-loader'),
|
||||
options: {
|
||||
name: 'static/media/[name].[hash].[ext]',
|
||||
},
|
||||
},
|
||||
],
|
||||
issuer: {
|
||||
and: [/\.(ts|tsx|js|jsx|md|mdx)$/],
|
||||
},
|
||||
},
|
||||
// Process application JS with Babel.
|
||||
// The preset includes JSX, Flow, TypeScript, and some ESnext features.
|
||||
{
|
||||
test: /\.(js|mjs|jsx|ts|tsx)$/,
|
||||
include: paths.appSrc,
|
||||
loader: require.resolve('babel-loader'),
|
||||
options: {
|
||||
customize: require.resolve(
|
||||
'babel-preset-react-app/webpack-overrides'
|
||||
),
|
||||
presets: [
|
||||
[
|
||||
require.resolve('babel-preset-react-app'),
|
||||
{
|
||||
runtime: hasJsxRuntime ? 'automatic' : 'classic',
|
||||
},
|
||||
],
|
||||
],
|
||||
|
||||
plugins: [
|
||||
isEnvDevelopment &&
|
||||
shouldUseReactRefresh &&
|
||||
require.resolve('react-refresh/babel'),
|
||||
].filter(Boolean),
|
||||
// This is a feature of `babel-loader` for webpack (not Babel itself).
|
||||
// It enables caching results in ./node_modules/.cache/babel-loader/
|
||||
// directory for faster rebuilds.
|
||||
cacheDirectory: true,
|
||||
// See #6846 for context on why cacheCompression is disabled
|
||||
cacheCompression: false,
|
||||
compact: isEnvProduction,
|
||||
},
|
||||
},
|
||||
// Process any JS outside of the app with Babel.
|
||||
// Unlike the application JS, we only compile the standard ES features.
|
||||
{
|
||||
test: /\.(js|mjs)$/,
|
||||
exclude: /@babel(?:\/|\\{1,2})runtime/,
|
||||
loader: require.resolve('babel-loader'),
|
||||
options: {
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
compact: false,
|
||||
presets: [
|
||||
[
|
||||
require.resolve('babel-preset-react-app/dependencies'),
|
||||
{ helpers: true },
|
||||
],
|
||||
],
|
||||
cacheDirectory: true,
|
||||
// See #6846 for context on why cacheCompression is disabled
|
||||
cacheCompression: false,
|
||||
|
||||
// Babel sourcemaps are needed for debugging into node_modules
|
||||
// code. Without the options below, debuggers like VSCode
|
||||
// show incorrect code and set breakpoints on the wrong lines.
|
||||
sourceMaps: shouldUseSourceMap,
|
||||
inputSourceMap: shouldUseSourceMap,
|
||||
},
|
||||
},
|
||||
// "postcss" loader applies autoprefixer to our CSS.
|
||||
// "css" loader resolves paths in CSS and adds assets as dependencies.
|
||||
// "style" loader turns CSS into JS modules that inject <style> tags.
|
||||
// In production, we use MiniCSSExtractPlugin to extract that CSS
|
||||
// to a file, but in development "style" loader enables hot editing
|
||||
// of CSS.
|
||||
// By default we support CSS Modules with the extension .module.css
|
||||
{
|
||||
test: cssRegex,
|
||||
exclude: cssModuleRegex,
|
||||
use: getStyleLoaders({
|
||||
importLoaders: 1,
|
||||
sourceMap: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
: isEnvDevelopment,
|
||||
modules: {
|
||||
mode: 'icss',
|
||||
},
|
||||
}),
|
||||
// Don't consider CSS imports dead code even if the
|
||||
// containing package claims to have no side effects.
|
||||
// Remove this when webpack adds a warning or an error for this.
|
||||
// See https://github.com/webpack/webpack/issues/6571
|
||||
sideEffects: true,
|
||||
},
|
||||
// Adds support for CSS Modules (https://github.com/css-modules/css-modules)
|
||||
// using the extension .module.css
|
||||
{
|
||||
test: cssModuleRegex,
|
||||
use: getStyleLoaders({
|
||||
importLoaders: 1,
|
||||
sourceMap: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
: isEnvDevelopment,
|
||||
modules: {
|
||||
mode: 'local',
|
||||
getLocalIdent: getCSSModuleLocalIdent,
|
||||
},
|
||||
}),
|
||||
},
|
||||
// Opt-in support for SASS (using .scss or .sass extensions).
|
||||
// By default we support SASS Modules with the
|
||||
// extensions .module.scss or .module.sass
|
||||
{
|
||||
test: sassRegex,
|
||||
exclude: sassModuleRegex,
|
||||
use: getStyleLoaders(
|
||||
{
|
||||
importLoaders: 3,
|
||||
sourceMap: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
: isEnvDevelopment,
|
||||
modules: {
|
||||
mode: 'icss',
|
||||
},
|
||||
},
|
||||
'sass-loader'
|
||||
),
|
||||
// Don't consider CSS imports dead code even if the
|
||||
// containing package claims to have no side effects.
|
||||
// Remove this when webpack adds a warning or an error for this.
|
||||
// See https://github.com/webpack/webpack/issues/6571
|
||||
sideEffects: true,
|
||||
},
|
||||
// Adds support for CSS Modules, but using SASS
|
||||
// using the extension .module.scss or .module.sass
|
||||
{
|
||||
test: sassModuleRegex,
|
||||
use: getStyleLoaders(
|
||||
{
|
||||
importLoaders: 3,
|
||||
sourceMap: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
: isEnvDevelopment,
|
||||
modules: {
|
||||
mode: 'local',
|
||||
getLocalIdent: getCSSModuleLocalIdent,
|
||||
},
|
||||
},
|
||||
'sass-loader'
|
||||
),
|
||||
},
|
||||
// "file" loader makes sure those assets get served by WebpackDevServer.
|
||||
// When you `import` an asset, you get its (virtual) filename.
|
||||
// In production, they would get copied to the `build` folder.
|
||||
// This loader doesn't use a "test" so it will catch all modules
|
||||
// that fall through the other loaders.
|
||||
{
|
||||
// Exclude `js` files to keep "css" loader working as it injects
|
||||
// its runtime that would otherwise be processed through "file" loader.
|
||||
// Also exclude `html` and `json` extensions so they get processed
|
||||
// by webpacks internal loaders.
|
||||
exclude: [/^$/, /\.(js|mjs|jsx|ts|tsx)$/, /\.html$/, /\.json$/],
|
||||
type: 'asset/resource',
|
||||
},
|
||||
// ** STOP ** Are you adding a new loader?
|
||||
// Make sure to add the new loader(s) before the "file" loader.
|
||||
],
|
||||
},
|
||||
].filter(Boolean),
|
||||
},
|
||||
plugins: [
|
||||
// Generates an `index.html` file with the <script> injected.
|
||||
new HtmlWebpackPlugin(
|
||||
Object.assign(
|
||||
{},
|
||||
{
|
||||
inject: true,
|
||||
template: paths.appHtml,
|
||||
},
|
||||
isEnvProduction
|
||||
? {
|
||||
minify: {
|
||||
removeComments: true,
|
||||
collapseWhitespace: true,
|
||||
removeRedundantAttributes: true,
|
||||
useShortDoctype: true,
|
||||
removeEmptyAttributes: true,
|
||||
removeStyleLinkTypeAttributes: true,
|
||||
keepClosingSlash: true,
|
||||
minifyJS: true,
|
||||
minifyCSS: true,
|
||||
minifyURLs: true,
|
||||
},
|
||||
}
|
||||
: undefined
|
||||
)
|
||||
),
|
||||
// Inlines the webpack runtime script. This script is too small to warrant
|
||||
// a network request.
|
||||
// https://github.com/facebook/create-react-app/issues/5358
|
||||
isEnvProduction &&
|
||||
shouldInlineRuntimeChunk &&
|
||||
new InlineChunkHtmlPlugin(HtmlWebpackPlugin, [/runtime-.+[.]js/]),
|
||||
// Makes some environment variables available in index.html.
|
||||
// The public URL is available as %PUBLIC_URL% in index.html, e.g.:
|
||||
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
|
||||
// It will be an empty string unless you specify "homepage"
|
||||
// in `package.json`, in which case it will be the pathname of that URL.
|
||||
new InterpolateHtmlPlugin(HtmlWebpackPlugin, env.raw),
|
||||
// This gives some necessary context to module not found errors, such as
|
||||
// the requesting resource.
|
||||
new ModuleNotFoundPlugin(paths.appPath),
|
||||
// Makes some environment variables available to the JS code, for example:
|
||||
// if (process.env.NODE_ENV === 'production') { ... }. See `./env.js`.
|
||||
// It is absolutely essential that NODE_ENV is set to production
|
||||
// during a production build.
|
||||
// Otherwise React will be compiled in the very slow development mode.
|
||||
new webpack.DefinePlugin(env.stringified),
|
||||
// Experimental hot reloading for React .
|
||||
// https://github.com/facebook/react/tree/main/packages/react-refresh
|
||||
isEnvDevelopment &&
|
||||
shouldUseReactRefresh &&
|
||||
new ReactRefreshWebpackPlugin({
|
||||
overlay: false,
|
||||
}),
|
||||
// Watcher doesn't work well if you mistype casing in a path so we use
|
||||
// a plugin that prints an error when you attempt to do this.
|
||||
// See https://github.com/facebook/create-react-app/issues/240
|
||||
isEnvDevelopment && new CaseSensitivePathsPlugin(),
|
||||
isEnvProduction &&
|
||||
new MiniCssExtractPlugin({
|
||||
// Options similar to the same options in webpackOptions.output
|
||||
// both options are optional
|
||||
filename: 'static/css/[name].[contenthash:8].css',
|
||||
chunkFilename: 'static/css/[name].[contenthash:8].chunk.css',
|
||||
}),
|
||||
// Generate an asset manifest file with the following content:
|
||||
// - "files" key: Mapping of all asset filenames to their corresponding
|
||||
// output file so that tools can pick it up without having to parse
|
||||
// `index.html`
|
||||
// - "entrypoints" key: Array of files which are included in `index.html`,
|
||||
// can be used to reconstruct the HTML if necessary
|
||||
new WebpackManifestPlugin({
|
||||
fileName: 'asset-manifest.json',
|
||||
publicPath: paths.publicUrlOrPath,
|
||||
generate: (seed, files, entrypoints) => {
|
||||
const manifestFiles = files.reduce((manifest, file) => {
|
||||
manifest[file.name] = file.path;
|
||||
return manifest;
|
||||
}, seed);
|
||||
const entrypointFiles = entrypoints.main.filter(
|
||||
fileName => !fileName.endsWith('.map')
|
||||
);
|
||||
|
||||
return {
|
||||
files: manifestFiles,
|
||||
entrypoints: entrypointFiles,
|
||||
};
|
||||
},
|
||||
}),
|
||||
// Moment.js is an extremely popular library that bundles large locale files
|
||||
// by default due to how webpack interprets its code. This is a practical
|
||||
// solution that requires the user to opt into importing specific locales.
|
||||
// https://github.com/jmblog/how-to-optimize-momentjs-with-webpack
|
||||
// You can remove this if you don't use Moment.js:
|
||||
new webpack.IgnorePlugin({
|
||||
resourceRegExp: /^\.\/locale$/,
|
||||
contextRegExp: /moment$/,
|
||||
}),
|
||||
// Generate a service worker script that will precache, and keep up to date,
|
||||
// the HTML & assets that are part of the webpack build.
|
||||
isEnvProduction &&
|
||||
fs.existsSync(swSrc) &&
|
||||
new WorkboxWebpackPlugin.InjectManifest({
|
||||
swSrc,
|
||||
dontCacheBustURLsMatching: /\.[0-9a-f]{8}\./,
|
||||
exclude: [/\.map$/, /asset-manifest\.json$/, /LICENSE/],
|
||||
// Bump up the default maximum size (2mb) that's precached,
|
||||
// to make lazy-loading failure scenarios less likely.
|
||||
// See https://github.com/cra-template/pwa/issues/13#issuecomment-722667270
|
||||
maximumFileSizeToCacheInBytes: 5 * 1024 * 1024,
|
||||
}),
|
||||
// TypeScript type checking
|
||||
useTypeScript &&
|
||||
new ForkTsCheckerWebpackPlugin({
|
||||
async: isEnvDevelopment,
|
||||
typescript: {
|
||||
typescriptPath: resolve.sync('typescript', {
|
||||
basedir: paths.appNodeModules,
|
||||
}),
|
||||
configOverwrite: {
|
||||
compilerOptions: {
|
||||
sourceMap: isEnvProduction
|
||||
? shouldUseSourceMap
|
||||
: isEnvDevelopment,
|
||||
skipLibCheck: true,
|
||||
inlineSourceMap: false,
|
||||
declarationMap: false,
|
||||
noEmit: true,
|
||||
incremental: true,
|
||||
tsBuildInfoFile: paths.appTsBuildInfoFile,
|
||||
},
|
||||
},
|
||||
context: paths.appPath,
|
||||
diagnosticOptions: {
|
||||
syntactic: true,
|
||||
},
|
||||
mode: 'write-references',
|
||||
// profile: true,
|
||||
},
|
||||
issue: {
|
||||
// This one is specifically to match during CI tests,
|
||||
// as micromatch doesn't match
|
||||
// '../cra-template-typescript/template/src/App.tsx'
|
||||
// otherwise.
|
||||
include: [
|
||||
{ file: '../**/src/**/*.{ts,tsx}' },
|
||||
{ file: '**/src/**/*.{ts,tsx}' },
|
||||
],
|
||||
exclude: [
|
||||
{ file: '**/src/**/__tests__/**' },
|
||||
{ file: '**/src/**/?(*.){spec|test}.*' },
|
||||
{ file: '**/src/setupProxy.*' },
|
||||
{ file: '**/src/setupTests.*' },
|
||||
],
|
||||
},
|
||||
logger: {
|
||||
infrastructure: 'silent',
|
||||
},
|
||||
}),
|
||||
!disableESLintPlugin &&
|
||||
new ESLintPlugin({
|
||||
// Plugin options
|
||||
extensions: ['js', 'mjs', 'jsx', 'ts', 'tsx'],
|
||||
formatter: require.resolve('react-dev-utils/eslintFormatter'),
|
||||
eslintPath: require.resolve('eslint'),
|
||||
failOnError: !(isEnvDevelopment && emitErrorsAsWarnings),
|
||||
context: paths.appSrc,
|
||||
cache: true,
|
||||
cacheLocation: path.resolve(
|
||||
paths.appNodeModules,
|
||||
'.cache/.eslintcache'
|
||||
),
|
||||
// ESLint class options
|
||||
cwd: paths.appPath,
|
||||
resolvePluginsRelativeTo: __dirname,
|
||||
baseConfig: {
|
||||
extends: [require.resolve('eslint-config-react-app/base')],
|
||||
rules: {
|
||||
...(!hasJsxRuntime && {
|
||||
'react/react-in-jsx-scope': 'error',
|
||||
}),
|
||||
},
|
||||
},
|
||||
}),
|
||||
].filter(Boolean),
|
||||
// Turn off performance processing because we utilize
|
||||
// our own hints via the FileSizeReporter
|
||||
performance: false,
|
||||
};
|
||||
};
|
|
@ -0,0 +1,9 @@
|
|||
'use strict';
|
||||
const { createHash } = require('crypto');
|
||||
|
||||
module.exports = env => {
|
||||
const hash = createHash('md5');
|
||||
hash.update(JSON.stringify(env));
|
||||
|
||||
return hash.digest('hex');
|
||||
};
|
127
asp-review-app/ui/config/webpackDevServer.config.js
Normal file
127
asp-review-app/ui/config/webpackDevServer.config.js
Normal file
|
@ -0,0 +1,127 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const evalSourceMapMiddleware = require('react-dev-utils/evalSourceMapMiddleware');
|
||||
const noopServiceWorkerMiddleware = require('react-dev-utils/noopServiceWorkerMiddleware');
|
||||
const ignoredFiles = require('react-dev-utils/ignoredFiles');
|
||||
const redirectServedPath = require('react-dev-utils/redirectServedPathMiddleware');
|
||||
const paths = require('./paths');
|
||||
const getHttpsConfig = require('./getHttpsConfig');
|
||||
|
||||
const host = process.env.HOST || '0.0.0.0';
|
||||
const sockHost = process.env.WDS_SOCKET_HOST;
|
||||
const sockPath = process.env.WDS_SOCKET_PATH; // default: '/ws'
|
||||
const sockPort = process.env.WDS_SOCKET_PORT;
|
||||
|
||||
module.exports = function (proxy, allowedHost) {
|
||||
const disableFirewall =
|
||||
!proxy || process.env.DANGEROUSLY_DISABLE_HOST_CHECK === 'true';
|
||||
return {
|
||||
// WebpackDevServer 2.4.3 introduced a security fix that prevents remote
|
||||
// websites from potentially accessing local content through DNS rebinding:
|
||||
// https://github.com/webpack/webpack-dev-server/issues/887
|
||||
// https://medium.com/webpack/webpack-dev-server-middleware-security-issues-1489d950874a
|
||||
// However, it made several existing use cases such as development in cloud
|
||||
// environment or subdomains in development significantly more complicated:
|
||||
// https://github.com/facebook/create-react-app/issues/2271
|
||||
// https://github.com/facebook/create-react-app/issues/2233
|
||||
// While we're investigating better solutions, for now we will take a
|
||||
// compromise. Since our WDS configuration only serves files in the `public`
|
||||
// folder we won't consider accessing them a vulnerability. However, if you
|
||||
// use the `proxy` feature, it gets more dangerous because it can expose
|
||||
// remote code execution vulnerabilities in backends like Django and Rails.
|
||||
// So we will disable the host check normally, but enable it if you have
|
||||
// specified the `proxy` setting. Finally, we let you override it if you
|
||||
// really know what you're doing with a special environment variable.
|
||||
// Note: ["localhost", ".localhost"] will support subdomains - but we might
|
||||
// want to allow setting the allowedHosts manually for more complex setups
|
||||
allowedHosts: disableFirewall ? 'all' : [allowedHost],
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': '*',
|
||||
'Access-Control-Allow-Headers': '*',
|
||||
},
|
||||
// Enable gzip compression of generated files.
|
||||
compress: true,
|
||||
static: {
|
||||
// By default WebpackDevServer serves physical files from current directory
|
||||
// in addition to all the virtual build products that it serves from memory.
|
||||
// This is confusing because those files won’t automatically be available in
|
||||
// production build folder unless we copy them. However, copying the whole
|
||||
// project directory is dangerous because we may expose sensitive files.
|
||||
// Instead, we establish a convention that only files in `public` directory
|
||||
// get served. Our build script will copy `public` into the `build` folder.
|
||||
// In `index.html`, you can get URL of `public` folder with %PUBLIC_URL%:
|
||||
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
|
||||
// In JavaScript code, you can access it with `process.env.PUBLIC_URL`.
|
||||
// Note that we only recommend to use `public` folder as an escape hatch
|
||||
// for files like `favicon.ico`, `manifest.json`, and libraries that are
|
||||
// for some reason broken when imported through webpack. If you just want to
|
||||
// use an image, put it in `src` and `import` it from JavaScript instead.
|
||||
directory: paths.appPublic,
|
||||
publicPath: [paths.publicUrlOrPath],
|
||||
// By default files from `contentBase` will not trigger a page reload.
|
||||
watch: {
|
||||
// Reportedly, this avoids CPU overload on some systems.
|
||||
// https://github.com/facebook/create-react-app/issues/293
|
||||
// src/node_modules is not ignored to support absolute imports
|
||||
// https://github.com/facebook/create-react-app/issues/1065
|
||||
ignored: ignoredFiles(paths.appSrc),
|
||||
},
|
||||
},
|
||||
client: {
|
||||
webSocketURL: {
|
||||
// Enable custom sockjs pathname for websocket connection to hot reloading server.
|
||||
// Enable custom sockjs hostname, pathname and port for websocket connection
|
||||
// to hot reloading server.
|
||||
hostname: sockHost,
|
||||
pathname: sockPath,
|
||||
port: sockPort,
|
||||
},
|
||||
overlay: {
|
||||
errors: true,
|
||||
warnings: false,
|
||||
},
|
||||
},
|
||||
devMiddleware: {
|
||||
// It is important to tell WebpackDevServer to use the same "publicPath" path as
|
||||
// we specified in the webpack config. When homepage is '.', default to serving
|
||||
// from the root.
|
||||
// remove last slash so user can land on `/test` instead of `/test/`
|
||||
publicPath: paths.publicUrlOrPath.slice(0, -1),
|
||||
},
|
||||
|
||||
https: getHttpsConfig(),
|
||||
host,
|
||||
historyApiFallback: {
|
||||
// Paths with dots should still use the history fallback.
|
||||
// See https://github.com/facebook/create-react-app/issues/387.
|
||||
disableDotRule: true,
|
||||
index: paths.publicUrlOrPath,
|
||||
},
|
||||
// `proxy` is run between `before` and `after` `webpack-dev-server` hooks
|
||||
proxy,
|
||||
onBeforeSetupMiddleware(devServer) {
|
||||
// Keep `evalSourceMapMiddleware`
|
||||
// middlewares before `redirectServedPath` otherwise will not have any effect
|
||||
// This lets us fetch source contents from webpack for the error overlay
|
||||
devServer.app.use(evalSourceMapMiddleware(devServer));
|
||||
|
||||
if (fs.existsSync(paths.proxySetup)) {
|
||||
// This registers user provided middleware for proxy reasons
|
||||
require(paths.proxySetup)(devServer.app);
|
||||
}
|
||||
},
|
||||
onAfterSetupMiddleware(devServer) {
|
||||
// Redirect to `PUBLIC_URL` or `homepage` from `package.json` if url not match
|
||||
devServer.app.use(redirectServedPath(paths.publicUrlOrPath));
|
||||
|
||||
// This service worker file is effectively a 'no-op' that will reset any
|
||||
// previous service worker registered for the same host:port combination.
|
||||
// We do this in development to avoid hitting the production cache if
|
||||
// it used the same host and port.
|
||||
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
|
||||
devServer.app.use(noopServiceWorkerMiddleware(paths.publicUrlOrPath));
|
||||
},
|
||||
};
|
||||
};
|
17016
asp-review-app/ui/package-lock.json
generated
Normal file
17016
asp-review-app/ui/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
160
asp-review-app/ui/package.json
Normal file
160
asp-review-app/ui/package.json
Normal file
|
@ -0,0 +1,160 @@
|
|||
{
|
||||
"name": "i18next",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.0",
|
||||
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.3",
|
||||
"@react-three/drei": "^9.65.3",
|
||||
"@svgr/webpack": "^5.5.0",
|
||||
"@testing-library/jest-dom": "^5.14.1",
|
||||
"@testing-library/react": "^13.0.0",
|
||||
"@testing-library/user-event": "^13.2.1",
|
||||
"@types/jest": "^27.0.1",
|
||||
"@types/node": "^16.7.13",
|
||||
"@types/react": "^18.0.0",
|
||||
"@types/react-dom": "^18.0.0",
|
||||
"antd": "^5.5.2",
|
||||
"babel-jest": "^27.4.2",
|
||||
"babel-loader": "^8.2.3",
|
||||
"babel-plugin-named-asset-import": "^0.3.8",
|
||||
"babel-preset-react-app": "^10.0.1",
|
||||
"bfj": "^7.0.2",
|
||||
"browserslist": "^4.18.1",
|
||||
"camelcase": "^6.2.1",
|
||||
"case-sensitive-paths-webpack-plugin": "^2.4.0",
|
||||
"css-loader": "^6.5.1",
|
||||
"css-minimizer-webpack-plugin": "^3.2.0",
|
||||
"dotenv": "^10.0.0",
|
||||
"dotenv-expand": "^5.1.0",
|
||||
"eslint": "^8.3.0",
|
||||
"eslint-config-react-app": "^7.0.1",
|
||||
"eslint-webpack-plugin": "^3.1.1",
|
||||
"file-loader": "^6.2.0",
|
||||
"fs-extra": "^10.0.0",
|
||||
"html-webpack-plugin": "^5.5.0",
|
||||
"i18next": "^22.4.14",
|
||||
"i18next-browser-languagedetector": "^7.0.1",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "^27.4.3",
|
||||
"jest-resolve": "^27.4.2",
|
||||
"jest-watch-typeahead": "^1.0.0",
|
||||
"localforage": "^1.10.0",
|
||||
"match-sorter": "^6.3.1",
|
||||
"mini-css-extract-plugin": "^2.4.5",
|
||||
"mobx": "^6.9.0",
|
||||
"mobx-react": "^7.6.0",
|
||||
"postcss": "^8.4.4",
|
||||
"postcss-flexbugs-fixes": "^5.0.2",
|
||||
"postcss-loader": "^6.2.1",
|
||||
"postcss-normalize": "^10.0.1",
|
||||
"postcss-preset-env": "^7.0.1",
|
||||
"prompts": "^2.4.2",
|
||||
"react": "^16.12.0",
|
||||
"react-app-polyfill": "^3.0.0",
|
||||
"react-dev-utils": "^12.0.1",
|
||||
"react-dom": "^16.12.0",
|
||||
"react-i18next": "^12.2.0",
|
||||
"react-refresh": "^0.11.0",
|
||||
"react-router-dom": "^6.11.2",
|
||||
"react-three-fiber": "^6.0.13",
|
||||
"resolve": "^1.20.0",
|
||||
"resolve-url-loader": "^4.0.0",
|
||||
"sass-loader": "^12.3.0",
|
||||
"semver": "^7.3.5",
|
||||
"sort-by": "^1.2.0",
|
||||
"source-map-loader": "^3.0.0",
|
||||
"style-loader": "^3.3.1",
|
||||
"tailwindcss": "^3.0.2",
|
||||
"terser-webpack-plugin": "^5.2.5",
|
||||
"three": "^0.151.3",
|
||||
"typescript": "^4.4.2",
|
||||
"web-vitals": "^2.1.0",
|
||||
"webpack": "^5.64.4",
|
||||
"webpack-dev-server": "^4.6.0",
|
||||
"webpack-manifest-plugin": "^4.0.2",
|
||||
"workbox-webpack-plugin": "^6.4.1"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "node scripts/start.js",
|
||||
"build": "node scripts/build.js",
|
||||
"test": "node scripts/test.js"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": [
|
||||
"react-app",
|
||||
"react-app/jest"
|
||||
]
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
">0.2%",
|
||||
"not dead",
|
||||
"not op_mini all"
|
||||
],
|
||||
"development": [
|
||||
"last 1 chrome version",
|
||||
"last 1 firefox version",
|
||||
"last 1 safari version"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/three": "^0.150.1"
|
||||
},
|
||||
"jest": {
|
||||
"roots": [
|
||||
"<rootDir>/src"
|
||||
],
|
||||
"collectCoverageFrom": [
|
||||
"src/**/*.{js,jsx,ts,tsx}",
|
||||
"!src/**/*.d.ts"
|
||||
],
|
||||
"setupFiles": [
|
||||
"react-app-polyfill/jsdom"
|
||||
],
|
||||
"setupFilesAfterEnv": [
|
||||
"<rootDir>/src/setupTests.ts"
|
||||
],
|
||||
"testMatch": [
|
||||
"<rootDir>/src/**/__tests__/**/*.{js,jsx,ts,tsx}",
|
||||
"<rootDir>/src/**/*.{spec,test}.{js,jsx,ts,tsx}"
|
||||
],
|
||||
"testEnvironment": "jsdom",
|
||||
"transform": {
|
||||
"^.+\\.(js|jsx|mjs|cjs|ts|tsx)$": "<rootDir>/config/jest/babelTransform.js",
|
||||
"^.+\\.css$": "<rootDir>/config/jest/cssTransform.js",
|
||||
"^(?!.*\\.(js|jsx|mjs|cjs|ts|tsx|css|json)$)": "<rootDir>/config/jest/fileTransform.js"
|
||||
},
|
||||
"transformIgnorePatterns": [
|
||||
"[/\\\\]node_modules[/\\\\].+\\.(js|jsx|mjs|cjs|ts|tsx)$",
|
||||
"^.+\\.module\\.(css|sass|scss)$"
|
||||
],
|
||||
"modulePaths": [],
|
||||
"moduleNameMapper": {
|
||||
"^react-native$": "react-native-web",
|
||||
"^.+\\.module\\.(css|sass|scss)$": "identity-obj-proxy"
|
||||
},
|
||||
"moduleFileExtensions": [
|
||||
"web.js",
|
||||
"js",
|
||||
"web.ts",
|
||||
"ts",
|
||||
"web.tsx",
|
||||
"tsx",
|
||||
"json",
|
||||
"web.jsx",
|
||||
"jsx",
|
||||
"node"
|
||||
],
|
||||
"watchPlugins": [
|
||||
"jest-watch-typeahead/filename",
|
||||
"jest-watch-typeahead/testname"
|
||||
],
|
||||
"resetMocks": true
|
||||
},
|
||||
"babel": {
|
||||
"presets": [
|
||||
"react-app"
|
||||
]
|
||||
}
|
||||
}
|
BIN
asp-review-app/ui/public/favicon.ico
Normal file
BIN
asp-review-app/ui/public/favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.8 KiB |
43
asp-review-app/ui/public/index.html
Normal file
43
asp-review-app/ui/public/index.html
Normal file
|
@ -0,0 +1,43 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta
|
||||
name="description"
|
||||
content="Web site created using create-react-app"
|
||||
/>
|
||||
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
|
||||
<!--
|
||||
manifest.json provides metadata used when your web app is installed on a
|
||||
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
|
||||
-->
|
||||
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
|
||||
<!--
|
||||
Notice the use of %PUBLIC_URL% in the tags above.
|
||||
It will be replaced with the URL of the `public` folder during the build.
|
||||
Only files inside the `public` folder can be referenced from the HTML.
|
||||
|
||||
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
|
||||
work correctly both with client-side routing and a non-root public URL.
|
||||
Learn how to configure a non-root public URL by running `npm run build`.
|
||||
-->
|
||||
<title>React App</title>
|
||||
</head>
|
||||
<body>
|
||||
<noscript>You need to enable JavaScript to run this app.</noscript>
|
||||
<div class="root" id="root"></div>
|
||||
<!--
|
||||
This HTML file is a template.
|
||||
If you open it directly in the browser, you will see an empty page.
|
||||
|
||||
You can add webfonts, meta tags, or analytics to this file.
|
||||
The build step will place the bundled scripts into the <body> tag.
|
||||
|
||||
To begin the development, run `npm start` or `yarn start`.
|
||||
To create a production bundle, use `npm run build` or `yarn build`.
|
||||
-->
|
||||
</body>
|
||||
</html>
|
BIN
asp-review-app/ui/public/logo192.png
Normal file
BIN
asp-review-app/ui/public/logo192.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.2 KiB |
BIN
asp-review-app/ui/public/logo512.png
Normal file
BIN
asp-review-app/ui/public/logo512.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.4 KiB |
25
asp-review-app/ui/public/manifest.json
Normal file
25
asp-review-app/ui/public/manifest.json
Normal file
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"short_name": "React App",
|
||||
"name": "Create React App Sample",
|
||||
"icons": [
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "64x64 32x32 24x24 16x16",
|
||||
"type": "image/x-icon"
|
||||
},
|
||||
{
|
||||
"src": "logo192.png",
|
||||
"type": "image/png",
|
||||
"sizes": "192x192"
|
||||
},
|
||||
{
|
||||
"src": "logo512.png",
|
||||
"type": "image/png",
|
||||
"sizes": "512x512"
|
||||
}
|
||||
],
|
||||
"start_url": ".",
|
||||
"display": "standalone",
|
||||
"theme_color": "#000000",
|
||||
"background_color": "#ffffff"
|
||||
}
|
3
asp-review-app/ui/public/robots.txt
Normal file
3
asp-review-app/ui/public/robots.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
# https://www.robotstxt.org/robotstxt.html
|
||||
User-agent: *
|
||||
Disallow:
|
217
asp-review-app/ui/scripts/build.js
Normal file
217
asp-review-app/ui/scripts/build.js
Normal file
|
@ -0,0 +1,217 @@
|
|||
'use strict';
|
||||
|
||||
// Do this as the first thing so that any code reading it knows the right env.
|
||||
process.env.BABEL_ENV = 'production';
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
// Makes the script crash on unhandled rejections instead of silently
|
||||
// ignoring them. In the future, promise rejections that are not handled will
|
||||
// terminate the Node.js process with a non-zero exit code.
|
||||
process.on('unhandledRejection', err => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Ensure environment variables are read.
|
||||
require('../config/env');
|
||||
|
||||
const path = require('path');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const fs = require('fs-extra');
|
||||
const bfj = require('bfj');
|
||||
const webpack = require('webpack');
|
||||
const configFactory = require('../config/webpack.config');
|
||||
const paths = require('../config/paths');
|
||||
const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
|
||||
const formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
|
||||
const printHostingInstructions = require('react-dev-utils/printHostingInstructions');
|
||||
const FileSizeReporter = require('react-dev-utils/FileSizeReporter');
|
||||
const printBuildError = require('react-dev-utils/printBuildError');
|
||||
|
||||
const measureFileSizesBeforeBuild =
|
||||
FileSizeReporter.measureFileSizesBeforeBuild;
|
||||
const printFileSizesAfterBuild = FileSizeReporter.printFileSizesAfterBuild;
|
||||
const useYarn = fs.existsSync(paths.yarnLockFile);
|
||||
|
||||
// These sizes are pretty large. We'll warn for bundles exceeding them.
|
||||
const WARN_AFTER_BUNDLE_GZIP_SIZE = 512 * 1024;
|
||||
const WARN_AFTER_CHUNK_GZIP_SIZE = 1024 * 1024;
|
||||
|
||||
const isInteractive = process.stdout.isTTY;
|
||||
|
||||
// Warn and crash if required files are missing
|
||||
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const argv = process.argv.slice(2);
|
||||
const writeStatsJson = argv.indexOf('--stats') !== -1;
|
||||
|
||||
// Generate configuration
|
||||
const config = configFactory('production');
|
||||
|
||||
// We require that you explicitly set browsers and do not fall back to
|
||||
// browserslist defaults.
|
||||
const { checkBrowsers } = require('react-dev-utils/browsersHelper');
|
||||
checkBrowsers(paths.appPath, isInteractive)
|
||||
.then(() => {
|
||||
// First, read the current file sizes in build directory.
|
||||
// This lets us display how much they changed later.
|
||||
return measureFileSizesBeforeBuild(paths.appBuild);
|
||||
})
|
||||
.then(previousFileSizes => {
|
||||
// Remove all content but keep the directory so that
|
||||
// if you're in it, you don't end up in Trash
|
||||
fs.emptyDirSync(paths.appBuild);
|
||||
// Merge with the public folder
|
||||
copyPublicFolder();
|
||||
// Start the webpack build
|
||||
return build(previousFileSizes);
|
||||
})
|
||||
.then(
|
||||
({ stats, previousFileSizes, warnings }) => {
|
||||
if (warnings.length) {
|
||||
console.log(chalk.yellow('Compiled with warnings.\n'));
|
||||
console.log(warnings.join('\n\n'));
|
||||
console.log(
|
||||
'\nSearch for the ' +
|
||||
chalk.underline(chalk.yellow('keywords')) +
|
||||
' to learn more about each warning.'
|
||||
);
|
||||
console.log(
|
||||
'To ignore, add ' +
|
||||
chalk.cyan('// eslint-disable-next-line') +
|
||||
' to the line before.\n'
|
||||
);
|
||||
} else {
|
||||
console.log(chalk.green('Compiled successfully.\n'));
|
||||
}
|
||||
|
||||
console.log('File sizes after gzip:\n');
|
||||
printFileSizesAfterBuild(
|
||||
stats,
|
||||
previousFileSizes,
|
||||
paths.appBuild,
|
||||
WARN_AFTER_BUNDLE_GZIP_SIZE,
|
||||
WARN_AFTER_CHUNK_GZIP_SIZE
|
||||
);
|
||||
console.log();
|
||||
|
||||
const appPackage = require(paths.appPackageJson);
|
||||
const publicUrl = paths.publicUrlOrPath;
|
||||
const publicPath = config.output.publicPath;
|
||||
const buildFolder = path.relative(process.cwd(), paths.appBuild);
|
||||
printHostingInstructions(
|
||||
appPackage,
|
||||
publicUrl,
|
||||
publicPath,
|
||||
buildFolder,
|
||||
useYarn
|
||||
);
|
||||
},
|
||||
err => {
|
||||
const tscCompileOnError = process.env.TSC_COMPILE_ON_ERROR === 'true';
|
||||
if (tscCompileOnError) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'Compiled with the following type errors (you may want to check these before deploying your app):\n'
|
||||
)
|
||||
);
|
||||
printBuildError(err);
|
||||
} else {
|
||||
console.log(chalk.red('Failed to compile.\n'));
|
||||
printBuildError(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
)
|
||||
.catch(err => {
|
||||
if (err && err.message) {
|
||||
console.log(err.message);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Create the production build and print the deployment instructions.
|
||||
function build(previousFileSizes) {
|
||||
console.log('Creating an optimized production build...');
|
||||
|
||||
const compiler = webpack(config);
|
||||
return new Promise((resolve, reject) => {
|
||||
compiler.run((err, stats) => {
|
||||
let messages;
|
||||
if (err) {
|
||||
if (!err.message) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
let errMessage = err.message;
|
||||
|
||||
// Add additional information for postcss errors
|
||||
if (Object.prototype.hasOwnProperty.call(err, 'postcssNode')) {
|
||||
errMessage +=
|
||||
'\nCompileError: Begins at CSS selector ' +
|
||||
err['postcssNode'].selector;
|
||||
}
|
||||
|
||||
messages = formatWebpackMessages({
|
||||
errors: [errMessage],
|
||||
warnings: [],
|
||||
});
|
||||
} else {
|
||||
messages = formatWebpackMessages(
|
||||
stats.toJson({ all: false, warnings: true, errors: true })
|
||||
);
|
||||
}
|
||||
if (messages.errors.length) {
|
||||
// Only keep the first error. Others are often indicative
|
||||
// of the same problem, but confuse the reader with noise.
|
||||
if (messages.errors.length > 1) {
|
||||
messages.errors.length = 1;
|
||||
}
|
||||
return reject(new Error(messages.errors.join('\n\n')));
|
||||
}
|
||||
if (
|
||||
process.env.CI &&
|
||||
(typeof process.env.CI !== 'string' ||
|
||||
process.env.CI.toLowerCase() !== 'false') &&
|
||||
messages.warnings.length
|
||||
) {
|
||||
// Ignore sourcemap warnings in CI builds. See #8227 for more info.
|
||||
const filteredWarnings = messages.warnings.filter(
|
||||
w => !/Failed to parse source map/.test(w)
|
||||
);
|
||||
if (filteredWarnings.length) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'\nTreating warnings as errors because process.env.CI = true.\n' +
|
||||
'Most CI servers set it automatically.\n'
|
||||
)
|
||||
);
|
||||
return reject(new Error(filteredWarnings.join('\n\n')));
|
||||
}
|
||||
}
|
||||
|
||||
const resolveArgs = {
|
||||
stats,
|
||||
previousFileSizes,
|
||||
warnings: messages.warnings,
|
||||
};
|
||||
|
||||
if (writeStatsJson) {
|
||||
return bfj
|
||||
.write(paths.appBuild + '/bundle-stats.json', stats.toJson())
|
||||
.then(() => resolve(resolveArgs))
|
||||
.catch(error => reject(new Error(error)));
|
||||
}
|
||||
|
||||
return resolve(resolveArgs);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function copyPublicFolder() {
|
||||
fs.copySync(paths.appPublic, paths.appBuild, {
|
||||
dereference: true,
|
||||
filter: file => file !== paths.appHtml,
|
||||
});
|
||||
}
|
154
asp-review-app/ui/scripts/start.js
Normal file
154
asp-review-app/ui/scripts/start.js
Normal file
|
@ -0,0 +1,154 @@
|
|||
'use strict';
|
||||
|
||||
// Do this as the first thing so that any code reading it knows the right env.
|
||||
process.env.BABEL_ENV = 'development';
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
// Makes the script crash on unhandled rejections instead of silently
|
||||
// ignoring them. In the future, promise rejections that are not handled will
|
||||
// terminate the Node.js process with a non-zero exit code.
|
||||
process.on('unhandledRejection', err => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Ensure environment variables are read.
|
||||
require('../config/env');
|
||||
|
||||
const fs = require('fs');
|
||||
const chalk = require('react-dev-utils/chalk');
|
||||
const webpack = require('webpack');
|
||||
const WebpackDevServer = require('webpack-dev-server');
|
||||
const clearConsole = require('react-dev-utils/clearConsole');
|
||||
const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles');
|
||||
const {
|
||||
choosePort,
|
||||
createCompiler,
|
||||
prepareProxy,
|
||||
prepareUrls,
|
||||
} = require('react-dev-utils/WebpackDevServerUtils');
|
||||
const openBrowser = require('react-dev-utils/openBrowser');
|
||||
const semver = require('semver');
|
||||
const paths = require('../config/paths');
|
||||
const configFactory = require('../config/webpack.config');
|
||||
const createDevServerConfig = require('../config/webpackDevServer.config');
|
||||
const getClientEnvironment = require('../config/env');
|
||||
const react = require(require.resolve('react', { paths: [paths.appPath] }));
|
||||
|
||||
const env = getClientEnvironment(paths.publicUrlOrPath.slice(0, -1));
|
||||
const useYarn = fs.existsSync(paths.yarnLockFile);
|
||||
const isInteractive = process.stdout.isTTY;
|
||||
|
||||
// Warn and crash if required files are missing
|
||||
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Tools like Cloud9 rely on this.
|
||||
const DEFAULT_PORT = parseInt(process.env.PORT, 10) || 3000;
|
||||
const HOST = process.env.HOST || '0.0.0.0';
|
||||
|
||||
if (process.env.HOST) {
|
||||
console.log(
|
||||
chalk.cyan(
|
||||
`Attempting to bind to HOST environment variable: ${chalk.yellow(
|
||||
chalk.bold(process.env.HOST)
|
||||
)}`
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
`If this was unintentional, check that you haven't mistakenly set it in your shell.`
|
||||
);
|
||||
console.log(
|
||||
`Learn more here: ${chalk.yellow('https://cra.link/advanced-config')}`
|
||||
);
|
||||
console.log();
|
||||
}
|
||||
|
||||
// We require that you explicitly set browsers and do not fall back to
|
||||
// browserslist defaults.
|
||||
const { checkBrowsers } = require('react-dev-utils/browsersHelper');
|
||||
checkBrowsers(paths.appPath, isInteractive)
|
||||
.then(() => {
|
||||
// We attempt to use the default port but if it is busy, we offer the user to
|
||||
// run on a different port. `choosePort()` Promise resolves to the next free port.
|
||||
return choosePort(HOST, DEFAULT_PORT);
|
||||
})
|
||||
.then(port => {
|
||||
if (port == null) {
|
||||
// We have not found a port.
|
||||
return;
|
||||
}
|
||||
|
||||
const config = configFactory('development');
|
||||
const protocol = process.env.HTTPS === 'true' ? 'https' : 'http';
|
||||
const appName = require(paths.appPackageJson).name;
|
||||
|
||||
const useTypeScript = fs.existsSync(paths.appTsConfig);
|
||||
const urls = prepareUrls(
|
||||
protocol,
|
||||
HOST,
|
||||
port,
|
||||
paths.publicUrlOrPath.slice(0, -1)
|
||||
);
|
||||
// Create a webpack compiler that is configured with custom messages.
|
||||
const compiler = createCompiler({
|
||||
appName,
|
||||
config,
|
||||
urls,
|
||||
useYarn,
|
||||
useTypeScript,
|
||||
webpack,
|
||||
});
|
||||
// Load proxy config
|
||||
const proxySetting = require(paths.appPackageJson).proxy;
|
||||
const proxyConfig = prepareProxy(
|
||||
proxySetting,
|
||||
paths.appPublic,
|
||||
paths.publicUrlOrPath
|
||||
);
|
||||
// Serve webpack assets generated by the compiler over a web server.
|
||||
const serverConfig = {
|
||||
...createDevServerConfig(proxyConfig, urls.lanUrlForConfig),
|
||||
host: HOST,
|
||||
port,
|
||||
};
|
||||
const devServer = new WebpackDevServer(serverConfig, compiler);
|
||||
// Launch WebpackDevServer.
|
||||
devServer.startCallback(() => {
|
||||
if (isInteractive) {
|
||||
clearConsole();
|
||||
}
|
||||
|
||||
if (env.raw.FAST_REFRESH && semver.lt(react.version, '16.10.0')) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`Fast Refresh requires React 16.10 or higher. You are using React ${react.version}.`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
console.log(chalk.cyan('Starting the development server...\n'));
|
||||
openBrowser(urls.localUrlForBrowser);
|
||||
});
|
||||
|
||||
['SIGINT', 'SIGTERM'].forEach(function (sig) {
|
||||
process.on(sig, function () {
|
||||
devServer.close();
|
||||
process.exit();
|
||||
});
|
||||
});
|
||||
|
||||
if (process.env.CI !== 'true') {
|
||||
// Gracefully exit when stdin ends
|
||||
process.stdin.on('end', function () {
|
||||
devServer.close();
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
if (err && err.message) {
|
||||
console.log(err.message);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
52
asp-review-app/ui/scripts/test.js
Normal file
52
asp-review-app/ui/scripts/test.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
'use strict';
|
||||
|
||||
// Do this as the first thing so that any code reading it knows the right env.
|
||||
process.env.BABEL_ENV = 'test';
|
||||
process.env.NODE_ENV = 'test';
|
||||
process.env.PUBLIC_URL = '';
|
||||
|
||||
// Makes the script crash on unhandled rejections instead of silently
|
||||
// ignoring them. In the future, promise rejections that are not handled will
|
||||
// terminate the Node.js process with a non-zero exit code.
|
||||
process.on('unhandledRejection', err => {
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Ensure environment variables are read.
|
||||
require('../config/env');
|
||||
|
||||
const jest = require('jest');
|
||||
const execSync = require('child_process').execSync;
|
||||
let argv = process.argv.slice(2);
|
||||
|
||||
function isInGitRepository() {
|
||||
try {
|
||||
execSync('git rev-parse --is-inside-work-tree', { stdio: 'ignore' });
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isInMercurialRepository() {
|
||||
try {
|
||||
execSync('hg --cwd . root', { stdio: 'ignore' });
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Watch unless on CI or explicitly running all tests
|
||||
if (
|
||||
!process.env.CI &&
|
||||
argv.indexOf('--watchAll') === -1 &&
|
||||
argv.indexOf('--watchAll=false') === -1
|
||||
) {
|
||||
// https://github.com/facebook/create-react-app/issues/5210
|
||||
const hasSourceControl = isInGitRepository() || isInMercurialRepository();
|
||||
argv.push(hasSourceControl ? '--watch' : '--watchAll');
|
||||
}
|
||||
|
||||
|
||||
jest.run(argv);
|
12
asp-review-app/ui/src/App.css
Normal file
12
asp-review-app/ui/src/App.css
Normal file
|
@ -0,0 +1,12 @@
|
|||
.canvas{
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
display: block;
|
||||
}
|
||||
|
||||
|
||||
.root{
|
||||
display: flex;
|
||||
}
|
||||
|
||||
|
10
asp-review-app/ui/src/core/core.tsx
Normal file
10
asp-review-app/ui/src/core/core.tsx
Normal file
|
@ -0,0 +1,10 @@
|
|||
// @ts-nocheck
|
||||
import {ReactComponent as SolidSvg} from "./assets/solid.svg";
|
||||
import {ReactComponent as PartSvg} from "./assets/part.svg";
|
||||
|
||||
|
||||
export const svg = {SolidSvg, PartSvg}
|
||||
|
||||
|
||||
|
||||
export { svg as SVG };
|
22
asp-review-app/ui/src/core/repository/http_repository.ts
Normal file
22
asp-review-app/ui/src/core/repository/http_repository.ts
Normal file
|
@ -0,0 +1,22 @@
|
|||
export enum HttpMethod {
|
||||
GET = 'GET',
|
||||
POST = 'POST'
|
||||
}
|
||||
export enum HttpRoute{
|
||||
insertionPath = '/assembly/preview/insertion_sequence/',
|
||||
assemblyPreviewPath = '/assembly/preview/subsequence/',
|
||||
projects = '/assembly/preview'
|
||||
}
|
||||
export class HttpRepository {
|
||||
static server = 'http://localhost:3002'
|
||||
static async jsonRequest<T>(method: HttpMethod, url: string, data?: any):Promise<T> {
|
||||
const reqInit = {
|
||||
'body': data,
|
||||
'method': method,
|
||||
}
|
||||
if (data !== undefined) {
|
||||
reqInit['body'] = JSON.stringify(data)
|
||||
}
|
||||
return (await fetch(this.server + url, reqInit)).json()
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export {}
|
|
@ -0,0 +1,39 @@
|
|||
import * as React from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import {
|
||||
HttpMethod,
|
||||
HttpRepository,
|
||||
HttpRoute,
|
||||
} from "../../core/repository/http_repository";
|
||||
import { Button } from "antd";
|
||||
export const ProjectsPath = '/'
|
||||
export const ProjectScreen: React.FunctionComponent = () => {
|
||||
const [projects, setProjects] = useState<Array<String>>([]);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchData() {
|
||||
setProjects(
|
||||
await HttpRepository.jsonRequest<Array<String>>(
|
||||
HttpMethod.GET,
|
||||
HttpRoute.projects
|
||||
)
|
||||
);
|
||||
}
|
||||
fetchData();
|
||||
}, []);
|
||||
return (
|
||||
<div>
|
||||
<div>Projects</div>
|
||||
<div>
|
||||
{projects.map((el) => {
|
||||
return (
|
||||
<>
|
||||
<div>{el}</div> <Button> Preview insert Path </Button>
|
||||
<Button>Preview assembly logical </Button>{" "}
|
||||
</>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,198 @@
|
|||
import * as React from "react";
|
||||
import {
|
||||
DirectionalLight,
|
||||
Object3D,
|
||||
PerspectiveCamera,
|
||||
Scene,
|
||||
WebGLRenderer,
|
||||
AmbientLight,
|
||||
Vector3,
|
||||
Group,
|
||||
Quaternion,
|
||||
} from "three";
|
||||
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls";
|
||||
import { OBJLoader } from "three/examples/jsm/loaders/OBJLoader";
|
||||
import CSS from "csstype";
|
||||
import {
|
||||
HttpMethod,
|
||||
HttpRepository,
|
||||
HttpRoute,
|
||||
} from "../../core/repository/http_repository";
|
||||
import { useParams } from "react-router-dom";
|
||||
|
||||
|
||||
const canvasStyle: CSS.Properties = {
|
||||
backgroundColor: "rgb(151 41 41 / 85%)",
|
||||
};
|
||||
|
||||
export const AssemblyPreviewInsertVectorPath = "/insertion_vector/";
|
||||
|
||||
export interface AssemblyPreviewInsertionPathModel {
|
||||
offset: number;
|
||||
count: number;
|
||||
parent: string;
|
||||
child: string;
|
||||
insertions: Insertions;
|
||||
}
|
||||
|
||||
export interface Insertions {
|
||||
time: number;
|
||||
insertion_path: InsertionPath[];
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface InsertionPath {
|
||||
quadrelion: number[];
|
||||
xyz: number[];
|
||||
euler: number[];
|
||||
}
|
||||
|
||||
export function AssemblyPreviewInsertVector() {
|
||||
const container = new Object3D();
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const scene = new Scene();
|
||||
const camera = new PerspectiveCamera(
|
||||
80,
|
||||
window.innerWidth / window.innerHeight,
|
||||
0.1,
|
||||
1000
|
||||
);
|
||||
let renderId = 1;
|
||||
let assemblyCounter: undefined | Number = undefined;
|
||||
let params = useParams().id;
|
||||
|
||||
React.useEffect(() => {
|
||||
const renderer = new WebGLRenderer({
|
||||
canvas: canvasRef.current as HTMLCanvasElement,
|
||||
antialias: true,
|
||||
alpha: true,
|
||||
});
|
||||
|
||||
camera.position.set(2, 1, 2);
|
||||
|
||||
const directionalLight = new DirectionalLight(0xffffff, 0.2);
|
||||
directionalLight.castShadow = true;
|
||||
directionalLight.position.set(-1, 2, 4);
|
||||
scene.add(directionalLight);
|
||||
|
||||
const ambientLight = new AmbientLight(0xffffff, 0.7);
|
||||
scene.add(ambientLight);
|
||||
container.position.set(0, 0, 0);
|
||||
|
||||
renderer.setSize(window.innerWidth, window.innerHeight);
|
||||
|
||||
const onResize = () => {
|
||||
camera.aspect = window.innerWidth / window.innerHeight;
|
||||
camera.updateProjectionMatrix();
|
||||
renderer!.setSize(window.innerWidth, window.innerHeight);
|
||||
};
|
||||
|
||||
window.addEventListener("resize", onResize, false);
|
||||
new OrbitControls(camera, renderer.domElement);
|
||||
|
||||
renderer!.setAnimationLoop(() => {
|
||||
renderer!.render(scene, camera);
|
||||
});
|
||||
|
||||
|
||||
renderObject(1, params!);
|
||||
});
|
||||
|
||||
async function renderObject(renderId: Number, projectId: String) {
|
||||
const assemblyResponse =
|
||||
await HttpRepository.jsonRequest<AssemblyPreviewInsertionPathModel>(
|
||||
HttpMethod.GET,
|
||||
`${HttpRoute.insertionPath}${projectId}?count=${renderId}`
|
||||
);
|
||||
const objectControl = (
|
||||
await loadObject([assemblyResponse.child, assemblyResponse.parent])
|
||||
)[1];
|
||||
|
||||
|
||||
function assemblyAnimate(objectId: Number, coords: InsertionPath, b:boolean) {
|
||||
const object = scene.getObjectById(objectId as number);
|
||||
const r = 1
|
||||
object?.position.set(coords.xyz[0] * r, coords.xyz[1] * r, coords.xyz[2] * r);
|
||||
object?.setRotationFromQuaternion(
|
||||
new Quaternion(
|
||||
coords.quadrelion[0],
|
||||
coords.quadrelion[1],
|
||||
coords.quadrelion[2],
|
||||
coords.quadrelion[3]
|
||||
)
|
||||
);
|
||||
console.log(object?.position)
|
||||
}
|
||||
function timer(ms: number) {
|
||||
return new Promise((res) => setTimeout(res, ms));
|
||||
}
|
||||
const b = true
|
||||
async function load(id: Number, len: number) {
|
||||
for (var i = 0; i < len; i++) {
|
||||
|
||||
assemblyAnimate(objectControl, assemblyResponse.insertions.insertion_path[i], b);
|
||||
await timer(3);
|
||||
}
|
||||
}
|
||||
|
||||
assemblyResponse.insertions.insertion_path = assemblyResponse.insertions.insertion_path.reverse()
|
||||
load(objectControl, assemblyResponse.insertions.insertion_path.length);
|
||||
}
|
||||
|
||||
async function click() {
|
||||
renderId = renderId + 1;
|
||||
|
||||
if (assemblyCounter === renderId) {
|
||||
renderId = 1;
|
||||
}
|
||||
scene.clear();
|
||||
renderObject(renderId, params!);
|
||||
}
|
||||
|
||||
async function loadObject(objectList: string[]): Promise<Number[]> {
|
||||
const promises: Array<Promise<Group>> = [];
|
||||
objectList.forEach((e) => {
|
||||
const fbxLoader = new OBJLoader();
|
||||
promises.push(fbxLoader.loadAsync(e));
|
||||
});
|
||||
|
||||
const objects = await Promise.all(promises);
|
||||
const result: Array<Number> = [];
|
||||
for (let i = 0; objects.length > i; i++) {
|
||||
const el = objects[i];
|
||||
container.add(el);
|
||||
scene.add(container);
|
||||
result.push(el.id);
|
||||
const directionalLight = new DirectionalLight(0xffffff, 0.2);
|
||||
directionalLight.castShadow = true;
|
||||
directionalLight.position.set(container.position.x - 10,container.position.y - 10,container.position.z - 10);
|
||||
scene.add(directionalLight);
|
||||
container.position.set(0, 0, 0);
|
||||
fitCameraToCenteredObject(camera, container);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function fitCameraToCenteredObject(
|
||||
camera: PerspectiveCamera,
|
||||
object: Object3D
|
||||
) {
|
||||
const dist = 20;
|
||||
const vector = new Vector3();
|
||||
|
||||
camera.getWorldDirection(vector);
|
||||
|
||||
vector.multiplyScalar(dist);
|
||||
vector.add(camera.position);
|
||||
|
||||
object.position.set(vector.x, vector.y, vector.z);
|
||||
object.setRotationFromQuaternion(camera.quaternion);
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<div className="loader">
|
||||
<div onClick={() => click()}>next</div>
|
||||
<canvas style={canvasStyle} ref={canvasRef} />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
import React, { useEffect } from "react";
|
||||
import {
|
||||
DirectionalLight,
|
||||
Object3D,
|
||||
PerspectiveCamera,
|
||||
Scene,
|
||||
WebGLRenderer,
|
||||
AmbientLight,
|
||||
Vector3,
|
||||
} from "three";
|
||||
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls";
|
||||
import { OBJLoader } from "three/examples/jsm/loaders/OBJLoader";
|
||||
import CSS from "csstype";
|
||||
|
||||
import { useParams } from "react-router-dom";
|
||||
import { HttpMethod, HttpRepository, HttpRoute } from "../../core/repository/http_repository";
|
||||
|
||||
const canvasStyle: CSS.Properties = {
|
||||
backgroundColor: "rgb(151 41 41 / 85%)",
|
||||
};
|
||||
|
||||
export interface AssemblyPreviewStructure {
|
||||
assembly: string[];
|
||||
offset: number;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export const AssemblyPreviewSubsequencePath = "/123/";
|
||||
|
||||
export const AssemblyPreviewSubsequence = () => {
|
||||
const container = new Object3D();
|
||||
const canvasRef = React.useRef<HTMLCanvasElement>(null);
|
||||
const scene = new Scene();
|
||||
const camera = new PerspectiveCamera(
|
||||
80,
|
||||
window.innerWidth / window.innerHeight,
|
||||
0.1,
|
||||
1000
|
||||
);
|
||||
let renderId = 1;
|
||||
let assemblyCounter: undefined | Number = undefined;
|
||||
let params = useParams().id;
|
||||
|
||||
useEffect(() => {
|
||||
const renderer = new WebGLRenderer({
|
||||
canvas: canvasRef.current as HTMLCanvasElement,
|
||||
antialias: true,
|
||||
alpha: true,
|
||||
});
|
||||
|
||||
camera.position.set(2, 1, 2);
|
||||
|
||||
const directionalLight = new DirectionalLight(0xffffff, 0.2);
|
||||
directionalLight.castShadow = true;
|
||||
directionalLight.position.set(-1, 2, 4);
|
||||
scene.add(directionalLight);
|
||||
|
||||
const ambientLight = new AmbientLight(0xffffff, 0.7);
|
||||
scene.add(ambientLight);
|
||||
container.position.set(0, 0, 0);
|
||||
|
||||
renderer.setSize(window.innerWidth, window.innerHeight);
|
||||
|
||||
const onResize = () => {
|
||||
camera.aspect = window.innerWidth / window.innerHeight;
|
||||
camera.updateProjectionMatrix();
|
||||
renderer!.setSize(window.innerWidth, window.innerHeight);
|
||||
};
|
||||
|
||||
window.addEventListener("resize", onResize, false);
|
||||
new OrbitControls(camera, renderer.domElement);
|
||||
|
||||
renderer!.setAnimationLoop(() => {
|
||||
renderer!.render(scene, camera);
|
||||
});
|
||||
renderObject(1, params!);
|
||||
} );
|
||||
|
||||
async function renderObject(renderId: Number,projectId:string ) {
|
||||
const assemblyResponse =
|
||||
await HttpRepository.jsonRequest<AssemblyPreviewStructure>(
|
||||
HttpMethod.GET,
|
||||
`${HttpRoute.assemblyPreviewPath}${projectId}?count=${renderId}`
|
||||
);
|
||||
assemblyCounter = assemblyResponse.count;
|
||||
|
||||
loadObject(assemblyResponse.assembly);
|
||||
}
|
||||
|
||||
async function click() {
|
||||
renderId = renderId + 1;
|
||||
console.log(assemblyCounter);
|
||||
console.log(renderId);
|
||||
if (assemblyCounter === renderId) {
|
||||
renderId = 1;
|
||||
}
|
||||
renderObject(renderId, params!);
|
||||
}
|
||||
|
||||
function loadObject(objectList: string[]) {
|
||||
objectList.forEach((el) => {
|
||||
const fbxLoader = new OBJLoader();
|
||||
fbxLoader.load(
|
||||
el,
|
||||
(object) => {
|
||||
object.scale.x = 0.3;
|
||||
object.scale.y = 0.3;
|
||||
object.scale.z = 0.3;
|
||||
object.rotation.x = -Math.PI / 2;
|
||||
object.position.y = -30;
|
||||
container.add(object);
|
||||
scene.add(container);
|
||||
|
||||
fitCameraToCenteredObject(camera, container);
|
||||
},
|
||||
(xhr) => {
|
||||
console.log((xhr.loaded / xhr.total) * 100 + "% loaded");
|
||||
},
|
||||
(error) => {
|
||||
console.log(error);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
function fitCameraToCenteredObject(
|
||||
camera: PerspectiveCamera,
|
||||
object: Object3D
|
||||
) {
|
||||
const dist = 50;
|
||||
const vector = new Vector3();
|
||||
|
||||
camera.getWorldDirection(vector);
|
||||
|
||||
vector.multiplyScalar(dist);
|
||||
vector.add(camera.position);
|
||||
|
||||
object.position.set(vector.x, vector.y, vector.z);
|
||||
object.setRotationFromQuaternion(camera.quaternion);
|
||||
}
|
||||
return <canvas onClick={() => click()} style={canvasStyle} ref={canvasRef} />;
|
||||
};
|
14
asp-review-app/ui/src/global.d.ts
vendored
Normal file
14
asp-review-app/ui/src/global.d.ts
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
/// <reference types="react-scripts" />
|
||||
import { resources, defaultNS } from './i18n';
|
||||
|
||||
declare module 'i18next' {
|
||||
interface CustomTypeOptions {
|
||||
defaultNS: typeof defaultNS;
|
||||
resources: typeof resources['en'];
|
||||
}
|
||||
}
|
||||
declare module "*.svg" {
|
||||
import { ReactElement, SVGProps } from "react";
|
||||
const content: (props: SVGProps<SVGElement>) => ReactElement;
|
||||
export default content;
|
||||
}
|
13
asp-review-app/ui/src/index.css
Normal file
13
asp-review-app/ui/src/index.css
Normal file
|
@ -0,0 +1,13 @@
|
|||
body {
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
|
||||
'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
|
||||
sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
|
||||
monospace;
|
||||
}
|
34
asp-review-app/ui/src/index.tsx
Normal file
34
asp-review-app/ui/src/index.tsx
Normal file
|
@ -0,0 +1,34 @@
|
|||
import { render } from "react-dom";
|
||||
import "./App.css";
|
||||
import "./index.css";
|
||||
import { createBrowserRouter, RouterProvider } from "react-router-dom";
|
||||
import {
|
||||
AssemblyPreviewInsertVector,
|
||||
AssemblyPreviewInsertVectorPath,
|
||||
} from "./features/assembly_preview_insert_vector/Assembly_preview_insert_vector_screen";
|
||||
import {
|
||||
ProjectScreen,
|
||||
ProjectsPath,
|
||||
} from "./features/all_project/all_project_screen";
|
||||
import {
|
||||
AssemblyPreviewSubsequence,
|
||||
AssemblyPreviewSubsequencePath,
|
||||
} from "./features/assembly_preview_subsequence/assembly_preview_subsequence_screen";
|
||||
|
||||
const rootElement = document.getElementById("root");
|
||||
|
||||
const router = createBrowserRouter([
|
||||
{
|
||||
path: ProjectsPath,
|
||||
element: <ProjectScreen />,
|
||||
},
|
||||
{
|
||||
path: AssemblyPreviewSubsequencePath + ":id",
|
||||
element: <AssemblyPreviewSubsequence />,
|
||||
},
|
||||
{
|
||||
path: AssemblyPreviewInsertVectorPath + ":id",
|
||||
element: <AssemblyPreviewInsertVector />,
|
||||
},
|
||||
]);
|
||||
render(<RouterProvider router={router} />, rootElement);
|
15
asp-review-app/ui/src/reportWebVitals.ts
Normal file
15
asp-review-app/ui/src/reportWebVitals.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
import { ReportHandler } from 'web-vitals';
|
||||
|
||||
const reportWebVitals = (onPerfEntry?: ReportHandler) => {
|
||||
if (onPerfEntry && onPerfEntry instanceof Function) {
|
||||
import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
|
||||
getCLS(onPerfEntry);
|
||||
getFID(onPerfEntry);
|
||||
getFCP(onPerfEntry);
|
||||
getLCP(onPerfEntry);
|
||||
getTTFB(onPerfEntry);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export default reportWebVitals;
|
27
asp-review-app/ui/tsconfig.json
Normal file
27
asp-review-app/ui/tsconfig.json
Normal file
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"lib": [
|
||||
"dom",
|
||||
"dom.iterable",
|
||||
"esnext"
|
||||
],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": true,
|
||||
"experimentalDecorators": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx"
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
]
|
||||
}
|
10491
asp-review-app/ui/yarn.lock
Normal file
10491
asp-review-app/ui/yarn.lock
Normal file
File diff suppressed because it is too large
Load diff
5
asp/requirements.txt
Normal file
5
asp/requirements.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
argparse
|
||||
matplotlib
|
||||
pybullet
|
||||
argparse
|
||||
xmlformatter
|
|
@ -5,10 +5,9 @@ OBJ mesh exporter.
|
|||
Exports all objects in scene.
|
||||
You can set export path and subdir.
|
||||
"""
|
||||
__version__ = "0.1"
|
||||
__version__ = "0.2"
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import bpy
|
||||
import os
|
||||
|
||||
|
@ -16,19 +15,22 @@ logger = logging.getLogger(__name__)
|
|||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
def export_obj(path, subdir=""):
|
||||
def export_obj(path, subdir="", filename=None):
|
||||
""" OBJ mesh exporter. Exports all objects in scene. """
|
||||
for ob in bpy.context.scene.objects:
|
||||
# deselect all but just one object and make it active
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
ob.select_set(state=True)
|
||||
bpy.context.view_layer.objects.active = ob
|
||||
filename = bpy.context.active_object.name
|
||||
if not filename:
|
||||
filename = bpy.context.active_object.name
|
||||
if not filename.endswith('.obj'):
|
||||
filename = (filename + '.obj')
|
||||
# export obj
|
||||
obj_path = os.path.join(path, subdir).replace('\\', '/')
|
||||
if not os.path.isdir(obj_path):
|
||||
os.makedirs(obj_path)
|
||||
outpath = os.path.join(obj_path, filename)
|
||||
logger.debug('vizual:', outpath)
|
||||
logger.debug('Exporting to %s', outpath)
|
||||
|
||||
bpy.ops.wm.obj_export(filepath=outpath, forward_axis='Y', up_axis='Z', global_scale=1000, apply_modifiers=True, export_selected_objects=True, export_uv=True, export_normals=True, export_colors=False, export_materials=True, export_pbr_extensions=False, path_mode='AUTO', export_triangulated_mesh=True)
|
||||
return bpy.ops.wm.obj_export(filepath=outpath, forward_axis='Y', up_axis='Z', global_scale=1000, apply_modifiers=True, export_selected_objects=True, export_uv=True, export_normals=True, export_colors=False, export_materials=True, export_pbr_extensions=False, path_mode='AUTO', export_triangulated_mesh=True)
|
||||
|
|
1
cg/blender/import_mesh/README.md
Normal file
1
cg/blender/import_mesh/README.md
Normal file
|
@ -0,0 +1 @@
|
|||
## Модули импорта для Blender
|
7
cg/blender/import_mesh/__init__.py
Normal file
7
cg/blender/import_mesh/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
DESCRIPTION.
|
||||
Blender export modules.
|
||||
Modules exports all objests in scene.
|
||||
You can set export path and subdir.
|
||||
"""
|
29
cg/blender/import_mesh/obj.py
Normal file
29
cg/blender/import_mesh/obj.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
DESCRIPTION.
|
||||
OBJ mesh importer.
|
||||
Import files in blender scene.
|
||||
"""
|
||||
__version__ = "0.2"
|
||||
|
||||
import logging
|
||||
import bpy
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
def import_obj(path):
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
path = path.replace('\\', '/')
|
||||
if os.path.isfile(path) and path.endswith('.obj'):
|
||||
return bpy.ops.wm.obj_import(filepath=path, global_scale=0.001, clamp_size=0, forward_axis='Y', up_axis='Z')
|
||||
if os.path.isdir(path):
|
||||
file_list = sorted(os.listdir(path))
|
||||
obj_list = [dict(name=item) for item in file_list if item.endswith('.obj')]
|
||||
return bpy.ops.wm.obj_import(directory=path, files=obj_list, global_scale=0.001, clamp_size=0, forward_axis='Y', up_axis='Z')
|
||||
|
||||
return logger.info("Path must be a directory or *.obj file!")
|
||||
|
|
@ -315,26 +315,24 @@ uidir = os.path.join(FreeCAD.getUserAppDataDir(),
|
|||
"Mod", __workbenchname__, "UI")
|
||||
icondir = os.path.join(uidir, "icons")
|
||||
|
||||
# Tools.spawnClassCommand("FrameCommand",
|
||||
# makeFrame,
|
||||
# {"Pixmap": str(os.path.join(icondir, "frame.svg")),
|
||||
# "MenuText": "Make a free frame",
|
||||
# "ToolTip": "Make a freestanding reference frame."})
|
||||
|
||||
Tools.spawnClassCommand("BoMGeneration",
|
||||
run_BoM_list,
|
||||
{"Pixmap": str(os.path.join(icondir, "BoMList.svg")),
|
||||
"MenuText": "Generate Bill of Materials",
|
||||
"ToolTip": "Press the button to create big BoM"})
|
||||
# Tools.spawnClassCommand("ASM4StructureParsing",
|
||||
# Asm4StructureParseUseCase().initParse,
|
||||
# {"Pixmap": str(os.path.join(icondir, "assembly4.svg")),
|
||||
# "MenuText": "Make a ASM4 parsing",
|
||||
# "ToolTip": "Make a ASM4 1"})
|
||||
# Tools.spawnClassCommand("SelectedPartFrameCommand",
|
||||
# makeSelectedPartFrames,
|
||||
# {"Pixmap": str(os.path.join(icondir, "partframe.svg")),
|
||||
# "MenuText": "selected parts frames",
|
||||
# "ToolTip": "Make selected parts frames."})
|
||||
Tools.spawnClassCommand("FrameCommand",
|
||||
makeFrame,
|
||||
{"Pixmap": str(os.path.join(icondir, "frame.svg")),
|
||||
"MenuText": "Make a free frame",
|
||||
"ToolTip": "Make a freestanding reference frame."})
|
||||
|
||||
|
||||
Tools.spawnClassCommand("SelectedPartFrameCommand",
|
||||
makeSelectedPartFrames,
|
||||
{"Pixmap": str(os.path.join(icondir, "partframe.svg")),
|
||||
"MenuText": "selected parts frames",
|
||||
"ToolTip": "Make selected parts frames."})
|
||||
|
||||
Tools.spawnClassCommand("AllPartFramesCommand",
|
||||
makeAllPartFrames,
|
||||
|
|
|
@ -47,7 +47,8 @@ class Frames(Workbench):
|
|||
self.toolcommands = [
|
||||
"ExportPlacementAndPropertiesCommand",
|
||||
"ExportGazeboModels",
|
||||
"InsertGraspPose"
|
||||
"InsertGraspPose",
|
||||
"ASM4StructureParsing"
|
||||
]
|
||||
self.appendToolbar(f"{__class__.__name__} Frames", self.framecommands)
|
||||
self.appendToolbar(f"{__class__.__name__} Tools", self.toolcommands)
|
||||
|
|
|
@ -17,6 +17,7 @@ import json # For exporting part infos
|
|||
import os # for safer path handling
|
||||
import GazeboExport
|
||||
import GraspPose
|
||||
from scenarios.robossembler_freecad_export_scenario import RobossemblerFreeCadExportScenario
|
||||
if FreeCAD.GuiUp:
|
||||
import FreeCADGui
|
||||
from PySide import QtGui
|
||||
|
@ -568,6 +569,12 @@ spawnClassCommand("InsertGraspPose",
|
|||
"MenuText": "Insert Grasp Pose",
|
||||
"ToolTip": "Insert Grasp Pose for Selected Part"})
|
||||
|
||||
spawnClassCommand("ASM4StructureParsing",
|
||||
RobossemblerFreeCadExportScenario().call,
|
||||
{"Pixmap": str(os.path.join(icondir, "assembly4.svg")),
|
||||
"MenuText": "Make a ASM4 parsing",
|
||||
"ToolTip": "Make a ASM4 1"})
|
||||
|
||||
|
||||
###################################################################
|
||||
# Information from primitive type
|
||||
|
|
9697
cg/freecad/Frames/axis.gcode
Normal file
9697
cg/freecad/Frames/axis.gcode
Normal file
File diff suppressed because it is too large
Load diff
31
cg/freecad/Frames/file.py
Normal file
31
cg/freecad/Frames/file.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import subprocess
|
||||
# import FreeCAD
|
||||
# import Mesh
|
||||
|
||||
|
||||
import gcoder
|
||||
|
||||
|
||||
# gcode_path = '/home/mark-voltov/Winder/axis.gcode'
|
||||
|
||||
# gcode.main(gcode)
|
||||
|
||||
|
||||
|
||||
file_path = '/home/mark-voltov/Winder/axis.gcode'
|
||||
gcoder_path = '/home/mark-voltov/GitProjects/framework/cg/freecad/Frames/gcoder.py'
|
||||
|
||||
|
||||
with open(file_path, 'r') as file:
|
||||
gcode = file.readlines()
|
||||
|
||||
# print(gcode)
|
||||
|
||||
(gcoder.LightGCode(gcode))
|
||||
cmd = [file_path gcoder_path]
|
||||
subprocess.run(cmd)
|
||||
# subprocess.run( '/home/mark-voltov/Winder/axis.gcode', '/home/mark-voltov/GitProjects/framework/cg/freecad/Frames/gcoder.py')
|
60
cg/freecad/Frames/freecad2pddl.py
Normal file
60
cg/freecad/Frames/freecad2pddl.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
# import FreeCAD as App
|
||||
|
||||
|
||||
# doc = App.ActiveDocument
|
||||
|
||||
|
||||
#генерируем pddl
|
||||
#импортируем кучу обьектов из дерева построения freecad
|
||||
|
||||
#задаем шаблон на действие
|
||||
|
||||
predicates = ';; Предикаты \n(:predicates \n(at ?c - component ?location - component) \n(printed ?c - component) \n(has-material ?m - material) \n(compatible ?m - material ?c - component))'
|
||||
|
||||
# print(predicates)
|
||||
|
||||
|
||||
predicates =
|
||||
|
||||
|
||||
(" ;; Предикаты \
|
||||
'(:predicates (at ?c - component ?location - component)' \
|
||||
'(printed ?c - component)' \
|
||||
'(printed ?c - component))'
|
||||
|
||||
|
||||
print(predicates)
|
||||
|
||||
# (:predicates
|
||||
# (at ?c - component ?location - component)
|
||||
# (printed ?c - component)
|
||||
# (has-material ?m - material)
|
||||
# (compatible ?m - material ?c - component))''
|
||||
|
||||
|
||||
|
||||
# ;; Предикаты
|
||||
# (:predicates
|
||||
# (at ?c - component ?location - component)
|
||||
# (printed ?c - component)
|
||||
# (has-material ?m - material)
|
||||
# (compatible ?m - material ?c - component))
|
||||
|
||||
# ;; Действия
|
||||
# (:action load-material
|
||||
# :parameters (?m - material ?c - component)
|
||||
# :precondition (and (at ?m ?c) (compatible ?m ?c))
|
||||
# :effect (has-material ?m))
|
||||
|
||||
# (:action unload-material
|
||||
# :parameters (?m - material ?c - component)
|
||||
# :precondition (has-material ?m)
|
||||
# :effect (and (not (has-material ?m)) (at ?m ?c)))
|
||||
|
||||
# (:action print-component
|
||||
# :parameters (?c - component)
|
||||
# :precondition (and (at ?c ?printer) (has-material ?m) (compatible ?m ?c))
|
||||
# :effect (printed ?c))
|
||||
|
||||
# ;; Цель
|
||||
# (:goal (forall (?c - component) (printed ?c))))
|
769
cg/freecad/Frames/gcoder.py
Normal file
769
cg/freecad/Frames/gcoder.py
Normal file
|
@ -0,0 +1,769 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# This file is part of the Printrun suite.
|
||||
#
|
||||
# Printrun is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Printrun is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# # along with Printrun. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Добавлен файл программы, проводящей анализ файла .gcode и на основе полученных данных вычисляющая материальные и временные затраты на 3д-печать.
|
||||
# Программа вычисляет габариты задания, длину затрачиваемой нити филамента, длительность маршрута движения головки принтера и длительность печати.
|
||||
# Эксперименты показывают, что на данный момент есть расхождение между реальным временем и вычисленным с помощью программы,0 в зависимости от файла, на 20-40% (недооценка вычисленной длительности в сравнении с реальной). Остальные результаты выглядят адекватными, но проверить их сложнее.
|
||||
|
||||
|
||||
|
||||
import sys
|
||||
import re
|
||||
import math
|
||||
import datetime
|
||||
import logging
|
||||
from array import array
|
||||
|
||||
gcode_parsed_args = ["x", "y", "e", "f", "z", "i", "j"]
|
||||
gcode_parsed_nonargs = 'gtmnd'
|
||||
to_parse = "".join(gcode_parsed_args) + gcode_parsed_nonargs
|
||||
gcode_exp = re.compile("\([^\(\)]*\)|;.*|[/\*].*\n|([%s])\s*([-+]?[0-9]*\.?[0-9]*)" % to_parse)
|
||||
gcode_strip_comment_exp = re.compile("\([^\(\)]*\)|;.*|[/\*].*\n")
|
||||
m114_exp = re.compile("\([^\(\)]*\)|[/\*].*\n|([XYZ]):?([-+]?[0-9]*\.?[0-9]*)")
|
||||
specific_exp = "(?:\([^\(\)]*\))|(?:;.*)|(?:[/\*].*\n)|(%s[-+]?[0-9]*\.?[0-9]*)"
|
||||
move_gcodes = ["G0", "G1", "G2", "G3"]
|
||||
|
||||
class PyLine:
|
||||
|
||||
__slots__ = ('x', 'y', 'z', 'e', 'f', 'i', 'j',
|
||||
'raw', 'command', 'is_move',
|
||||
'relative', 'relative_e',
|
||||
'current_x', 'current_y', 'current_z', 'extruding',
|
||||
'current_tool',
|
||||
'gcview_end_vertex')
|
||||
|
||||
def __init__(self, l):
|
||||
self.raw = l
|
||||
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
|
||||
class PyLightLine:
|
||||
|
||||
__slots__ = ('raw', 'command')
|
||||
|
||||
def __init__(self, l):
|
||||
self.raw = l
|
||||
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
|
||||
try:
|
||||
from . import gcoder_line
|
||||
Line = gcoder_line.GLine
|
||||
LightLine = gcoder_line.GLightLine
|
||||
except Exception as e:
|
||||
logging.warning("Memory-efficient GCoder implementation unavailable: %s" % e)
|
||||
Line = PyLine
|
||||
LightLine = PyLightLine
|
||||
|
||||
def find_specific_code(line, code):
|
||||
exp = specific_exp % code
|
||||
bits = [bit for bit in re.findall(exp, line.raw) if bit]
|
||||
if not bits: return None
|
||||
else: return float(bits[0][1:])
|
||||
|
||||
def S(line):
|
||||
return find_specific_code(line, "S")
|
||||
|
||||
def P(line):
|
||||
return find_specific_code(line, "P")
|
||||
|
||||
def split(line):
|
||||
split_raw = gcode_exp.findall(line.raw.lower())
|
||||
if split_raw and split_raw[0][0] == "n":
|
||||
del split_raw[0]
|
||||
if not split_raw:
|
||||
line.command = line.raw
|
||||
line.is_move = False
|
||||
logging.warning("raw G-Code line \"%s\" could not be parsed" % line.raw)
|
||||
return [line.raw]
|
||||
command = split_raw[0]
|
||||
line.command = command[0].upper() + command[1]
|
||||
line.is_move = line.command in move_gcodes
|
||||
return split_raw
|
||||
|
||||
def parse_coordinates(line, split_raw, imperial = False, force = False):
|
||||
# Not a G-line, we don't want to parse its arguments
|
||||
if not force and line.command[0] != "G":
|
||||
return
|
||||
unit_factor = 25.4 if imperial else 1
|
||||
for bit in split_raw:
|
||||
code = bit[0]
|
||||
if code not in gcode_parsed_nonargs and bit[1]:
|
||||
setattr(line, code, unit_factor * float(bit[1]))
|
||||
|
||||
class Layer(list):
|
||||
|
||||
__slots__ = ("duration", "z")
|
||||
|
||||
def __init__(self, lines, z = None):
|
||||
super(Layer, self).__init__(lines)
|
||||
self.z = z
|
||||
self.duration = 0
|
||||
|
||||
class GCode:
|
||||
|
||||
line_class = Line
|
||||
|
||||
lines = None
|
||||
layers = None
|
||||
all_layers = None
|
||||
layer_idxs = None
|
||||
line_idxs = None
|
||||
append_layer = None
|
||||
append_layer_id = None
|
||||
|
||||
imperial = False
|
||||
cutting = False
|
||||
relative = False
|
||||
relative_e = False
|
||||
current_tool = 0
|
||||
# Home position: current absolute position counted from machine origin
|
||||
home_x = 0
|
||||
home_y = 0
|
||||
home_z = 0
|
||||
# Current position: current absolute position counted from machine origin
|
||||
current_x = 0
|
||||
current_y = 0
|
||||
current_z = 0
|
||||
# For E this is the absolute position from machine start
|
||||
current_e = 0
|
||||
current_e_multi=[0]
|
||||
total_e = 0
|
||||
total_e_multi=[0]
|
||||
max_e = 0
|
||||
max_e_multi=[0]
|
||||
# Current feedrate
|
||||
current_f = 0
|
||||
# Offset: current offset between the machine origin and the machine current
|
||||
# absolute coordinate system (as shifted by G92s)
|
||||
offset_x = 0
|
||||
offset_y = 0
|
||||
offset_z = 0
|
||||
offset_e = 0
|
||||
offset_e_multi = [0]
|
||||
|
||||
# Expected behavior:
|
||||
# - G28 X => X axis is homed, offset_x <- 0, current_x <- home_x
|
||||
# - G92 Xk => X axis does not move, so current_x does not change
|
||||
# and offset_x <- current_x - k,
|
||||
# - absolute G1 Xk => X axis moves, current_x <- offset_x + k
|
||||
# How to get...
|
||||
# current abs X from machine origin: current_x
|
||||
# current abs X in machine current coordinate system: current_x - offset_x
|
||||
|
||||
filament_length = None
|
||||
filament_length_multi=[0]
|
||||
duration = None
|
||||
xmin = None
|
||||
xmax = None
|
||||
ymin = None
|
||||
ymax = None
|
||||
zmin = None
|
||||
zmax = None
|
||||
width = None
|
||||
depth = None
|
||||
height = None
|
||||
|
||||
est_layer_height = None
|
||||
|
||||
# abs_x is the current absolute X in machine current coordinate system
|
||||
# (after the various G92 transformations) and can be used to store the
|
||||
# absolute position of the head at a given time
|
||||
def _get_abs_x(self):
|
||||
return self.current_x - self.offset_x
|
||||
abs_x = property(_get_abs_x)
|
||||
|
||||
def _get_abs_y(self):
|
||||
return self.current_y - self.offset_y
|
||||
abs_y = property(_get_abs_y)
|
||||
|
||||
def _get_abs_z(self):
|
||||
return self.current_z - self.offset_z
|
||||
abs_z = property(_get_abs_z)
|
||||
|
||||
def _get_abs_e(self):
|
||||
return self.current_e - self.offset_e
|
||||
abs_e = property(_get_abs_e)
|
||||
|
||||
def _get_abs_e_multi(self,i):
|
||||
return self.current_e_multi[i] - self.offset_e_multi[i]
|
||||
abs_e = property(_get_abs_e)
|
||||
|
||||
def _get_abs_pos(self):
|
||||
return (self.abs_x, self.abs_y, self.abs_z)
|
||||
abs_pos = property(_get_abs_pos)
|
||||
|
||||
def _get_current_pos(self):
|
||||
return (self.current_x, self.current_y, self.current_z)
|
||||
current_pos = property(_get_current_pos)
|
||||
|
||||
def _get_home_pos(self):
|
||||
return (self.home_x, self.home_y, self.home_z)
|
||||
|
||||
def _set_home_pos(self, home_pos):
|
||||
if home_pos:
|
||||
self.home_x, self.home_y, self.home_z = home_pos
|
||||
home_pos = property(_get_home_pos, _set_home_pos)
|
||||
|
||||
def _get_layers_count(self):
|
||||
return len(self.all_zs)
|
||||
layers_count = property(_get_layers_count)
|
||||
|
||||
def __init__(self, data = None, home_pos = None,
|
||||
layer_callback = None, deferred = False,
|
||||
cutting_as_extrusion = False):
|
||||
self.cutting_as_extrusion = cutting_as_extrusion
|
||||
if not deferred:
|
||||
self.prepare(data, home_pos, layer_callback)
|
||||
|
||||
def prepare(self, data = None, home_pos = None, layer_callback = None):
|
||||
self.home_pos = home_pos
|
||||
if data:
|
||||
line_class = self.line_class
|
||||
self.lines = [line_class(l2) for l2 in
|
||||
(l.strip() for l in data)
|
||||
if l2]
|
||||
self._preprocess(build_layers = True,
|
||||
layer_callback = layer_callback)
|
||||
else:
|
||||
self.lines = []
|
||||
self.append_layer_id = 0
|
||||
self.append_layer = Layer([])
|
||||
self.all_layers = [self.append_layer]
|
||||
self.all_zs = set()
|
||||
self.layers = {}
|
||||
self.layer_idxs = array('I', [])
|
||||
self.line_idxs = array('I', [])
|
||||
|
||||
def has_index(self, i):
|
||||
return i < len(self)
|
||||
def __len__(self):
|
||||
return len(self.line_idxs)
|
||||
|
||||
def __iter__(self):
|
||||
return self.lines.__iter__()
|
||||
|
||||
def prepend_to_layer(self, commands, layer_idx):
|
||||
# Prepend commands in reverse order
|
||||
commands = [c.strip() for c in commands[::-1] if c.strip()]
|
||||
layer = self.all_layers[layer_idx]
|
||||
# Find start index to append lines
|
||||
# and end index to append new indices
|
||||
start_index = self.layer_idxs.index(layer_idx)
|
||||
for i in range(start_index, len(self.layer_idxs)):
|
||||
if self.layer_idxs[i] != layer_idx:
|
||||
end_index = i
|
||||
break
|
||||
else:
|
||||
end_index = i + 1
|
||||
end_line = self.line_idxs[end_index - 1]
|
||||
for i, command in enumerate(commands):
|
||||
gline = Line(command)
|
||||
# Split to get command
|
||||
split(gline)
|
||||
# Force is_move to False
|
||||
gline.is_move = False
|
||||
# Insert gline at beginning of layer
|
||||
layer.insert(0, gline)
|
||||
# Insert gline at beginning of list
|
||||
self.lines.insert(start_index, gline)
|
||||
# Update indices arrays & global gcodes list
|
||||
self.layer_idxs.insert(end_index + i, layer_idx)
|
||||
self.line_idxs.insert(end_index + i, end_line + i + 1)
|
||||
return commands[::-1]
|
||||
|
||||
def rewrite_layer(self, commands, layer_idx):
|
||||
# Prepend commands in reverse order
|
||||
commands = [c.strip() for c in commands[::-1] if c.strip()]
|
||||
layer = self.all_layers[layer_idx]
|
||||
# Find start index to append lines
|
||||
# and end index to append new indices
|
||||
start_index = self.layer_idxs.index(layer_idx)
|
||||
for i in range(start_index, len(self.layer_idxs)):
|
||||
if self.layer_idxs[i] != layer_idx:
|
||||
end_index = i
|
||||
break
|
||||
else:
|
||||
end_index = i + 1
|
||||
self.layer_idxs = self.layer_idxs[:start_index] + array('I', len(commands) * [layer_idx]) + self.layer_idxs[end_index:]
|
||||
self.line_idxs = self.line_idxs[:start_index] + array('I', range(len(commands))) + self.line_idxs[end_index:]
|
||||
del self.lines[start_index:end_index]
|
||||
del layer[:]
|
||||
for i, command in enumerate(commands):
|
||||
gline = Line(command)
|
||||
# Split to get command
|
||||
split(gline)
|
||||
# Force is_move to False
|
||||
gline.is_move = False
|
||||
# Insert gline at beginning of layer
|
||||
layer.insert(0, gline)
|
||||
# Insert gline at beginning of list
|
||||
self.lines.insert(start_index, gline)
|
||||
return commands[::-1]
|
||||
|
||||
def append(self, command, store = True):
|
||||
command = command.strip()
|
||||
if not command:
|
||||
return
|
||||
gline = Line(command)
|
||||
self._preprocess([gline])
|
||||
if store:
|
||||
self.lines.append(gline)
|
||||
self.append_layer.append(gline)
|
||||
self.layer_idxs.append(self.append_layer_id)
|
||||
self.line_idxs.append(len(self.append_layer)-1)
|
||||
return gline
|
||||
|
||||
def _preprocess(self, lines = None, build_layers = False,
|
||||
layer_callback = None):
|
||||
"""Checks for imperial/relativeness settings and tool changes"""
|
||||
if not lines:
|
||||
lines = self.lines
|
||||
imperial = self.imperial
|
||||
relative = self.relative
|
||||
relative_e = self.relative_e
|
||||
current_tool = self.current_tool
|
||||
current_x = self.current_x
|
||||
current_y = self.current_y
|
||||
current_z = self.current_z
|
||||
offset_x = self.offset_x
|
||||
offset_y = self.offset_y
|
||||
offset_z = self.offset_z
|
||||
|
||||
# Extrusion computation
|
||||
current_e = self.current_e
|
||||
offset_e = self.offset_e
|
||||
total_e = self.total_e
|
||||
max_e = self.max_e
|
||||
cutting = self.cutting
|
||||
|
||||
current_e_multi = self.current_e_multi[current_tool]
|
||||
offset_e_multi = self.offset_e_multi[current_tool]
|
||||
total_e_multi = self.total_e_multi[current_tool]
|
||||
max_e_multi = self.max_e_multi[current_tool]
|
||||
|
||||
# Store this one out of the build_layers scope for efficiency
|
||||
cur_layer_has_extrusion = False
|
||||
|
||||
# Initialize layers and other global computations
|
||||
if build_layers:
|
||||
# Bounding box computation
|
||||
xmin = float("inf")
|
||||
ymin = float("inf")
|
||||
zmin = 0
|
||||
xmax = float("-inf")
|
||||
ymax = float("-inf")
|
||||
zmax = float("-inf")
|
||||
# Also compute extrusion-only values
|
||||
xmin_e = float("inf")
|
||||
ymin_e = float("inf")
|
||||
xmax_e = float("-inf")
|
||||
ymax_e = float("-inf")
|
||||
|
||||
# Duration estimation
|
||||
# TODO:
|
||||
# get device caps from firmware: max speed, acceleration/axis
|
||||
# (including extruder)
|
||||
# calculate the maximum move duration accounting for above ;)
|
||||
lastx = lasty = lastz = None
|
||||
laste = lastf = 0
|
||||
lastdx = 0
|
||||
lastdy = 0
|
||||
x = y = e = f = 0.0
|
||||
currenttravel = 0.0
|
||||
moveduration = 0.0
|
||||
totalduration = 0.0
|
||||
acceleration = 2000.0 # mm/s^2
|
||||
layerbeginduration = 0.0
|
||||
|
||||
# Initialize layers
|
||||
all_layers = self.all_layers = []
|
||||
all_zs = self.all_zs = set()
|
||||
layer_idxs = self.layer_idxs = []
|
||||
line_idxs = self.line_idxs = []
|
||||
|
||||
|
||||
last_layer_z = None
|
||||
prev_z = None
|
||||
cur_z = None
|
||||
cur_lines = []
|
||||
|
||||
def append_lines(lines, isEnd):
|
||||
if not build_layers:
|
||||
return
|
||||
nonlocal layerbeginduration, last_layer_z
|
||||
if cur_layer_has_extrusion and prev_z != last_layer_z \
|
||||
or not all_layers:
|
||||
layer = Layer([], prev_z)
|
||||
last_layer_z = prev_z
|
||||
finished_layer = len(all_layers)-1 if all_layers else None
|
||||
all_layers.append(layer)
|
||||
all_zs.add(prev_z)
|
||||
else:
|
||||
layer = all_layers[-1]
|
||||
finished_layer = None
|
||||
layer_id = len(all_layers)-1
|
||||
layer_line = len(layer)
|
||||
for i, ln in enumerate(lines):
|
||||
layer.append(ln)
|
||||
layer_idxs.append(layer_id)
|
||||
line_idxs.append(layer_line+i)
|
||||
layer.duration += totalduration - layerbeginduration
|
||||
layerbeginduration = totalduration
|
||||
if layer_callback:
|
||||
# we finish a layer when inserting the next
|
||||
if finished_layer is not None:
|
||||
layer_callback(self, finished_layer)
|
||||
# notify about end layer, there will not be next
|
||||
if isEnd:
|
||||
layer_callback(self, layer_id)
|
||||
|
||||
if self.line_class != Line:
|
||||
get_line = lambda l: Line(l.raw)
|
||||
else:
|
||||
get_line = lambda l: l
|
||||
for true_line in lines:
|
||||
# # Parse line
|
||||
# Use a heavy copy of the light line to preprocess
|
||||
line = get_line(true_line)
|
||||
split_raw = split(line)
|
||||
if line.command:
|
||||
# Update properties
|
||||
if line.is_move:
|
||||
line.relative = relative
|
||||
line.relative_e = relative_e
|
||||
line.current_tool = current_tool
|
||||
elif line.command == "G20":
|
||||
imperial = True
|
||||
elif line.command == "G21":
|
||||
imperial = False
|
||||
elif line.command == "G90":
|
||||
relative = False
|
||||
relative_e = False
|
||||
elif line.command == "G91":
|
||||
relative = True
|
||||
relative_e = True
|
||||
elif line.command == "M82":
|
||||
relative_e = False
|
||||
elif line.command == "M83":
|
||||
relative_e = True
|
||||
elif line.command[0] == "T":
|
||||
try:
|
||||
current_tool = int(line.command[1:])
|
||||
except:
|
||||
pass #handle T? by treating it as no tool change
|
||||
while current_tool+1 > len(self.current_e_multi):
|
||||
self.current_e_multi+=[0]
|
||||
self.offset_e_multi+=[0]
|
||||
self.total_e_multi+=[0]
|
||||
self.max_e_multi+=[0]
|
||||
elif line.command == "M3" or line.command == "M4":
|
||||
cutting = True
|
||||
elif line.command == "M5":
|
||||
cutting = False
|
||||
|
||||
current_e_multi = self.current_e_multi[current_tool]
|
||||
offset_e_multi = self.offset_e_multi[current_tool]
|
||||
total_e_multi = self.total_e_multi[current_tool]
|
||||
max_e_multi = self.max_e_multi[current_tool]
|
||||
|
||||
|
||||
if line.command[0] == "G":
|
||||
parse_coordinates(line, split_raw, imperial)
|
||||
|
||||
# Compute current position
|
||||
if line.is_move:
|
||||
x = line.x
|
||||
y = line.y
|
||||
z = line.z
|
||||
|
||||
if line.f is not None:
|
||||
self.current_f = line.f
|
||||
|
||||
if line.relative:
|
||||
x = current_x + (x or 0)
|
||||
y = current_y + (y or 0)
|
||||
z = current_z + (z or 0)
|
||||
else:
|
||||
if x is not None: x = x + offset_x
|
||||
if y is not None: y = y + offset_y
|
||||
if z is not None: z = z + offset_z
|
||||
|
||||
if x is not None: current_x = x
|
||||
if y is not None: current_y = y
|
||||
if z is not None: current_z = z
|
||||
|
||||
elif line.command == "G28":
|
||||
home_all = not any([line.x, line.y, line.z])
|
||||
if home_all or line.x is not None:
|
||||
offset_x = 0
|
||||
current_x = self.home_x
|
||||
if home_all or line.y is not None:
|
||||
offset_y = 0
|
||||
current_y = self.home_y
|
||||
if home_all or line.z is not None:
|
||||
offset_z = 0
|
||||
current_z = self.home_z
|
||||
|
||||
elif line.command == "G92":
|
||||
if line.x is not None: offset_x = current_x - line.x
|
||||
if line.y is not None: offset_y = current_y - line.y
|
||||
if line.z is not None: offset_z = current_z - line.z
|
||||
|
||||
line.current_x = current_x
|
||||
line.current_y = current_y
|
||||
line.current_z = current_z
|
||||
|
||||
# # Process extrusion
|
||||
if line.e is not None:
|
||||
if line.is_move:
|
||||
if line.relative_e:
|
||||
line.extruding = line.e > 0
|
||||
total_e += line.e
|
||||
current_e += line.e
|
||||
total_e_multi += line.e
|
||||
current_e_multi += line.e
|
||||
else:
|
||||
new_e = line.e + offset_e
|
||||
line.extruding = new_e > current_e
|
||||
total_e += new_e - current_e
|
||||
current_e = new_e
|
||||
new_e_multi = line.e + offset_e_multi
|
||||
total_e_multi += new_e_multi - current_e_multi
|
||||
current_e_multi = new_e_multi
|
||||
|
||||
max_e = max(max_e, total_e)
|
||||
max_e_multi=max(max_e_multi, total_e_multi)
|
||||
cur_layer_has_extrusion |= line.extruding and (line.x is not None or line.y is not None)
|
||||
elif line.command == "G92":
|
||||
offset_e = current_e - line.e
|
||||
offset_e_multi = current_e_multi - line.e
|
||||
if cutting and self.cutting_as_extrusion:
|
||||
line.extruding = True
|
||||
|
||||
self.current_e_multi[current_tool]=current_e_multi
|
||||
self.offset_e_multi[current_tool]=offset_e_multi
|
||||
self.max_e_multi[current_tool]=max_e_multi
|
||||
self.total_e_multi[current_tool]=total_e_multi
|
||||
|
||||
# # Create layers and perform global computations
|
||||
if build_layers:
|
||||
# Update bounding box
|
||||
if line.is_move:
|
||||
if line.extruding:
|
||||
if line.current_x is not None:
|
||||
# G0 X10 ; G1 X20 E5 results in 10..20 even as G0 is not extruding
|
||||
xmin_e = min(xmin_e, line.current_x, xmin_e if lastx is None else lastx)
|
||||
xmax_e = max(xmax_e, line.current_x, xmax_e if lastx is None else lastx)
|
||||
if line.current_y is not None:
|
||||
ymin_e = min(ymin_e, line.current_y, ymin_e if lasty is None else lasty)
|
||||
ymax_e = max(ymax_e, line.current_y, ymax_e if lasty is None else lasty)
|
||||
if max_e <= 0:
|
||||
if line.current_x is not None:
|
||||
xmin = min(xmin, line.current_x)
|
||||
xmax = max(xmax, line.current_x)
|
||||
if line.current_y is not None:
|
||||
ymin = min(ymin, line.current_y)
|
||||
ymax = max(ymax, line.current_y)
|
||||
|
||||
# Compute duration
|
||||
if line.command == "G0" or line.command == "G1":
|
||||
x = line.x if line.x is not None else (lastx or 0)
|
||||
y = line.y if line.y is not None else (lasty or 0)
|
||||
z = line.z if line.z is not None else (lastz or 0)
|
||||
e = line.e if line.e is not None else laste
|
||||
# mm/s vs mm/m => divide by 60
|
||||
f = line.f / 60.0 if line.f is not None else lastf
|
||||
|
||||
# given last feedrate and current feedrate calculate the
|
||||
# distance needed to achieve current feedrate.
|
||||
# if travel is longer than req'd distance, then subtract
|
||||
# distance to achieve full speed, and add the time it took
|
||||
# to get there.
|
||||
# then calculate the time taken to complete the remaining
|
||||
# distance
|
||||
|
||||
# FIXME: this code has been proven to be super wrong when 2
|
||||
# subsquent moves are in opposite directions, as requested
|
||||
# speed is constant but printer has to fully decellerate
|
||||
# and reaccelerate
|
||||
# The following code tries to fix it by forcing a full
|
||||
# reacceleration if this move is in the opposite direction
|
||||
# of the previous one
|
||||
dx = x - (lastx or 0)
|
||||
dy = y - (lasty or 0)
|
||||
if dx * lastdx + dy * lastdy <= 0:
|
||||
lastf = 0
|
||||
|
||||
currenttravel = math.hypot(dx, dy)
|
||||
if currenttravel == 0:
|
||||
if line.z is not None:
|
||||
currenttravel = abs(line.z) if line.relative else abs(line.z - (lastz or 0))
|
||||
elif line.e is not None:
|
||||
currenttravel = abs(line.e) if line.relative_e else abs(line.e - laste)
|
||||
# Feedrate hasn't changed, no acceleration/decceleration planned
|
||||
if f == lastf:
|
||||
moveduration = currenttravel / f if f != 0 else 0.
|
||||
else:
|
||||
# FIXME: review this better
|
||||
# this looks wrong : there's little chance that the feedrate we'll decelerate to is the previous feedrate
|
||||
# shouldn't we instead look at three consecutive moves ?
|
||||
distance = 2 * abs(((lastf + f) * (f - lastf) * 0.5) / acceleration) # multiply by 2 because we have to accelerate and decelerate
|
||||
if distance <= currenttravel and lastf + f != 0 and f != 0:
|
||||
moveduration = 2 * distance / (lastf + f) # This is distance / mean(lastf, f)
|
||||
moveduration += (currenttravel - distance) / f
|
||||
else:
|
||||
moveduration = 2 * currenttravel / (lastf + f) # This is currenttravel / mean(lastf, f)
|
||||
# FIXME: probably a little bit optimistic, but probably a much better estimate than the previous one:
|
||||
# moveduration = math.sqrt(2 * distance / acceleration) # probably buggy : not taking actual travel into account
|
||||
|
||||
lastdx = dx
|
||||
lastdy = dy
|
||||
|
||||
totalduration += moveduration
|
||||
|
||||
lastx = x
|
||||
lasty = y
|
||||
lastz = z
|
||||
laste = e
|
||||
lastf = f
|
||||
elif line.command == "G4":
|
||||
moveduration = P(line)
|
||||
if moveduration:
|
||||
moveduration /= 1000.0
|
||||
totalduration += moveduration
|
||||
|
||||
# FIXME : looks like this needs to be tested with "lift Z on move"
|
||||
if line.z is not None:
|
||||
if line.command == "G92":
|
||||
cur_z = line.z
|
||||
elif line.is_move:
|
||||
if line.relative and cur_z is not None:
|
||||
cur_z += line.z
|
||||
else:
|
||||
cur_z = line.z
|
||||
|
||||
if cur_z != prev_z and cur_layer_has_extrusion:
|
||||
append_lines(cur_lines, False)
|
||||
cur_lines = []
|
||||
cur_layer_has_extrusion = False
|
||||
|
||||
if build_layers:
|
||||
cur_lines.append(true_line)
|
||||
prev_z = cur_z
|
||||
# ## Loop done
|
||||
|
||||
# Store current status
|
||||
self.imperial = imperial
|
||||
self.relative = relative
|
||||
self.relative_e = relative_e
|
||||
self.current_tool = current_tool
|
||||
self.current_x = current_x
|
||||
self.current_y = current_y
|
||||
self.current_z = current_z
|
||||
self.offset_x = offset_x
|
||||
self.offset_y = offset_y
|
||||
self.offset_z = offset_z
|
||||
self.current_e = current_e
|
||||
self.offset_e = offset_e
|
||||
self.max_e = max_e
|
||||
self.total_e = total_e
|
||||
self.current_e_multi[current_tool]=current_e_multi
|
||||
self.offset_e_multi[current_tool]=offset_e_multi
|
||||
self.max_e_multi[current_tool]=max_e_multi
|
||||
self.total_e_multi[current_tool]=total_e_multi
|
||||
self.cutting = cutting
|
||||
|
||||
|
||||
# Finalize layers
|
||||
if build_layers:
|
||||
if cur_lines:
|
||||
append_lines(cur_lines, True)
|
||||
|
||||
self.append_layer_id = len(all_layers)
|
||||
self.append_layer = Layer([])
|
||||
self.append_layer.duration = 0
|
||||
all_layers.append(self.append_layer)
|
||||
self.layer_idxs = array('I', layer_idxs)
|
||||
self.line_idxs = array('I', line_idxs)
|
||||
|
||||
# Compute bounding box
|
||||
all_zs = self.all_zs.union({zmin}).difference({None})
|
||||
zmin = min(all_zs)
|
||||
zmax = max(all_zs)
|
||||
|
||||
self.filament_length = self.max_e
|
||||
while len(self.filament_length_multi)<len(self.max_e_multi):
|
||||
self.filament_length_multi+=[0]
|
||||
for i in enumerate(self.max_e_multi):
|
||||
self.filament_length_multi[i[0]]=i[1]
|
||||
|
||||
|
||||
if self.filament_length > 0:
|
||||
self.xmin = xmin_e if not math.isinf(xmin_e) else 0
|
||||
self.xmax = xmax_e if not math.isinf(xmax_e) else 0
|
||||
self.ymin = ymin_e if not math.isinf(ymin_e) else 0
|
||||
self.ymax = ymax_e if not math.isinf(ymax_e) else 0
|
||||
else:
|
||||
self.xmin = xmin if not math.isinf(xmin) else 0
|
||||
self.xmax = xmax if not math.isinf(xmax) else 0
|
||||
self.ymin = ymin if not math.isinf(ymin) else 0
|
||||
self.ymax = ymax if not math.isinf(ymax) else 0
|
||||
self.zmin = zmin if not math.isinf(zmin) else 0
|
||||
self.zmax = zmax if not math.isinf(zmax) else 0
|
||||
self.width = self.xmax - self.xmin
|
||||
self.depth = self.ymax - self.ymin
|
||||
self.height = self.zmax - self.zmin
|
||||
|
||||
# Finalize duration
|
||||
totaltime = datetime.timedelta(seconds = int(totalduration))
|
||||
self.duration = totaltime
|
||||
|
||||
def idxs(self, i):
|
||||
return self.layer_idxs[i], self.line_idxs[i]
|
||||
|
||||
def estimate_duration(self):
|
||||
return self.layers_count, self.duration
|
||||
|
||||
class LightGCode(GCode):
|
||||
line_class = LightLine
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("usage: %s filename.gcode" % sys.argv[0])
|
||||
return
|
||||
|
||||
print("Line object size:", sys.getsizeof(Line("G0 X0")))
|
||||
print("Light line object size:", sys.getsizeof(LightLine("G0 X0")))
|
||||
gcode = GCode(open(sys.argv[1], "rU"))
|
||||
|
||||
print("Dimensions:")
|
||||
xdims = (gcode.xmin, gcode.xmax, gcode.width)
|
||||
print("\tX: %0.02f - %0.02f (%0.02f)" % xdims)
|
||||
ydims = (gcode.ymin, gcode.ymax, gcode.depth)
|
||||
print("\tY: %0.02f - %0.02f (%0.02f)" % ydims)
|
||||
zdims = (gcode.zmin, gcode.zmax, gcode.height)
|
||||
print("\tZ: %0.02f - %0.02f (%0.02f)" % zdims)
|
||||
print("Filament used: %0.02fmm" % gcode.filament_length)
|
||||
for i in enumerate(gcode.filament_length_multi):
|
||||
print("E%d %0.02fmm" % (i[0],i[1]))
|
||||
print("Number of layers: %d" % gcode.layers_count)
|
||||
print("Estimated duration: %s" % gcode.estimate_duration()[1])
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
34
cg/freecad/Frames/model/connected_part_model.py
Normal file
34
cg/freecad/Frames/model/connected_part_model.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
import BOPTools.JoinFeatures
|
||||
import FreeCAD as App
|
||||
import uuid
|
||||
|
||||
|
||||
class ConnectedPartModel:
|
||||
name = None
|
||||
id = None
|
||||
solid = None
|
||||
|
||||
def __init__(self, part) -> None:
|
||||
try:
|
||||
self.id ='part' + str(uuid.uuid4())
|
||||
j = BOPTools.JoinFeatures.makeConnect(name=self.id)
|
||||
if (type(part) is list):
|
||||
j.Objects = part
|
||||
else:
|
||||
j.Objects = [part]
|
||||
j.Proxy.execute(j)
|
||||
j.purgeTouched()
|
||||
self.solid = j
|
||||
App.ActiveDocument.recompute()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
|
||||
def remove(self):
|
||||
try:
|
||||
App.ActiveDocument.removeObject(self.solid.Label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
|
|
@ -10,3 +10,4 @@ class FolderGenerator(Enum):
|
|||
MESHES = 'meshes'
|
||||
ASSETS = 'assets'
|
||||
SDF = 'sdf'
|
||||
ASSEMBlY = 'assembly'
|
||||
|
|
33
cg/freecad/Frames/model/join_mesh_model.py
Normal file
33
cg/freecad/Frames/model/join_mesh_model.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import FreeCAD
|
||||
import Mesh
|
||||
import FreeCAD as App
|
||||
from model.mesh_part_model import MeshPartModel
|
||||
|
||||
|
||||
class JoinMeshModel:
|
||||
id = None
|
||||
mesh = None
|
||||
|
||||
def __init__(self, meshesPartModels: list['MeshPartModel']) -> None:
|
||||
meshes = []
|
||||
import Mesh
|
||||
from random import randrange
|
||||
for el in meshesPartModels:
|
||||
meshes.append(el.mesh.Mesh)
|
||||
|
||||
self.id = 'MergedMesh' + str(randrange(1000000))
|
||||
document = App.ActiveDocument
|
||||
merged_mesh = Mesh.Mesh()
|
||||
for el in meshes:
|
||||
merged_mesh.addMesh(el)
|
||||
|
||||
new_obj = App.activeDocument().addObject("Mesh::Feature", self.id)
|
||||
new_obj.Mesh = merged_mesh
|
||||
new_obj.ViewObject.DisplayMode = "Flat Lines" # Set display mode to flat lines
|
||||
self.mesh = new_obj
|
||||
|
||||
def remove(self):
|
||||
try:
|
||||
App.ActiveDocument.removeObject(self.id)
|
||||
except Exception as e:
|
||||
print(e)
|
32
cg/freecad/Frames/model/mesh_part_model.py
Normal file
32
cg/freecad/Frames/model/mesh_part_model.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
import FreeCAD as App
|
||||
import uuid
|
||||
import Mesh
|
||||
import Part
|
||||
import PartGui
|
||||
import MeshPart
|
||||
|
||||
|
||||
class MeshPartModel:
|
||||
id = None
|
||||
mesh = None
|
||||
|
||||
def __init__(self, part) -> None:
|
||||
try:
|
||||
from random import randrange
|
||||
self.id = 'mesh' + str(randrange(1000000))
|
||||
document = App.ActiveDocument
|
||||
mesh = document.addObject("Mesh::Feature", self.id)
|
||||
shape = Part.getShape(part, "")
|
||||
mesh.Mesh = MeshPart.meshFromShape(
|
||||
Shape=shape, LinearDeflection=20, AngularDeflection=0.1, Relative=False)
|
||||
mesh.Label = self.id
|
||||
self.mesh = mesh
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
|
||||
def remove(self):
|
||||
try:
|
||||
App.ActiveDocument.removeObject(self.mesh.Label)
|
||||
except Exception as e:
|
||||
print(e)
|
31
cg/freecad/Frames/model/simple_copy_part_model.py
Normal file
31
cg/freecad/Frames/model/simple_copy_part_model.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
import FreeCAD as App
|
||||
import Part
|
||||
|
||||
|
||||
class SimpleCopyPartModel:
|
||||
id = None
|
||||
copyLink = None
|
||||
label = None
|
||||
part = None
|
||||
|
||||
def getPart(self):
|
||||
return self.part
|
||||
|
||||
def __init__(self, part) -> None:
|
||||
try:
|
||||
from random import randrange
|
||||
self.id = str(randrange(1000000))
|
||||
childObj = part
|
||||
print(part)
|
||||
__shape = Part.getShape(
|
||||
childObj, '', needSubElement=False, refine=False)
|
||||
obj = App.ActiveDocument.addObject('Part::Feature', self.id)
|
||||
obj.Shape = __shape
|
||||
self.part = obj
|
||||
self.label = obj.Label
|
||||
App.ActiveDocument.recompute()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def remove(self):
|
||||
App.ActiveDocument.removeObject(self.label)
|
98
cg/freecad/Frames/printETA.py
Normal file
98
cg/freecad/Frames/printETA.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import FreeCAD
|
||||
import Mesh
|
||||
|
||||
|
||||
import gcoder
|
||||
|
||||
|
||||
def export_to_stl(doc, output_dir):
|
||||
objects = doc.Objects
|
||||
|
||||
for obj in objects:
|
||||
if isinstance(obj, Part.Feature):
|
||||
stl_path = os.path.join(output_dir, obj.Label + ".stl")
|
||||
mesh = obj.Shape.tessellate(0.1)
|
||||
|
||||
# Создаем меш из геометрии объекта
|
||||
mesh_obj = Mesh.Mesh(mesh)
|
||||
|
||||
# Проверяем, что меш содержит данные
|
||||
if len(mesh_obj.Points) > 0:
|
||||
# Экспортируем меш в формат STL
|
||||
mesh_obj.write(stl_path)
|
||||
|
||||
|
||||
def create_gcode(stl_dir, output_dir):
|
||||
stl_files = [f for f in os.listdir(stl_dir) if f.endswith(".stl")]
|
||||
|
||||
for stl_file in stl_files:
|
||||
stl_path = os.path.join(stl_dir, stl_file)
|
||||
|
||||
gcode_file = stl_file.replace(".stl", ".gcode")
|
||||
gcode_path = os.path.join(output_dir, gcode_file)
|
||||
|
||||
# Команда для запуска Slic3r (в Ubuntu используется slic3r-пакет)
|
||||
cmd = ["slic3r", "--load", "/home/mark-voltov/my_config.ini", "-o", gcode_path, stl_path]
|
||||
|
||||
# Запускаем Slic3r
|
||||
subprocess.run(cmd)
|
||||
|
||||
def get_print_duration(gcode_dir):
|
||||
|
||||
gcoder.G
|
||||
gcode_files = [f for f in os.listdir(gcode_dir) if f.endswith(".gcode")]
|
||||
|
||||
total_duration = 0
|
||||
|
||||
for gcode_file in gcode_files:
|
||||
gcode_path = os.path.join(gcode_dir, gcode_file)
|
||||
|
||||
with open(gcode_path, "r") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
for line in lines:
|
||||
if line.startswith("; estimated printing time"):
|
||||
duration = float(line.split(":")[1].strip()) * 60
|
||||
total_duration += duration
|
||||
break
|
||||
|
||||
return total_duration
|
||||
|
||||
|
||||
file_location = App.ActiveDocument.FileName
|
||||
file_name = os.path.basename(file_location ) #eds_report.csv
|
||||
location = os.path.dirname(file_location )
|
||||
|
||||
|
||||
# Путь к документу FreeCAD
|
||||
doc_path = file_location
|
||||
|
||||
# Каталог для сохранения STL-файлов
|
||||
stl_dir = location + '/stl'
|
||||
|
||||
# Каталог для сохранения G-code файлов
|
||||
gcode_dir = location + '/gcode'
|
||||
|
||||
# Открываем документ FreeCAD
|
||||
doc = FreeCAD.open(doc_path)
|
||||
|
||||
# Экспортируем модель в файлы STL
|
||||
export_to_stl(doc, stl_dir)
|
||||
print("STL файлы успешно созданы.")
|
||||
|
||||
# Создаем G-code файлы с помощью Slic3r
|
||||
create_gcode(stl_dir, gcode_dir)
|
||||
print("G-code файлы успешно созданы.")
|
||||
|
||||
# Получаем оценочную длительность печати
|
||||
print_duration = get_print_duration(gcode_dir)
|
||||
print("Оценочная длительность печати: {} секунд.".format(print_duration))
|
||||
|
||||
# Закрываем документ FreeCAD
|
||||
FreeCAD.closeDocument(doc)
|
||||
|
|
@ -1,4 +1,7 @@
|
|||
import FreeCAD
|
||||
from usecases.export_assembly_them_all_usecase import ExportAssemblyThemAllUseCase
|
||||
|
||||
from usecases.export_usecase import EXPORT_TYPES
|
||||
from usecases.export_usecase import ExportUseCase
|
||||
from usecases.get_sdf_geometry_usecase import SdfGeometryUseCase
|
||||
from usecases.assembly_parse_usecase import AssemblyParseUseCase
|
||||
|
@ -41,16 +44,17 @@ class RobossemblerFreeCadExportScenario:
|
|||
os.makedirs(directory)
|
||||
|
||||
__objs__ = FreeCAD.ActiveDocument.RootObjects
|
||||
|
||||
os.makedirs(directory + '/' + FolderGenerator.ASSETS.value)
|
||||
os.makedirs(directory + '/' + FolderGenerator.SDF.value)
|
||||
os.makedirs(directory + '/' + FolderGenerator.SDF.value + '/' + FolderGenerator.MESHES.value)
|
||||
directoryExport = directory + '/'
|
||||
os.makedirs(directoryExport + FolderGenerator.ASSETS.value)
|
||||
os.makedirs(directoryExport + FolderGenerator.SDF.value)
|
||||
os.makedirs(directoryExport + FolderGenerator.SDF.value + '/' + FolderGenerator.MESHES.value)
|
||||
os.makedirs(directoryExport + FolderGenerator.ASSEMBlY.value)
|
||||
f = open(directory + "/step-structure.json", "w")
|
||||
f.write(AssemblyParseUseCase().toJson())
|
||||
f.close()
|
||||
self.geometry(directory)
|
||||
|
||||
ImportGui.export(__objs__, directory + '/' + 'assembly.step')
|
||||
ExportAssemblyThemAllUseCase().call(directoryExport)
|
||||
ImportGui.export(__objs__, directoryExport + 'assembly.step')
|
||||
|
||||
shutil.make_archive(directory, 'zip', directory)
|
||||
|
||||
|
@ -58,8 +62,8 @@ class RobossemblerFreeCadExportScenario:
|
|||
return True
|
||||
|
||||
def geometry(self, outPutsPath: str):
|
||||
meshesExportUseCase = ExportUseCase.call(outPutsPath)
|
||||
for el in SdfGeometryUseCase.call(meshesExportUseCase):
|
||||
exportUseCase = ExportUseCase.call(outPutsPath,EXPORT_TYPES.OBJ)
|
||||
for el in SdfGeometryUseCase().call(exportUseCase):
|
||||
FS.writeFile(el.toJSON(), outPutsPath +
|
||||
'/' + FolderGenerator.ASSETS.value + '/', el.name + '.json',)
|
||||
|
||||
|
|
|
@ -1,20 +1,40 @@
|
|||
import FreeCAD as App
|
||||
from helper.is_solid import is_object_solid
|
||||
|
||||
def is_object_solid(obj):
|
||||
"""If obj is solid return True"""
|
||||
if not isinstance(obj, App.DocumentObject):
|
||||
return False
|
||||
if hasattr(obj, 'Group'):
|
||||
return False
|
||||
|
||||
if not hasattr(obj, 'Shape'):
|
||||
return False
|
||||
# if not hasattr(obj.Shape, 'Mass'):
|
||||
# return False
|
||||
if not hasattr(obj.Shape, 'Solids'):
|
||||
return False
|
||||
|
||||
if len(obj.Shape.Solids) == 0:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class AssemblyParseUseCase:
|
||||
_parts = []
|
||||
|
||||
_asm = []
|
||||
|
||||
def getAsm(self):
|
||||
return self._asm
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.initParse()
|
||||
if (self._asm.__len__() == 0):
|
||||
self.initParse()
|
||||
pass
|
||||
|
||||
|
||||
|
||||
def initParse(self):
|
||||
for el in App.ActiveDocument.Objects:
|
||||
if(is_object_solid(el)):
|
||||
for el in App.ActiveDocument.Objects:
|
||||
if (is_object_solid(el)):
|
||||
self._asm.append(el.Label)
|
||||
|
||||
def toJson(self):
|
||||
|
@ -28,15 +48,11 @@ class AssemblyParseUseCase:
|
|||
if groupLink.get(el.Label) == None:
|
||||
groupLink[el.Label] = []
|
||||
for i in el.Group:
|
||||
|
||||
if str(i).find('Pad') != -1:
|
||||
groupLink[el.Label].append(i)
|
||||
if groupLink.__len__() == 0:
|
||||
return None
|
||||
return None
|
||||
return groupLink
|
||||
|
||||
def getLinkedProperty(self):
|
||||
return self._asm
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
|
||||
|
||||
from typing import List
|
||||
import FreeCAD as App
|
||||
import Part
|
||||
from model.join_mesh_model import JoinMeshModel
|
||||
from model.mesh_part_model import MeshPartModel
|
||||
from helper.fs import FS
|
||||
from helper.is_solid import is_object_solid
|
||||
from model.simple_copy_part_model import SimpleCopyPartModel
|
||||
from model.files_generator import FolderGenerator
|
||||
from usecases.assembly_parse_usecase import AssemblyParseUseCase
|
||||
import importOBJ
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class ExportAssemblyThemAllUseCase:
|
||||
|
||||
def call(self, path):
|
||||
assembly = AssemblyParseUseCase().getAsm()
|
||||
asmStructure = {}
|
||||
inc = 0
|
||||
for el in assembly:
|
||||
if (inc != 0):
|
||||
asmStructure[inc] = {
|
||||
"child": el,
|
||||
"parents": assembly[0:inc]
|
||||
}
|
||||
inc += 1
|
||||
objectsFreeCad = App.ActiveDocument.Objects
|
||||
asmSolids = {}
|
||||
for k, v in asmStructure.items():
|
||||
assemblyParentList = v['parents']
|
||||
assemblyChild = v['child']
|
||||
for el in assemblyParentList:
|
||||
for solid in objectsFreeCad:
|
||||
if (el == solid.Label):
|
||||
if (asmSolids.get(k) is None):
|
||||
|
||||
asmSolids[k] = {'parents': [], 'child': list(
|
||||
filter(lambda x: x.Label == assemblyChild, objectsFreeCad))[0]}
|
||||
|
||||
asmSolids[k]['parents'].append(solid)
|
||||
|
||||
inc = 0
|
||||
for k, v in asmSolids.items():
|
||||
geometry = {"0": [], "1": []}
|
||||
if (k != 0):
|
||||
App.activeDocument().addObject("Part::Compound", "Compound")
|
||||
|
||||
copyLinks = list(
|
||||
map(lambda el: SimpleCopyPartModel(el), v['parents']))
|
||||
|
||||
if copyLinks != None:
|
||||
App.activeDocument().Compound.Links = list(
|
||||
map(lambda el: el.getPart(), copyLinks))
|
||||
|
||||
object = App.activeDocument().getObject('Compound')
|
||||
boundBox = object.Shape.BoundBox
|
||||
geometry['0'].append(boundBox.XMax)
|
||||
geometry['0'].append(boundBox.YMax)
|
||||
geometry['0'].append(boundBox.ZMax)
|
||||
|
||||
os.makedirs(
|
||||
path + FolderGenerator.ASSEMBlY.value + '/' + '0000' + str(k))
|
||||
boundBoxChild = v['child'].Shape.BoundBox
|
||||
geometry['1'].append(boundBoxChild.XMax)
|
||||
geometry['1'].append(boundBoxChild.YMax)
|
||||
geometry['1'].append(boundBoxChild.ZMax)
|
||||
meshParents = []
|
||||
|
||||
for el in v['parents']:
|
||||
meshParents.append(MeshPartModel(el))
|
||||
joinMesh = JoinMeshModel(meshParents)
|
||||
for el in meshParents:
|
||||
el.remove()
|
||||
importOBJ.export(joinMesh.mesh, path + FolderGenerator.ASSEMBlY.value +
|
||||
'/' + '0000' + str(k) + '/' + str(1) + '.obj')
|
||||
joinMesh.remove()
|
||||
importOBJ.export(v['child'], path + FolderGenerator.ASSEMBlY.value +
|
||||
'/' + '0000' + str(k) + '/' + str(0) + '.obj')
|
||||
FS.writeFile(json.dumps(geometry), path + FolderGenerator.ASSEMBlY.value +
|
||||
'/' + '0000' + str(k) + '/', 'translation.json')
|
||||
|
||||
App.ActiveDocument.removeObject("Compound")
|
||||
for el in copyLinks:
|
||||
el.remove()
|
||||
App.activeDocument().recompute()
|
||||
inc += 1
|
||||
|
||||
|
|
@ -1,16 +1,36 @@
|
|||
# import importDAE
|
||||
import importDAE
|
||||
import importOBJ
|
||||
import Mesh
|
||||
import FreeCAD as App
|
||||
from model.files_generator import FolderGenerator
|
||||
from helper.is_solid import is_object_solid
|
||||
import Mesh
|
||||
from enum import Enum
|
||||
|
||||
class EXPORT_TYPES(Enum):
|
||||
STL = 'STL'
|
||||
DAO = 'DAO'
|
||||
OBJ = 'OBJ'
|
||||
|
||||
|
||||
class ExportUseCase:
|
||||
def call(path):
|
||||
def call(path: str, type: EXPORT_TYPES):
|
||||
meshes = {}
|
||||
for el in App.ActiveDocument.Objects:
|
||||
if (is_object_solid(el)):
|
||||
Mesh.export([el], path + '/' + FolderGenerator.SDF.value +
|
||||
'/' + FolderGenerator.MESHES.value + '/' + el.Label + '.dae')
|
||||
meshes[el.Label] = '/' + FolderGenerator.MESHES.value + \
|
||||
'/' + el.Label + '.dae'
|
||||
match type.value:
|
||||
case EXPORT_TYPES.STL.value:
|
||||
Mesh.export([el], path + '/' + FolderGenerator.SDF.value +
|
||||
'/' + FolderGenerator.MESHES.value + '/' + el.Label + '.stl')
|
||||
meshes[el.Label] = '/' + FolderGenerator.MESHES.value + \
|
||||
'/' + el.Label + '.stl'
|
||||
|
||||
case EXPORT_TYPES.DAO.value:
|
||||
importDAE.export([el], path + '/' + FolderGenerator.SDF.value +
|
||||
'/' + FolderGenerator.MESHES.value + '/' + el.Label + '.dae')
|
||||
case EXPORT_TYPES.OBJ.value:
|
||||
importOBJ.export([el], path + '/' + FolderGenerator.SDF.value +
|
||||
'/' + FolderGenerator.MESHES.value + '/' + el.Label + '.obj')
|
||||
meshes[el.Label] = '/' + FolderGenerator.MESHES.value + \
|
||||
'/' + el.Label + '.obj'
|
||||
|
||||
return meshes
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue