剩余模块:生产入库,销售出库

This commit is contained in:
2025-05-20 16:07:49 +08:00
parent 933ddab8f3
commit b1d8dec263
299 changed files with 38798 additions and 0 deletions

66
node_modules/js-binary-schema-parser/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,66 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.loop = exports.conditional = exports.parse = void 0;
var parse = function parse(stream, schema) {
var result = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var parent = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : result;
if (Array.isArray(schema)) {
schema.forEach(function (partSchema) {
return parse(stream, partSchema, result, parent);
});
} else if (typeof schema === 'function') {
schema(stream, result, parent, parse);
} else {
var key = Object.keys(schema)[0];
if (Array.isArray(schema[key])) {
parent[key] = {};
parse(stream, schema[key], result, parent[key]);
} else {
parent[key] = schema[key](stream, result, parent, parse);
}
}
return result;
};
exports.parse = parse;
var conditional = function conditional(schema, conditionFunc) {
return function (stream, result, parent, parse) {
if (conditionFunc(stream, result, parent)) {
parse(stream, schema, result, parent);
}
};
};
exports.conditional = conditional;
var loop = function loop(schema, continueFunc) {
return function (stream, result, parent, parse) {
var arr = [];
var lastStreamPos = stream.pos;
while (continueFunc(stream, result, parent)) {
var newParent = {};
parse(stream, schema, result, newParent); // cases when whole file is parsed but no termination is there and stream position is not getting updated as well
// it falls into infinite recursion, null check to avoid the same
if (stream.pos === lastStreamPos) {
break;
}
lastStreamPos = stream.pos;
arr.push(newParent);
}
return arr;
};
};
exports.loop = loop;

View File

@@ -0,0 +1,122 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.readBits = exports.readArray = exports.readUnsigned = exports.readString = exports.peekBytes = exports.readBytes = exports.peekByte = exports.readByte = exports.buildStream = void 0;
// Default stream and parsers for Uint8TypedArray data type
var buildStream = function buildStream(uint8Data) {
return {
data: uint8Data,
pos: 0
};
};
exports.buildStream = buildStream;
var readByte = function readByte() {
return function (stream) {
return stream.data[stream.pos++];
};
};
exports.readByte = readByte;
var peekByte = function peekByte() {
var offset = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
return function (stream) {
return stream.data[stream.pos + offset];
};
};
exports.peekByte = peekByte;
var readBytes = function readBytes(length) {
return function (stream) {
return stream.data.subarray(stream.pos, stream.pos += length);
};
};
exports.readBytes = readBytes;
var peekBytes = function peekBytes(length) {
return function (stream) {
return stream.data.subarray(stream.pos, stream.pos + length);
};
};
exports.peekBytes = peekBytes;
var readString = function readString(length) {
return function (stream) {
return Array.from(readBytes(length)(stream)).map(function (value) {
return String.fromCharCode(value);
}).join('');
};
};
exports.readString = readString;
var readUnsigned = function readUnsigned(littleEndian) {
return function (stream) {
var bytes = readBytes(2)(stream);
return littleEndian ? (bytes[1] << 8) + bytes[0] : (bytes[0] << 8) + bytes[1];
};
};
exports.readUnsigned = readUnsigned;
var readArray = function readArray(byteSize, totalOrFunc) {
return function (stream, result, parent) {
var total = typeof totalOrFunc === 'function' ? totalOrFunc(stream, result, parent) : totalOrFunc;
var parser = readBytes(byteSize);
var arr = new Array(total);
for (var i = 0; i < total; i++) {
arr[i] = parser(stream);
}
return arr;
};
};
exports.readArray = readArray;
var subBitsTotal = function subBitsTotal(bits, startIndex, length) {
var result = 0;
for (var i = 0; i < length; i++) {
result += bits[startIndex + i] && Math.pow(2, length - i - 1);
}
return result;
};
var readBits = function readBits(schema) {
return function (stream) {
var _byte = readByte()(stream); // convert the byte to bit array
var bits = new Array(8);
for (var i = 0; i < 8; i++) {
bits[7 - i] = !!(_byte & 1 << i);
} // convert the bit array to values based on the schema
return Object.keys(schema).reduce(function (res, key) {
var def = schema[key];
if (def.length) {
res[key] = subBitsTotal(bits, def.index, def.length);
} else {
res[key] = bits[def.index];
}
return res;
}, {});
};
};
exports.readBits = readBits;

219
node_modules/js-binary-schema-parser/lib/schemas/gif.js generated vendored Normal file
View File

@@ -0,0 +1,219 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _ = require("../");
var _uint = require("../parsers/uint8");
// a set of 0x00 terminated subblocks
var subBlocksSchema = {
blocks: function blocks(stream) {
var terminator = 0x00;
var chunks = [];
var streamSize = stream.data.length;
var total = 0;
for (var size = (0, _uint.readByte)()(stream); size !== terminator; size = (0, _uint.readByte)()(stream)) {
// size becomes undefined for some case when file is corrupted and terminator is not proper
// null check to avoid recursion
if (!size) break; // catch corrupted files with no terminator
if (stream.pos + size >= streamSize) {
var availableSize = streamSize - stream.pos;
chunks.push((0, _uint.readBytes)(availableSize)(stream));
total += availableSize;
break;
}
chunks.push((0, _uint.readBytes)(size)(stream));
total += size;
}
var result = new Uint8Array(total);
var offset = 0;
for (var i = 0; i < chunks.length; i++) {
result.set(chunks[i], offset);
offset += chunks[i].length;
}
return result;
}
}; // global control extension
var gceSchema = (0, _.conditional)({
gce: [{
codes: (0, _uint.readBytes)(2)
}, {
byteSize: (0, _uint.readByte)()
}, {
extras: (0, _uint.readBits)({
future: {
index: 0,
length: 3
},
disposal: {
index: 3,
length: 3
},
userInput: {
index: 6
},
transparentColorGiven: {
index: 7
}
})
}, {
delay: (0, _uint.readUnsigned)(true)
}, {
transparentColorIndex: (0, _uint.readByte)()
}, {
terminator: (0, _uint.readByte)()
}]
}, function (stream) {
var codes = (0, _uint.peekBytes)(2)(stream);
return codes[0] === 0x21 && codes[1] === 0xf9;
}); // image pipeline block
var imageSchema = (0, _.conditional)({
image: [{
code: (0, _uint.readByte)()
}, {
descriptor: [{
left: (0, _uint.readUnsigned)(true)
}, {
top: (0, _uint.readUnsigned)(true)
}, {
width: (0, _uint.readUnsigned)(true)
}, {
height: (0, _uint.readUnsigned)(true)
}, {
lct: (0, _uint.readBits)({
exists: {
index: 0
},
interlaced: {
index: 1
},
sort: {
index: 2
},
future: {
index: 3,
length: 2
},
size: {
index: 5,
length: 3
}
})
}]
}, (0, _.conditional)({
lct: (0, _uint.readArray)(3, function (stream, result, parent) {
return Math.pow(2, parent.descriptor.lct.size + 1);
})
}, function (stream, result, parent) {
return parent.descriptor.lct.exists;
}), {
data: [{
minCodeSize: (0, _uint.readByte)()
}, subBlocksSchema]
}]
}, function (stream) {
return (0, _uint.peekByte)()(stream) === 0x2c;
}); // plain text block
var textSchema = (0, _.conditional)({
text: [{
codes: (0, _uint.readBytes)(2)
}, {
blockSize: (0, _uint.readByte)()
}, {
preData: function preData(stream, result, parent) {
return (0, _uint.readBytes)(parent.text.blockSize)(stream);
}
}, subBlocksSchema]
}, function (stream) {
var codes = (0, _uint.peekBytes)(2)(stream);
return codes[0] === 0x21 && codes[1] === 0x01;
}); // application block
var applicationSchema = (0, _.conditional)({
application: [{
codes: (0, _uint.readBytes)(2)
}, {
blockSize: (0, _uint.readByte)()
}, {
id: function id(stream, result, parent) {
return (0, _uint.readString)(parent.blockSize)(stream);
}
}, subBlocksSchema]
}, function (stream) {
var codes = (0, _uint.peekBytes)(2)(stream);
return codes[0] === 0x21 && codes[1] === 0xff;
}); // comment block
var commentSchema = (0, _.conditional)({
comment: [{
codes: (0, _uint.readBytes)(2)
}, subBlocksSchema]
}, function (stream) {
var codes = (0, _uint.peekBytes)(2)(stream);
return codes[0] === 0x21 && codes[1] === 0xfe;
});
var schema = [{
header: [{
signature: (0, _uint.readString)(3)
}, {
version: (0, _uint.readString)(3)
}]
}, {
lsd: [{
width: (0, _uint.readUnsigned)(true)
}, {
height: (0, _uint.readUnsigned)(true)
}, {
gct: (0, _uint.readBits)({
exists: {
index: 0
},
resolution: {
index: 1,
length: 3
},
sort: {
index: 4
},
size: {
index: 5,
length: 3
}
})
}, {
backgroundColorIndex: (0, _uint.readByte)()
}, {
pixelAspectRatio: (0, _uint.readByte)()
}]
}, (0, _.conditional)({
gct: (0, _uint.readArray)(3, function (stream, result) {
return Math.pow(2, result.lsd.gct.size + 1);
})
}, function (stream, result) {
return result.lsd.gct.exists;
}), // content frames
{
frames: (0, _.loop)([gceSchema, applicationSchema, commentSchema, imageSchema, textSchema], function (stream) {
var nextCode = (0, _uint.peekByte)()(stream); // rather than check for a terminator, we should check for the existence
// of an ext or image block to avoid infinite loops
//var terminator = 0x3B;
//return nextCode !== terminator;
return nextCode === 0x21 || nextCode === 0x2c;
})
}];
var _default = schema;
exports["default"] = _default;