Commit 582fe3ed by 蒋勇

d

parent 667a860c

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

../atob/bin/atob.js
\ No newline at end of file
../autoprefixer/bin/autoprefixer-info
\ No newline at end of file
../browserslist/cli.js
\ No newline at end of file
../cardinal/bin/cdl.js
\ No newline at end of file
../clean-css/bin/cleancss
\ No newline at end of file
../color-support/bin.js
\ No newline at end of file
{
"development": {
"username": "root",
"password": null,
"database": "database_development",
"host": "127.0.0.1",
"dialect": "mysql"
},
"test": {
"username": "root",
"password": null,
"database": "database_test",
"host": "127.0.0.1",
"dialect": "mysql"
},
"production": {
"username": "root",
"password": null,
"database": "database_production",
"host": "127.0.0.1",
"dialect": "mysql"
}
}
../element-theme/bin/element-theme
\ No newline at end of file
../escodegen/bin/escodegen.js
\ No newline at end of file
../escodegen/bin/esgenerate.js
\ No newline at end of file
../esprima/bin/esparse.js
\ No newline at end of file
../esprima/bin/esvalidate.js
\ No newline at end of file
../element-theme/bin/element-theme
\ No newline at end of file
../extract-zip/cli.js
\ No newline at end of file
../gulp/bin/gulp.js
\ No newline at end of file
../in-publish/in-install.js
\ No newline at end of file
../in-publish/in-publish.js
\ No newline at end of file
../mime/cli.js
\ No newline at end of file
../mkdirp/bin/cmd.js
\ No newline at end of file
'use strict';
const fs = require('fs');
const path = require('path');
const Sequelize = require('sequelize');
const basename = path.basename(__filename);
const env = process.env.NODE_ENV || 'development';
const config = require(__dirname + '/../config/config.json')[env];
const db = {};
let sequelize;
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else {
sequelize = new Sequelize(config.database, config.username, config.password, config);
}
fs
.readdirSync(__dirname)
.filter(file => {
return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js');
})
.forEach(file => {
const model = sequelize['import'](path.join(__dirname, file));
db[model.name] = model;
});
Object.keys(db).forEach(modelName => {
if (db[modelName].associate) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
module.exports = db;
../node-gyp/bin/node-gyp.js
\ No newline at end of file
../node-sass/bin/node-sass
\ No newline at end of file
../nopt/bin/nopt.js
\ No newline at end of file
../in-publish/not-in-install.js
\ No newline at end of file
../in-publish/not-in-publish.js
\ No newline at end of file
../os-name/cli.js
\ No newline at end of file
../osx-release/cli.js
\ No newline at end of file
../_pinyin@2.8.3@pinyin/bin/pinyin
\ No newline at end of file
../rimraf/bin.js
\ No newline at end of file
../sass-graph/bin/sassgraph
\ No newline at end of file
../semver/bin/semver
\ No newline at end of file
../_sequelize-cli@4.1.1@sequelize-cli/lib/sequelize
\ No newline at end of file
../sshpk/bin/sshpk-conv
\ No newline at end of file
../sshpk/bin/sshpk-sign
\ No newline at end of file
../sshpk/bin/sshpk-verify
\ No newline at end of file
../strip-bom/cli.js
\ No newline at end of file
../strip-indent/cli.js
\ No newline at end of file
../typescript/bin/tsc
\ No newline at end of file
../typescript/bin/tsserver
\ No newline at end of file
../user-home/cli.js
\ No newline at end of file
../uuid/bin/uuid
\ No newline at end of file
../which/bin/which
\ No newline at end of file
All packages installed at Fri Jan 11 2019 19:10:08 GMT+0800 (CST)
\ No newline at end of file
Recently updated (since 2019-09-05)
2019-09-11
→ @alicloud/pop-core@1.7.7 › httpx@2.2.0 › @types/node@^12.0.2(12.7.5) (13:46:39)
../_@alicloud_pop-core@1.7.7@@alicloud/pop-core
\ No newline at end of file
../_@mrmlnc_readdir-enhanced@2.2.1@@mrmlnc/readdir-enhanced
\ No newline at end of file
../_@nodelib_fs.scandir@2.1.1@@nodelib/fs.scandir
\ No newline at end of file
../_@nodelib_fs.stat@2.0.1@@nodelib/fs.stat
\ No newline at end of file
../_@nodelib_fs.walk@1.2.2@@nodelib/fs.walk
\ No newline at end of file
../_@sindresorhus_is@0.7.0@@sindresorhus/is
\ No newline at end of file
../_@types_commander@2.12.2@@types/commander
\ No newline at end of file
../_@types_events@3.0.0@@types/events
\ No newline at end of file
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE
# Installation
> `npm install --save @types/geojson`
# Summary
This package contains type definitions for GeoJSON Format Specification Revision (http://geojson.org/).
# Details
Files were exported from https://www.github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/geojson
Additional Details
* Last updated: Wed, 01 Nov 2017 15:55:40 GMT
* Dependencies: none
* Global values: GeoJSON
# Credits
These definitions were written by Jacob Bruun <https://github.com/cobster>, Arne Schubert <https://github.com/atd-schubert>.
// Type definitions for GeoJSON Format Specification Revision 1.0
// Project: http://geojson.org/
// Definitions by: Jacob Bruun <https://github.com/cobster>
// Arne Schubert <https://github.com/atd-schubert>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
export as namespace GeoJSON;
/***
* http://geojson.org/geojson-spec.html#geojson-objects
*/
export interface GeoJsonObject {
type: string;
bbox?: number[];
crs?: CoordinateReferenceSystem;
}
/***
* http://geojson.org/geojson-spec.html#positions
*/
export type Position = number[];
/***
* http://geojson.org/geojson-spec.html#geometry-objects
*/
export interface DirectGeometryObject extends GeoJsonObject {
coordinates: Position[][][] | Position[][] | Position[] | Position;
}
/**
* GeometryObject supports geometry collection as well
*/
export type GeometryObject = DirectGeometryObject | GeometryCollection;
/***
* http://geojson.org/geojson-spec.html#point
*/
export interface Point extends DirectGeometryObject {
type: "Point";
coordinates: Position;
}
/***
* http://geojson.org/geojson-spec.html#multipoint
*/
export interface MultiPoint extends DirectGeometryObject {
type: "MultiPoint";
coordinates: Position[];
}
/***
* http://geojson.org/geojson-spec.html#linestring
*/
export interface LineString extends DirectGeometryObject {
type: "LineString";
coordinates: Position[];
}
/***
* http://geojson.org/geojson-spec.html#multilinestring
*/
export interface MultiLineString extends DirectGeometryObject {
type: "MultiLineString";
coordinates: Position[][];
}
/***
* http://geojson.org/geojson-spec.html#polygon
*/
export interface Polygon extends DirectGeometryObject {
type: "Polygon";
coordinates: Position[][];
}
/***
* http://geojson.org/geojson-spec.html#multipolygon
*/
export interface MultiPolygon extends DirectGeometryObject {
type: "MultiPolygon";
coordinates: Position[][][];
}
/***
* http://geojson.org/geojson-spec.html#geometry-collection
*/
export interface GeometryCollection extends GeoJsonObject {
type: "GeometryCollection";
geometries: GeometryObject[];
}
/***
* https://tools.ietf.org/html/rfc7946#section-3.2
*/
export interface Feature<G extends GeometryObject, P = any> extends GeoJsonObject {
type: "Feature";
geometry: G;
properties: P;
id?: string | number;
}
/***
* http://geojson.org/geojson-spec.html#feature-collection-objects
*/
export interface FeatureCollection<G extends GeometryObject, P = any> extends GeoJsonObject {
type: "FeatureCollection";
features: Array<Feature<G, P>>;
}
/***
* http://geojson.org/geojson-spec.html#coordinate-reference-system-objects
*/
export interface CoordinateReferenceSystem {
type: string;
properties: any;
}
export interface NamedCoordinateReferenceSystem extends CoordinateReferenceSystem {
properties: { name: string };
}
export interface LinkedCoordinateReferenceSystem extends CoordinateReferenceSystem {
properties: { href: string; type: string };
}
{
"_from": "@types/geojson@^1.0.0",
"_id": "@types/geojson@1.0.6",
"_inBundle": false,
"_integrity": "sha512-Xqg/lIZMrUd0VRmSRbCAewtwGZiAk3mEUDvV4op1tGl+LvyPcb/MIOSxTl9z+9+J+R4/vpjiCAT4xeKzH9ji1w==",
"_location": "/@types/geojson",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@types/geojson@^1.0.0",
"name": "@types/geojson",
"escapedName": "@types%2fgeojson",
"scope": "@types",
"rawSpec": "^1.0.0",
"saveSpec": null,
"fetchSpec": "^1.0.0"
},
"_requiredBy": [
"/terraformer",
"/terraformer-wkt-parser"
],
"_resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-1.0.6.tgz",
"_shasum": "3e02972728c69248c2af08d60a48cbb8680fffdf",
"_spec": "@types/geojson@^1.0.0",
"_where": "/usr/devws/OMC/scratch-web/node_modules/terraformer-wkt-parser",
"bundleDependencies": false,
"contributors": [
{
"name": "Jacob Bruun",
"url": "https://github.com/cobster"
},
{
"name": "Arne Schubert",
"url": "https://github.com/atd-schubert"
}
],
"dependencies": {},
"deprecated": false,
"description": "TypeScript definitions for GeoJSON Format Specification Revision",
"license": "MIT",
"main": "",
"name": "@types/geojson",
"repository": {
"type": "git",
"url": "https://www.github.com/DefinitelyTyped/DefinitelyTyped.git"
},
"scripts": {},
"typeScriptVersion": "2.3",
"typesPublisherContentHash": "2a374692a48615d90fde45b274e247a1ec98647d93fe7c7ee355386108689bcd",
"version": "1.0.6"
}
../_@types_glob@7.1.1@@types/glob
\ No newline at end of file
../_@types_minimatch@3.0.3@@types/minimatch
\ No newline at end of file
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE
# Installation
> `npm install --save @types/node`
# Summary
This package contains type definitions for Node.js (http://nodejs.org/).
# Details
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped.git/tree/master/types/node
Additional Details
* Last updated: Fri, 18 May 2018 21:07:30 GMT
* Dependencies: none
* Global values: Buffer, NodeJS, SlowBuffer, Symbol, __dirname, __filename, clearImmediate, clearInterval, clearTimeout, console, exports, global, module, process, require, setImmediate, setInterval, setTimeout
# Credits
These definitions were written by Microsoft TypeScript <http://typescriptlang.org>, DefinitelyTyped <https://github.com/DefinitelyTyped/DefinitelyTyped>, Parambir Singh <https://github.com/parambirs>, Christian Vaagland Tellnes <https://github.com/tellnes>, Wilco Bakker <https://github.com/WilcoBakker>, Nicolas Voigt <https://github.com/octo-sniffle>, Chigozirim C. <https://github.com/smac89>, Flarna <https://github.com/Flarna>, Mariusz Wiktorczyk <https://github.com/mwiktorczyk>, wwwy3y3 <https://github.com/wwwy3y3>, Deividas Bakanas <https://github.com/DeividasBakanas>, Kelvin Jin <https://github.com/kjin>, Alvis HT Tang <https://github.com/alvis>, Sebastian Silbermann <https://github.com/eps1lon>, Hannes Magnusson <https://github.com/Hannes-Magnusson-CK>, Alberto Schiabel <https://github.com/jkomyno>, Klaus Meinhardt <https://github.com/ajafff>, Huw <https://github.com/hoo29>, Nicolas Even <https://github.com/n-e>, Bruno Scheufler <https://github.com/brunoscheufler>, Mohsen Azimi <https://github.com/mohsen1>, Hoàng Văn Khải <https://github.com/KSXGitHub>, Alexander T. <https://github.com/a-tarasyuk>, Lishude <https://github.com/islishude>.
This source diff could not be displayed because it is too large. You can view the blob instead.
{
"_from": "@types/node@*",
"_id": "@types/node@10.1.2",
"_inBundle": false,
"_integrity": "sha512-bjk1RIeZBCe/WukrFToIVegOf91Pebr8cXYBwLBIsfiGWVQ+ifwWsT59H3RxrWzWrzd1l/Amk1/ioY5Fq3/bpA==",
"_location": "/@types/node",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "@types/node@*",
"name": "@types/node",
"escapedName": "@types%2fnode",
"scope": "@types",
"rawSpec": "*",
"saveSpec": null,
"fetchSpec": "*"
},
"_requiredBy": [
"/wkx"
],
"_resolved": "https://registry.npmjs.org/@types/node/-/node-10.1.2.tgz",
"_shasum": "1b928a0baa408fc8ae3ac012cc81375addc147c6",
"_spec": "@types/node@*",
"_where": "/usr/devws/OMC/scratch-web/node_modules/wkx",
"bugs": {
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Microsoft TypeScript",
"url": "http://typescriptlang.org"
},
{
"name": "DefinitelyTyped",
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped"
},
{
"name": "Parambir Singh",
"url": "https://github.com/parambirs"
},
{
"name": "Christian Vaagland Tellnes",
"url": "https://github.com/tellnes"
},
{
"name": "Wilco Bakker",
"url": "https://github.com/WilcoBakker"
},
{
"name": "Nicolas Voigt",
"url": "https://github.com/octo-sniffle"
},
{
"name": "Chigozirim C.",
"url": "https://github.com/smac89"
},
{
"name": "Flarna",
"url": "https://github.com/Flarna"
},
{
"name": "Mariusz Wiktorczyk",
"url": "https://github.com/mwiktorczyk"
},
{
"name": "wwwy3y3",
"url": "https://github.com/wwwy3y3"
},
{
"name": "Deividas Bakanas",
"url": "https://github.com/DeividasBakanas"
},
{
"name": "Kelvin Jin",
"url": "https://github.com/kjin"
},
{
"name": "Alvis HT Tang",
"url": "https://github.com/alvis"
},
{
"name": "Sebastian Silbermann",
"url": "https://github.com/eps1lon"
},
{
"name": "Hannes Magnusson",
"url": "https://github.com/Hannes-Magnusson-CK"
},
{
"name": "Alberto Schiabel",
"url": "https://github.com/jkomyno"
},
{
"name": "Klaus Meinhardt",
"url": "https://github.com/ajafff"
},
{
"name": "Huw",
"url": "https://github.com/hoo29"
},
{
"name": "Nicolas Even",
"url": "https://github.com/n-e"
},
{
"name": "Bruno Scheufler",
"url": "https://github.com/brunoscheufler"
},
{
"name": "Mohsen Azimi",
"url": "https://github.com/mohsen1"
},
{
"name": "Hoàng Văn Khải",
"url": "https://github.com/KSXGitHub"
},
{
"name": "Alexander T.",
"url": "https://github.com/a-tarasyuk"
},
{
"name": "Lishude",
"url": "https://github.com/islishude"
}
],
"dependencies": {},
"deprecated": false,
"description": "TypeScript definitions for Node.js",
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped.git#readme",
"license": "MIT",
"main": "",
"name": "@types/node",
"repository": {
"type": "git",
"url": "git+https://github.com/DefinitelyTyped/DefinitelyTyped.git.git"
},
"scripts": {},
"typeScriptVersion": "2.0",
"typesPublisherContentHash": "c5964fb92b3570a29eac4893d444e4d649ee35ceac81d26885ce171625f7375f",
"version": "10.1.2"
}
../_@types_q@1.5.2@@types/q
\ No newline at end of file
../_@types_semver@5.5.0@@types/semver
\ No newline at end of file
language: node_js
node_js:
- 0.6
- 0.8
\ No newline at end of file
Copyright © 2011-2012, Paul Vorbach.
Copyright © 2009, Jeff Mott.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name Crypto-JS nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# MD5
[![build status](https://secure.travis-ci.org/pvorb/node-md5.png)](http://travis-ci.org/pvorb/node-md5)
a JavaScript function for hashing messages with MD5.
## Installation
You can use this package on the server side as well as the client side.
### [Node.js](http://nodejs.org/):
~~~
npm install md5
~~~
## API
~~~ javascript
md5(message)
~~~
* `message` -- `String` or `Buffer`
* returns `String`
## Usage
~~~ javascript
var md5 = require('md5');
console.log(md5('message'));
~~~
This will print the following
~~~
78e731027d8fd50ed642340b7c9a63b3
~~~
It supports buffers, too
~~~ javascript
var fs = require('fs');
var md5 = require('md5');
fs.readFile('example.txt', function(err, buf) {
console.log(md5(buf));
});
~~~
## Versions
Before version 2.0.0 there were two packages called md5 on npm, one lowercase,
one uppercase (the one you're looking at). As of version 2.0.0, all new versions
of this module will go to lowercase [md5](https://www.npmjs.com/package/md5) on
npm. To use the correct version, users of this module will have to change their
code from `require('MD5')` to `require('md5')` if they want to use versions >=
2.0.0.
## Bugs and Issues
If you encounter any bugs or issues, feel free to open an issue at
[github](https://github.com/pvorb/node-md5/issues).
## Credits
This package is based on the work of Jeff Mott, who did a pure JS implementation
of the MD5 algorithm that was published by Ronald L. Rivest in 1991. I needed a
npm package of the algorithm, so I used Jeff’s implementation for this package.
The original implementation can be found in the
[CryptoJS](http://code.google.com/p/crypto-js/) project.
## License
~~~
Copyright © 2011-2015, Paul Vorbach.
Copyright © 2009, Jeff Mott.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name Crypto-JS nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
~~~
(function(){
var crypt = require('crypt'),
utf8 = require('charenc').utf8,
isBuffer = require('is-buffer'),
bin = require('charenc').bin,
// The core
md5 = function (message, options) {
// Convert to byte array
if (message.constructor == String)
if (options && options.encoding === 'binary')
message = bin.stringToBytes(message);
else
message = utf8.stringToBytes(message);
else if (isBuffer(message))
message = Array.prototype.slice.call(message, 0);
else if (!Array.isArray(message))
message = message.toString();
// else, assume byte array already
var m = crypt.bytesToWords(message),
l = message.length * 8,
a = 1732584193,
b = -271733879,
c = -1732584194,
d = 271733878;
// Swap endian
for (var i = 0; i < m.length; i++) {
m[i] = ((m[i] << 8) | (m[i] >>> 24)) & 0x00FF00FF |
((m[i] << 24) | (m[i] >>> 8)) & 0xFF00FF00;
}
// Padding
m[l >>> 5] |= 0x80 << (l % 32);
m[(((l + 64) >>> 9) << 4) + 14] = l;
// Method shortcuts
var FF = md5._ff,
GG = md5._gg,
HH = md5._hh,
II = md5._ii;
for (var i = 0; i < m.length; i += 16) {
var aa = a,
bb = b,
cc = c,
dd = d;
a = FF(a, b, c, d, m[i+ 0], 7, -680876936);
d = FF(d, a, b, c, m[i+ 1], 12, -389564586);
c = FF(c, d, a, b, m[i+ 2], 17, 606105819);
b = FF(b, c, d, a, m[i+ 3], 22, -1044525330);
a = FF(a, b, c, d, m[i+ 4], 7, -176418897);
d = FF(d, a, b, c, m[i+ 5], 12, 1200080426);
c = FF(c, d, a, b, m[i+ 6], 17, -1473231341);
b = FF(b, c, d, a, m[i+ 7], 22, -45705983);
a = FF(a, b, c, d, m[i+ 8], 7, 1770035416);
d = FF(d, a, b, c, m[i+ 9], 12, -1958414417);
c = FF(c, d, a, b, m[i+10], 17, -42063);
b = FF(b, c, d, a, m[i+11], 22, -1990404162);
a = FF(a, b, c, d, m[i+12], 7, 1804603682);
d = FF(d, a, b, c, m[i+13], 12, -40341101);
c = FF(c, d, a, b, m[i+14], 17, -1502002290);
b = FF(b, c, d, a, m[i+15], 22, 1236535329);
a = GG(a, b, c, d, m[i+ 1], 5, -165796510);
d = GG(d, a, b, c, m[i+ 6], 9, -1069501632);
c = GG(c, d, a, b, m[i+11], 14, 643717713);
b = GG(b, c, d, a, m[i+ 0], 20, -373897302);
a = GG(a, b, c, d, m[i+ 5], 5, -701558691);
d = GG(d, a, b, c, m[i+10], 9, 38016083);
c = GG(c, d, a, b, m[i+15], 14, -660478335);
b = GG(b, c, d, a, m[i+ 4], 20, -405537848);
a = GG(a, b, c, d, m[i+ 9], 5, 568446438);
d = GG(d, a, b, c, m[i+14], 9, -1019803690);
c = GG(c, d, a, b, m[i+ 3], 14, -187363961);
b = GG(b, c, d, a, m[i+ 8], 20, 1163531501);
a = GG(a, b, c, d, m[i+13], 5, -1444681467);
d = GG(d, a, b, c, m[i+ 2], 9, -51403784);
c = GG(c, d, a, b, m[i+ 7], 14, 1735328473);
b = GG(b, c, d, a, m[i+12], 20, -1926607734);
a = HH(a, b, c, d, m[i+ 5], 4, -378558);
d = HH(d, a, b, c, m[i+ 8], 11, -2022574463);
c = HH(c, d, a, b, m[i+11], 16, 1839030562);
b = HH(b, c, d, a, m[i+14], 23, -35309556);
a = HH(a, b, c, d, m[i+ 1], 4, -1530992060);
d = HH(d, a, b, c, m[i+ 4], 11, 1272893353);
c = HH(c, d, a, b, m[i+ 7], 16, -155497632);
b = HH(b, c, d, a, m[i+10], 23, -1094730640);
a = HH(a, b, c, d, m[i+13], 4, 681279174);
d = HH(d, a, b, c, m[i+ 0], 11, -358537222);
c = HH(c, d, a, b, m[i+ 3], 16, -722521979);
b = HH(b, c, d, a, m[i+ 6], 23, 76029189);
a = HH(a, b, c, d, m[i+ 9], 4, -640364487);
d = HH(d, a, b, c, m[i+12], 11, -421815835);
c = HH(c, d, a, b, m[i+15], 16, 530742520);
b = HH(b, c, d, a, m[i+ 2], 23, -995338651);
a = II(a, b, c, d, m[i+ 0], 6, -198630844);
d = II(d, a, b, c, m[i+ 7], 10, 1126891415);
c = II(c, d, a, b, m[i+14], 15, -1416354905);
b = II(b, c, d, a, m[i+ 5], 21, -57434055);
a = II(a, b, c, d, m[i+12], 6, 1700485571);
d = II(d, a, b, c, m[i+ 3], 10, -1894986606);
c = II(c, d, a, b, m[i+10], 15, -1051523);
b = II(b, c, d, a, m[i+ 1], 21, -2054922799);
a = II(a, b, c, d, m[i+ 8], 6, 1873313359);
d = II(d, a, b, c, m[i+15], 10, -30611744);
c = II(c, d, a, b, m[i+ 6], 15, -1560198380);
b = II(b, c, d, a, m[i+13], 21, 1309151649);
a = II(a, b, c, d, m[i+ 4], 6, -145523070);
d = II(d, a, b, c, m[i+11], 10, -1120210379);
c = II(c, d, a, b, m[i+ 2], 15, 718787259);
b = II(b, c, d, a, m[i+ 9], 21, -343485551);
a = (a + aa) >>> 0;
b = (b + bb) >>> 0;
c = (c + cc) >>> 0;
d = (d + dd) >>> 0;
}
return crypt.endian([a, b, c, d]);
};
// Auxiliary functions
md5._ff = function (a, b, c, d, x, s, t) {
var n = a + (b & c | ~b & d) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
};
md5._gg = function (a, b, c, d, x, s, t) {
var n = a + (b & d | c & ~d) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
};
md5._hh = function (a, b, c, d, x, s, t) {
var n = a + (b ^ c ^ d) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
};
md5._ii = function (a, b, c, d, x, s, t) {
var n = a + (c ^ (b | ~d)) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
};
// Package private blocksize
md5._blocksize = 16;
md5._digestsize = 16;
module.exports = function (message, options) {
if (message === undefined || message === null)
throw new Error('Illegal argument ' + message);
var digestbytes = crypt.wordsToBytes(md5(message, options));
return options && options.asBytes ? digestbytes :
options && options.asString ? bin.bytesToString(digestbytes) :
crypt.bytesToHex(digestbytes);
};
})();
{
"_from": "md5@^2.0.0",
"_id": "md5@2.2.1",
"_inBundle": false,
"_integrity": "sha1-U6s41f48iJG6RlMp6iP6wFQBJvk=",
"_location": "/md5",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "md5@^2.0.0",
"name": "md5",
"escapedName": "md5",
"rawSpec": "^2.0.0",
"saveSpec": null,
"fetchSpec": "^2.0.0"
},
"_requiredBy": [
"/wx-pay"
],
"_resolved": "https://registry.npmjs.org/md5/-/md5-2.2.1.tgz",
"_shasum": "53ab38d5fe3c8891ba465329ea23fac0540126f9",
"_spec": "md5@^2.0.0",
"_where": "/usr/devws/OMC/scratch-web/node_modules/wx-pay",
"author": {
"name": "Paul Vorbach",
"email": "paul@vorba.ch",
"url": "http://paul.vorba.ch"
},
"bugs": {
"url": "https://github.com/pvorb/node-md5/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "salba"
}
],
"dependencies": {
"charenc": "~0.0.1",
"crypt": "~0.0.1",
"is-buffer": "~1.1.1"
},
"deprecated": false,
"description": "js function for hashing messages with MD5",
"devDependencies": {
"mocha": "~2.3.4"
},
"homepage": "https://github.com/pvorb/node-md5#readme",
"license": "BSD-3-Clause",
"main": "md5.js",
"name": "md5",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/pvorb/node-md5.git"
},
"scripts": {
"test": "mocha"
},
"tags": [
"md5",
"hash",
"encryption",
"native",
"message digest"
],
"version": "2.2.1"
}
var md5 = require('./md5.js');
var assert = require('assert');
describe('md5', function () {
it('should throw an error for `undefined`', function() {
assert.throws(function() {
md5(undefined);
});
});
it('should throw an error for `null`', function() {
assert.throws(function() {
md5(null);
});
});
it('should return the expected MD5 hash for "message"', function() {
assert.equal('78e731027d8fd50ed642340b7c9a63b3', md5('message'));
});
it('should not return the same hash for random numbers twice', function() {
var msg1 = Math.floor((Math.random() * 100000) + 1) + (new Date).getTime();
var msg2 = Math.floor((Math.random() * 100000) + 1) + (new Date).getTime();
if (msg1 !== msg2) {
assert.notEqual(md5(msg1), md5(msg2));
} else {
assert.equal(md5(msg1), md5(msg1));
}
});
it('should support Node.js Buffers', function() {
var buffer = new Buffer('message áßäöü', 'utf8');
assert.equal(md5(buffer), md5('message áßäöü'));
})
it('should be able to use a binary encoded string', function() {
var hash1 = md5('abc', { asString: true });
var hash2 = md5(hash1 + 'a', { asString: true, encoding : 'binary' });
var hash3 = md5(hash1 + 'a', { encoding : 'binary' });
assert.equal(hash3, '131f0ac52813044f5110e4aec638c169');
});
});
## 1.0.2 (2017-01-16)
Feature:
- 返回出错的结果,挂载到返回error的data对象上,err.data = resData
\ No newline at end of file
# @alicloud/pop-core
The core SDK of POP API.
[![NPM version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![codecov][cov-image]][cov-url]
[npm-image]: https://img.shields.io/npm/v/@alicloud/pop-core.svg?style=flat-square
[npm-url]: https://npmjs.org/package/@alicloud/pop-core
[travis-image]: https://img.shields.io/travis/aliyun/openapi-core-nodejs-sdk/master.svg?style=flat-square
[travis-url]: https://travis-ci.org/aliyun/openapi-core-nodejs-sdk
[cov-image]: https://codecov.io/gh/aliyun/openapi-core-nodejs-sdk/branch/master/graph/badge.svg
[cov-url]: https://codecov.io/gh/aliyun/openapi-core-nodejs-sdk
## Installation
Install it and write into package.json dependences.
```sh
$ npm install @alicloud/pop-core -S
```
## Prerequisite
Node.js >= 8.x
### Notes
You must know your `AK`(`accessKeyId/accessKeySecret`), and the cloud product's `endpoint` and `apiVersion`.
For example, The ECS OpenAPI(https://help.aliyun.com/document_detail/25490.html), the API version is `2014-05-26`.
And the endpoint list can be found at [here](https://help.aliyun.com/document_detail/25489.html), the center endpoint is ecs.aliyuncs.com. Add http protocol `http` or `https`, should be `http://ecs.aliyuncs.com/`.
## Online Demo
**[API Explorer](https://api.aliyun.com)** provides the ability to call the cloud product OpenAPI online, and dynamically generate SDK Example code and quick retrieval interface, which can significantly reduce the difficulty of using the cloud API. **It is highly recommended**.
<a href="https://api.aliyun.com" target="api_explorer">
<img src="https://img.alicdn.com/tfs/TB12GX6zW6qK1RjSZFmXXX0PFXa-744-122.png" width="180" />
</a>
## Usage
The RPC style client:
```js
var RPCClient = require('@alicloud/pop-core').RPCClient;
var client = new RPCClient({
accessKeyId: '<accessKeyId>',
accessKeySecret: '<accessKeySecret>',
endpoint: '<endpoint>',
apiVersion: '<apiVersion>'
});
// => returns Promise
client.request(action, params);
// co/yield, async/await
// options
client.request(action, params, {
timeout: 3000, // default 3000 ms
formatAction: true, // default true, format the action to Action
formatParams: true, // default true, format the parameter name to first letter upper case
method: 'GET', // set the http method, default is GET
headers: {}, // set the http request headers
});
```
The ROA style client:
```js
var ROAClient = require('@alicloud/pop-core').ROAClient;
var client = new ROAClient({
accessKeyId: '<accessKeyId>',
accessKeySecret: '<secretAccessKey>',
endpoint: '<endpoint>',
apiVersion: '<apiVersion>'
});
// => returns Promise
// request(HTTPMethod, uriPath, queries, body, headers, options);
// options => {timeout}
client.request('GET', '/regions');
// co/yield, async/await
```
### Custom opts
We offer two ways to customize request opts.
One way is passing opts through the Client constructor. You should treat opts passed through the constructor as default custom opts, because all requests will use this opts.
```js
var client = new RPCClient({
accessKeyId: '<accessKeyId>',
accessKeySecret: '<accessKeySecret>',
endpoint: '<endpoint>',
apiVersion: '<apiVersion>',
opts: {
timeout: 3000
}
});
```
Another way is passing opts through the function's parameter. You should use this way when you want to just pass opts in specific functions.
```js
client.request(action, params, {
timeout: 3000
});
```
When both ways are used, opts will be merged. But for the opt with the same key, the opts provided by the function parameter overrides the opts provided by the constructor.
### Http Proxy Support
```js
var tunnel = require('tunnel-agent');
var RPCClient = require('@alicloud/pop-core').RPCClient;
var client = new RPCClient({
accessKeyId: '<accessKeyId>',
accessKeySecret: '<accessKeySecret>',
endpoint: '<endpoint>',
apiVersion: '<apiVersion>'
});
client.request(action, params, {
agent: tunnel.httpOverHttp({
proxy: {
host: 'host',
port: port
}
});
});
```
## License
The MIT License
'use strict';
module.exports = require('./lib/rpc');
module.exports.ROAClient = require('./lib/roa');
module.exports.RPCClient = require('./lib/rpc');
'use strict';
const os = require('os');
const pkg = require('../package.json');
exports.DEFAULT_UA = `AlibabaCloud (${os.platform()}; ${os.arch()}) ` +
`Node.js/${process.version} Core/${pkg.version}`;
exports.DEFAULT_CLIENT = `Node.js(${process.version}), ${pkg.name}: ${pkg.version}`;
'use strict';
const assert = require('assert');
const url = require('url');
const querystring = require('querystring');
const kitx = require('kitx');
const httpx = require('httpx');
const xml2js = require('xml2js');
const JSON = require('json-bigint');
const debug = require('debug')('roa');
const helper = require('./helper');
function filter(value) {
return value.replace(/[\t\n\r\f]/g, ' ');
}
function keyLowerify(headers) {
const keys = Object.keys(headers);
const newHeaders = {};
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
newHeaders[key.toLowerCase()] = headers[key];
}
return newHeaders;
}
function getCanonicalizedHeaders(headers) {
const prefix = 'x-acs-';
const keys = Object.keys(headers);
const canonicalizedKeys = [];
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
if (key.startsWith(prefix)) {
canonicalizedKeys.push(key);
}
}
canonicalizedKeys.sort();
var result = '';
for (let i = 0; i < canonicalizedKeys.length; i++) {
const key = canonicalizedKeys[i];
result += `${key}:${filter(headers[key]).trim()}\n`;
}
return result;
}
function getCanonicalizedResource(uriPattern, query) {
const keys = Object.keys(query).sort();
if (keys.length === 0) {
return uriPattern;
}
var result = [];
for (var i = 0; i < keys.length; i++) {
const key = keys[i];
result.push(`${key}=${query[key]}`);
}
return `${uriPattern}?${result.join('&')}`;
}
function buildStringToSign(method, uriPattern, headers, query) {
const accept = headers['accept'];
const contentMD5 = headers['content-md5'] || '';
const contentType = headers['content-type'] || '';
const date = headers['date'] || '';
const header = `${method}\n${accept}\n${contentMD5}\n${contentType}\n${date}\n`;
const canonicalizedHeaders = getCanonicalizedHeaders(headers);
const canonicalizedResource = getCanonicalizedResource(uriPattern, query);
return `${header}${canonicalizedHeaders}${canonicalizedResource}`;
}
function parseXML(xml) {
const parser = new xml2js.Parser({
// explicitArray: false
});
return new Promise((resolve, reject) => {
parser.parseString(xml, (err, result) => {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
class ACSError extends Error {
constructor(err) {
const message = err.Message[0];
const code = err.Code[0];
const hostid = err.HostId[0];
const requestid = err.RequestId[0];
super(`${message} hostid: ${hostid}, requestid: ${requestid}`);
this.code = code;
}
}
class ROAClient {
constructor(config) {
assert(config, 'must pass "config"');
assert(config.endpoint, 'must pass "config.endpoint"');
if (!config.endpoint.startsWith('https://') &&
!config.endpoint.startsWith('http://')) {
throw new Error(`"config.endpoint" must starts with 'https://' or 'http://'.`);
}
assert(config.apiVersion, 'must pass "config.apiVersion"');
assert(config.accessKeyId, 'must pass "config.accessKeyId"');
assert(config.accessKeySecret, 'must pass "config.accessKeySecret"');
this.endpoint = config.endpoint;
this.apiVersion = config.apiVersion;
this.accessKeyId = config.accessKeyId;
this.accessKeySecret = config.accessKeySecret;
this.securityToken = config.securityToken;
this.host = url.parse(this.endpoint).hostname;
this.opts = config.opts;
var httpModule = this.endpoint.startsWith('https://') ? require('https') : require('http');
this.keepAliveAgent = new httpModule.Agent({
keepAlive: true,
keepAliveMsecs: 3000
});
}
buildHeaders() {
const now = new Date();
var defaultHeaders = {
accept: 'application/json',
date: now.toGMTString(),
host: this.host,
'x-acs-signature-nonce': kitx.makeNonce(),
'x-acs-signature-method': 'HMAC-SHA1',
'x-acs-signature-version': '1.0',
'x-acs-version': this.apiVersion,
'user-agent': helper.DEFAULT_UA,
'x-sdk-client': helper.DEFAULT_CLIENT
};
if (this.securityToken) {
defaultHeaders['x-acs-accesskey-id'] = this.accessKeyId;
defaultHeaders['x-acs-security-token'] = this.securityToken;
}
return defaultHeaders;
}
signature(stringToSign) {
const utf8Buff = Buffer.from(stringToSign, 'utf8');
return kitx.sha1(utf8Buff, this.accessKeySecret, 'base64');
}
buildAuthorization(stringToSign) {
return `acs ${this.accessKeyId}:${this.signature(stringToSign)}`;
}
request(method, uriPattern, query = {}, body = '', headers = {}, opts = {}) {
var postBody = null;
var mixHeaders = Object.assign(this.buildHeaders(), keyLowerify(headers));
if (body) {
postBody = Buffer.from(body, 'utf8');
mixHeaders['content-md5'] = kitx.md5(postBody, 'base64');
mixHeaders['content-length'] = postBody.length;
}
var url = `${this.endpoint}${uriPattern}`;
if (Object.keys(query).length) {
url += `?${querystring.stringify(query)}`;
}
const stringToSign = buildStringToSign(method, uriPattern, mixHeaders, query);
debug('stringToSign: %s', stringToSign);
mixHeaders['authorization'] = this.buildAuthorization(stringToSign);
const options = Object.assign({
method,
agent: this.keepAliveAgent,
headers: mixHeaders,
data: postBody
}, this.opts, opts);
return httpx.request(url, options).then((response) => {
return httpx.read(response, 'utf8').then((body) => {
// Retrun raw body
if (opts.rawBody) {
return body;
}
const contentType = response.headers['content-type'] || '';
// JSON
if (contentType.startsWith('application/json')) {
const statusCode = response.statusCode;
if (statusCode === 204) {
return body;
}
var result;
try {
result = JSON.parse(body);
} catch (err) {
err.name = 'FormatError';
err.message = 'parse response to json error';
err.body = body;
throw err;
}
if (statusCode >= 400) {
const errorMessage = result.Message || result.errorMsg || '';
const errorCode = result.Code || result.errorCode || '';
const requestId = result.RequestId || '';
var err = new Error(`code: ${statusCode}, ${errorMessage}, requestid: ${requestId}`);
err.name = `${errorCode}Error`;
err.statusCode = statusCode;
err.result = result;
err.code = errorCode;
throw err;
}
return result;
}
if (contentType.startsWith('text/xml')) {
return parseXML(body).then((result) => {
if (result.Error) {
throw new ACSError(result.Error);
}
return result;
});
}
return body;
});
});
}
put(path, query, body, headers, options) {
return this.request('PUT', path, query, body, headers, options);
}
post(path, query, body, headers, options) {
return this.request('POST', path, query, body, headers, options);
}
get(path, query, headers, options) {
return this.request('GET', path, query, '', headers, options);
}
delete(path, query, headers, options) {
return this.request('DELETE', path, query, '', headers, options);
}
}
module.exports = ROAClient;
// Type definitions for [~THE LIBRARY NAME~] [~OPTIONAL VERSION NUMBER~]
// Project: [~THE PROJECT NAME~]
// Definitions by: [~YOUR NAME~] <[~A URL FOR YOU~]>
/*~ This is the module template file for class modules.
*~ You should rename it to index.d.ts and place it in a folder with the same name as the module.
*~ For example, if you were writing a file for "super-greeter", this
*~ file should be 'super-greeter/index.d.ts'
*/
/*~ Note that ES6 modules cannot directly export class objects.
*~ This file should be imported using the CommonJS-style:
*~ import x = require('someLibrary');
*~
*~ Refer to the documentation to understand common
*~ workarounds for this limitation of ES6 modules.
*/
/*~ This declaration specifies that the class constructor function
*~ is the exported object from the file
*/
export = RPCClient;
/*~ Write your module's methods and properties in this class */
declare class RPCClient {
constructor(config: RPCClient.Config);
request<T>(action: String, params: Object, options?: Object): Promise<T>;
}
/*~ If you want to expose types from your module as well, you can
*~ place them in this block.
*/
declare namespace RPCClient {
export interface Config {
endpoint: string;
apiVersion: string;
accessKeyId: string;
accessKeySecret: string;
codes?: (string | number)[];
opts?: object;
}
}
'use strict';
const assert = require('assert');
const httpx = require('httpx');
const kitx = require('kitx');
const JSON = require('json-bigint');
const helper = require('./helper');
function firstLetterUpper(str) {
return str.slice(0, 1).toUpperCase() + str.slice(1);
}
function formatParams(params) {
var keys = Object.keys(params);
var newParams = {};
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
newParams[firstLetterUpper(key)] = params[key];
}
return newParams;
}
function timestamp() {
var date = new Date();
var YYYY = date.getUTCFullYear();
var MM = kitx.pad2(date.getUTCMonth() + 1);
var DD = kitx.pad2(date.getUTCDate());
var HH = kitx.pad2(date.getUTCHours());
var mm = kitx.pad2(date.getUTCMinutes());
var ss = kitx.pad2(date.getUTCSeconds());
// 删除掉毫秒部分
return `${YYYY}-${MM}-${DD}T${HH}:${mm}:${ss}Z`;
}
function encode(str) {
var result = encodeURIComponent(str);
return result.replace(/\!/g, '%21')
.replace(/\'/g, '%27')
.replace(/\(/g, '%28')
.replace(/\)/g, '%29')
.replace(/\*/g, '%2A');
}
function replaceRepeatList(target, key, repeat) {
for (var i = 0; i < repeat.length; i++) {
var item = repeat[i];
if (item && typeof item === 'object') {
const keys = Object.keys(item);
for (var j = 0; j < keys.length; j++) {
target[`${key}.${i + 1}.${keys[j]}`] = item[keys[j]];
}
} else {
target[`${key}.${i + 1}`] = item;
}
}
}
function flatParams(params) {
var target = {};
var keys = Object.keys(params);
for (let i = 0; i < keys.length; i++) {
var key = keys[i];
var value = params[key];
if (Array.isArray(value)) {
replaceRepeatList(target, key, value);
} else {
target[key] = value;
}
}
return target;
}
function normalize(params) {
var list = [];
var flated = flatParams(params);
var keys = Object.keys(flated).sort();
for (let i = 0; i < keys.length; i++) {
var key = keys[i];
var value = flated[key];
list.push([encode(key), encode(value)]); //push []
}
return list;
}
function canonicalize(normalized) {
var fields = [];
for (var i = 0; i < normalized.length; i++) {
var [key, value] = normalized[i];
fields.push(key + '=' + value);
}
return fields.join('&');
}
class RPCClient {
constructor(config, verbose) {
assert(config, 'must pass "config"');
assert(config.endpoint, 'must pass "config.endpoint"');
if (!config.endpoint.startsWith('https://') &&
!config.endpoint.startsWith('http://')) {
throw new Error(`"config.endpoint" must starts with 'https://' or 'http://'.`);
}
assert(config.apiVersion, 'must pass "config.apiVersion"');
assert(config.accessKeyId, 'must pass "config.accessKeyId"');
var accessKeySecret = config.secretAccessKey || config.accessKeySecret;
assert(accessKeySecret, 'must pass "config.accessKeySecret"');
if (config.endpoint.endsWith('/')) {
config.endpoint = config.endpoint.slice(0, -1);
}
this.endpoint = config.endpoint;
this.apiVersion = config.apiVersion;
this.accessKeyId = config.accessKeyId;
this.accessKeySecret = accessKeySecret;
this.securityToken = config.securityToken;
this.verbose = verbose === true;
// 非 codes 里的值,将抛出异常
this.codes = new Set([200, '200', 'OK', 'Success']);
if (config.codes) {
// 合并 codes
for (var elem of config.codes) {
this.codes.add(elem);
}
}
this.opts = config.opts || {};
var httpModule = this.endpoint.startsWith('https://')
? require('https') : require('http');
this.keepAliveAgent = new httpModule.Agent({
keepAlive: true,
keepAliveMsecs: 3000
});
}
request(action, params = {}, opts = {}) {
// 1. compose params and opts
opts = Object.assign({
headers: {
'x-sdk-client': helper.DEFAULT_CLIENT,
'user-agent': helper.DEFAULT_UA
}
}, this.opts, opts);
// format action until formatAction is false
if (opts.formatAction !== false) {
action = firstLetterUpper(action);
}
// format params until formatParams is false
if (opts.formatParams !== false) {
params = formatParams(params);
}
var defaults = this._buildParams();
params = Object.assign({Action: action}, defaults, params);
// 2. caculate signature
var method = (opts.method || 'GET').toUpperCase();
var normalized = normalize(params);
var canonicalized = canonicalize(normalized);
// 2.1 get string to sign
var stringToSign = `${method}&${encode('/')}&${encode(canonicalized)}`;
// 2.2 get signature
const key = this.accessKeySecret + '&';
var signature = kitx.sha1(stringToSign, key, 'base64');
// add signature
normalized.push(['Signature', encode(signature)]);
// 3. generate final url
const url = opts.method === 'POST' ? `${this.endpoint}/` : `${this.endpoint}/?${canonicalize(normalized)}`;
// 4. send request
var entry = {
url: url,
request: null,
response: null
};
if (opts && !opts.agent) {
opts.agent = this.keepAliveAgent;
}
if (opts.method === 'POST') {
opts.headers = opts.headers || {};
opts.headers['content-type'] = 'application/x-www-form-urlencoded';
opts.data = canonicalize(normalized);
}
return httpx.request(url, opts).then((response) => {
entry.request = {
headers: response.req._headers
};
entry.response = {
statusCode: response.statusCode,
headers: response.headers
};
return httpx.read(response);
}).then((buffer) => {
var json = JSON.parse(buffer);
if (json.Code && !this.codes.has(json.Code)) {
var err = new Error(`${json.Message}, URL: ${url}`);
err.name = json.Code + 'Error';
err.data = json;
err.code = json.Code;
err.url = url;
err.entry = entry;
return Promise.reject(err);
}
if (this.verbose) {
return [json, entry];
}
return json;
});
}
_buildParams() {
var defaultParams = {
Format: 'JSON',
SignatureMethod: 'HMAC-SHA1',
SignatureNonce: kitx.makeNonce(),
SignatureVersion: '1.0',
Timestamp: timestamp(),
AccessKeyId: this.accessKeyId,
Version: this.apiVersion,
};
if (this.securityToken) {
defaultParams.SecurityToken = this.securityToken;
}
return defaultParams;
}
}
module.exports = RPCClient;
../../../_json-bigint@0.2.3@json-bigint
\ No newline at end of file
{
"name": "@alicloud/pop-core",
"version": "1.7.7",
"description": "AliCloud POP SDK core",
"main": "index.js",
"types": "lib/rpc.d.ts",
"scripts": {
"lint": "eslint --fix lib test",
"test": "mocha -R spec test/*.test.js",
"test-cov": "nyc -r=html -r=text -r=lcov mocha -t 3000 -R spec test/*.test.js",
"test-integration": "mocha -R spec test/*.integration.js",
"ci": "npm run lint && npm run test-cov && codecov"
},
"keywords": [
"Aliyun",
"AliCloud",
"OpenAPI",
"POP",
"SDK",
"Core"
],
"author": "Jackson Tian",
"license": "MIT",
"dependencies": {
"debug": "^3.1.0",
"httpx": "^2.1.2",
"json-bigint": "^0.2.3",
"kitx": "^1.2.1",
"xml2js": "^0.4.17"
},
"files": [
"lib",
"index.js"
],
"devDependencies": {
"codecov": "^3.0.4",
"eslint": "^3.13.0",
"expect.js": "^0.3.1",
"mocha": "^4",
"muk": "^0.5.3",
"nyc": "^12.0.2",
"rewire": "^4.0.1"
},
"directories": {
"test": "test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/aliyun/openapi-core-nodejs-sdk.git"
},
"bugs": {
"url": "https://github.com/aliyun/openapi-core-nodejs-sdk/issues"
},
"homepage": "https://github.com/aliyun/openapi-core-nodejs-sdk#readme",
"__npminstall_done": "Thu Sep 12 2019 15:55:12 GMT+0800 (CST)",
"_from": "@alicloud/pop-core@1.7.7",
"_resolved": "https://registry.npm.taobao.org/@alicloud/pop-core/download/@alicloud/pop-core-1.7.7.tgz"
}
\ No newline at end of file
# Change Log
All notable changes will be documented in this file.
`readdir-enhanced` adheres to [Semantic Versioning](http://semver.org/).
## [v2.2.0](https://github.com/BigstickCarpet/readdir-enhanced/tree/v2.2.0) (2018-01-09)
- Refactored the codebase to use ES6 syntax (Node v4.x compatible)
- You can now provide [your own implementation](https://github.com/BigstickCarpet/readdir-enhanced#custom-fs-methods) for the [filesystem module](https://nodejs.org/api/fs.html) that's used by `readdir-enhanced`. Just set the `fs` option to your implementation. Thanks to [@mrmlnc](https://github.com/mrmlnc) for the idea and [the PR](https://github.com/BigstickCarpet/readdir-enhanced/pull/10)!
- [Better error handling](https://github.com/BigstickCarpet/readdir-enhanced/commit/0d330b68524bafbdeae11566a3e8af1bc3f184bf), especially around user-specified logic, such as `options.deep`, `options.filter`, and `options.fs`
[Full Changelog](https://github.com/BigstickCarpet/readdir-enhanced/compare/v2.1.0...v2.2.0)
## [v2.1.0](https://github.com/BigstickCarpet/readdir-enhanced/tree/v2.1.0) (2017-12-01)
- The `fs.Stats` objects now include a `depth` property, which indicates the number of subdirectories beneath the base path. Thanks to [@mrmlnc](https://github.com/mrmlnc) for [the PR](https://github.com/BigstickCarpet/readdir-enhanced/pull/8)!
[Full Changelog](https://github.com/BigstickCarpet/readdir-enhanced/compare/v2.0.0...v2.1.0)
## [v2.0.0](https://github.com/BigstickCarpet/readdir-enhanced/tree/v2.0.0) (2017-11-15)
- Dropped support for Node v0.x, which is no longer actively maintained. Please upgrade to Node 4 or newer.
[Full Changelog](https://github.com/BigstickCarpet/readdir-enhanced/compare/v1.5.0...v2.0.0)
## [v1.5.0](https://github.com/BigstickCarpet/readdir-enhanced/tree/v1.5.0) (2017-04-10)
The [`deep` option](README.md#deep) can now be set to a [regular expression](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp), a [glob pattern](https://github.com/isaacs/node-glob#glob-primer), or a function, which allows you to customize which subdirectories get crawled. Of course, you can also still still set the `deep` option to `true` to crawl _all_ subdirectories, or a number if you just want to limit the recursion depth.
[Full Changelog](https://github.com/BigstickCarpet/readdir-enhanced/compare/v1.4.0...v1.5.0)
## [v1.4.0](https://github.com/BigstickCarpet/readdir-enhanced/tree/v1.4.0) (2016-08-26)
The [`filter` option](README.md#filter) can now be set to a regular expression or a glob pattern string, which simplifies filtering based on file names. Of course, you can still set the `filter` option to a function if you need to perform more advanced filtering based on the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) of each file.
[Full Changelog](https://github.com/BigstickCarpet/readdir-enhanced/compare/v1.3.4...v1.4.0)
## [v1.3.4](https://github.com/BigstickCarpet/readdir-enhanced/tree/v1.3.4) (2016-08-26)
As of this release, `readdir-enhanced` is fully tested on all major Node versions (0.x, 4.x, 5.x, 6.x) on [linux](https://travis-ci.org/BigstickCarpet/readdir-enhanced) and [Windows](https://ci.appveyor.com/project/BigstickCarpet/readdir-enhanced/branch/master), with [nearly 100% code coverage](https://coveralls.io/github/BigstickCarpet/readdir-enhanced?branch=master). I do all of my local development and testing on MacOS, so that's covered too.
[Full Changelog](https://github.com/BigstickCarpet/readdir-enhanced/compare/v1.0.1...v1.3.4)
The MIT License (MIT)
Copyright (c) 2016 James Messinger
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
.
\ No newline at end of file
'use strict';
module.exports = asyncForEach;
/**
* Simultaneously processes all items in the given array.
*
* @param {array} array - The array to iterate over
* @param {function} iterator - The function to call for each item in the array
* @param {function} done - The function to call when all iterators have completed
*/
function asyncForEach (array, iterator, done) {
if (array.length === 0) {
// NOTE: Normally a bad idea to mix sync and async, but it's safe here because
// of the way that this method is currently used by DirectoryReader.
done();
return;
}
// Simultaneously process all items in the array.
let pending = array.length;
array.forEach(item => {
iterator(item, () => {
if (--pending === 0) {
done();
}
});
});
}
'use strict';
module.exports = readdirAsync;
const maybe = require('call-me-maybe');
const DirectoryReader = require('../directory-reader');
let asyncFacade = {
fs: require('fs'),
forEach: require('./for-each'),
async: true
};
/**
* Returns the buffered output from an asynchronous {@link DirectoryReader},
* via an error-first callback or a {@link Promise}.
*
* @param {string} dir
* @param {object} [options]
* @param {function} [callback]
* @param {object} internalOptions
*/
function readdirAsync (dir, options, callback, internalOptions) {
if (typeof options === 'function') {
callback = options;
options = undefined;
}
return maybe(callback, new Promise(((resolve, reject) => {
let results = [];
internalOptions.facade = asyncFacade;
let reader = new DirectoryReader(dir, options, internalOptions);
let stream = reader.stream;
stream.on('error', err => {
reject(err);
stream.pause();
});
stream.on('data', result => {
results.push(result);
});
stream.on('end', () => {
resolve(results);
});
})));
}
'use strict';
let call = module.exports = {
safe: safeCall,
once: callOnce,
};
/**
* Calls a function with the given arguments, and ensures that the error-first callback is _always_
* invoked exactly once, even if the function throws an error.
*
* @param {function} fn - The function to invoke
* @param {...*} args - The arguments to pass to the function. The final argument must be a callback function.
*/
function safeCall (fn, args) {
// Get the function arguments as an array
args = Array.prototype.slice.call(arguments, 1);
// Replace the callback function with a wrapper that ensures it will only be called once
let callback = call.once(args.pop());
args.push(callback);
try {
fn.apply(null, args);
}
catch (err) {
callback(err);
}
}
/**
* Returns a wrapper function that ensures the given callback function is only called once.
* Subsequent calls are ignored, unless the first argument is an Error, in which case the
* error is thrown.
*
* @param {function} fn - The function that should only be called once
* @returns {function}
*/
function callOnce (fn) {
let fulfilled = false;
return function onceWrapper (err) {
if (!fulfilled) {
fulfilled = true;
return fn.apply(this, arguments);
}
else if (err) {
// The callback has already been called, but now an error has occurred
// (most likely inside the callback function). So re-throw the error,
// so it gets handled further up the call stack
throw err;
}
};
}
'use strict';
const readdirSync = require('./sync');
const readdirAsync = require('./async');
const readdirStream = require('./stream');
module.exports = exports = readdirAsyncPath;
exports.readdir = exports.readdirAsync = exports.async = readdirAsyncPath;
exports.readdirAsyncStat = exports.async.stat = readdirAsyncStat;
exports.readdirStream = exports.stream = readdirStreamPath;
exports.readdirStreamStat = exports.stream.stat = readdirStreamStat;
exports.readdirSync = exports.sync = readdirSyncPath;
exports.readdirSyncStat = exports.sync.stat = readdirSyncStat;
/**
* Synchronous readdir that returns an array of string paths.
*
* @param {string} dir
* @param {object} [options]
* @returns {string[]}
*/
function readdirSyncPath (dir, options) {
return readdirSync(dir, options, {});
}
/**
* Synchronous readdir that returns results as an array of {@link fs.Stats} objects
*
* @param {string} dir
* @param {object} [options]
* @returns {fs.Stats[]}
*/
function readdirSyncStat (dir, options) {
return readdirSync(dir, options, { stats: true });
}
/**
* Aynchronous readdir (accepts an error-first callback or returns a {@link Promise}).
* Results are an array of path strings.
*
* @param {string} dir
* @param {object} [options]
* @param {function} [callback]
* @returns {Promise<string[]>}
*/
function readdirAsyncPath (dir, options, callback) {
return readdirAsync(dir, options, callback, {});
}
/**
* Aynchronous readdir (accepts an error-first callback or returns a {@link Promise}).
* Results are an array of {@link fs.Stats} objects.
*
* @param {string} dir
* @param {object} [options]
* @param {function} [callback]
* @returns {Promise<fs.Stats[]>}
*/
function readdirAsyncStat (dir, options, callback) {
return readdirAsync(dir, options, callback, { stats: true });
}
/**
* Aynchronous readdir that returns a {@link stream.Readable} (which is also an {@link EventEmitter}).
* All stream data events ("data", "file", "directory", "symlink") are passed a path string.
*
* @param {string} dir
* @param {object} [options]
* @returns {stream.Readable}
*/
function readdirStreamPath (dir, options) {
return readdirStream(dir, options, {});
}
/**
* Aynchronous readdir that returns a {@link stream.Readable} (which is also an {@link EventEmitter})
* All stream data events ("data", "file", "directory", "symlink") are passed an {@link fs.Stats} object.
*
* @param {string} dir
* @param {object} [options]
* @returns {stream.Readable}
*/
function readdirStreamStat (dir, options) {
return readdirStream(dir, options, { stats: true });
}
'use strict';
const path = require('path');
const globToRegExp = require('glob-to-regexp');
module.exports = normalizeOptions;
let isWindows = /^win/.test(process.platform);
/**
* @typedef {Object} FSFacade
* @property {fs.readdir} readdir
* @property {fs.stat} stat
* @property {fs.lstat} lstat
*/
/**
* Validates and normalizes the options argument
*
* @param {object} [options] - User-specified options, if any
* @param {object} internalOptions - Internal options that aren't part of the public API
*
* @param {number|boolean|function} [options.deep]
* The number of directories to recursively traverse. Any falsy value or negative number will
* default to zero, so only the top-level contents will be returned. Set to `true` or `Infinity`
* to traverse all subdirectories. Or provide a function that accepts a {@link fs.Stats} object
* and returns a truthy value if the directory's contents should be crawled.
*
* @param {function|string|RegExp} [options.filter]
* A function that accepts a {@link fs.Stats} object and returns a truthy value if the data should
* be returned. Or a RegExp or glob string pattern, to filter by file name.
*
* @param {string} [options.sep]
* The path separator to use. By default, the OS-specific separator will be used, but this can be
* set to a specific value to ensure consistency across platforms.
*
* @param {string} [options.basePath]
* The base path to prepend to each result. If empty, then all results will be relative to `dir`.
*
* @param {FSFacade} [options.fs]
* Synchronous or asynchronous facades for Node.js File System module
*
* @param {object} [internalOptions.facade]
* Synchronous or asynchronous facades for various methods, including for the Node.js File System module
*
* @param {boolean} [internalOptions.emit]
* Indicates whether the reader should emit "file", "directory", and "symlink" events
*
* @param {boolean} [internalOptions.stats]
* Indicates whether the reader should emit {@link fs.Stats} objects instead of path strings
*
* @returns {object}
*/
function normalizeOptions (options, internalOptions) {
if (options === null || options === undefined) {
options = {};
}
else if (typeof options !== 'object') {
throw new TypeError('options must be an object');
}
let recurseDepth, recurseFn, recurseRegExp, recurseGlob, deep = options.deep;
if (deep === null || deep === undefined) {
recurseDepth = 0;
}
else if (typeof deep === 'boolean') {
recurseDepth = deep ? Infinity : 0;
}
else if (typeof deep === 'number') {
if (deep < 0 || isNaN(deep)) {
throw new Error('options.deep must be a positive number');
}
else if (Math.floor(deep) !== deep) {
throw new Error('options.deep must be an integer');
}
else {
recurseDepth = deep;
}
}
else if (typeof deep === 'function') {
recurseDepth = Infinity;
recurseFn = deep;
}
else if (deep instanceof RegExp) {
recurseDepth = Infinity;
recurseRegExp = deep;
}
else if (typeof deep === 'string' && deep.length > 0) {
recurseDepth = Infinity;
recurseGlob = globToRegExp(deep, { extended: true, globstar: true });
}
else {
throw new TypeError('options.deep must be a boolean, number, function, regular expression, or glob pattern');
}
let filterFn, filterRegExp, filterGlob, filter = options.filter;
if (filter !== null && filter !== undefined) {
if (typeof filter === 'function') {
filterFn = filter;
}
else if (filter instanceof RegExp) {
filterRegExp = filter;
}
else if (typeof filter === 'string' && filter.length > 0) {
filterGlob = globToRegExp(filter, { extended: true, globstar: true });
}
else {
throw new TypeError('options.filter must be a function, regular expression, or glob pattern');
}
}
let sep = options.sep;
if (sep === null || sep === undefined) {
sep = path.sep;
}
else if (typeof sep !== 'string') {
throw new TypeError('options.sep must be a string');
}
let basePath = options.basePath;
if (basePath === null || basePath === undefined) {
basePath = '';
}
else if (typeof basePath === 'string') {
// Append a path separator to the basePath, if necessary
if (basePath && basePath.substr(-1) !== sep) {
basePath += sep;
}
}
else {
throw new TypeError('options.basePath must be a string');
}
// Convert the basePath to POSIX (forward slashes)
// so that glob pattern matching works consistently, even on Windows
let posixBasePath = basePath;
if (posixBasePath && sep !== '/') {
posixBasePath = posixBasePath.replace(new RegExp('\\' + sep, 'g'), '/');
/* istanbul ignore if */
if (isWindows) {
// Convert Windows root paths (C:\) and UNCs (\\) to POSIX root paths
posixBasePath = posixBasePath.replace(/^([a-zA-Z]\:\/|\/\/)/, '/');
}
}
// Determine which facade methods to use
let facade;
if (options.fs === null || options.fs === undefined) {
// The user didn't provide their own facades, so use our internal ones
facade = internalOptions.facade;
}
else if (typeof options.fs === 'object') {
// Merge the internal facade methods with the user-provided `fs` facades
facade = Object.assign({}, internalOptions.facade);
facade.fs = Object.assign({}, internalOptions.facade.fs, options.fs);
}
else {
throw new TypeError('options.fs must be an object');
}
return {
recurseDepth,
recurseFn,
recurseRegExp,
recurseGlob,
filterFn,
filterRegExp,
filterGlob,
sep,
basePath,
posixBasePath,
facade,
emit: !!internalOptions.emit,
stats: !!internalOptions.stats,
};
}
'use strict';
const call = require('./call');
module.exports = stat;
/**
* Retrieves the {@link fs.Stats} for the given path. If the path is a symbolic link,
* then the Stats of the symlink's target are returned instead. If the symlink is broken,
* then the Stats of the symlink itself are returned.
*
* @param {object} fs - Synchronous or Asynchronouse facade for the "fs" module
* @param {string} path - The path to return stats for
* @param {function} callback
*/
function stat (fs, path, callback) {
let isSymLink = false;
call.safe(fs.lstat, path, (err, lstats) => {
if (err) {
// fs.lstat threw an eror
return callback(err);
}
try {
isSymLink = lstats.isSymbolicLink();
}
catch (err2) {
// lstats.isSymbolicLink() threw an error
// (probably because fs.lstat returned an invalid result)
return callback(err2);
}
if (isSymLink) {
// Try to resolve the symlink
symlinkStat(fs, path, lstats, callback);
}
else {
// It's not a symlink, so return the stats as-is
callback(null, lstats);
}
});
}
/**
* Retrieves the {@link fs.Stats} for the target of the given symlink.
* If the symlink is broken, then the Stats of the symlink itself are returned.
*
* @param {object} fs - Synchronous or Asynchronouse facade for the "fs" module
* @param {string} path - The path of the symlink to return stats for
* @param {object} lstats - The stats of the symlink
* @param {function} callback
*/
function symlinkStat (fs, path, lstats, callback) {
call.safe(fs.stat, path, (err, stats) => {
if (err) {
// The symlink is broken, so return the stats for the link itself
return callback(null, lstats);
}
try {
// Return the stats for the resolved symlink target,
// and override the `isSymbolicLink` method to indicate that it's a symlink
stats.isSymbolicLink = () => true;
}
catch (err2) {
// Setting stats.isSymbolicLink threw an error
// (probably because fs.stat returned an invalid result)
return callback(err2);
}
callback(null, stats);
});
}
'use strict';
module.exports = readdirStream;
const DirectoryReader = require('../directory-reader');
let streamFacade = {
fs: require('fs'),
forEach: require('../async/for-each'),
async: true
};
/**
* Returns the {@link stream.Readable} of an asynchronous {@link DirectoryReader}.
*
* @param {string} dir
* @param {object} [options]
* @param {object} internalOptions
*/
function readdirStream (dir, options, internalOptions) {
internalOptions.facade = streamFacade;
let reader = new DirectoryReader(dir, options, internalOptions);
return reader.stream;
}
'use strict';
module.exports = syncForEach;
/**
* A facade that allows {@link Array.forEach} to be called as though it were asynchronous.
*
* @param {array} array - The array to iterate over
* @param {function} iterator - The function to call for each item in the array
* @param {function} done - The function to call when all iterators have completed
*/
function syncForEach (array, iterator, done) {
array.forEach(item => {
iterator(item, () => {
// Note: No error-handling here because this is currently only ever called
// by DirectoryReader, which never passes an `error` parameter to the callback.
// Instead, DirectoryReader emits an "error" event if an error occurs.
});
});
done();
}
'use strict';
const fs = require('fs');
const call = require('../call');
/**
* A facade around {@link fs.readdirSync} that allows it to be called
* the same way as {@link fs.readdir}.
*
* @param {string} dir
* @param {function} callback
*/
exports.readdir = function (dir, callback) {
// Make sure the callback is only called once
callback = call.once(callback);
try {
let items = fs.readdirSync(dir);
callback(null, items);
}
catch (err) {
callback(err);
}
};
/**
* A facade around {@link fs.statSync} that allows it to be called
* the same way as {@link fs.stat}.
*
* @param {string} path
* @param {function} callback
*/
exports.stat = function (path, callback) {
// Make sure the callback is only called once
callback = call.once(callback);
try {
let stats = fs.statSync(path);
callback(null, stats);
}
catch (err) {
callback(err);
}
};
/**
* A facade around {@link fs.lstatSync} that allows it to be called
* the same way as {@link fs.lstat}.
*
* @param {string} path
* @param {function} callback
*/
exports.lstat = function (path, callback) {
// Make sure the callback is only called once
callback = call.once(callback);
try {
let stats = fs.lstatSync(path);
callback(null, stats);
}
catch (err) {
callback(err);
}
};
'use strict';
module.exports = readdirSync;
const DirectoryReader = require('../directory-reader');
let syncFacade = {
fs: require('./fs'),
forEach: require('./for-each'),
sync: true
};
/**
* Returns the buffered output from a synchronous {@link DirectoryReader}.
*
* @param {string} dir
* @param {object} [options]
* @param {object} internalOptions
*/
function readdirSync (dir, options, internalOptions) {
internalOptions.facade = syncFacade;
let reader = new DirectoryReader(dir, options, internalOptions);
let stream = reader.stream;
let results = [];
let data = stream.read();
while (data !== null) {
results.push(data);
data = stream.read();
}
return results;
}
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment