Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
A
ai
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
ai
Commits
efd1cb7d
Commit
efd1cb7d
authored
Dec 29, 2021
by
Ahmad Nemati
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
init
parent
c8cfdf0b
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
212 additions
and
5 deletions
+212
-5
ai.js
ai.js
+2
-2
brain.js
brain.js
+22
-2
brainDetail.bat
brainDetail.bat
+1
-0
brainDetail.js
brainDetail.js
+183
-0
run.js
run.js
+4
-1
No files found.
ai.js
View file @
efd1cb7d
...
@@ -85,7 +85,7 @@ let obj
...
@@ -85,7 +85,7 @@ let obj
obj
=
d
obj
=
d
obj
.
signal
=
data
[
i
]
obj
.
signal
=
data
[
i
]
obj
.
maxReserve
=
maxReserve
obj
.
maxReserve
=
maxReserve
obj
.
maxAllocatedMoney
=
maxAllocatedMoney
obj
.
maxAllocatedMoney
=
maxAllocatedMoney
...
@@ -224,7 +224,7 @@ let obj
...
@@ -224,7 +224,7 @@ let obj
if
(
needPrintData
)
if
(
needPrintData
)
{
{
fs
.
writeFile
(
'
detail/
'
+
data
[
0
].
signal
,
JSON
.
stringify
(
saveData
,
null
,
2
),
'
utf8
'
,
function
(
err
)
{
fs
.
writeFile
(
'
detail/
'
+
opt
.
platform
+
'
&
'
+
type
+
'
.json
'
,
JSON
.
stringify
(
saveData
,
null
,
2
),
'
utf8
'
,
function
(
err
)
{
});
});
}
}
...
...
brain.js
View file @
efd1cb7d
...
@@ -18,6 +18,17 @@ async function init() {
...
@@ -18,6 +18,17 @@ async function init() {
{
{
}
}
try
{
await
fs
.
mkdirSync
(
'
detailsJson
'
)
}
catch
(
e
)
{
}
for
(
let
i
=
0
;
i
<
cores
;
i
++
)
{
for
(
let
i
=
0
;
i
<
cores
;
i
++
)
{
let
res
=
await
fs
.
existsSync
(
prefix
+
i
)
let
res
=
await
fs
.
existsSync
(
prefix
+
i
)
...
@@ -74,7 +85,7 @@ async function check() {
...
@@ -74,7 +85,7 @@ async function check() {
await
fs
.
renameSync
(
'
allFiles/
'
+
files
[
i
],
worker
+
'
/
'
+
files
[
i
])
await
fs
.
renameSync
(
'
allFiles/
'
+
files
[
i
],
worker
+
'
/
'
+
files
[
i
])
await
makeStatusNew
(
worker
,
'
1
'
)
await
makeStatusNew
(
worker
,
'
1
'
)
//console.log('node run.js ' + worker +' '+configFileName)
//console.log('node run.js ' + worker +' '+configFileName)
shell
.
exec
(
'
node run.js
'
+
worker
+
'
'
+
configFileName
,
{
async
:
true
});
shell
.
exec
(
'
node run.js
'
+
worker
+
'
'
+
configFileName
+
'
'
+
JSON
.
stringify
([])
,
{
async
:
true
});
}
}
...
@@ -130,7 +141,7 @@ async function copyAllFiles()
...
@@ -130,7 +141,7 @@ async function copyAllFiles()
}
}
}
}
await
writeFileDetail
([])
init
()
init
()
}
}
...
@@ -148,6 +159,15 @@ function makeStatusNew(directory, status) {
...
@@ -148,6 +159,15 @@ function makeStatusNew(directory, status) {
});
});
}
}
function
writeFileDetail
(
data
)
{
return
new
Promise
(
function
(
resolve
,
reject
)
{
fs
.
writeFile
(
'
detail.json
'
,
JSON
.
stringify
(
data
),
'
utf8
'
,
function
(
err
)
{
if
(
err
)
reject
(
err
);
else
resolve
(
true
);
});
});
}
async
function
findOffWorker
()
{
async
function
findOffWorker
()
{
let
arr
=
[]
let
arr
=
[]
...
...
brainDetail.bat
0 → 100644
View file @
efd1cb7d
node --max-old-space-size=9999999999 brainDetail.js
\ No newline at end of file
brainDetail.js
0 → 100644
View file @
efd1cb7d
let
fs
=
require
(
'
fs
'
)
let
_
=
require
(
'
lodash
'
)
var
shell
=
require
(
'
shelljs
'
);
let
prefix
=
'
work
'
let
cores
=
15
//init()
copyAllFiles
()
async
function
init
()
{
try
{
await
fs
.
mkdirSync
(
'
detail
'
)
}
catch
(
e
)
{
}
try
{
await
fs
.
mkdirSync
(
'
detailsJson
'
)
}
catch
(
e
)
{
}
for
(
let
i
=
0
;
i
<
cores
;
i
++
)
{
let
res
=
await
fs
.
existsSync
(
prefix
+
i
)
if
(
!
res
)
{
await
fs
.
mkdirSync
(
prefix
+
i
)
}
await
makeStatus
(
prefix
+
i
)
}
check
()
}
function
makeStatus
(
directory
)
{
return
new
Promise
(
function
(
resolve
,
reject
)
{
fs
.
writeFile
(
directory
+
'
/work
'
+
'
.txt
'
,
'
0
'
,
'
utf8
'
,
function
(
err
)
{
if
(
err
)
reject
(
err
);
else
resolve
(
true
);
});
});
}
async
function
check
()
{
let
files
=
await
fs
.
readdirSync
(
'
allFiles
'
)
if
(
files
.
length
===
0
)
process
.
exit
(
0
)
let
size
=
await
fs
.
read
for
(
let
i
=
0
;
i
<
files
.
length
;
i
++
)
{
if
(
!
files
[
i
].
includes
(
'
csv
'
))
{
await
fs
.
unlinkSync
(
'
allFiles/
'
+
files
[
i
])
continue
}
let
stats
=
await
fs
.
statSync
(
'
allFiles/
'
+
files
[
i
])
if
(
stats
.
size
<
1000
)
{
await
fs
.
unlinkSync
(
'
allFiles/
'
+
files
[
i
])
continue
}
// console.log(stats.size)
let
worker
=
await
findOffWorker
()
// console.log(worker)
let
configFileName
=
files
[
i
].
split
(
'
-
'
)[
0
]
if
(
worker
!==
null
)
{
await
fs
.
renameSync
(
'
allFiles/
'
+
files
[
i
],
worker
+
'
/
'
+
files
[
i
])
await
makeStatusNew
(
worker
,
'
1
'
)
//console.log('node run.js ' + worker +' '+configFileName)
// console.log('node run.js ' + worker + ' ' + configFileName +' '+JSON.stringify(checks))
shell
.
exec
(
'
node run.js
'
+
worker
+
'
'
+
configFileName
,
{
async
:
true
});
}
}
await
sleep
(
250
)
return
check
()
// console.log(files)
}
function
writeFileDetail
(
data
)
{
return
new
Promise
(
function
(
resolve
,
reject
)
{
fs
.
writeFile
(
'
detail.json
'
,
JSON
.
stringify
(
data
),
'
utf8
'
,
function
(
err
)
{
if
(
err
)
reject
(
err
);
else
resolve
(
true
);
});
});
}
async
function
copyAllFiles
()
{
try
{
await
fs
.
mkdirSync
(
'
detail
'
)
}
catch
(
e
)
{
}
try
{
await
fs
.
mkdirSync
(
'
detailsJson
'
)
}
catch
(
e
)
{
}
let
configs
=
await
fs
.
readdirSync
(
'
detailsJson
'
)
// console.log(configs)
let
arr
=
[]
for
(
let
z
=
0
;
z
<
configs
.
length
;
z
++
)
{
if
(
!
configs
[
z
].
includes
(
'
json
'
))
{
await
fs
.
unlinkSync
(
'
detailsJson/
'
+
configs
[
z
])
continue
}
let
res
=
await
fs
.
readFileSync
(
'
./detailsJson/
'
+
configs
[
z
],
'
utf8
'
);
res
=
JSON
.
parse
(
res
)
let
obj
=
{}
for
(
let
i
=
0
;
i
<
res
.
length
;
i
++
)
{
console
.
log
(
res
[
i
].
platform
.
split
(
'
-
'
))
obj
.
configFile
=
res
[
i
].
platform
.
split
(
'
-
'
)[
0
]
obj
.
csvFile
=
res
[
i
].
platform
.
split
(
'
-
'
)[
1
]
obj
.
check
=
res
[
i
].
platform
+
'
&
'
+
res
[
i
].
type
arr
.
push
(
obj
.
check
)
}
// let files = await fs.readdirSync('files')
// let size = await fs.read
// for (let i = 0; i < files.length; i++) {
//
//
console
.
log
(
'
files/
'
+
obj
.
csvFile
)
await
fs
.
copyFileSync
(
'
files/
'
+
obj
.
csvFile
,
'
allFiles
'
+
'
/
'
+
obj
.
configFile
+
'
-
'
+
obj
.
csvFile
)
//
//
// }
}
await
writeFileDetail
(
arr
)
init
()
}
function
sleep
(
millis
)
{
return
new
Promise
(
resolve
=>
setTimeout
(
resolve
,
millis
));
}
function
makeStatusNew
(
directory
,
status
)
{
return
new
Promise
(
function
(
resolve
,
reject
)
{
fs
.
writeFile
(
directory
+
'
/work
'
+
'
.txt
'
,
status
,
'
utf8
'
,
function
(
err
)
{
if
(
err
)
reject
(
err
);
else
resolve
(
true
);
});
});
}
async
function
findOffWorker
()
{
let
arr
=
[]
for
(
let
i
=
0
;
i
<
cores
;
i
++
)
{
let
res
=
await
fs
.
readFileSync
(
'
./
'
+
prefix
+
i
+
'
/work.txt
'
,
'
utf8
'
);
if
(
res
.
includes
(
'
0
'
))
arr
.
push
(
prefix
+
i
)
}
arr
=
_
.
shuffle
(
arr
)
if
(
arr
.
length
!==
0
)
return
arr
[
0
]
else
return
null
}
\ No newline at end of file
run.js
View file @
efd1cb7d
...
@@ -4,6 +4,7 @@ let _ = require('lodash')
...
@@ -4,6 +4,7 @@ let _ = require('lodash')
let
directory
let
directory
let
configFileName
let
configFileName
let
csvFile
let
csvFile
let
checks
=
[]
let
drawDown
=
[
30
,
40
,
50
]
let
drawDown
=
[
30
,
40
,
50
]
//const perf = require('execution-time')();
//const perf = require('execution-time')();
run
()
run
()
...
@@ -12,9 +13,11 @@ async function run() {
...
@@ -12,9 +13,11 @@ async function run() {
var
args
=
process
.
argv
.
slice
(
2
);
var
args
=
process
.
argv
.
slice
(
2
);
directory
=
args
[
0
]
directory
=
args
[
0
]
configFileName
=
args
[
1
]
configFileName
=
args
[
1
]
checks
=
args
[
2
]
await
makeStatus
(
directory
,
'
1
'
)
await
makeStatus
(
directory
,
'
1
'
)
// perf.start()
// perf.start()
let
configs
=
await
fs
.
readFileSync
(
'
./configs/
'
+
configFileName
,
'
utf8
'
)
let
configs
=
await
fs
.
readFileSync
(
'
./configs/
'
+
configFileName
,
'
utf8
'
)
checks
=
await
fs
.
readFileSync
(
'
./detail.json
'
,
'
utf8
'
)
configs
=
JSON
.
parse
(
configs
)
configs
=
JSON
.
parse
(
configs
)
let
arr
=
[]
let
arr
=
[]
let
files
=
await
fs
.
readdirSync
(
'
./
'
+
directory
)
let
files
=
await
fs
.
readdirSync
(
'
./
'
+
directory
)
...
@@ -22,7 +25,7 @@ async function run() {
...
@@ -22,7 +25,7 @@ async function run() {
for
(
let
i
=
0
;
i
<
files
.
length
;
i
++
)
for
(
let
i
=
0
;
i
<
files
.
length
;
i
++
)
if
(
files
[
i
].
includes
(
'
.csv
'
))
{
if
(
files
[
i
].
includes
(
'
.csv
'
))
{
csvFile
=
files
[
i
]
csvFile
=
files
[
i
]
arr
.
push
(
csv
.
parse
(
directory
,
files
[
i
],
configs
.
data
,
configs
.
risk
,
configs
.
pfTrade
,
configs
.
side
,
configs
.
riskFreeLevel
,
configs
.
detail
,
configs
.
onlyShortLong
))
arr
.
push
(
csv
.
parse
(
directory
,
files
[
i
],
configs
.
data
,
configs
.
risk
,
configs
.
pfTrade
,
configs
.
side
,
configs
.
riskFreeLevel
,
JSON
.
parse
(
checks
)
,
configs
.
onlyShortLong
))
}
}
arr
=
await
Promise
.
all
(
arr
)
arr
=
await
Promise
.
all
(
arr
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment