Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
N
node-bigstream
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
3
Merge Requests
3
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
bs
node-bigstream
Commits
822c6e2f
Commit
822c6e2f
authored
Nov 20, 2017
by
Kamron Aroonrua
💬
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update plugins
parent
ceabb20d
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
602 additions
and
180 deletions
+602
-180
job-creator.js
plugins/di/di-gistda-air/job-creator.js
+149
-0
package.json
plugins/di/di-gistda-air/package.json
+3
-1
perform.js
plugins/di/di-gistda-air/perform.js
+142
-131
perform.js
plugins/di/di-http-request/perform.js
+1
-1
index.js
plugins/di/di-sftp-filesync/index.js
+13
-0
package.json
plugins/di/di-sftp-filesync/package.json
+14
-0
perform.js
plugins/di/di-sftp-filesync/perform.js
+140
-0
perform.js
plugins/do/do-storage/perform.js
+10
-0
perform.js
plugins/dt/dt-gistda-air/perform.js
+127
-46
test_queue_receive.js
test/test_queue_receive.js
+3
-1
No files found.
plugins/di/di-gistda-air/job-creator.js
0 → 100644
View file @
822c6e2f
var
Client
=
require
(
'ftp'
);
var
async
=
require
(
'async'
);
var
fs
=
require
(
'fs'
);
var
host
=
"203.150.19.51"
;
var
port
=
"21"
;
var
user
=
"bs"
;
var
pwd
=
"UF13kczHdCPXpBb"
;
var
main_folder
=
"GISTDA_SOS_DATA"
var
init_observed_date
=
"2017-10-18"
;
var
init_observed_time
=
"09:00:00"
;
var
di_plugin
=
"sftp-filesync"
;
var
data_source
=
"gistda-air"
var
job_path
=
"/Users/apple/Project@Nectec/i-bitz/jobs/gistda_air_job"
;
var
stationTable
=
require
(
'hashTable'
);
var
config
=
{
host
:
host
,
port
:
port
,
user
:
user
,
password
:
pwd
};
var
c
=
new
Client
();
var
stationTable
=
new
stationTable
();
fs
.
readFile
(
'/Users/apple/Project@Nectec/i-bitz/data_sample/FromKPrasong/Gistda_Air_Station_Profile.json'
,
function
(
err
,
data
)
{
if
(
err
)
throw
err
;
var
profile
=
JSON
.
parse
(
data
);
var
features
=
profile
.
features
;
for
(
var
i
=
0
;
i
<
features
.
length
;
i
++
)
{
var
properties
=
features
[
i
].
properties
;
stationTable
.
put
(
properties
.
ftp_folder_mapping
,
{
latitude
:
properties
.
lat
,
longitude
:
properties
.
long
});
}
// var location = stationTable.get('STATION1_KORAT');
});
c
.
on
(
'ready'
,
function
()
{
c
.
list
(
main_folder
,
function
(
err
,
list
)
{
if
(
err
)
throw
err
;
async
.
eachSeries
(
list
,
function
(
element
,
callback
)
{
// create job profile for .dat
create_job_profile
(
element
.
name
,
element
.
name
,
element
.
name
,
false
);
// prepare the image path
var
station_no
=
element
.
name
;
station_no
=
station_no
.
replace
(
"STATION"
,
""
);
var
n
=
station_no
.
search
(
/_/i
);
station_no
=
station_no
.
substr
(
0
,
n
);
var
image_path_top
=
element
.
name
+
"/PIC"
+
station_no
+
"/TOP"
+
station_no
;
var
filename_top
=
element
.
name
+
"-PIC"
+
station_no
+
"-TOP"
+
station_no
;
create_job_profile
(
element
.
name
,
image_path_top
,
filename_top
,
true
);
var
image_path_bottom
=
element
.
name
+
"/PIC"
+
station_no
+
"/BOTTOM"
+
station_no
;
var
filename_bottom
=
element
.
name
+
"-PIC"
+
station_no
+
"-BOTTOM"
+
station_no
;
create_job_profile
(
element
.
name
,
image_path_bottom
,
filename_bottom
,
true
);
console
.
log
(
element
.
name
+
", "
+
image_path_top
+
" vs "
+
image_path_bottom
);
callback
();
}
);
});
});
c
.
connect
(
config
);
var
trigger
=
{};
trigger
[
"type"
]
=
"cron"
;
trigger
[
"cmd"
]
=
"0 * * * *"
;
var
param
=
{};
param
[
"source"
]
=
data_source
,
param
[
"url"
]
=
host
,
param
[
"port"
]
=
port
;
param
[
"user"
]
=
user
;
param
[
"password"
]
=
pwd
;
param
[
"init_observed_date"
]
=
init_observed_date
;
param
[
"init_observed_time"
]
=
init_observed_time
;
function
create_job_profile
(
station_id
,
path
,
filename
,
is_image
)
{
var
job
=
{};
var
job_id
=
"sds."
+
data_source
+
"-"
+
filename
;
job
[
"job_id"
]
=
job_id
;
job
[
"active"
]
=
true
;
job
[
"trigger"
]
=
trigger
;
var
location
=
stationTable
.
get
(
station_id
);
var
data_in
=
{};
data_in
[
"type"
]
=
data_source
;
var
profile
=
{};
profile
[
"station_id"
]
=
station_id
;
profile
[
"latitude"
]
=
location
.
latitude
;
profile
[
"longitude"
]
=
location
.
longitude
;
data_in
[
"profile"
]
=
profile
;
param
[
"path"
]
=
main_folder
+
"/"
+
path
;
data_in
[
"param"
]
=
param
;
if
(
!
is_image
)
{
var
data_transform
=
{};
data_transform
[
"type"
]
=
data_source
;
}
else
{
var
data_transform
=
[];
var
transfrom1
=
{};
transfrom1
[
"type"
]
=
data_source
;
var
script
=
{};
script
[
"script"
]
=
"data=Array.isArray(src.data)?src.data.pop():src.dat"
;
var
transfrom2
=
{};
transfrom2
[
"type"
]
=
"transform"
;
transfrom2
[
"param"
]
=
script
;
data_transform
.
push
(
transfrom1
);
data_transform
.
push
(
transfrom2
);
}
var
data_out
=
{};
data_out
[
"type"
]
=
"storage"
;
// data_out["type"] = "dir";
var
data_out_param
=
{};
data_out_param
[
"storage_name"
]
=
"sds.gistda-air"
;
// data_out_param["path"] = "/Users/Naiyana/testdata";
data_out
[
"param"
]
=
data_out_param
;
job
[
"data_in"
]
=
data_in
;
job
[
"data_transform"
]
=
data_transform
;
job
[
"data_out"
]
=
data_out
;
fs
.
writeFile
(
job_path
+
"/"
+
job_id
+
".json"
,
JSON
.
stringify
(
job
),
function
(
err
)
{
if
(
err
)
{
return
console
.
log
(
err
);
}
console
.
log
(
"The file was saved!"
);
});
}
plugins/di/di-gistda-air/package.json
View file @
822c6e2f
...
...
@@ -16,6 +16,8 @@
"path"
:
"0.12.7"
,
"fs"
:
"0.0.1-security"
,
"dateformat"
:
"2.0.0"
,
"ftp"
:
"0.3.10"
"ftp"
:
"0.3.10"
,
"moment"
:
"2.18.1"
,
"hashtable"
:
"2.0.2"
}
}
plugins/di/di-gistda-air/perform.js
View file @
822c6e2f
var
path
=
require
(
'path'
);
var
fs
=
require
(
'fs'
);
var
async
=
require
(
'async'
);
var
dateFormat
=
require
(
'dateformat'
);
var
Client
=
require
(
'ftp'
);
function
execute_function
(
context
,
response
){
var
job_id
=
context
.
jobconfig
.
job_id
;
var
transaction_id
=
context
.
transaction
.
id
;
var
param
=
context
.
jobconfig
.
data_in
.
param
;
var
memstore
=
context
.
task
.
memstore
var
output_type
=
'object/gistda-air'
var
config
=
{
host
:
param
.
url
,
port
:
param
.
port
,
user
:
param
.
user
,
password
:
param
.
password
};
let
result
=
{
"object_type"
:
param
.
source
,
"data"
:[]
};
let
maxdate
;
var
c
=
new
Client
();
c
.
on
(
'ready'
,
function
()
{
c
.
list
(
function
(
err
,
list
)
{
if
(
err
)
throw
err
;
memstore
.
getItem
(
'lasttransaction'
,
function
(
err
,
value
)
{
if
(
err
)
throw
err
;
var
latestDate
;
if
(
typeof
value
==
'undefined'
)
{
var
latestDateStr
=
param
.
init_observed_date
+
' '
+
param
.
init_observed_time
;
//'2016-12-20T10:00:00+04:00';
latestDate
=
new
Date
(
latestDateStr
);
}
else
{
// var date = value.substring(0, 10);
// var time = value.substring(11,19)
// latestDate = new Date(date + ' ' + time);
latestDate
=
new
Date
(
value
);
}
console
.
log
(
value
+
" !!! "
+
latestDate
);
async
.
eachSeries
(
list
,
function
(
element
,
callback
)
{
if
(
typeof
element
!==
'undefined'
)
{
if
(
element
.
type
!==
'd'
)
{
// filter out directories
var
filename
=
element
.
name
;
var
filedate
=
element
.
date
;
var
filetype
=
element
.
type
;
if
(
path
.
extname
(
filename
)
===
'.dat'
&&
filename
.
indexOf
(
"debug"
)
==
-
1
)
{
if
(
filedate
-
latestDate
>
0
)
{
// filter out old files
c
.
get
(
filename
,
function
(
err
,
stream
)
{
if
(
err
)
throw
err
;
var
data
=
''
;
stream
.
setEncoding
(
'utf8'
);
console
.
log
(
"downloading .... : "
+
filename
+
", "
+
dateFormat
(
filedate
,
"isoDateTime"
));
stream
.
on
(
'data'
,
function
(
chunk
)
{
// donwload each individual chunk as per a downloading file
if
(
chunk
!=
''
)
data
=
data
+
chunk
;
});
stream
.
on
(
'end'
,
function
()
{
// insert a data file
result
.
data
.
push
({
"filename"
:
filename
,
"value"
:
data
});
if
(
typeof
maxdate
==
'undefined'
)
{
maxdate
=
filedate
;
}
else
{
if
(
filedate
-
maxdate
>
0
)
{
maxdate
=
filedate
;
}
}
memstore
.
setItem
(
'lasttransaction'
,
dateFormat
(
maxdate
,
"isoDateTime"
),
function
(
err
){
if
(
err
)
throw
err
;
callback
();
});
});
// stream.pipe(fs.createWriteStream(filename));
});
}
else
{
async
.
setImmediate
(
callback
);
//callback(null);
}
}
else
{
async
.
setImmediate
(
callback
);
//callback(null);
}
}
else
async
.
setImmediate
(
callback
);
//callback(null);
}
else
async
.
setImmediate
(
callback
);
//callback(null);
},
function
(
err
)
{
if
(
err
)
{
response
.
error
(
err
);
}
else
{
if
(
result
.
data
.
length
==
0
)
response
.
reject
();
// for no data
else
response
.
success
(
result
,
output_type
);
c
.
end
();
}
}
);
// async close
});
// memstore close
});
});
c
.
connect
(
config
);
//response.reject();
}
module
.
exports
=
execute_function
;
var
path
=
require
(
'path'
);
var
fs
=
require
(
'fs'
);
var
async
=
require
(
'async'
);
var
dateFormat
=
require
(
'dateformat'
);
var
Client
=
require
(
'ftp'
);
function
execute_function
(
context
,
response
){
var
job_id
=
context
.
jobconfig
.
job_id
;
var
transaction_id
=
context
.
transaction
.
id
;
var
profile
=
context
.
jobconfig
.
data_in
.
profile
;
var
param
=
context
.
jobconfig
.
data_in
.
param
;
var
memstore
=
context
.
task
.
memstore
var
output_type
=
'object/gistda-air'
var
config
=
{
host
:
param
.
url
,
port
:
param
.
port
,
user
:
param
.
user
,
password
:
param
.
password
};
let
result
=
{
"object_type"
:
param
.
source
,
"data"
:[]
};
let
maxdate
;
var
c
=
new
Client
();
var
key
=
param
.
path
+
'-lasttransaction'
;
c
.
on
(
'ready'
,
function
()
{
c
.
list
(
param
.
path
,
function
(
err
,
list
)
{
if
(
err
)
throw
err
;
memstore
.
getItem
(
key
,
function
(
err
,
value
)
{
if
(
err
)
throw
err
;
var
latestDate
;
if
(
!
value
)
{
var
latestDateStr
=
param
.
init_observed_date
+
' '
+
param
.
init_observed_time
;
//'2016-12-20T10:00:00+04:00';
latestDate
=
new
Date
(
latestDateStr
);
}
else
{
latestDate
=
new
Date
(
value
);
}
async
.
eachSeries
(
list
,
function
(
element
,
callback
)
{
if
(
typeof
element
!==
'undefined'
)
{
if
(
element
.
type
!==
'd'
)
{
// filter out directories
var
filename
=
element
.
name
;
var
filedate
=
element
.
date
;
var
filetype
=
element
.
type
;
// if ((path.extname(filename) === '.dat' || path.extname(filename) === '.jpg') && filename.indexOf("debug") == -1) {
if
((
path
.
extname
(
filename
)
===
'.dat'
&&
(
filename
.
indexOf
(
"Every_5m"
)
>
0
||
(
filename
.
indexOf
(
"MS700"
)
>
0
&&
filename
.
indexOf
(
"debug"
)
==
-
1
)))
||
path
.
extname
(
filename
)
===
'.jpg'
)
{
var
type
=
'text'
;
if
(
path
.
extname
(
filename
)
===
'.jpg'
)
type
=
'image'
;
if
(
filedate
-
latestDate
>
0
)
{
// filter out old files
c
.
get
(
param
.
path
+
"/"
+
filename
,
function
(
err
,
stream
)
{
if
(
err
)
throw
err
;
var
data
=
''
;
stream
.
setEncoding
(
'utf8'
);
console
.
log
(
"downloading .... : "
+
filename
+
", "
+
dateFormat
(
filedate
,
"isoDateTime"
));
stream
.
on
(
'data'
,
function
(
chunk
)
{
// donwload each individual chunk as per a downloading file
if
(
chunk
!=
''
)
data
=
data
+
chunk
;
});
stream
.
on
(
'end'
,
function
()
{
// insert a data file
result
.
data
.
push
({
"filename"
:
filename
,
"station_id"
:
profile
.
station_id
,
"latitude"
:
profile
.
latitude
,
"longitude"
:
profile
.
longitude
,
"type"
:
type
,
"observeddatetime"
:
dateFormat
(
filedate
,
'yyyy-mm-dd HH:MM:ss'
),
"value"
:
data
});
if
(
typeof
maxdate
==
'undefined'
)
{
maxdate
=
filedate
;
}
else
{
if
(
filedate
-
maxdate
>
0
)
{
maxdate
=
filedate
;
}
}
memstore
.
setItem
(
key
,
dateFormat
(
maxdate
,
'yyyy-mm-dd HH:MM:ss'
),
function
(
err
){
if
(
err
)
throw
err
;
callback
();
});
});
// stream.pipe(fs.createWriteStream(filename));
});
}
else
{
async
.
setImmediate
(
callback
);
//callback(null);
}
}
else
{
async
.
setImmediate
(
callback
);
//callback(null);
}
}
else
async
.
setImmediate
(
callback
);
//callback(null);
}
else
async
.
setImmediate
(
callback
);
//callback(null);
},
function
(
err
)
{
if
(
err
)
{
response
.
error
(
err
);
}
else
{
if
(
result
.
data
.
length
==
0
)
response
.
reject
();
// for no data
else
response
.
success
(
result
,
output_type
);
c
.
end
();
}
}
);
// async close
});
// memstore close
});
});
c
.
connect
(
config
);
//response.reject();
}
module
.
exports
=
execute_function
;
plugins/di/di-http-request/perform.js
View file @
822c6e2f
...
...
@@ -10,7 +10,7 @@ function execute_function(context,response){
var
url
=
param
.
url
;
var
reject
=
true
;
if
(
param
.
reject
==
'false'
){
reject
=
false
;}
if
(
param
.
reject
==
false
){
reject
=
false
;}
var
encode
=
'utf8'
;
if
(
param
.
encoding
==
'binary'
){
...
...
plugins/di/di-sftp-filesync/index.js
0 → 100644
View file @
822c6e2f
var
util
=
require
(
'util'
);
var
DIPlugin
=
require
(
'../di-plugin'
);
function
DITask
(
context
){
DIPlugin
.
call
(
this
,
context
);
this
.
name
=
"sftp-filesync"
;
this
.
output_type
=
""
;
}
util
.
inherits
(
DITask
,
DIPlugin
);
DITask
.
prototype
.
perform
=
require
(
'./perform'
);
module
.
exports
=
DITask
;
plugins/di/di-sftp-filesync/package.json
0 → 100644
View file @
822c6e2f
{
"name"
:
"di-sftp-filesync"
,
"version"
:
"1.0.0"
,
"description"
:
""
,
"main"
:
"index.js"
,
"scripts"
:
{
"test"
:
"echo
\"
Error: no test specified
\"
&& exit 1"
},
"author"
:
""
,
"license"
:
"ISC"
,
"dependencies"
:
{
"ssh2-sftp-client"
:
"^2.0.1"
}
}
plugins/di/di-sftp-filesync/perform.js
0 → 100644
View file @
822c6e2f
var
Client
=
require
(
"ssh2-sftp-client"
);
var
path
=
require
(
'path'
);
function
perform_function
(
context
,
response
){
var
job_id
=
context
.
jobconfig
.
job_id
;
var
transaction_id
=
context
.
transaction
.
id
;
var
param
=
context
.
jobconfig
.
data_in
.
param
;
var
memstore
=
context
.
job
.
memstore
;
var
output_type
=
'object'
var
prm_host
=
param
.
host
;
var
prm_port
=
param
.
port
||
22
;
var
prm_user
=
param
.
username
||
""
;
var
prm_pass
=
param
.
password
||
""
;
var
prm_dir
=
param
.
dir
||
"~"
;
var
prm_encoding
=
param
.
encoding
||
"binary"
;
var
prm_continue
=
(
typeof
param
.
continue
==
'boolean'
&&
param
.
continue
.
toString
()
==
'false'
)?
false
:
true
;
//filter.ext|filetype
var
prm_filter
=
param
.
filter
||
{};
var
meta
=
{};
var
last_mod
=
{
'fname'
:
''
,
'tts'
:
0
};
var
fs_continue
=
false
;
var
buff_out
=
new
Buffer
(
0
);
memstore
.
getItem
(
'lastmodify'
,
function
(
err
,
value
){
if
(
value
){
last_mod
=
value
;
}
getData
();
});
function
getData
(){
var
sftp
=
new
Client
();
sftp
.
connect
({
host
:
prm_host
,
port
:
prm_port
,
username
:
prm_user
,
password
:
prm_pass
}).
then
(()
=>
{
return
sftp
.
list
(
prm_dir
+
'/'
);
}).
then
((
fList
)
=>
{
var
f_target
=
null
;
var
last_tts
=
0
;
var
sync_list
=
[];
fList
.
forEach
((
file
)
=>
{
if
(
file
.
modifyTime
>
last_mod
.
tts
&&
file
.
type
==
'-'
&&
rulesMatch
(
prm_filter
,
file
)){
sync_list
.
push
(
file
);
if
(
f_target
==
null
||
(
file
.
modifyTime
<
f_target
.
modifyTime
)
){
f_target
=
file
;
last_tts
=
file
.
modifyTime
;
last_mod
.
fname
=
file
.
name
;
}
}
});
if
(
sync_list
.
length
>
1
&&
prm_continue
){
fs_continue
=
true
;}
last_mod
.
tts
=
last_tts
;
if
(
f_target
){
meta
=
{
'filename'
:
f_target
.
name
,
'fileext'
:
path
.
extname
(
f_target
.
name
),
'filesize'
:
f_target
.
size
,
'modify_ts'
:
Math
.
round
(
f_target
.
modifyTime
/
1000
)
}
return
sftp
.
get
(
prm_dir
+
'/'
+
f_target
.
name
,
null
,
null
);
}
else
{
return
null
;
}
}).
then
((
data
)
=>
{
if
(
data
){
data
.
on
(
'data'
,(
dat
)
=>
{
var
nb
=
Buffer
.
concat
([
buff_out
,
dat
]);
buff_out
=
nb
;
})
data
.
on
(
'end'
,()
=>
{
sftp
.
end
()
memstore
.
setItem
(
'lastmodify'
,
last_mod
,
function
(
err
){
var
result
=
(
prm_encoding
==
'binary'
)?
buff_out
:
buff_out
.
toString
(
'utf8'
);
response
.
success
(
result
,
{
"meta"
:
meta
,
"continue"
:
fs_continue
});
});
});
}
else
{
sftp
.
end
();
response
.
reject
();
}
}).
catch
((
err
)
=>
{
sftp
.
end
();
response
.
error
(
err
);
console
.
log
(
err
,
'catch error'
);
});
}
// memstore.setItem('lasttransaction',transaction_id,function(err){
// response.success(data);
// });
// memstore.getItem('lasttransaction2',function(err,value){
// console.log('key');
// console.log(value);
// response.success(value);
// });
//response.success(data,output_type);
//response.reject();
//response.error("error message")
}
function
rulesMatch
(
r
,
fd
)
{
var
ret
=
true
;
var
fname
=
fd
.
name
;
if
(
r
.
ext
){
var
extlist
=
(
Array
.
isArray
(
r
.
ext
))?
r
.
ext
:
r
.
ext
.
split
(
','
);
if
(
extlist
.
indexOf
(
path
.
extname
(
fname
))
<
0
){
ret
=
false
;
}
}
if
(
r
.
filetype
){
var
ftlist
=
(
Array
.
isArray
(
r
.
filetype
))?
r
.
filetype
:
r
.
filetype
.
split
(
','
);
if
(
ftlist
.
indexOf
(
path
.
extname
(
fname
))
<
0
){
ret
=
false
;
}
}
return
ret
;
}
module
.
exports
=
perform_function
;
plugins/do/do-storage/perform.js
View file @
822c6e2f
...
...
@@ -31,6 +31,15 @@ function perform_function(context,request,response){
"_ts"
:
Math
.
round
((
new
Date
).
getTime
()
/
1000
)
}
if
(
meta
&&
typeof
meta
==
'object'
)
{
Object
.
keys
(
meta
).
forEach
((
item
)
=>
{
if
(
!
item
.
startsWith
(
'_'
)){
dc_meta
[
item
]
=
meta
[
item
];
}
});
}
var
idx
=
0
;
async
.
whilst
(
function
()
{
return
idx
<
data
.
length
;
},
...
...
@@ -61,6 +70,7 @@ function perform_function(context,request,response){
if
(
!
err
){
response
.
success
();
}
else
{
console
.
log
(
err
);
response
.
error
(
"storage error"
);
}
}
...
...
plugins/dt/dt-gistda-air/perform.js
View file @
822c6e2f
var
async
=
require
(
'async'
);
function
perform_function
(
context
,
request
,
response
){
var
job_id
=
context
.
jobconfig
.
job_id
;
var
transaction_id
=
context
.
transaction
.
id
;
...
...
@@ -12,53 +14,132 @@ function perform_function(context,request,response){
var
nfiles
=
data
.
data
.
length
;
var
i
=
0
;
while
(
i
<
nfiles
)
{
var
filename
=
data
.
data
[
i
].
filename
;
var
filecontent
=
data
.
data
[
i
].
value
;
var
arr
=
filecontent
.
toString
().
split
(
"
\
r
\n
"
);
var
arr_type
=
arr
[
1
].
split
(
","
);
var
arr_unit
=
arr
[
2
].
split
(
","
);
var
arr_value_type
=
arr
[
3
].
split
(
","
);
var
ndata
=
arr_type
.
length
;
var
col
=
1
;
let
_result
=
{
"object_type"
:
"sds"
,
"station_id"
:
filename
,
// need to change to exact station
"latitude"
:
""
,
"longitude"
;
""
,
"altitude"
:
""
,
"data"
:[]
};
while
(
col
<
ndata
)
{
var
row
=
4
;
let
values
=
[];
while
(
row
<
arr
.
length
-
1
)
{
var
rdata
=
arr
[
row
].
split
(
","
);
values
.
push
({
"observeddatetime"
:
rdata
[
0
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"value"
:
rdata
[
col
]
});
row
++
;
}
_result
.
data
.
push
({
"type"
:
arr_type
[
col
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"unit"
:
arr_unit
[
col
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"value_type"
:
arr_value_type
[
col
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"values"
:
values
});
col
++
;
}
result
.
push
(
_result
);
i
++
;
}
response
.
success
(
result
,
output_type
);
//response.reject();
//response.error("error message")
async
.
whilst
(
function
()
{
return
i
<
nfiles
;
},
function
(
callback
)
{
var
filename
=
data
.
data
[
i
].
filename
;
var
station_id
=
data
.
data
[
i
].
station_id
;
var
latitude
=
data
.
data
[
i
].
latitude
;
var
longitude
=
data
.
data
[
i
].
longitude
;
var
data_type
=
data
.
data
[
i
].
type
;
var
filecontent
=
data
.
data
[
i
].
value
;
var
observeddatetime
=
data
.
data
[
i
].
observeddatetime
;
i
++
;
let
_result
=
{
"object_type"
:
"sds"
,
"station_id"
:
station_id
,
// need to change to exact station
"latitude"
:
latitude
,
"longitude"
:
longitude
,
"data"
:[]
};
if
(
data_type
==
'text'
)
{
_result
=
perform_text
(
_result
,
filecontent
);
result
.
push
(
_result
);
callback
();
}
else
if
(
data_type
==
'image'
)
{
getImage
(
filecontent
).
then
((
base64
)
=>
{
var
values
=
[];
var
avalue
=
{};
avalue
[
"observeddatetime"
]
=
observeddatetime
;
avalue
[
"value"
]
=
base64
;
values
.
push
(
avalue
);
_result
.
data
.
push
({
"type"
:
"image"
,
"values"
:
values
});
result
.
push
(
_result
);
callback
();
}).
catch
((
err
)
=>
{
throw
err
});
}
},
function
(
err
,
n
)
{
response
.
success
(
result
,
output_type
);
}
);
// while (i < nfiles) {
// var filename = data.data[i].filename;
// var data_type = data.data[i].type;
// var filecontent = data.data[i].value;
// let _result = {
// "object_type":"sds",
// "station_id" : filename, // need to change to exact station
// "latitude":"",
// "longitude":"",
// "altitude":"",
// "data":[]
// };
// if (data_type == 'text')
// _result = perform_text(_result, filecontent);
// else if (data_type == 'image') {
// // _result = perform_image(_result, filecontent);
// getImage(filecontent).then((base64) => {
// _result.data.push({"values":base64});
// console.log("after get image");
// }).catch((err) => {
// throw err
// });
// }
// console.log("will result");
// result.push(_result);
// i++;
// }
// response.success(result,output_type);
// //response.reject();
// //response.error("error message")
}
function
perform_text
(
_result
,
filecontent
)
{
var
arr
=
filecontent
.
toString
().
split
(
"
\
r
\n
"
);
var
arr_type
=
arr
[
1
].
split
(
","
);
var
arr_unit
=
arr
[
2
].
split
(
","
);
var
arr_value_type
=
arr
[
3
].
split
(
","
);
var
ndata
=
arr_type
.
length
;
var
col
=
1
;
while
(
col
<
ndata
)
{
var
row
=
4
;
let
values
=
[];
while
(
row
<
arr
.
length
-
1
)
{
var
rdata
=
arr
[
row
].
split
(
","
);
values
.
push
({
"observeddatetime"
:
rdata
[
0
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"value"
:
rdata
[
col
]
});
row
++
;
}
_result
.
data
.
push
({
"type"
:
arr_type
[
col
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"unit"
:
arr_unit
[
col
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"value_type"
:
arr_value_type
[
col
].
replace
(
'"'
,
''
).
replace
(
'"'
,
''
),
"values"
:
values
});
col
++
;
}
return
_result
;
}
function
perform_image
(
_result
,
filecontent
)
{
getImage
(
filecontent
).
then
((
base64
)
=>
{
_result
.
data
.
push
({
"values"
:
base64
});
}).
catch
((
err
)
=>
{
throw
err
});
return
_result
;
}
function
getImage
(
filecontent
)
{
return
new
Promise
((
resolve
,
reject
)
=>
{
resolve
(
"data:image;base64,"
+
new
Buffer
(
filecontent
).
toString
(
'base64'
));
});
}
module
.
exports
=
perform_function
;
test/test_queue_receive.js
View file @
822c6e2f
var
ctx
=
require
(
'../context'
);
var
amqp_cfg
=
ctx
.
config
.
amqp
;
var
AMQP_URL
=
'amqp://bigmaster.igridproject.info'
;
var
QueueReceiver
=
ctx
.
getLib
(
'lib/amqp/queuereceiver'
);
var
server
=
new
QueueReceiver
({
url
:
amqp_cfg
.
url
,
url
:
AMQP_URL
,
name
:
'bs_jobs_queue'
});
server
.
set_execute_function
(
function
(
data
,
callback
){
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment