Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
T
tensorflow-feed-from-gpu
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Elphel
tensorflow-feed-from-gpu
Commits
1873e403
Commit
1873e403
authored
Jan 16, 2020
by
Oleg Dzhimiev
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
not too many changes
parent
ed88f719
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
13 additions
and
8 deletions
+13
-8
README.md
README.md
+1
-1
inference_base.cpp
inference_base.cpp
+3
-1
main.cpp
main.cpp
+9
-6
No files found.
README.md
View file @
1873e403
...
...
@@ -8,4 +8,4 @@ From **Eclipse (2019-12)**:
*
File > Open Projects from File System...
*
Directory... > navigate to project's root > Finish
Tried
a few times - indexer wouldn'
t work sometimes.
Tried
importing a few times - indexer does no
t work sometimes.
inference_base.cpp
View file @
1873e403
...
...
@@ -49,6 +49,7 @@ int InferenceBase::Init(string videoStream)
return
-
1
;
}
/*
if (ReadGraph() != 0)
{
LOG(ERROR) << "Could not load inference graph";
...
...
@@ -68,6 +69,7 @@ int InferenceBase::Init(string videoStream)
auto formatStruct = d_reader->format();
width = formatStruct.width;
height = formatStruct.height;
*/
isInitialized
=
true
;
return
0
;
...
...
@@ -118,4 +120,4 @@ void InferenceBase::RunInferenceOnStream()
LOG
(
INFO
)
<<
"Speed: "
<<
to_string
(
fps
).
substr
(
0
,
5
);
}
}
}
\ No newline at end of file
}
main.cpp
View file @
1873e403
...
...
@@ -21,12 +21,10 @@ int main(int argc, char *argv[])
return
-
1
;
}
cout
<<
"Hello world!
\n
"
;
const
String
keys
=
"{d display |1 | view video while objects are detected}"
//"{t tensorrt|false | use tensorrt}"
"{i int8|false| use INT8 (requires callibration)}"
//
"{i int8|false| use INT8 (requires callibration)}"
"{v video | | video for detection}"
"{graph ||frozen graph location}"
"{labels ||trained labels filelocation}"
;
...
...
@@ -38,16 +36,21 @@ int main(int argc, char *argv[])
int
showWindow
=
parser
.
get
<
int
>
(
"d"
);
String
video_file
=
parser
.
get
<
String
>
(
"v"
);
//bool is_tensor_rt = parser.get<bool>("t");
bool
is_int8
=
parser
.
get
<
bool
>
(
"i"
);
//
bool is_int8 = parser.get<bool>("i");
String
LABELS
=
parser
.
get
<
String
>
(
"labels"
);
String
GRAPH
=
parser
.
get
<
String
>
(
"graph"
);
unique_ptr
<
InferenceBase
>
infer
((
InferenceBase
*
)
new
InferenceTensorflow
(
LABELS
,
GRAPH
));
infer
->
set_debug
(
showWindow
);
cout
<<
"Init()
\n
"
;
infer
->
Init
(
video_file
);
infer
->
RunInferenceOnStream
();
// never reached?
cout
<<
"EXIT 0
\n
"
;
//infer->RunInferenceOnStream();
return
0
;
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment