Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
E
elphel-apps-autocampars
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Elphel
elphel-apps-autocampars
Commits
1b799ea6
Commit
1b799ea6
authored
Nov 18, 2016
by
Andrey Filippov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
making http requests really parallel
parent
2877732d
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
40 additions
and
98 deletions
+40
-98
autocampars.py
src/autocampars.py
+40
-98
No files found.
src/autocampars.py
View file @
1b799ea6
...
@@ -35,35 +35,26 @@ import os
...
@@ -35,35 +35,26 @@ import os
import
urlparse
import
urlparse
import
urllib
import
urllib
import
time
import
time
import
socket
#import shutil
import
shutil
import
sys
import
sys
import
subprocess
import
subprocess
import
select
import
urllib2
import
urllib2
import
xml.etree.ElementTree
as
ET
import
xml.etree.ElementTree
as
ET
import
threading
import
Queue
import
time
import
time
import
random
import
random
PYDIR
=
"/usr/local/bin"
PYDIR
=
"/usr/local/bin"
VERILOG_DIR
=
"/usr/local/verilog"
VERILOG_DIR
=
"/usr/local/verilog"
WWW_SCRIPT
=
"autocampars.py"
WWW_SCRIPT
=
"autocampars.py"
# shout(pydir+"/test_mcntrl.py @"+verilogdir+"/hargs-eyesis")
TIMEOUT
=
20
# seconds
"""
TEST_URLS = ["http://xmission.com",
"http://www3.elphel.com",
"http://wiki.elphel.com"]
"""
def
process_py393
(
args
):
def
process_py393
(
args
):
# print ('process_py393 (',args,')')
# print (args['include'][0])
cmd
=
PYDIR
+
'/test_mcntrl.py'
cmd
=
PYDIR
+
'/test_mcntrl.py'
# print('0. calling ', cmd)
try
:
try
:
cmd
+=
' @'
+
VERILOG_DIR
+
'/'
+
args
[
'include'
][
0
]
cmd
+=
' @'
+
VERILOG_DIR
+
'/'
+
args
[
'include'
][
0
]
except
:
except
:
pass
pass
# print('1. calling ', cmd)
try
:
try
:
if
len
(
args
[
'other'
])
>
0
:
if
len
(
args
[
'other'
])
>
0
:
cmd
+=
' '
+
args
[
'other'
][
0
]
cmd
+=
' '
+
args
[
'other'
][
0
]
...
@@ -76,21 +67,16 @@ def process_py393(args):
...
@@ -76,21 +67,16 @@ def process_py393(args):
fmt
=
"""<?xml version="1.0"?>
fmt
=
"""<?xml version="1.0"?>
<result>
%
d</result>
<result>
%
d</result>
"""
"""
# print('fmt=',fmt)
"""
b=0;
with open ('/dev/urandom','rb') as randf:
for _ in range(4):
b = (b << 8)+ord(randf.read(1))
random.seed(b)
time.sleep(random.random()*5.0)
"""
print
(
fmt
%
(
rslt
))
print
(
fmt
%
(
rslt
))
# print ('<!--random.random()*5.0 = ',random.random()*5.0,' -->')
print
(
'<!--process_py393 ('
,
args
,
')-->'
)
print
(
'<!--process_py393 ('
,
args
,
')-->'
)
print
(
'<!-- called '
,
cmd
,
'-->'
)
print
(
'<!-- called '
,
cmd
,
'-->'
)
def
remote_parallel393
(
args
,
host
,
timeout
=
0
):
#fp in sec
def
remote_parallel393
(
args
,
host
,
timeout
=
0
):
#fp in sec
if
not
isinstance
(
host
,(
list
,
tuple
)):
if
not
isinstance
(
args
,(
list
,
tuple
)):
host
=
[
host
]
else
:
host
=
[
host
]
*
len
(
args
)
if
not
isinstance
(
args
,(
list
,
tuple
)):
if
not
isinstance
(
args
,(
list
,
tuple
)):
args
=
[
args
]
*
len
(
host
)
args
=
[
args
]
*
len
(
host
)
argshosts
=
zip
(
args
,
host
)
argshosts
=
zip
(
args
,
host
)
...
@@ -100,92 +86,48 @@ def remote_parallel393(args, host, timeout=0): #fp in sec
...
@@ -100,92 +86,48 @@ def remote_parallel393(args, host, timeout=0): #fp in sec
for
i
in
argshosts
:
for
i
in
argshosts
:
i
[
0
][
'cmd'
]
=
'py393'
i
[
0
][
'cmd'
]
=
'py393'
urls
.
append
(
"http://"
+
i
[
1
]
+
"/"
+
WWW_SCRIPT
+
"?"
+
urllib
.
urlencode
(
i
[
0
]))
urls
.
append
(
"http://"
+
i
[
1
]
+
"/"
+
WWW_SCRIPT
+
"?"
+
urllib
.
urlencode
(
i
[
0
]))
"""
try:
urls += TEST_URLS
except:
pass
print ('urls=',urls)
"""
return
remote_parallel_urls
(
urls
=
urls
,
timeout
=
timeout
)
return
remote_parallel_urls
(
urls
=
urls
,
timeout
=
timeout
)
def
remote_parallel_urls
(
urls
,
timeout
=
0
):
#fp in sec
def
remote_parallel_urls
(
urls
,
timeout
=
0
):
#imeout will restart for each next url
rqs
=
[]
def
read_url
(
index
,
queue
,
url
):
pending
=
[
True
]
*
len
(
urls
)
queue
.
put
((
index
,
urllib2
.
urlopen
(
url
)
.
read
()))
rslts
=
[
None
]
*
len
(
urls
)
for
url
in
urls
:
queue
=
Queue
.
Queue
()
time
.
sleep
(
random
.
random
()
*
1.0
)
for
index
,
url
in
enumerate
(
urls
):
rqs
.
append
(
urllib2
.
urlopen
(
url
))
thread
=
threading
.
Thread
(
target
=
read_url
,
args
=
(
index
,
queue
,
url
))
while
any
(
pending
):
#implement timeout
thread
.
daemon
=
True
if
(
timeout
):
thread
.
start
()
# if True:
rslts
=
[
None
]
*
len
(
urls
)
ready_to_read
,
_
,
_
=
select
.
select
(
#ready_to_write, in_error
if
not
timeout
:
[
rqs
[
i
]
for
i
,
t
in
enumerate
(
pending
)
if
t
],
# potential_readers,
timeout
=
None
[],
# potential_writers,
for
_
in
urls
:
[],
timeout
)
# potential_errs,
else
:
ready_to_read
,
_
,
_
=
select
.
select
(
#ready_to_write, in_error
[
rqs
[
i
]
for
i
,
t
in
enumerate
(
pending
)
if
t
],
# potential_readers,
[],
# potential_writers,
[])
# potential_errs,
print
(
'len(ready_to_read)= '
,
len
(
ready_to_read
))
if
not
ready_to_read
:
# timeout
# break
print
(
'==== @ '
,
time
.
time
())
continue
#find indices of the ready
for
fd
in
ready_to_read
:
fi
=
rqs
.
index
(
fd
)
print
(
'--Got response from '
,
urls
[
fi
],
' ('
,
fi
,
') @ '
,
time
.
time
())
rslts
[
fi
]
=
r
=
fd
.
read
()
pending
[
fi
]
=
False
print
(
'results='
,
rslts
)
return
rslts
#if None in rslts - likely timeout happened
def
remote_py393
(
args
,
host
):
print
(
"remote_py393("
,
args
,
','
,
host
,
')'
)
if
isinstance
(
host
,(
list
,
tuple
)):
rslts
=
remote_parallel393
(
args
,
host
)
else
:
args
[
'cmd'
]
=
'py393'
uea
=
urllib
.
urlencode
(
args
)
url
=
"http://"
+
host
+
"/"
+
WWW_SCRIPT
+
"?"
+
uea
print
(
"opening url:"
,
url
)
rslts
=
[
urllib2
.
urlopen
(
url
)
.
read
()]
print
(
'Received:'
,
rslts
)
for
i
,
r
in
enumerate
(
rslts
):
try
:
try
:
rslts
[
i
]
=
int
(
ET
.
fromstring
(
r
)
.
text
)
rslt
=
queue
.
get
(
block
=
True
,
timeout
=
timeout
)
rslts
[
rslt
[
0
]]
=
rslt
[
1
]
except
:
except
:
rslts
[
i
]
=
-
1
break
return
rslts
return
rslts
#if None in rslts - likely timeout happened
def
remote_wget
(
url
,
host
,
timeout
=
0
)
:
#split_list_arg(sys.argv[1]))
def
remote_wget
(
url
,
host
,
timeout
=
0
)
:
#split_list_arg(sys.argv[1]))
print
(
"remote_wget("
,
args
,
','
,
host
,
')'
)
print
(
"remote_wget("
,
args
,
','
,
host
,
')'
)
if
not
isinstance
(
host
,(
list
,
tuple
)):
if
not
isinstance
(
host
,(
list
,
tuple
)):
host
=
[
host
]
host
=
[
host
]
if
(
len
(
host
)
>
1
)
or
(
len
(
url
)
>
1
):
if
len
(
host
)
>
len
(
url
):
if
len
(
host
)
>
len
(
url
):
url
+=
[
url
[
-
1
]]
*
(
len
(
host
)
-
len
(
url
))
url
+=
[
url
[
-
1
]]
*
(
len
(
host
)
-
len
(
url
))
elif
len
(
url
)
>
len
(
url
):
elif
len
(
url
)
>
len
(
url
):
host
+=
[
host
[
-
1
]]
*
(
len
(
url
)
-
len
(
host
))
host
+=
[
host
[
-
1
]]
*
(
len
(
url
)
-
len
(
host
))
argshosts
=
zip
(
args
,
host
)
argshosts
=
zip
(
args
,
host
)
print
(
'remote_wget(args='
,
args
,
' host='
,
host
,
')'
)
print
(
'remote_parallel393(args='
,
args
,
' host='
,
host
,
')'
)
print
(
'argshosts='
,
argshosts
)
print
(
'argshosts='
,
argshosts
)
urls
=
[]
urls
=
[]
for
i
in
argshosts
:
for
i
in
argshosts
:
urls
.
append
(
"http://"
+
i
[
1
]
+
"/"
+
i
[
0
])
urls
.
append
(
"http://"
+
i
[
1
]
+
"/"
+
i
[
0
])
rslts
=
remote_parallel_urls
(
urls
=
urls
,
timeout
=
timeout
)
rslts
=
remote_parallel_urls
(
urls
=
urls
,
timeout
=
timeout
)
else
:
print
(
"opening url:"
,
url
)
rslts
=
[
urllib2
.
urlopen
(
url
)
.
read
()]
print
(
'Received:'
+
rslts
)
# parse results here
# parse results here
return
rslts
return
rslts
def
process_http
():
def
process_http
():
try
:
try
:
...
@@ -222,7 +164,7 @@ def process_cmdline():
...
@@ -222,7 +164,7 @@ def process_cmdline():
except
:
except
:
pass
pass
print
(
'args='
,
args
)
print
(
'args='
,
args
)
remote_p
y393
(
args
,
split_list_arg
(
sys
.
argv
[
1
])
)
remote_p
arallel393
(
args
=
args
,
host
=
split_list_arg
(
sys
.
argv
[
1
]),
timeout
=
TIMEOUT
)
elif
sys
.
argv
[
2
]
==
"wget"
:
elif
sys
.
argv
[
2
]
==
"wget"
:
urls
=
sys
.
argv
[
3
:]
urls
=
sys
.
argv
[
3
:]
remote_wget
(
urls
,
split_list_arg
(
sys
.
argv
[
1
]))
remote_wget
(
urls
,
split_list_arg
(
sys
.
argv
[
1
]))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment