2013-03-02 00:05:03 +01:00
|
|
|
# Python class for controlling wpa_supplicant
|
2014-01-03 13:34:59 +01:00
|
|
|
# Copyright (c) 2013-2014, Jouni Malinen <j@w1.fi>
|
2013-03-02 00:05:03 +01:00
|
|
|
#
|
|
|
|
# This software may be distributed under the terms of the BSD license.
|
|
|
|
# See README for more details.
|
|
|
|
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import logging
|
2014-03-08 20:25:47 +01:00
|
|
|
import binascii
|
2013-03-09 12:36:35 +01:00
|
|
|
import re
|
2014-03-08 20:25:47 +01:00
|
|
|
import struct
|
2013-09-28 16:31:54 +02:00
|
|
|
import subprocess
|
2013-03-02 00:05:03 +01:00
|
|
|
import wpaspy
|
|
|
|
|
2013-10-31 11:46:42 +01:00
|
|
|
logger = logging.getLogger()
|
2013-03-02 00:05:03 +01:00
|
|
|
wpas_ctrl = '/var/run/wpa_supplicant'
|
|
|
|
|
|
|
|
class WpaSupplicant:
|
2013-12-30 22:08:25 +01:00
|
|
|
def __init__(self, ifname=None, global_iface=None):
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = None
|
2014-10-19 19:56:36 +02:00
|
|
|
self.gctrl_mon = None
|
2013-12-30 22:08:25 +01:00
|
|
|
if ifname:
|
|
|
|
self.set_ifname(ifname)
|
|
|
|
else:
|
|
|
|
self.ifname = None
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-06-30 23:13:11 +02:00
|
|
|
self.global_iface = global_iface
|
|
|
|
if global_iface:
|
|
|
|
self.global_ctrl = wpaspy.Ctrl(global_iface)
|
|
|
|
self.global_mon = wpaspy.Ctrl(global_iface)
|
|
|
|
self.global_mon.attach()
|
|
|
|
|
2013-12-30 22:08:25 +01:00
|
|
|
def set_ifname(self, ifname):
|
|
|
|
self.ifname = ifname
|
|
|
|
self.ctrl = wpaspy.Ctrl(os.path.join(wpas_ctrl, ifname))
|
|
|
|
self.mon = wpaspy.Ctrl(os.path.join(wpas_ctrl, ifname))
|
|
|
|
self.mon.attach()
|
|
|
|
|
|
|
|
def remove_ifname(self):
|
|
|
|
if self.ifname:
|
|
|
|
self.mon.detach()
|
|
|
|
self.mon = None
|
|
|
|
self.ctrl = None
|
|
|
|
self.ifname = None
|
|
|
|
|
2014-04-15 23:27:27 +02:00
|
|
|
def interface_add(self, ifname, config="", driver="nl80211", drv_params=None):
|
2013-12-30 22:08:25 +01:00
|
|
|
try:
|
|
|
|
groups = subprocess.check_output(["id"])
|
|
|
|
group = "admin" if "(admin)" in groups else "adm"
|
|
|
|
except Exception, e:
|
|
|
|
group = "admin"
|
2014-04-15 23:27:27 +02:00
|
|
|
cmd = "INTERFACE_ADD " + ifname + "\t" + config + "\t" + driver + "\tDIR=/var/run/wpa_supplicant GROUP=" + group
|
2013-12-30 23:17:02 +01:00
|
|
|
if drv_params:
|
|
|
|
cmd = cmd + '\t' + drv_params
|
2013-12-30 22:08:25 +01:00
|
|
|
if "FAIL" in self.global_request(cmd):
|
|
|
|
raise Exception("Failed to add a dynamic wpa_supplicant interface")
|
|
|
|
self.set_ifname(ifname)
|
|
|
|
|
|
|
|
def interface_remove(self, ifname):
|
|
|
|
self.remove_ifname()
|
|
|
|
self.global_request("INTERFACE_REMOVE " + ifname)
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def request(self, cmd):
|
|
|
|
logger.debug(self.ifname + ": CTRL: " + cmd)
|
|
|
|
return self.ctrl.request(cmd)
|
|
|
|
|
2013-06-30 23:13:11 +02:00
|
|
|
def global_request(self, cmd):
|
|
|
|
if self.global_iface is None:
|
|
|
|
self.request(cmd)
|
|
|
|
else:
|
2013-12-30 22:08:25 +01:00
|
|
|
ifname = self.ifname or self.global_iface
|
2014-04-29 13:46:09 +02:00
|
|
|
logger.debug(ifname + ": CTRL(global): " + cmd)
|
2013-06-30 23:13:11 +02:00
|
|
|
return self.global_ctrl.request(cmd)
|
|
|
|
|
2013-03-09 16:34:08 +01:00
|
|
|
def group_request(self, cmd):
|
|
|
|
if self.group_ifname and self.group_ifname != self.ifname:
|
|
|
|
logger.debug(self.group_ifname + ": CTRL: " + cmd)
|
|
|
|
gctrl = wpaspy.Ctrl(os.path.join(wpas_ctrl, self.group_ifname))
|
|
|
|
return gctrl.request(cmd)
|
|
|
|
return self.request(cmd)
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def ping(self):
|
|
|
|
return "PONG" in self.request("PING")
|
|
|
|
|
2014-04-29 13:46:09 +02:00
|
|
|
def global_ping(self):
|
|
|
|
return "PONG" in self.global_request("PING")
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def reset(self):
|
2014-04-29 13:46:09 +02:00
|
|
|
self.dump_monitor()
|
2013-09-03 11:13:25 +02:00
|
|
|
res = self.request("FLUSH")
|
|
|
|
if not "OK" in res:
|
|
|
|
logger.info("FLUSH to " + self.ifname + " failed: " + res)
|
2014-02-24 12:55:42 +01:00
|
|
|
self.request("WPS_ER_STOP")
|
2014-02-26 00:23:43 +01:00
|
|
|
self.request("SET pmf 0")
|
2013-10-20 12:41:23 +02:00
|
|
|
self.request("SET external_sim 0")
|
2013-12-25 17:55:08 +01:00
|
|
|
self.request("SET hessid 00:00:00:00:00:00")
|
|
|
|
self.request("SET access_network_type 15")
|
2013-10-26 19:47:08 +02:00
|
|
|
self.request("SET p2p_add_cli_chan 0")
|
|
|
|
self.request("SET p2p_no_go_freq ")
|
|
|
|
self.request("SET p2p_pref_chan ")
|
2013-11-24 14:32:52 +01:00
|
|
|
self.request("SET p2p_no_group_iface 1")
|
2013-04-02 20:46:52 +02:00
|
|
|
self.request("SET p2p_go_intent 7")
|
2014-11-15 11:56:02 +01:00
|
|
|
self.request("SET ignore_old_scan_res 0")
|
2014-10-19 19:56:36 +02:00
|
|
|
if self.gctrl_mon:
|
|
|
|
try:
|
|
|
|
self.gctrl_mon.detach()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.gctrl_mon = None
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = None
|
2013-04-28 21:00:58 +02:00
|
|
|
self.dump_monitor()
|
2013-09-28 16:31:54 +02:00
|
|
|
|
|
|
|
iter = 0
|
|
|
|
while iter < 60:
|
|
|
|
state = self.get_driver_status_field("scan_state")
|
|
|
|
if "SCAN_STARTED" in state or "SCAN_REQUESTED" in state:
|
|
|
|
logger.info(self.ifname + ": Waiting for scan operation to complete before continuing")
|
|
|
|
time.sleep(1)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
iter = iter + 1
|
|
|
|
if iter == 60:
|
|
|
|
logger.error(self.ifname + ": Driver scan state did not clear")
|
|
|
|
print "Trying to clear cfg80211/mac80211 scan state"
|
|
|
|
try:
|
|
|
|
cmd = ["sudo", "ifconfig", self.ifname, "down"]
|
|
|
|
subprocess.call(cmd)
|
|
|
|
except subprocess.CalledProcessError, e:
|
|
|
|
logger.info("ifconfig failed: " + str(e.returncode))
|
|
|
|
logger.info(e.output)
|
|
|
|
try:
|
|
|
|
cmd = ["sudo", "ifconfig", self.ifname, "up"]
|
|
|
|
subprocess.call(cmd)
|
|
|
|
except subprocess.CalledProcessError, e:
|
|
|
|
logger.info("ifconfig failed: " + str(e.returncode))
|
|
|
|
logger.info(e.output)
|
2014-01-13 19:26:22 +01:00
|
|
|
if iter > 0:
|
|
|
|
# The ongoing scan could have discovered BSSes or P2P peers
|
|
|
|
logger.info("Run FLUSH again since scan was in progress")
|
|
|
|
self.request("FLUSH")
|
2014-01-14 22:37:15 +01:00
|
|
|
self.dump_monitor()
|
2013-09-28 16:31:54 +02:00
|
|
|
|
2013-09-03 11:13:25 +02:00
|
|
|
if not self.ping():
|
|
|
|
logger.info("No PING response from " + self.ifname + " after reset")
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-03-09 18:01:56 +01:00
|
|
|
def add_network(self):
|
|
|
|
id = self.request("ADD_NETWORK")
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("ADD_NETWORK failed")
|
|
|
|
return int(id)
|
|
|
|
|
|
|
|
def remove_network(self, id):
|
|
|
|
id = self.request("REMOVE_NETWORK " + str(id))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("REMOVE_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2014-02-03 23:08:18 +01:00
|
|
|
def get_network(self, id, field):
|
|
|
|
res = self.request("GET_NETWORK " + str(id) + " " + field)
|
|
|
|
if res == "FAIL\n":
|
|
|
|
return None
|
|
|
|
return res
|
|
|
|
|
2013-03-09 18:01:56 +01:00
|
|
|
def set_network(self, id, field, value):
|
|
|
|
res = self.request("SET_NETWORK " + str(id) + " " + field + " " + value)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def set_network_quoted(self, id, field, value):
|
|
|
|
res = self.request("SET_NETWORK " + str(id) + " " + field + ' "' + value + '"')
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2013-11-06 22:35:19 +01:00
|
|
|
def list_networks(self):
|
|
|
|
res = self.request("LIST_NETWORKS")
|
|
|
|
lines = res.splitlines()
|
|
|
|
networks = []
|
|
|
|
for l in lines:
|
|
|
|
if "network id" in l:
|
|
|
|
continue
|
|
|
|
[id,ssid,bssid,flags] = l.split('\t')
|
|
|
|
network = {}
|
|
|
|
network['id'] = id
|
|
|
|
network['ssid'] = ssid
|
|
|
|
network['bssid'] = bssid
|
|
|
|
network['flags'] = flags
|
|
|
|
networks.append(network)
|
|
|
|
return networks
|
|
|
|
|
2014-04-12 17:47:48 +02:00
|
|
|
def hs20_enable(self, auto_interworking=False):
|
2013-10-29 13:20:29 +01:00
|
|
|
self.request("SET interworking 1")
|
|
|
|
self.request("SET hs20 1")
|
2014-04-12 17:47:48 +02:00
|
|
|
if auto_interworking:
|
|
|
|
self.request("SET auto_interworking 1")
|
|
|
|
else:
|
|
|
|
self.request("SET auto_interworking 0")
|
2013-10-29 13:20:29 +01:00
|
|
|
|
2013-04-01 00:01:24 +02:00
|
|
|
def add_cred(self):
|
|
|
|
id = self.request("ADD_CRED")
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("ADD_CRED failed")
|
|
|
|
return int(id)
|
|
|
|
|
|
|
|
def remove_cred(self, id):
|
|
|
|
id = self.request("REMOVE_CRED " + str(id))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("REMOVE_CRED failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def set_cred(self, id, field, value):
|
|
|
|
res = self.request("SET_CRED " + str(id) + " " + field + " " + value)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_CRED failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def set_cred_quoted(self, id, field, value):
|
|
|
|
res = self.request("SET_CRED " + str(id) + " " + field + ' "' + value + '"')
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_CRED failed")
|
|
|
|
return None
|
|
|
|
|
2014-04-04 21:50:46 +02:00
|
|
|
def get_cred(self, id, field):
|
|
|
|
return self.request("GET_CRED " + str(id) + " " + field)
|
|
|
|
|
2013-11-04 12:09:46 +01:00
|
|
|
def add_cred_values(self, params):
|
2013-10-29 13:20:29 +01:00
|
|
|
id = self.add_cred()
|
2013-11-04 12:09:46 +01:00
|
|
|
|
2013-11-04 12:22:55 +01:00
|
|
|
quoted = [ "realm", "username", "password", "domain", "imsi",
|
2013-12-26 13:08:42 +01:00
|
|
|
"excluded_ssid", "milenage", "ca_cert", "client_cert",
|
2013-11-04 13:39:25 +01:00
|
|
|
"private_key", "domain_suffix_match", "provisioning_sp",
|
2014-03-01 10:44:25 +01:00
|
|
|
"roaming_partner", "phase1", "phase2" ]
|
2013-11-04 12:09:46 +01:00
|
|
|
for field in quoted:
|
|
|
|
if field in params:
|
|
|
|
self.set_cred_quoted(id, field, params[field])
|
|
|
|
|
2014-02-27 13:17:31 +01:00
|
|
|
not_quoted = [ "eap", "roaming_consortium", "priority",
|
2013-11-04 15:32:00 +01:00
|
|
|
"required_roaming_consortium", "sp_priority",
|
2014-03-01 09:10:53 +01:00
|
|
|
"max_bss_load", "update_identifier", "req_conn_capab",
|
|
|
|
"min_dl_bandwidth_home", "min_ul_bandwidth_home",
|
|
|
|
"min_dl_bandwidth_roaming", "min_ul_bandwidth_roaming" ]
|
2013-11-04 12:09:46 +01:00
|
|
|
for field in not_quoted:
|
|
|
|
if field in params:
|
|
|
|
self.set_cred(id, field, params[field])
|
|
|
|
|
2013-10-29 13:20:29 +01:00
|
|
|
return id;
|
|
|
|
|
2014-04-04 22:13:49 +02:00
|
|
|
def select_network(self, id, freq=None):
|
|
|
|
if freq:
|
|
|
|
extra = " freq=" + freq
|
|
|
|
else:
|
|
|
|
extra = ""
|
|
|
|
id = self.request("SELECT_NETWORK " + str(id) + extra)
|
2013-03-27 12:57:48 +01:00
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("SELECT_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2014-09-01 06:23:35 +02:00
|
|
|
def mesh_group_add(self, id):
|
|
|
|
id = self.request("MESH_GROUP_ADD " + str(id))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("MESH_GROUP_ADD failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def mesh_group_remove(self):
|
|
|
|
id = self.request("MESH_GROUP_REMOVE " + str(self.ifname))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("MESH_GROUP_REMOVE failed")
|
|
|
|
return None
|
|
|
|
|
2014-01-13 19:47:01 +01:00
|
|
|
def connect_network(self, id, timeout=10):
|
2013-03-27 12:57:48 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
self.select_network(id)
|
2014-01-13 19:47:01 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=timeout)
|
2013-03-27 12:57:48 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("Association with the AP timed out")
|
|
|
|
self.dump_monitor()
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_status(self, extra=None):
|
|
|
|
if extra:
|
|
|
|
extra = "-" + extra
|
|
|
|
else:
|
|
|
|
extra = ""
|
|
|
|
res = self.request("STATUS" + extra)
|
2013-03-02 00:05:03 +01:00
|
|
|
lines = res.splitlines()
|
2013-03-31 11:33:49 +02:00
|
|
|
vals = dict()
|
2013-03-02 00:05:03 +01:00
|
|
|
for l in lines:
|
2014-02-04 12:22:10 +01:00
|
|
|
try:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
vals[name] = value
|
|
|
|
except ValueError, e:
|
|
|
|
logger.info(self.ifname + ": Ignore unexpected STATUS line: " + l)
|
2013-03-31 11:33:49 +02:00
|
|
|
return vals
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_status_field(self, field, extra=None):
|
|
|
|
vals = self.get_status(extra)
|
2013-03-31 11:33:49 +02:00
|
|
|
if field in vals:
|
|
|
|
return vals[field]
|
2013-03-02 00:05:03 +01:00
|
|
|
return None
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_group_status(self, extra=None):
|
|
|
|
if extra:
|
|
|
|
extra = "-" + extra
|
|
|
|
else:
|
|
|
|
extra = ""
|
|
|
|
res = self.group_request("STATUS" + extra)
|
2013-03-09 17:24:32 +01:00
|
|
|
lines = res.splitlines()
|
2013-03-31 11:33:49 +02:00
|
|
|
vals = dict()
|
2013-03-09 17:24:32 +01:00
|
|
|
for l in lines:
|
2014-11-27 18:42:54 +01:00
|
|
|
try:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
except ValueError:
|
|
|
|
logger.info(self.ifname + ": Ignore unexpected status line: " + l)
|
|
|
|
continue
|
2013-03-31 11:33:49 +02:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_group_status_field(self, field, extra=None):
|
|
|
|
vals = self.get_group_status(extra)
|
2013-03-31 11:33:49 +02:00
|
|
|
if field in vals:
|
|
|
|
return vals[field]
|
2013-03-09 17:24:32 +01:00
|
|
|
return None
|
|
|
|
|
2013-09-28 16:31:54 +02:00
|
|
|
def get_driver_status(self):
|
|
|
|
res = self.request("STATUS-DRIVER")
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
2014-11-27 18:42:54 +01:00
|
|
|
try:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
except ValueError:
|
|
|
|
logger.info(self.ifname + ": Ignore unexpected status-driver line: " + l)
|
|
|
|
continue
|
2013-09-28 16:31:54 +02:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
|
|
|
|
|
|
|
def get_driver_status_field(self, field):
|
|
|
|
vals = self.get_driver_status()
|
|
|
|
if field in vals:
|
|
|
|
return vals[field]
|
|
|
|
return None
|
|
|
|
|
2014-06-10 19:50:30 +02:00
|
|
|
def get_mcc(self):
|
|
|
|
mcc = int(self.get_driver_status_field('capa.num_multichan_concurrent'))
|
|
|
|
return 1 if mcc < 2 else mcc
|
|
|
|
|
2014-03-15 23:18:03 +01:00
|
|
|
def get_mib(self):
|
|
|
|
res = self.request("MIB")
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
|
|
|
try:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
vals[name] = value
|
|
|
|
except ValueError, e:
|
|
|
|
logger.info(self.ifname + ": Ignore unexpected MIB line: " + l)
|
|
|
|
return vals
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def p2p_dev_addr(self):
|
2013-03-31 11:33:49 +02:00
|
|
|
return self.get_status_field("p2p_device_address")
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-03-09 17:24:32 +01:00
|
|
|
def p2p_interface_addr(self):
|
2013-03-31 11:33:49 +02:00
|
|
|
return self.get_group_status_field("address")
|
2013-03-09 17:24:32 +01:00
|
|
|
|
2014-10-19 19:55:02 +02:00
|
|
|
def own_addr(self):
|
|
|
|
try:
|
|
|
|
res = self.p2p_interface_addr()
|
|
|
|
except:
|
|
|
|
res = self.p2p_dev_addr()
|
|
|
|
return res
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def p2p_listen(self):
|
2013-06-30 23:13:11 +02:00
|
|
|
return self.global_request("P2P_LISTEN")
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2014-04-12 19:03:24 +02:00
|
|
|
def p2p_find(self, social=False, progressive=False, dev_id=None, dev_type=None):
|
2014-01-08 21:25:58 +01:00
|
|
|
cmd = "P2P_FIND"
|
2013-03-02 00:05:03 +01:00
|
|
|
if social:
|
2014-01-08 21:25:58 +01:00
|
|
|
cmd = cmd + " type=social"
|
2014-04-12 19:03:24 +02:00
|
|
|
elif progressive:
|
|
|
|
cmd = cmd + " type=progressive"
|
2014-01-08 21:25:58 +01:00
|
|
|
if dev_id:
|
|
|
|
cmd = cmd + " dev_id=" + dev_id
|
|
|
|
if dev_type:
|
|
|
|
cmd = cmd + " dev_type=" + dev_type
|
|
|
|
return self.global_request(cmd)
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-03-17 09:58:48 +01:00
|
|
|
def p2p_stop_find(self):
|
2013-06-30 23:13:11 +02:00
|
|
|
return self.global_request("P2P_STOP_FIND")
|
2013-03-17 09:58:48 +01:00
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def wps_read_pin(self):
|
2014-04-06 15:31:44 +02:00
|
|
|
self.pin = self.request("WPS_PIN get").rstrip("\n")
|
|
|
|
if "FAIL" in self.pin:
|
|
|
|
raise Exception("Could not generate PIN")
|
2013-03-02 00:05:03 +01:00
|
|
|
return self.pin
|
|
|
|
|
|
|
|
def peer_known(self, peer, full=True):
|
2013-06-30 23:13:11 +02:00
|
|
|
res = self.global_request("P2P_PEER " + peer)
|
2013-03-02 00:05:03 +01:00
|
|
|
if peer.lower() not in res.lower():
|
|
|
|
return False
|
|
|
|
if not full:
|
|
|
|
return True
|
|
|
|
return "[PROBE_REQ_ONLY]" not in res
|
|
|
|
|
2014-01-05 15:42:48 +01:00
|
|
|
def discover_peer(self, peer, full=True, timeout=15, social=True, force_find=False):
|
2013-03-02 00:05:03 +01:00
|
|
|
logger.info(self.ifname + ": Trying to discover peer " + peer)
|
2014-01-05 15:42:48 +01:00
|
|
|
if not force_find and self.peer_known(peer, full):
|
2013-03-02 00:05:03 +01:00
|
|
|
return True
|
2013-03-16 10:47:01 +01:00
|
|
|
self.p2p_find(social)
|
2013-03-02 00:05:03 +01:00
|
|
|
count = 0
|
|
|
|
while count < timeout:
|
|
|
|
time.sleep(1)
|
|
|
|
count = count + 1
|
|
|
|
if self.peer_known(peer, full):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2013-09-01 18:24:12 +02:00
|
|
|
def get_peer(self, peer):
|
|
|
|
res = self.global_request("P2P_PEER " + peer)
|
|
|
|
if peer.lower() not in res.lower():
|
|
|
|
raise Exception("Peer information not available")
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
|
|
|
if '=' in l:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
vals[name] = value
|
|
|
|
return vals
|
|
|
|
|
2013-10-26 19:47:08 +02:00
|
|
|
def group_form_result(self, ev, expect_failure=False, go_neg_res=None):
|
2013-03-09 14:52:57 +01:00
|
|
|
if expect_failure:
|
|
|
|
if "P2P-GROUP-STARTED" in ev:
|
|
|
|
raise Exception("Group formation succeeded when expecting failure")
|
|
|
|
exp = r'<.>(P2P-GO-NEG-FAILURE) status=([0-9]*)'
|
|
|
|
s = re.split(exp, ev)
|
|
|
|
if len(s) < 3:
|
|
|
|
return None
|
|
|
|
res = {}
|
|
|
|
res['result'] = 'go-neg-failed'
|
|
|
|
res['status'] = int(s[2])
|
|
|
|
return res
|
|
|
|
|
|
|
|
if "P2P-GROUP-STARTED" not in ev:
|
|
|
|
raise Exception("No P2P-GROUP-STARTED event seen")
|
|
|
|
|
2013-04-02 20:46:52 +02:00
|
|
|
exp = r'<.>(P2P-GROUP-STARTED) ([^ ]*) ([^ ]*) ssid="(.*)" freq=([0-9]*) ((?:psk=.*)|(?:passphrase=".*")) go_dev_addr=([0-9a-f:]*) ip_addr=([0-9.]*) ip_mask=([0-9.]*) go_ip_addr=([0-9.]*)'
|
2013-03-09 12:36:35 +01:00
|
|
|
s = re.split(exp, ev)
|
2013-04-02 20:46:52 +02:00
|
|
|
if len(s) < 11:
|
|
|
|
exp = r'<.>(P2P-GROUP-STARTED) ([^ ]*) ([^ ]*) ssid="(.*)" freq=([0-9]*) ((?:psk=.*)|(?:passphrase=".*")) go_dev_addr=([0-9a-f:]*)'
|
|
|
|
s = re.split(exp, ev)
|
|
|
|
if len(s) < 8:
|
|
|
|
raise Exception("Could not parse P2P-GROUP-STARTED")
|
2013-03-09 12:36:35 +01:00
|
|
|
res = {}
|
|
|
|
res['result'] = 'success'
|
|
|
|
res['ifname'] = s[2]
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = s[2]
|
2014-10-19 19:56:36 +02:00
|
|
|
try:
|
|
|
|
self.gctrl_mon = wpaspy.Ctrl(os.path.join(wpas_ctrl, self.group_ifname))
|
|
|
|
self.gctrl_mon.attach()
|
|
|
|
except:
|
|
|
|
logger.debug("Could not open monitor socket for group interface")
|
|
|
|
self.gctrl_mon = None
|
2013-03-09 12:36:35 +01:00
|
|
|
res['role'] = s[3]
|
|
|
|
res['ssid'] = s[4]
|
|
|
|
res['freq'] = s[5]
|
2013-09-01 18:24:12 +02:00
|
|
|
if "[PERSISTENT]" in ev:
|
|
|
|
res['persistent'] = True
|
|
|
|
else:
|
|
|
|
res['persistent'] = False
|
2013-03-09 12:36:35 +01:00
|
|
|
p = re.match(r'psk=([0-9a-f]*)', s[6])
|
|
|
|
if p:
|
|
|
|
res['psk'] = p.group(1)
|
|
|
|
p = re.match(r'passphrase="(.*)"', s[6])
|
|
|
|
if p:
|
|
|
|
res['passphrase'] = p.group(1)
|
|
|
|
res['go_dev_addr'] = s[7]
|
2013-10-26 19:47:08 +02:00
|
|
|
|
2013-04-02 20:46:52 +02:00
|
|
|
if len(s) > 8 and len(s[8]) > 0:
|
|
|
|
res['ip_addr'] = s[8]
|
|
|
|
if len(s) > 9:
|
|
|
|
res['ip_mask'] = s[9]
|
|
|
|
if len(s) > 10:
|
|
|
|
res['go_ip_addr'] = s[10]
|
|
|
|
|
2013-10-26 19:47:08 +02:00
|
|
|
if go_neg_res:
|
|
|
|
exp = r'<.>(P2P-GO-NEG-SUCCESS) role=(GO|client) freq=([0-9]*)'
|
|
|
|
s = re.split(exp, go_neg_res)
|
|
|
|
if len(s) < 4:
|
|
|
|
raise Exception("Could not parse P2P-GO-NEG-SUCCESS")
|
|
|
|
res['go_neg_role'] = s[2]
|
|
|
|
res['go_neg_freq'] = s[3]
|
|
|
|
|
2013-03-09 12:36:35 +01:00
|
|
|
return res
|
|
|
|
|
2013-10-26 19:47:08 +02:00
|
|
|
def p2p_go_neg_auth(self, peer, pin, method, go_intent=None, persistent=False, freq=None):
|
2013-03-02 00:05:03 +01:00
|
|
|
if not self.discover_peer(peer):
|
|
|
|
raise Exception("Peer " + peer + " not found")
|
|
|
|
self.dump_monitor()
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + pin + " " + method + " auth"
|
2013-03-09 12:52:59 +01:00
|
|
|
if go_intent:
|
|
|
|
cmd = cmd + ' go_intent=' + str(go_intent)
|
2013-10-26 19:47:08 +02:00
|
|
|
if freq:
|
|
|
|
cmd = cmd + ' freq=' + str(freq)
|
2013-09-01 18:24:12 +02:00
|
|
|
if persistent:
|
|
|
|
cmd = cmd + " persistent"
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
2013-03-02 00:05:03 +01:00
|
|
|
return None
|
|
|
|
raise Exception("P2P_CONNECT (auth) failed")
|
|
|
|
|
2013-03-09 12:52:59 +01:00
|
|
|
def p2p_go_neg_auth_result(self, timeout=1, expect_failure=False):
|
2013-10-26 19:47:08 +02:00
|
|
|
go_neg_res = None
|
|
|
|
ev = self.wait_global_event(["P2P-GO-NEG-SUCCESS",
|
|
|
|
"P2P-GO-NEG-FAILURE"], timeout);
|
2013-03-09 12:36:35 +01:00
|
|
|
if ev is None:
|
2013-03-09 12:52:59 +01:00
|
|
|
if expect_failure:
|
|
|
|
return None
|
2013-03-09 12:36:35 +01:00
|
|
|
raise Exception("Group formation timed out")
|
2013-10-26 19:47:08 +02:00
|
|
|
if "P2P-GO-NEG-SUCCESS" in ev:
|
|
|
|
go_neg_res = ev
|
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout);
|
|
|
|
if ev is None:
|
|
|
|
if expect_failure:
|
|
|
|
return None
|
|
|
|
raise Exception("Group formation timed out")
|
2013-03-09 12:36:35 +01:00
|
|
|
self.dump_monitor()
|
2013-10-26 19:47:08 +02:00
|
|
|
return self.group_form_result(ev, expect_failure, go_neg_res)
|
2013-03-09 12:36:35 +01:00
|
|
|
|
2014-10-10 17:03:38 +02:00
|
|
|
def p2p_go_neg_init(self, peer, pin, method, timeout=0, go_intent=None, expect_failure=False, persistent=False, persistent_id=None, freq=None, provdisc=False, wait_group=True):
|
2013-03-02 00:05:03 +01:00
|
|
|
if not self.discover_peer(peer):
|
|
|
|
raise Exception("Peer " + peer + " not found")
|
|
|
|
self.dump_monitor()
|
2013-03-17 10:10:59 +01:00
|
|
|
if pin:
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + pin + " " + method
|
|
|
|
else:
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + method
|
2013-03-09 12:52:59 +01:00
|
|
|
if go_intent:
|
|
|
|
cmd = cmd + ' go_intent=' + str(go_intent)
|
2013-09-28 11:09:40 +02:00
|
|
|
if freq:
|
|
|
|
cmd = cmd + ' freq=' + str(freq)
|
2013-09-01 18:24:12 +02:00
|
|
|
if persistent:
|
|
|
|
cmd = cmd + " persistent"
|
2014-04-15 00:06:39 +02:00
|
|
|
elif persistent_id:
|
|
|
|
cmd = cmd + " persistent=" + persistent_id
|
2014-03-04 23:36:37 +01:00
|
|
|
if provdisc:
|
|
|
|
cmd = cmd + " provdisc"
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
2013-03-02 00:05:03 +01:00
|
|
|
if timeout == 0:
|
|
|
|
self.dump_monitor()
|
|
|
|
return None
|
2013-10-26 19:47:08 +02:00
|
|
|
go_neg_res = None
|
|
|
|
ev = self.wait_global_event(["P2P-GO-NEG-SUCCESS",
|
|
|
|
"P2P-GO-NEG-FAILURE"], timeout)
|
2013-03-09 12:36:35 +01:00
|
|
|
if ev is None:
|
2013-03-09 12:52:59 +01:00
|
|
|
if expect_failure:
|
|
|
|
return None
|
2013-03-09 12:36:35 +01:00
|
|
|
raise Exception("Group formation timed out")
|
2013-10-26 19:47:08 +02:00
|
|
|
if "P2P-GO-NEG-SUCCESS" in ev:
|
2014-10-10 17:03:38 +02:00
|
|
|
if not wait_group:
|
|
|
|
return ev
|
2013-10-26 19:47:08 +02:00
|
|
|
go_neg_res = ev
|
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout)
|
|
|
|
if ev is None:
|
|
|
|
if expect_failure:
|
|
|
|
return None
|
|
|
|
raise Exception("Group formation timed out")
|
2013-03-09 12:36:35 +01:00
|
|
|
self.dump_monitor()
|
2013-10-26 19:47:08 +02:00
|
|
|
return self.group_form_result(ev, expect_failure, go_neg_res)
|
2013-03-02 00:05:03 +01:00
|
|
|
raise Exception("P2P_CONNECT failed")
|
|
|
|
|
2013-12-26 17:18:10 +01:00
|
|
|
def wait_event(self, events, timeout=10):
|
2014-01-05 07:02:06 +01:00
|
|
|
start = os.times()[4]
|
|
|
|
while True:
|
2013-03-02 00:05:03 +01:00
|
|
|
while self.mon.pending():
|
|
|
|
ev = self.mon.recv()
|
2013-03-09 12:52:59 +01:00
|
|
|
logger.debug(self.ifname + ": " + ev)
|
2013-03-09 14:52:57 +01:00
|
|
|
for event in events:
|
|
|
|
if event in ev:
|
|
|
|
return ev
|
2014-01-05 07:02:06 +01:00
|
|
|
now = os.times()[4]
|
|
|
|
remaining = start + timeout - now
|
|
|
|
if remaining <= 0:
|
|
|
|
break
|
|
|
|
if not self.mon.pending(timeout=remaining):
|
|
|
|
break
|
2013-03-09 12:36:35 +01:00
|
|
|
return None
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-06-30 23:13:11 +02:00
|
|
|
def wait_global_event(self, events, timeout):
|
|
|
|
if self.global_iface is None:
|
|
|
|
self.wait_event(events, timeout)
|
|
|
|
else:
|
2014-01-05 07:02:06 +01:00
|
|
|
start = os.times()[4]
|
|
|
|
while True:
|
2013-06-30 23:13:11 +02:00
|
|
|
while self.global_mon.pending():
|
|
|
|
ev = self.global_mon.recv()
|
2013-09-25 15:45:45 +02:00
|
|
|
logger.debug(self.ifname + "(global): " + ev)
|
2013-06-30 23:13:11 +02:00
|
|
|
for event in events:
|
|
|
|
if event in ev:
|
|
|
|
return ev
|
2014-01-05 07:02:06 +01:00
|
|
|
now = os.times()[4]
|
|
|
|
remaining = start + timeout - now
|
|
|
|
if remaining <= 0:
|
|
|
|
break
|
|
|
|
if not self.global_mon.pending(timeout=remaining):
|
|
|
|
break
|
2013-06-30 23:13:11 +02:00
|
|
|
return None
|
|
|
|
|
2014-10-19 19:56:36 +02:00
|
|
|
def wait_group_event(self, events, timeout=10):
|
|
|
|
if self.group_ifname and self.group_ifname != self.ifname:
|
|
|
|
if self.gctrl_mon is None:
|
|
|
|
return None
|
|
|
|
start = os.times()[4]
|
|
|
|
while True:
|
|
|
|
while self.gctrl_mon.pending():
|
|
|
|
ev = self.gctrl_mon.recv()
|
|
|
|
logger.debug(self.group_ifname + ": " + ev)
|
|
|
|
for event in events:
|
|
|
|
if event in ev:
|
|
|
|
return ev
|
|
|
|
now = os.times()[4]
|
|
|
|
remaining = start + timeout - now
|
|
|
|
if remaining <= 0:
|
|
|
|
break
|
|
|
|
if not self.gctrl_mon.pending(timeout=remaining):
|
|
|
|
break
|
|
|
|
return None
|
|
|
|
|
|
|
|
return self.wait_event(events, timeout)
|
|
|
|
|
2013-09-01 18:30:08 +02:00
|
|
|
def wait_go_ending_session(self):
|
2014-10-19 19:56:36 +02:00
|
|
|
if self.gctrl_mon:
|
|
|
|
try:
|
|
|
|
self.gctrl_mon.detach()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.gctrl_mon = None
|
2013-09-01 18:30:08 +02:00
|
|
|
ev = self.wait_event(["P2P-GROUP-REMOVED"], timeout=3)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("Group removal event timed out")
|
|
|
|
if "reason=GO_ENDING_SESSION" not in ev:
|
|
|
|
raise Exception("Unexpected group removal reason")
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def dump_monitor(self):
|
|
|
|
while self.mon.pending():
|
|
|
|
ev = self.mon.recv()
|
|
|
|
logger.debug(self.ifname + ": " + ev)
|
2013-09-25 15:45:45 +02:00
|
|
|
while self.global_mon.pending():
|
|
|
|
ev = self.global_mon.recv()
|
|
|
|
logger.debug(self.ifname + "(global): " + ev)
|
2013-03-02 10:38:56 +01:00
|
|
|
|
2013-03-02 11:22:28 +01:00
|
|
|
def remove_group(self, ifname=None):
|
2014-10-19 19:56:36 +02:00
|
|
|
if self.gctrl_mon:
|
|
|
|
try:
|
|
|
|
self.gctrl_mon.detach()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.gctrl_mon = None
|
2013-03-02 11:22:28 +01:00
|
|
|
if ifname is None:
|
2013-09-01 18:24:12 +02:00
|
|
|
ifname = self.group_ifname if self.group_ifname else self.ifname
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" not in self.global_request("P2P_GROUP_REMOVE " + ifname):
|
2013-03-02 10:38:56 +01:00
|
|
|
raise Exception("Group could not be removed")
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = None
|
2013-03-09 15:30:25 +01:00
|
|
|
|
2013-03-30 19:16:30 +01:00
|
|
|
def p2p_start_go(self, persistent=None, freq=None):
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
cmd = "P2P_GROUP_ADD"
|
2013-03-09 18:01:56 +01:00
|
|
|
if persistent is None:
|
|
|
|
pass
|
|
|
|
elif persistent is True:
|
|
|
|
cmd = cmd + " persistent"
|
|
|
|
else:
|
|
|
|
cmd = cmd + " persistent=" + str(persistent)
|
2013-03-30 19:16:30 +01:00
|
|
|
if freq:
|
2013-09-28 11:09:40 +02:00
|
|
|
cmd = cmd + " freq=" + str(freq)
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout=5)
|
2013-03-09 15:30:25 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("GO start up timed out")
|
|
|
|
self.dump_monitor()
|
|
|
|
return self.group_form_result(ev)
|
|
|
|
raise Exception("P2P_GROUP_ADD failed")
|
|
|
|
|
|
|
|
def p2p_go_authorize_client(self, pin):
|
|
|
|
cmd = "WPS_PIN any " + pin
|
2013-03-09 16:34:08 +01:00
|
|
|
if "FAIL" in self.group_request(cmd):
|
2013-03-09 15:30:25 +01:00
|
|
|
raise Exception("Failed to authorize client connection on GO")
|
|
|
|
return None
|
|
|
|
|
2013-09-01 10:56:55 +02:00
|
|
|
def p2p_go_authorize_client_pbc(self):
|
|
|
|
cmd = "WPS_PBC"
|
|
|
|
if "FAIL" in self.group_request(cmd):
|
|
|
|
raise Exception("Failed to authorize client connection on GO")
|
|
|
|
return None
|
|
|
|
|
2013-11-24 19:50:11 +01:00
|
|
|
def p2p_connect_group(self, go_addr, pin, timeout=0, social=False):
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
2013-11-24 19:50:11 +01:00
|
|
|
if not self.discover_peer(go_addr, social=social):
|
2014-05-15 13:13:39 +02:00
|
|
|
if social or not self.discover_peer(go_addr, social=social):
|
|
|
|
raise Exception("GO " + go_addr + " not found")
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
cmd = "P2P_CONNECT " + go_addr + " " + pin + " join"
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
2013-03-09 15:30:25 +01:00
|
|
|
if timeout == 0:
|
|
|
|
self.dump_monitor()
|
|
|
|
return None
|
2013-06-30 23:13:11 +02:00
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout)
|
2013-03-09 15:30:25 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("Joining the group timed out")
|
|
|
|
self.dump_monitor()
|
|
|
|
return self.group_form_result(ev)
|
|
|
|
raise Exception("P2P_CONNECT(join) failed")
|
2013-03-09 17:24:32 +01:00
|
|
|
|
|
|
|
def tdls_setup(self, peer):
|
|
|
|
cmd = "TDLS_SETUP " + peer
|
|
|
|
if "FAIL" in self.group_request(cmd):
|
|
|
|
raise Exception("Failed to request TDLS setup")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def tdls_teardown(self, peer):
|
|
|
|
cmd = "TDLS_TEARDOWN " + peer
|
|
|
|
if "FAIL" in self.group_request(cmd):
|
|
|
|
raise Exception("Failed to request TDLS teardown")
|
|
|
|
return None
|
2013-03-29 19:41:33 +01:00
|
|
|
|
2014-10-22 14:04:04 +02:00
|
|
|
def add_ts(self, tsid, up):
|
|
|
|
params = {
|
|
|
|
"sba": 9000,
|
|
|
|
"nominal_msdu_size": 1500,
|
|
|
|
"min_phy_rate": 6000000,
|
|
|
|
"mean_data_rate": 1500,
|
|
|
|
}
|
|
|
|
cmd = "WMM_AC_ADDTS downlink tsid=%d up=%d" % (tsid, up)
|
|
|
|
for (key, value) in params.iteritems():
|
|
|
|
cmd += " %s=%d" % (key, value)
|
|
|
|
|
|
|
|
if self.request(cmd).strip() != "OK":
|
|
|
|
raise Exception("ADDTS failed (tsid=%d up=%d)" % (tsid, up))
|
|
|
|
|
|
|
|
ev = self.wait_event(["TSPEC-ADDED"], timeout=1)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("ADDTS failed (time out)")
|
|
|
|
if "tsid=%d" % (tsid) not in ev:
|
|
|
|
raise Exception("ADDTS failed (invalid tsid in TSPEC-ADDED)")
|
|
|
|
|
|
|
|
def del_ts(self, tsid):
|
|
|
|
if self.request("WMM_AC_DELTS %d" % (tsid)).strip() != "OK":
|
|
|
|
raise Exception("DELTS failed")
|
|
|
|
|
|
|
|
ev = self.wait_event(["TSPEC-REMOVED"], timeout=1)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("DELTS failed (time out)")
|
|
|
|
if "tsid=%d" % (tsid) not in ev:
|
|
|
|
raise Exception("DELTS failed (invalid tsid in TSPEC-REMOVED)")
|
|
|
|
|
2014-02-15 18:52:56 +01:00
|
|
|
def connect(self, ssid=None, ssid2=None, **kwargs):
|
2013-03-29 19:41:33 +01:00
|
|
|
logger.info("Connect STA " + self.ifname + " to AP")
|
|
|
|
id = self.add_network()
|
2013-12-31 12:21:18 +01:00
|
|
|
if ssid:
|
|
|
|
self.set_network_quoted(id, "ssid", ssid)
|
|
|
|
elif ssid2:
|
|
|
|
self.set_network(id, "ssid", ssid2)
|
2014-02-15 18:52:56 +01:00
|
|
|
|
|
|
|
quoted = [ "psk", "identity", "anonymous_identity", "password",
|
|
|
|
"ca_cert", "client_cert", "private_key",
|
|
|
|
"private_key_passwd", "ca_cert2", "client_cert2",
|
|
|
|
"private_key2", "phase1", "phase2", "domain_suffix_match",
|
2014-04-12 10:56:55 +02:00
|
|
|
"altsubject_match", "subject_match", "pac_file", "dh_file",
|
2014-10-12 10:53:18 +02:00
|
|
|
"bgscan", "ht_mcs", "id_str", "openssl_ciphers" ]
|
2014-02-15 18:52:56 +01:00
|
|
|
for field in quoted:
|
|
|
|
if field in kwargs and kwargs[field]:
|
|
|
|
self.set_network_quoted(id, field, kwargs[field])
|
|
|
|
|
|
|
|
not_quoted = [ "proto", "key_mgmt", "ieee80211w", "pairwise",
|
2014-11-19 16:37:54 +01:00
|
|
|
"group", "wep_key0", "wep_key1", "wep_key2", "wep_key3",
|
|
|
|
"wep_tx_keyidx", "scan_freq", "eap",
|
2014-03-15 23:13:23 +01:00
|
|
|
"eapol_flags", "fragment_size", "scan_ssid", "auth_alg",
|
2014-04-12 22:25:36 +02:00
|
|
|
"wpa_ptk_rekey", "disable_ht", "disable_vht", "bssid",
|
|
|
|
"disable_max_amsdu", "ampdu_factor", "ampdu_density",
|
2014-04-15 16:42:40 +02:00
|
|
|
"disable_ht40", "disable_sgi", "disable_ldpc",
|
2014-09-27 18:12:32 +02:00
|
|
|
"ht40_intolerant", "update_identifier", "mac_addr" ]
|
2014-02-15 18:52:56 +01:00
|
|
|
for field in not_quoted:
|
|
|
|
if field in kwargs and kwargs[field]:
|
|
|
|
self.set_network(id, field, kwargs[field])
|
|
|
|
|
|
|
|
if "raw_psk" in kwargs and kwargs['raw_psk']:
|
|
|
|
self.set_network(id, "psk", kwargs['raw_psk'])
|
|
|
|
if "password_hex" in kwargs and kwargs['password_hex']:
|
|
|
|
self.set_network(id, "password", kwargs['password_hex'])
|
|
|
|
if "peerkey" in kwargs and kwargs['peerkey']:
|
2013-12-28 10:43:42 +01:00
|
|
|
self.set_network(id, "peerkey", "1")
|
2014-02-15 18:52:56 +01:00
|
|
|
if "okc" in kwargs and kwargs['okc']:
|
2013-12-29 18:25:42 +01:00
|
|
|
self.set_network(id, "proactive_key_caching", "1")
|
2014-02-15 18:52:56 +01:00
|
|
|
if "ocsp" in kwargs and kwargs['ocsp']:
|
|
|
|
self.set_network(id, "ocsp", str(kwargs['ocsp']))
|
|
|
|
if "only_add_network" in kwargs and kwargs['only_add_network']:
|
2013-12-29 11:24:25 +01:00
|
|
|
return id
|
2014-02-15 18:52:56 +01:00
|
|
|
if "wait_connect" not in kwargs or kwargs['wait_connect']:
|
|
|
|
if "eap" in kwargs:
|
2014-01-13 19:47:01 +01:00
|
|
|
self.connect_network(id, timeout=20)
|
|
|
|
else:
|
|
|
|
self.connect_network(id)
|
2013-09-29 19:35:26 +02:00
|
|
|
else:
|
|
|
|
self.dump_monitor()
|
|
|
|
self.select_network(id)
|
2013-10-29 16:14:02 +01:00
|
|
|
return id
|
2013-03-30 10:28:39 +01:00
|
|
|
|
2014-04-12 16:16:07 +02:00
|
|
|
def scan(self, type=None, freq=None, no_wait=False, only_new=False):
|
2013-03-30 10:28:39 +01:00
|
|
|
if type:
|
|
|
|
cmd = "SCAN TYPE=" + type
|
|
|
|
else:
|
|
|
|
cmd = "SCAN"
|
2013-12-25 19:17:32 +01:00
|
|
|
if freq:
|
|
|
|
cmd = cmd + " freq=" + freq
|
2014-04-12 16:16:07 +02:00
|
|
|
if only_new:
|
|
|
|
cmd += " only_new=1"
|
2014-01-03 13:34:59 +01:00
|
|
|
if not no_wait:
|
|
|
|
self.dump_monitor()
|
2013-03-30 10:28:39 +01:00
|
|
|
if not "OK" in self.request(cmd):
|
|
|
|
raise Exception("Failed to trigger scan")
|
2014-01-03 13:34:59 +01:00
|
|
|
if no_wait:
|
|
|
|
return
|
2013-03-30 10:28:39 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-SCAN-RESULTS"], 15)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("Scan timed out")
|
|
|
|
|
2014-05-14 12:35:32 +02:00
|
|
|
def scan_for_bss(self, bssid, freq=None, force_scan=False):
|
|
|
|
if not force_scan and self.get_bss(bssid) is not None:
|
|
|
|
return
|
|
|
|
for i in range(0, 10):
|
2014-09-27 15:11:28 +02:00
|
|
|
self.scan(freq=freq, type="ONLY")
|
2014-05-14 12:35:32 +02:00
|
|
|
if self.get_bss(bssid) is not None:
|
|
|
|
return
|
|
|
|
raise Exception("Could not find BSS " + bssid + " in scan")
|
|
|
|
|
2014-03-23 18:23:16 +01:00
|
|
|
def roam(self, bssid, fail_test=False):
|
2013-03-30 10:28:39 +01:00
|
|
|
self.dump_monitor()
|
2014-05-14 16:02:32 +02:00
|
|
|
if "OK" not in self.request("ROAM " + bssid):
|
|
|
|
raise Exception("ROAM failed")
|
2014-03-23 18:23:16 +01:00
|
|
|
if fail_test:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
|
|
|
|
if ev is not None:
|
|
|
|
raise Exception("Unexpected connection")
|
|
|
|
self.dump_monitor()
|
|
|
|
return
|
2013-03-30 10:28:39 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=10)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("Roaming with the AP timed out")
|
|
|
|
self.dump_monitor()
|
2013-04-28 21:00:58 +02:00
|
|
|
|
2014-03-23 18:23:16 +01:00
|
|
|
def roam_over_ds(self, bssid, fail_test=False):
|
2013-12-28 10:52:19 +01:00
|
|
|
self.dump_monitor()
|
2014-05-14 16:02:32 +02:00
|
|
|
if "OK" not in self.request("FT_DS " + bssid):
|
|
|
|
raise Exception("FT_DS failed")
|
2014-03-23 18:23:16 +01:00
|
|
|
if fail_test:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
|
|
|
|
if ev is not None:
|
|
|
|
raise Exception("Unexpected connection")
|
|
|
|
self.dump_monitor()
|
|
|
|
return
|
2013-12-28 10:52:19 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=10)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("Roaming with the AP timed out")
|
|
|
|
self.dump_monitor()
|
|
|
|
|
2013-04-28 21:00:58 +02:00
|
|
|
def wps_reg(self, bssid, pin, new_ssid=None, key_mgmt=None, cipher=None,
|
2013-12-28 16:47:32 +01:00
|
|
|
new_passphrase=None, no_wait=False):
|
2013-04-28 21:00:58 +02:00
|
|
|
self.dump_monitor()
|
|
|
|
if new_ssid:
|
|
|
|
self.request("WPS_REG " + bssid + " " + pin + " " +
|
|
|
|
new_ssid.encode("hex") + " " + key_mgmt + " " +
|
|
|
|
cipher + " " + new_passphrase.encode("hex"))
|
2013-12-28 16:47:32 +01:00
|
|
|
if no_wait:
|
|
|
|
return
|
2013-04-28 21:00:58 +02:00
|
|
|
ev = self.wait_event(["WPS-SUCCESS"], timeout=15)
|
|
|
|
else:
|
|
|
|
self.request("WPS_REG " + bssid + " " + pin)
|
2013-12-28 16:47:32 +01:00
|
|
|
if no_wait:
|
|
|
|
return
|
2013-04-28 21:00:58 +02:00
|
|
|
ev = self.wait_event(["WPS-CRED-RECEIVED"], timeout=15)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("WPS cred timed out")
|
|
|
|
ev = self.wait_event(["WPS-FAIL"], timeout=15)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("WPS timed out")
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("Association with the AP timed out")
|
2013-11-02 12:04:06 +01:00
|
|
|
|
|
|
|
def relog(self):
|
2014-10-20 10:22:16 +02:00
|
|
|
self.global_request("RELOG")
|
2013-11-24 19:50:11 +01:00
|
|
|
|
|
|
|
def wait_completed(self, timeout=10):
|
|
|
|
for i in range(0, timeout * 2):
|
|
|
|
if self.get_status_field("wpa_state") == "COMPLETED":
|
|
|
|
return
|
|
|
|
time.sleep(0.5)
|
|
|
|
raise Exception("Timeout while waiting for COMPLETED state")
|
2013-12-25 10:17:32 +01:00
|
|
|
|
|
|
|
def get_capability(self, field):
|
|
|
|
res = self.request("GET_CAPABILITY " + field)
|
|
|
|
if "FAIL" in res:
|
|
|
|
return None
|
|
|
|
return res.split(' ')
|
2013-12-25 19:49:02 +01:00
|
|
|
|
|
|
|
def get_bss(self, bssid):
|
|
|
|
res = self.request("BSS " + bssid)
|
2014-05-13 23:12:40 +02:00
|
|
|
if "FAIL" in res:
|
|
|
|
return None
|
2013-12-25 19:49:02 +01:00
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
vals[name] = value
|
2014-05-13 23:12:40 +02:00
|
|
|
if len(vals) == 0:
|
|
|
|
return None
|
2013-12-25 19:49:02 +01:00
|
|
|
return vals
|
2013-12-29 18:25:42 +01:00
|
|
|
|
|
|
|
def get_pmksa(self, bssid):
|
|
|
|
res = self.request("PMKSA")
|
|
|
|
lines = res.splitlines()
|
|
|
|
for l in lines:
|
|
|
|
if bssid not in l:
|
|
|
|
continue
|
|
|
|
vals = dict()
|
|
|
|
[index,aa,pmkid,expiration,opportunistic] = l.split(' ')
|
|
|
|
vals['index'] = index
|
|
|
|
vals['pmkid'] = pmkid
|
|
|
|
vals['expiration'] = expiration
|
|
|
|
vals['opportunistic'] = opportunistic
|
|
|
|
return vals
|
|
|
|
return None
|
2014-02-03 20:52:33 +01:00
|
|
|
|
|
|
|
def get_sta(self, addr, info=None, next=False):
|
|
|
|
cmd = "STA-NEXT " if next else "STA "
|
|
|
|
if addr is None:
|
|
|
|
res = self.request("STA-FIRST")
|
|
|
|
elif info:
|
|
|
|
res = self.request(cmd + addr + " " + info)
|
|
|
|
else:
|
|
|
|
res = self.request(cmd + addr)
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
first = True
|
|
|
|
for l in lines:
|
|
|
|
if first:
|
|
|
|
vals['addr'] = l
|
|
|
|
first = False
|
|
|
|
else:
|
|
|
|
[name,value] = l.split('=', 1)
|
|
|
|
vals[name] = value
|
|
|
|
return vals
|
2014-03-08 20:25:47 +01:00
|
|
|
|
|
|
|
def mgmt_rx(self, timeout=5):
|
|
|
|
ev = self.wait_event(["MGMT-RX"], timeout=timeout)
|
|
|
|
if ev is None:
|
|
|
|
return None
|
|
|
|
msg = {}
|
|
|
|
items = ev.split(' ')
|
|
|
|
field,val = items[1].split('=')
|
|
|
|
if field != "freq":
|
|
|
|
raise Exception("Unexpected MGMT-RX event format: " + ev)
|
|
|
|
msg['freq'] = val
|
|
|
|
frame = binascii.unhexlify(items[4])
|
|
|
|
msg['frame'] = frame
|
|
|
|
|
|
|
|
hdr = struct.unpack('<HH6B6B6BH', frame[0:24])
|
|
|
|
msg['fc'] = hdr[0]
|
|
|
|
msg['subtype'] = (hdr[0] >> 4) & 0xf
|
|
|
|
hdr = hdr[1:]
|
|
|
|
msg['duration'] = hdr[0]
|
|
|
|
hdr = hdr[1:]
|
|
|
|
msg['da'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
|
|
|
|
hdr = hdr[6:]
|
|
|
|
msg['sa'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
|
|
|
|
hdr = hdr[6:]
|
|
|
|
msg['bssid'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
|
|
|
|
hdr = hdr[6:]
|
|
|
|
msg['seq_ctrl'] = hdr[0]
|
|
|
|
msg['payload'] = frame[24:]
|
|
|
|
|
|
|
|
return msg
|