Skip to content

Commit

Permalink
set encoding utf-8.
Browse files Browse the repository at this point in the history
  • Loading branch information
anarkiwi committed Aug 25, 2021
1 parent 12578cc commit d9841dd
Show file tree
Hide file tree
Showing 27 changed files with 88 additions and 89 deletions.
3 changes: 1 addition & 2 deletions .pylintrc
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
[MASTER]
disable=
fixme,
import-error,
unspecified-encoding
import-error

[FORMAT]
max-line-length=120
Expand Down
10 changes: 5 additions & 5 deletions clib/clib_mininet_test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def import_hw_config():
print('Cannot find %s in %s' % (HW_SWITCH_CONFIG_FILE, CONFIG_FILE_DIRS))
sys.exit(-1)
try:
with open(config_file_name, 'r') as config_file:
with open(config_file_name, 'r', encoding='utf-8') as config_file:
config = yaml.safe_load(config_file)
except IOError:
print('Could not load YAML config data from %s' % config_file_name)
Expand Down Expand Up @@ -305,7 +305,7 @@ def parse_flow(flow_lines):
table_actions_max = collections.defaultdict(lambda: 0)

for log in decoded_pcap_logs:
with open(log) as log_file:
with open(log, encoding='utf-8') as log_file:
packets = re.compile(r'\n{2,}').split(log_file.read())
for packet in packets:
last_packet_line = None
Expand Down Expand Up @@ -448,7 +448,7 @@ def _set_test_duration_secs(self, test):
if test.id() not in self.test_duration_secs:
self.test_duration_secs[test.id()] = 0
try:
with open(duration_file_name) as duration_file:
with open(duration_file_name, encoding='utf-8') as duration_file:
self.test_duration_secs[test.id()] = int(duration_file.read())
except FileNotFoundError:
pass
Expand Down Expand Up @@ -558,7 +558,7 @@ def report_results(results, hw_config, report_json_filename):
'hw_config': hw_config,
'tests': tests_json,
}
with open(report_json_filename, 'w') as report_json_file:
with open(report_json_filename, 'w', encoding='utf-8') as report_json_file:
report_json_file.write(json.dumps(report_json))


Expand Down Expand Up @@ -605,7 +605,7 @@ def dump_failed_test_file(test_file, only_exts):

if dump_file:
try:
with open(test_file) as test_file_h:
with open(test_file, encoding='utf-8') as test_file_h:
test_file_content = test_file_h.read()
if test_file_content:
print(test_file)
Expand Down
2 changes: 1 addition & 1 deletion clib/clib_mininet_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def test_containers(self):
'container tmp dir missing')

host_log = os.path.join(self.tmpdir, host_name, 'activate.log')
with open(host_log, 'r') as host_log_file:
with open(host_log, 'r', encoding='utf-8') as host_log_file:
lines = host_log_file.readlines()
output = ' '.join(lines).strip()
self.assertEqual(output, 'hello faucet')
4 changes: 2 additions & 2 deletions clib/fakeoftable.py
Original file line number Diff line number Diff line change
Expand Up @@ -1208,7 +1208,7 @@ def parse_args():

def _print(filename, **_kwargs):
"""Prints the JSON flow table from a file in a human readable format"""
with open(filename, 'r') as file_handle:
with open(filename, 'r', encoding='utf-8') as file_handle:
msg = json.load(file_handle)
datapath = FakeRyuDp()
ofmsg = ofp_parser.ofp_msg_from_jsondict(datapath, msg)
Expand All @@ -1219,7 +1219,7 @@ def _print(filename, **_kwargs):

def probe(filename, packet):
"""Prints the actions applied to packet by the table from the file"""
with open(filename, 'r') as file_handle:
with open(filename, 'r', encoding='utf-8') as file_handle:
msg = json.load(file_handle)
datapath = FakeRyuDp()
ofmsg = ofp_parser.ofp_msg_from_jsondict(datapath, msg)
Expand Down
34 changes: 17 additions & 17 deletions clib/mininet_test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def _wait_until_matching_event(self, match_func, timeout=30):
assert timeout >= 1
assert self.event_log and os.path.exists(self.event_log)
for _ in range(timeout):
with open(self.event_log) as events:
with open(self.event_log, encoding='utf-8') as events:
for event_str in events:
event = json.loads(event_str)
event_id = event['event_id']
Expand All @@ -311,7 +311,7 @@ def _wait_until_matching_event(self, match_func, timeout=30):

@staticmethod
def _read_yaml(yaml_path):
with open(yaml_path) as yaml_file:
with open(yaml_path, encoding='utf-8') as yaml_file:
content = yaml.safe_load(yaml_file.read())
return content

Expand Down Expand Up @@ -354,7 +354,7 @@ def _write_yaml_conf(yaml_path, yaml_conf):
delete=False) as conf_file_tmp:
conf_file_tmp_name = conf_file_tmp.name
conf_file_tmp.write(new_conf_str)
with open(conf_file_tmp_name, 'rb') as conf_file_tmp:
with open(conf_file_tmp_name, 'rb', encoding='utf-8') as conf_file_tmp:
conf_file_tmp_str = conf_file_tmp.read()
assert new_conf_str == conf_file_tmp_str
if os.path.exists(yaml_path):
Expand Down Expand Up @@ -508,9 +508,9 @@ def tearDown(self, ignore_oferrors=False):
lines.extend(self.matching_lines_from_file(name, ovs_log))
if lines:
switch_ovs_log_name = os.path.join(self.tmpdir, os.path.basename(ovs_log))
with open(switch_ovs_log_name, 'w') as switch_ovs_log:
with open(switch_ovs_log_name, 'w', encoding='utf-8') as switch_ovs_log:
switch_ovs_log.write('\n'.join(lines))
with open(os.path.join(self.tmpdir, 'test_duration_secs'), 'w') as duration_file:
with open(os.path.join(self.tmpdir, 'test_duration_secs'), 'w', encoding='utf-8') as duration_file:
duration_file.write(str(int(time.time() - self.start_time)))
# Must not be any controller exception.
for controller_env in self.env.values():
Expand Down Expand Up @@ -857,7 +857,7 @@ def _dump_controller_logs(self):
for test_log_name in test_logs:
basename = os.path.basename(test_log_name)
if basename.startswith(controller.name):
with open(test_log_name) as test_log:
with open(test_log_name, encoding='utf-8') as test_log:
dump_txt += '\n'.join((
'',
basename,
Expand Down Expand Up @@ -912,7 +912,7 @@ def _wait_debug_log(self):
def verify_no_exception(self, exception_log_name):
if not os.path.exists(exception_log_name):
return
with open(exception_log_name) as exception_log:
with open(exception_log_name, encoding='utf-8') as exception_log:
exception_contents = exception_log.read()
self.assertEqual(
'',
Expand Down Expand Up @@ -1099,7 +1099,7 @@ def wait_matching_in_group_table(self, action, group_id, timeout=10):
groupdump = os.path.join(self.tmpdir, 'groupdump-%s.txt' % self.dpid)
for _ in range(timeout):
group_dump = self.get_all_groups_desc_from_dpid(self.dpid, 1)
with open(groupdump, 'w') as groupdump_file:
with open(groupdump, 'w', encoding='utf-8') as groupdump_file:
for group_dict in group_dump:
groupdump_file.write(str(group_dict) + '\n')
if group_dict['group_id'] == group_id:
Expand All @@ -1113,7 +1113,7 @@ def wait_matching_in_group_table(self, action, group_id, timeout=10):
def get_matching_meters_on_dpid(self, dpid):
meterdump = os.path.join(self.tmpdir, 'meterdump-%s.log' % dpid)
meter_dump = self.get_all_meters_from_dpid(dpid)
with open(meterdump, 'w') as meterdump_file:
with open(meterdump, 'w', encoding='utf-8') as meterdump_file:
meterdump_file.write(str(meter_dump))
return meterdump

Expand Down Expand Up @@ -1173,7 +1173,7 @@ def to_old_match(match):
flow_dump = self.get_all_flows_from_dpid(dpid, table_id, match=match)
else:
flow_dump = self.get_all_flows_from_dpid(dpid, table_id)
with open(flowdump, 'w') as flowdump_file:
with open(flowdump, 'w', encoding='utf-8') as flowdump_file:
flowdump_file.write(str(flow_dump))
for flow_dict in flow_dump:
if (cookie is not None
Expand Down Expand Up @@ -1483,7 +1483,7 @@ def scrape_prometheus(self, controller=None, timeout=15, var=None, verify_consis
prom_raw = requests.get(url, {}, timeout=timeout).text
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
return []
with open(os.path.join(self.tmpdir, '%s-prometheus.log' % controller_name), 'w') as prom_log:
with open(os.path.join(self.tmpdir, '%s-prometheus.log' % controller_name), 'w', encoding='utf-8') as prom_log:
prom_log.write(prom_raw)
prom_lines = [
prom_line for prom_line in prom_raw.splitlines() if not prom_line.startswith('#')]
Expand Down Expand Up @@ -2307,7 +2307,7 @@ def verify_faucet_reconf(self, timeout=20,

def force_faucet_reload(self, new_config):
"""Force FAUCET to reload."""
with open(self.faucet_config_path, 'w') as config_file:
with open(self.faucet_config_path, 'w', encoding='utf-8') as config_file:
config_file.write(new_config)
self.verify_faucet_reconf(change_expected=False)

Expand Down Expand Up @@ -2465,7 +2465,7 @@ def get_mac_of_intf(intf, host=None):
"""Get MAC address of a port."""
address_file_name = '/sys/class/net/%s/address' % intf
if host is None:
with open(address_file_name) as address_file:
with open(address_file_name, encoding='utf-8') as address_file:
address = address_file.read()
else:
address = host.cmd('cat %s' % address_file_name)
Expand Down Expand Up @@ -2712,7 +2712,7 @@ def start_exabgp(self, exabgp_conf, timeout=30, log_prefix=''):
))
bgp_port = self.config_ports['bgp_port']
exabgp_conf = exabgp_conf % {'bgp_port': bgp_port}
with open(exabgp_conf_file_name, 'w') as exabgp_conf_file:
with open(exabgp_conf_file_name, 'w', encoding='utf-8') as exabgp_conf_file:
exabgp_conf_file.write(exabgp_conf)
controller = self._get_controller()
# Ensure exabgp only attempts one connection.
Expand Down Expand Up @@ -2744,14 +2744,14 @@ def wait_bgp_up(self, neighbor, vlan, exabgp_log, exabgp_err):
exabgp_log_content = []
for log_name in (exabgp_log, exabgp_err):
if os.path.exists(log_name):
with open(log_name) as log:
with open(log_name, encoding='utf-8') as log:
exabgp_log_content.append(log.read())
self.fail('exabgp did not peer with FAUCET: %s' % '\n'.join(exabgp_log_content))

@staticmethod
def matching_lines_from_file(exp, log_name):
exp_re = re.compile(exp)
with open(log_name) as log_file:
with open(log_name, encoding='utf-8') as log_file:
return [log_line for log_line in log_file if exp_re.match(log_line)]
return []

Expand Down Expand Up @@ -2819,7 +2819,7 @@ def start_wpasupplicant(self, host, wpasupplicant_conf, timeout=10, log_prefix='
self.tmpdir, '%swpasupplicant.conf' % log_prefix)
wpasupplicant_log = os.path.join(
self.tmpdir, '%swpasupplicant.log' % log_prefix)
with open(wpasupplicant_conf_file_name, 'w') as wpasupplicant_conf_file:
with open(wpasupplicant_conf_file_name, 'w', encoding='utf-8') as wpasupplicant_conf_file:
wpasupplicant_conf_file.write(wpasupplicant_conf)
wpa_ctrl_socket = ''
if wpa_ctrl_socket_path:
Expand Down
2 changes: 1 addition & 1 deletion clib/mininet_test_base_topo.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,7 +699,7 @@ def require_linux_bond_up(self, host_id):
for _ in range(self.LACP_TIMEOUT * 2):
result = host.cmd('cat /proc/net/bonding/%s|sed "s/[ \t]*$//g"' % bond_name)
result = '\n'.join([line.rstrip() for line in result.splitlines()])
with open(os.path.join(self.tmpdir, 'bonding-state.txt'), 'w') as state_file:
with open(os.path.join(self.tmpdir, 'bonding-state.txt'), 'w', encoding='utf-8') as state_file:
state_file.write(result)
matched_all = True
for state_txt in synced_state_list:
Expand Down
10 changes: 5 additions & 5 deletions clib/mininet_test_topo.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def __init__(self, *args, **kwargs):
def terminate(self):
# If any 'dnsmasq' processes were started, terminate them now
for pid_file in self.pid_files:
with open(pid_file, 'r') as pf:
with open(pid_file, 'r', encoding='utf-8') as pf:
for _, pid in enumerate(pf):
os.kill(int(pid), 15)
super().terminate()
Expand Down Expand Up @@ -479,7 +479,7 @@ def _add_cargs(self, cargs, name):
self.pid_file = os.path.join(self.tmpdir, name + '.pid')
pid_file_arg = '--ryu-pid-file=%s' % self.pid_file
ryu_conf_file = os.path.join(self.tmpdir, 'ryu.conf')
with open(ryu_conf_file, 'w') as ryu_conf:
with open(ryu_conf_file, 'w', encoding='utf-8') as ryu_conf:
ryu_conf.write(self.RYU_CONF)
ryu_conf_arg = '--ryu-config-file=%s' % ryu_conf_file
return ' '.join((
Expand Down Expand Up @@ -539,7 +539,7 @@ def _command(self, env, tmpdir, name, args):
if self.CPROFILE:
cprofile_args = 'python3 -m cProfile -s time'
full_faucet_dir = os.path.abspath(mininet_test_util.FAUCET_DIR)
with open(script_wrapper_name, 'w') as script_wrapper:
with open(script_wrapper_name, 'w', encoding='utf-8') as script_wrapper:
faucet_cli = (
'PYTHONPATH=%s %s exec timeout %u %s %s %s $*\n' % (
os.path.dirname(full_faucet_dir),
Expand All @@ -555,7 +555,7 @@ def ryu_pid(self):
"""Return PID of ryu-manager process."""
if os.path.exists(self.pid_file) and os.path.getsize(self.pid_file) > 0:
pid = None
with open(self.pid_file) as pid_file:
with open(self.pid_file, encoding='utf-8') as pid_file:
pid = int(pid_file.read())
return pid
return None
Expand Down Expand Up @@ -607,7 +607,7 @@ def _stop_cap(self):
if os.path.exists(self.ofcap):
self.cmd(' '.join(['fuser', '-15', '-k', self.ofcap]))
text_ofcap_log = '%s.txt' % self.ofcap
with open(text_ofcap_log, 'w') as text_ofcap:
with open(text_ofcap_log, 'w', encoding='utf-8') as text_ofcap:
subprocess.call(
['timeout', str(self.MAX_CTL_TIME),
'tshark', '-l', '-n', '-Q',
Expand Down
2 changes: 1 addition & 1 deletion clib/mininet_test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
LOCALHOSTV6 = '::1'
FAUCET_DIR = os.getenv('FAUCET_DIR', '../faucet')
RESERVED_FOR_TESTS_PORTS = (179, 5001, 5002, 6633, 6653)
with open('/proc/sys/net/netfilter/nf_conntrack_tcp_timeout_time_wait') as pf:
with open('/proc/sys/net/netfilter/nf_conntrack_tcp_timeout_time_wait', encoding='utf-8') as pf:
MIN_PORT_AGE = max(int(pf.read()) / 2, 10)


Expand Down
2 changes: 1 addition & 1 deletion clib/mininet_test_watcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,6 @@ def dump_info(self, tmpdir):
sw_graph_fn = os.path.join(tmpdir, 'final_switch_graph.txt')
networkx.write_edgelist(self.switch_graph, sw_graph_fn)
fault_list_fn = os.path.join(tmpdir, 'fault-list.txt')
with open(fault_list_fn, 'w') as fl_file:
with open(fault_list_fn, 'w', encoding='utf-8') as fl_file:
for fault_name in self.fault_list:
fl_file.write(fault_name + '\n')
6 changes: 3 additions & 3 deletions clib/valve_test_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -789,9 +789,9 @@ def update_config(self, config, table_dpid=None, reload_type='cold',
before_dp_status = int(self.get_prom('dp_status'))
existing_config = None
if os.path.exists(self.config_file):
with open(self.config_file) as config_file:
with open(self.config_file, encoding='utf-8') as config_file:
existing_config = config_file.read()
with open(self.config_file, 'w') as config_file:
with open(self.config_file, 'w', encoding='utf-8') as config_file:
config_file.write(config)
content_change_expected = config != existing_config
self.assertEqual(
Expand Down Expand Up @@ -1505,7 +1505,7 @@ def test_notifier_socket_path(self):
new_path = os.path.join(self.tmpdir, 'new_path/new_socket')
self.assertEqual(self.notifier.check_path(new_path), new_path)
stale_socket = os.path.join(self.tmpdir, 'stale_socket')
with open(stale_socket, 'w') as stale_socket_file:
with open(stale_socket, 'w', encoding='utf-8') as stale_socket_file:
stale_socket_file.write('')
self.assertEqual(self.notifier.check_path(stale_socket), stale_socket)

Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def generate_prometheus_metric_table(_):
- {}
""".format(metric_name, metric.type, metric.documentation)

with open(output_path[module], 'w') as output_file:
with open(output_path[module], 'w', encoding='utf-8') as output_file:
output_file.write(block_text[module])


Expand Down
4 changes: 2 additions & 2 deletions faucet/config_parser_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def read_config(config_file, logname):
conf = None

try:
with open(config_file, 'r') as stream:
with open(config_file, 'r', encoding='utf-8') as stream:
conf_txt = stream.read()
conf = yaml.safe_load(conf_txt)
except (yaml.YAMLError, UnicodeDecodeError,
Expand All @@ -91,7 +91,7 @@ def config_hash_content(content):

def config_file_hash(config_file_name):
"""Return hash of YAML config file contents."""
with open(config_file_name) as config_file:
with open(config_file_name, encoding='utf-8') as config_file:
return config_hash_content(config_file.read())


Expand Down
2 changes: 1 addition & 1 deletion faucet/gauge_pollers.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def _update(self, rcv_time, msg):
for stat_name, stat_val in self._format_stat_pairs('-', stat):
dp_stat_name = self._dp_stat_name(stat, stat_name)
log_lines.append(self._update_line(rcv_time_str, dp_stat_name, stat_val))
with open(self.conf.file, 'a') as logfile:
with open(self.conf.file, 'a', encoding='utf-8') as logfile:
logfile.writelines(log_lines)

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion faucet/valves_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def revert_config(self):
for config_file_name, config_content in self.meta_dp_state.last_good_config.items():
self.logger.info('attempting to revert to last good config: %s' % config_file_name)
try:
with open(config_file_name, 'w') as config_file:
with open(config_file_name, 'w', encoding='utf-8') as config_file:
config_file.write(str(config_content))
except (FileNotFoundError, OSError, PermissionError) as err:
self.logger.error('could not revert %s: %s' % (config_file_name, err))
Expand Down
4 changes: 2 additions & 2 deletions faucet/watcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def _update(self, rcv_time, msg):
log_msg = '%s %s' % (dpid_log(self.dp.dp_id), log_msg)
self.logger.info(log_msg)
if self.conf.file:
with open(self.conf.file, 'a') as logfile:
with open(self.conf.file, 'a', encoding='utf-8') as logfile:
logfile.write('\t'.join((rcv_time_str, log_msg)) + '\n')

def send_req(self):
Expand Down Expand Up @@ -167,5 +167,5 @@ def _update(self, rcv_time, msg):
with gzip.open(filename, 'wt') as outfile:
outfile.write(json.dumps(msg.to_jsondict()))
else:
with open(filename, 'w') as outfile:
with open(filename, 'w', encoding='utf-8') as outfile:
json.dump(msg.to_jsondict(), outfile, indent=2)
2 changes: 1 addition & 1 deletion tests/generative/fuzzer/config/fuzz_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

def create_config_file(config):
"""Create config file with given contents."""
with open(conf_file_name, 'w') as conf_file:
with open(conf_file_name, 'w', encoding='utf-8') as conf_file:
conf_file.write(config)
return conf_file_name

Expand Down
Loading

0 comments on commit d9841dd

Please sign in to comment.