Dataset Viewer
edited_code
stringlengths 17
978k
| original_code
stringlengths 17
978k
|
---|---|
"""
Input: tsv file in the form
Input Video filename | topic | subtopic | title greek | title english | start time | end time | delete segments
input.mp4 | 1 | 1 | έξοδος | output | 00:10:05 | 00:30:10 | 00:11:15-00:12:30,00:20:35-00:22:10
"""
import os
import subprocess
import sys
import yaml
def run_cmd(command: str):
"""run_cmd Run given shell command
Args:
command (str): Shell command to run
Returns:
(int, str): Status code, stdout of shell command
Examples:
>>> run_cmd("ls /")
(0, 'bin\nboot\ndev\netc\nhome\ninit\nlib\nlib32\nlib64\nlibx32\nlost+found\nmedia\nmnt\nopt\nproc\nroot\nrun\nsbin\nsnap\nsrv\nsys\ntmp\nusr\nvar\n')
"""
command = f'{os.getenv('SHELL')} -c "{command}"'
pipe = subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout = ""
if pipe.stdout is not None:
stdout = "".join(
[line.decode("utf-8") for line in iter(pipe.stdout.readline, b"")]
)
pipe.stdout.close()
returncode = pipe.wait()
print(stdout)
return returncode, stdout
def out_video(segment, greek=True):
title_idx = 3 if greek else 4
title, topic, subtopic = segment[title_idx], segment[1], segment[2]
name = f"{title}_{topic}-{subtopic}.mp4"
return name
def input_video(segment):
return segment[0]
def manage_timestamps(segment):
try:
st, et = segment[5], segment[6]
except:
st = segment[5]
return [st]
try:
delete_timestamps = segment[7]
except:
return [st, et]
if not delete_timestamps:
return [st, et]
else:
return (
[st]
+ [
t
for s in delete_timestamps.split(",")
for t in (s.split("-")[0], s.split("-")[1])
]
+ [et]
)
def to_cut_fmt(timestamp):
out = ""
labels = ["h", "m", "s"]
lb_idx = 0
for c in timestamp:
if c == ":":
out += labels[lb_idx]
lb_idx += 1
else:
out += c
return out
def to_cut_yaml(inmp4, outmp4, ymlname, timestamps):
def pairwise(iterable):
"s -> (s0, s1), (s2, s3), (s4, s5), ..."
a = iter(iterable)
return list(zip(a, a))
timestamps = [to_cut_fmt(t) for t in timestamps]
timeframe = []
if len(timestamps) == 1:
timeframe = [{"from": "start", "to": timestamps[0]}]
else:
for s, e in pairwise(["start"] + timestamps + ["end"]):
timeframe += [{"from": s, "to": e}]
out = {
"input": inmp4,
"output": outmp4,
"cut_method": "delete",
"timeframe": timeframe,
}
with open(ymlname, "w") as fd:
yaml.dump(out, fd, default_flow_style=False, sort_keys=False)
def format_timestamp_args(timestamps):
if len(timestamps) == 1:
return [f"-ss {timestamps[0]} "]
def pairwise(iterable):
"s -> (s0, s1), (s2, s3), (s4, s5), ..."
a = iter(iterable)
return list(zip(a, a))
cmds = [f"-ss {s} -to {e}" for s, e in pairwise(timestamps)]
return cmds
def ffmpeg(inp, out, timestamps_args):
if len(timestamps_args) == 1:
run_cmd(f"ffmpeg -y -i '{inp}' " + timestamps_args[0] + f" -c:v h265_nvenc -crf 24 -preset fast -c:a copy '{out}'")
return
mp4s = []
for i, arg in enumerate(timestamps_args):
mp4s.append(f"{i}.mp4")
cmd = f"ffmpeg -i '{inp}' " + arg + f" -c:v h265_nvenc -crf 24 -preset fast -c:a copy '{i}.mp4'"
print(cmd)
run_cmd(cmd)
tmp = ".tmp_files.txt"
with open(tmp, "w") as fd:
for f in mp4s:
fd.write(f"file '{f}'\n")
run_cmd(f"ffmpeg -y -f concat -i .tmp_files.txt '{out}'")
run_cmd(f"rm {tmp} " + " ".join(mp4s))
def read_split_tsv(timestamp_file):
with open(timestamp_file) as f:
segments = [ln.strip().split("\t") for ln in f]
return segments
def main():
timestamp_file = sys.argv[1]
segments = read_split_tsv(timestamp_file)
for segment in segments:
inmp4 = input_video(segment)
outmp4 = "out/" + out_video(segment, greek=True)
timestamps = manage_timestamps(segment)
timestamp_args = format_timestamp_args(timestamps)
ffmpeg(inmp4, outmp4, timestamp_args)
def main1():
timestamp_file = sys.argv[1]
segments = read_split_tsv(timestamp_file)
for i, segment in enumerate(segments):
inmp4 = input_video(segment)
outmp4 = out_video(segment, greek=True)
timestamps = manage_timestamps(segment)
to_cut_yaml(inmp4, outmp4, f"{i}.yml", timestamps)
if __name__ == "__main__":
main()
| """
Input: tsv file in the form
Input Video filename | topic | subtopic | title greek | title english | start time | end time | delete segments
input.mp4 | 1 | 1 | έξοδος | output | 00:10:05 | 00:30:10 | 00:11:15-00:12:30,00:20:35-00:22:10
"""
import os
import subprocess
import sys
import yaml
def run_cmd(command: str):
"""run_cmd Run given shell command
Args:
command (str): Shell command to run
Returns:
(int, str): Status code, stdout of shell command
Examples:
>>> run_cmd("ls /")
(0, 'bin\nboot\ndev\netc\nhome\ninit\nlib\nlib32\nlib64\nlibx32\nlost+found\nmedia\nmnt\nopt\nproc\nroot\nrun\nsbin\nsnap\nsrv\nsys\ntmp\nusr\nvar\n')
"""
command = f'{os.getenv("SHELL")} -c "{command}"'
pipe = subprocess.Popen(
command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
stdout = ""
if pipe.stdout is not None:
stdout = "".join(
[line.decode("utf-8") for line in iter(pipe.stdout.readline, b"")]
)
pipe.stdout.close()
returncode = pipe.wait()
print(stdout)
return returncode, stdout
def out_video(segment, greek=True):
title_idx = 3 if greek else 4
title, topic, subtopic = segment[title_idx], segment[1], segment[2]
name = f"{title}_{topic}-{subtopic}.mp4"
return name
def input_video(segment):
return segment[0]
def manage_timestamps(segment):
try:
st, et = segment[5], segment[6]
except:
st = segment[5]
return [st]
try:
delete_timestamps = segment[7]
except:
return [st, et]
if not delete_timestamps:
return [st, et]
else:
return (
[st]
+ [
t
for s in delete_timestamps.split(",")
for t in (s.split("-")[0], s.split("-")[1])
]
+ [et]
)
def to_cut_fmt(timestamp):
out = ""
labels = ["h", "m", "s"]
lb_idx = 0
for c in timestamp:
if c == ":":
out += labels[lb_idx]
lb_idx += 1
else:
out += c
return out
def to_cut_yaml(inmp4, outmp4, ymlname, timestamps):
def pairwise(iterable):
"s -> (s0, s1), (s2, s3), (s4, s5), ..."
a = iter(iterable)
return list(zip(a, a))
timestamps = [to_cut_fmt(t) for t in timestamps]
timeframe = []
if len(timestamps) == 1:
timeframe = [{"from": "start", "to": timestamps[0]}]
else:
for s, e in pairwise(["start"] + timestamps + ["end"]):
timeframe += [{"from": s, "to": e}]
out = {
"input": inmp4,
"output": outmp4,
"cut_method": "delete",
"timeframe": timeframe,
}
with open(ymlname, "w") as fd:
yaml.dump(out, fd, default_flow_style=False, sort_keys=False)
def format_timestamp_args(timestamps):
if len(timestamps) == 1:
return [f"-ss {timestamps[0]} "]
def pairwise(iterable):
"s -> (s0, s1), (s2, s3), (s4, s5), ..."
a = iter(iterable)
return list(zip(a, a))
cmds = [f"-ss {s} -to {e}" for s, e in pairwise(timestamps)]
return cmds
def ffmpeg(inp, out, timestamps_args):
if len(timestamps_args) == 1:
run_cmd(f"ffmpeg -y -i '{inp}' " + timestamps_args[0] + f" -c:v h265_nvenc -crf 24 -preset fast -c:a copy '{out}'")
return
mp4s = []
for i, arg in enumerate(timestamps_args):
mp4s.append(f"{i}.mp4")
cmd = f"ffmpeg -i '{inp}' " + arg + f" -c:v h265_nvenc -crf 24 -preset fast -c:a copy '{i}.mp4'"
print(cmd)
run_cmd(cmd)
tmp = ".tmp_files.txt"
with open(tmp, "w") as fd:
for f in mp4s:
fd.write(f"file '{f}'\n")
run_cmd(f"ffmpeg -y -f concat -i .tmp_files.txt '{out}'")
run_cmd(f"rm {tmp} " + " ".join(mp4s))
def read_split_tsv(timestamp_file):
with open(timestamp_file) as f:
segments = [ln.strip().split("\t") for ln in f]
return segments
def main():
timestamp_file = sys.argv[1]
segments = read_split_tsv(timestamp_file)
for segment in segments:
inmp4 = input_video(segment)
outmp4 = "out/" + out_video(segment, greek=True)
timestamps = manage_timestamps(segment)
timestamp_args = format_timestamp_args(timestamps)
ffmpeg(inmp4, outmp4, timestamp_args)
def main1():
timestamp_file = sys.argv[1]
segments = read_split_tsv(timestamp_file)
for i, segment in enumerate(segments):
inmp4 = input_video(segment)
outmp4 = out_video(segment, greek=True)
timestamps = manage_timestamps(segment)
to_cut_yaml(inmp4, outmp4, f"{i}.yml", timestamps)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Analyze CSV file into scores.
Created on Sat Feb 12 22:15:29 2022 // @hk_nien
"""
from pathlib import Path
import os
import re
import sys
import pandas as pd
import numpy as np
PCODES = dict([
# Regio Noord
(1011, 'Amsterdam'),
(1625, 'Hoorn|Zwaag'),
(1811, 'Alkmaar'),
(7471, 'Goor'),
(7556, 'Hengelo'),
(7903, 'Hoogeveen'),
(7942, 'Meppel'),
(8011, 'Zwolle'),
(8232, 'Lelystad'),
(8442, 'Heerenveen'),
(8911, 'Leeuwarden'),
(9291, 'Kollum'),
(9501, 'Stadskanaal'),
(9726, 'Groningen'),
# Regio Midden
(2406, 'Alphen a/d Rijn'),
(2515, 'Den Haag'),
(3013, 'Rotterdam'),
(3511, 'Utrecht'),
(3901, 'Veenendaal'),
((7137, 7131), 'Lichtenvoorde|Groenlo'),
(7311, 'Apeldoorn'),
# Regio Zuid
(4325, 'Renesse'),
(4462, 'Goes'),
(4701, 'Roosendaal'),
(5038, 'Tilburg'),
(5401, 'Uden'),
(5611, 'Eindhoven'),
(5801, 'Oostrum'),
(6101, 'Echt'),
(6229, 'Maastricht'),
(6541, 'Nijmegen'),
])
def get_bad_scan_times():
"""Return list of Timestamps with bad scan times, from CSV data."""
df = pd.read_csv('data-ggd/ggd_bad_scans.txt', comment='#')
tstamps = pd.to_datetime(df['Timestamp']).to_list()
return tstamps
def _mean_time(ts_list):
"""Return mean timestamp value from list of timestamps."""
ts0 = ts_list[0]
delta_sum = pd.Timedelta(0)
for ts in ts_list:
delta_sum += (ts -ts0)
ts_mean = ts0 + delta_sum / len(ts_list)
return ts_mean
def _delta_time_hhmm(hm):
"""Convert 'hh:mm' string to TimeDelta."""
return pd.Timedelta(f'{hm}:00')
def _summary_to_scores(summary):
"""Convert summary from _read_log to scores dict and effective timestamp.
Parameters:
- summary: dict with int(pc4) -> [(query_time, appt_time), ...]
Return:
- scores dict: int(pc4) -> score (int or float or '?')
- timestamp: middle query timestamp of this run.
"""
# Convert to number codes.
scores = {k: '?' for k in PCODES}
multi_pcs = {} # pc4 -> (pc4[0], pc4[1], ...)
for pc in PCODES:
if isinstance(pc, tuple):
for pc1 in pc:
multi_pcs[pc1] = pc
qtms = []
dhm = _delta_time_hhmm
for pc4, vlist in summary.items():
pc4 = int(pc4)
if pc4 not in scores:
if pc4 in multi_pcs:
pc4_key = multi_pcs[pc4]
else:
print(f'{pc4} not in list...')
continue
else:
pc4_key = pc4
if len(vlist) == 0:
scores[pc4_key] = 7
continue
qtm = _mean_time([v[0] for v in vlist]) # query time
qtms.append(qtm)
atm = min(v[1] for v in vlist) # earliest appointment time
qtm_00 = pd.Timestamp(qtm.strftime('%Y-%m-%dT00:00'))
thresholds = [
(3, qtm_00 + dhm('23:59')),
(4, qtm + dhm('24:00')),
(5, qtm_00 + dhm('48:00')),
(6, qtm + dhm('48:00')),
(6.3, qtm_00 + dhm('72:00')),
(6.7, qtm + dhm('72:00')),
(7, atm)
]
if qtm.hour < 9:
thresholds.insert(0, (1, qtm_00 + dhm('13:00')))
elif qtm.hour < 13:
thresholds.insert(0, (1, qtm + dhm('4:00')))
elif qtm.hour < 17:
thresholds.insert(0, (1, qtm_00 + dhm('24:00')))
thresholds.insert(1, (2, qtm + dhm('20:00')))
else:
thresholds.insert(0, (1, qtm_00 + dhm('24:00')))
thresholds.insert(1, (2, qtm_00 + dhm('37:00')))
for s, tm in thresholds:
if atm < tm:
scores[pc4_key] = s
break
if len(qtms) == 0:
qtm_mid = pd.Timestamp(None)
else:
qtm_min = min(qtms)
qtm_mid = qtm_min + (max(qtms) - qtm_min)/2
return scores, qtm_mid
def _get_min_wait(summary):
"""Return minimum and median wait Timedelta between scan time and appointment.
summary is dict of pc4 -> list of timestamps
No data -> 999 h.
For the median, NaT is counted as infinite.
"""
wtimes = []
for _, vlist in summary.items():
wtimes_this = [atm - qtm for qtm, atm in vlist]
wtimes.append(
min(wtimes_this) if wtimes_this else pd.Timedelta(99, 'h')
)
minwait = min(wtimes) if wtimes else 999
medwait = pd.Timedelta(np.median(wtimes))
return minwait, medwait
def load_csv(csv_fname):
"""Return DataFrame and list of start times (+1)."""
df = pd.read_csv(csv_fname, comment='#')
df['req_pc4'] = df['req_pc4'].astype(int)
for c in df.columns:
if c.endswith('_time') or c.endswith('_date'):
df[c] = pd.to_datetime(df[c])
else:
df.loc[df[c].isna(), c] = None
# start_tms: list of scan start times (plus one extra at the end)
start_tms = df.loc[df['scan_time'].diff() > pd.Timedelta('10 min'), 'scan_time']
start_tms = [df.iloc[0]['scan_time']] + list(start_tms)
start_tms += [df.iloc[-1]['scan_time'] + pd.Timedelta('1 min')]
return df, start_tms
def load_multi_csvs(csv_fnames):
"""Return DataFrame and list of start times (+1)"""
dfs = []
start_tms = []
for f in csv_fnames:
df, st = load_csv(f)
dfs.append(df)
start_tms.extend(st[:-1])
df = pd.concat(dfs).reset_index()
start_tms.append(df.iloc[-1]['scan_time'] + pd.Timedelta('1 min'))
return df, start_tms
def get_scan_scores(df, tm_range):
"""Get scan scores as pc4 -> score dict.
Parameters:
- df: DataFrame with scan_time, req_date, req_pc4, opt0_short_addr,
opt0_time, opt0_loc_id, etc.
- tm_range: (tm_start, tm_stop) timestamps.
Return:
- tstamp: timestamp of the scan (mid-point)
- scores: dict of pc4->score
- min_wait: Timedelta of minimum wait time from scan to appointment
"""
mask = (df['scan_time'] >= tm_range[0]) & (df['scan_time'] < tm_range[1])
df1 = df.loc[mask]
summary = {}
for pc4, city_re in PCODES.items():
pc4_tup = (pc4,) if isinstance(pc4, int) else pc4
options = []
req_pc4 = None
for _, row in df1.loc[df1['req_pc4'].isin(pc4_tup)].iterrows():
req_pc4 = int(row['req_pc4'])
for i in range(3):
addr = row[f'opt{i}_short_addr']
if addr and re.match(f'{city_re}$', addr[5:]):
options.append((row['scan_time'], row[f'opt{i}_time']))
if req_pc4 is not None:
summary[req_pc4] = options
scores, tstamp = _summary_to_scores(summary)
if pd.isna(tstamp):
tstamp = df1.iloc[len(df1)//2]['scan_time']
minwait, medwait = _get_min_wait(summary)
if medwait == 999:
medwait = pd.Timedelta(None)
return tstamp, scores, minwait, medwait
def get_scan_scores_df(df, tm_ranges, decimal_comma=True):
"""Get scan scores as dataframe, from csv dataframe.
Blacklisted scan times are dropped.
Parameters:
- df: DataFrame with scan_time, req_date, req_pc4, opt0_short_addr,
opt0_time, opt0_loc_id, etc.
- tm_ranges: list of timestamps (+one at the end) with boundaries
of timestamp ranges.
- decimal_comma: True to have string values 6,3 rather than float 6.3.
Return:
- Dataframe with scores, date_str, time_str, pc4, min_wait, med_wait as columns.
"""
n = len(tm_ranges)
records = []
index = []
minwait_hs = []
medwait_hs = []
bad_stimes = get_bad_scan_times()
for i in range(n-1):
tm_ra = tm_ranges[i:i+2]
is_ok = True
for tm in bad_stimes:
if tm_ra[0] <= tm < tm_ra[1]:
is_ok = False
break
if not is_ok:
print(f'Dropped scan at {tm_ra[0].strftime('%Y-%m-%d %H:%M')}')
continue
tm, scores, minwait, medwait = get_scan_scores(df, tm_ra)
records.append(scores)
index.append(tm)
minwait_hs.append(minwait.total_seconds() / 3600)
medwait_hs.append(medwait.total_seconds() / 3600)
dates = [t.strftime('%Y-%m-%d') for t in index]
times = [t.strftime('%H:%M') for t in index]
sdf = pd.DataFrame.from_records(records)
sdf.insert(0, 'Time', times)
sdf.insert(0, 'Date', dates)
sdf['min_wait_h'] = np.around(minwait_hs, 2)
sdf['med_wait_h'] = np.around(medwait_hs, 2)
sdf.loc[sdf['min_wait_h'].isna(), 'min_wait_h'] = 999
sdf.columns = [
('/'.join([str(x) for x in c]) if isinstance(c, tuple) else c)
for c in sdf.columns
]
if decimal_comma:
for c in sdf.columns[2:]:
sdf[c] = sdf[c].astype(str)
sdf[c] = sdf[c].str.replace('.', ',', regex=False)
sdf[c] = sdf[c].str.replace(',0$', '', regex=False)
sdf[c] = sdf[c].str.replace('?', '', regex=False)
return sdf
if __name__ == '__main__':
in_spyder = ('SPYDER_ARGS' in os.environ)
csv_fnames = sorted(Path('data-ggd').glob('ggd_scan-????-W??.csv'))
do_all = ('--all' in sys.argv)
do_all = do_all or in_spyder and input('(A)ll or latest?').lower() == 'a'
if do_all:
df, start_tms = load_multi_csvs(csv_fnames)
sdf = get_scan_scores_df(df, start_tms).iloc[::-1]
else:
df, start_tms = load_csv(csv_fnames[-1])
sdf = get_scan_scores_df(df, start_tms[-2:])
print(sdf)
if len(sdf) > 1:
sdf.to_clipboard(index=False)
print('Copied to clipboard including headers')
elif len(sdf) == 1:
sdf.iloc[[0], 2:].to_clipboard(header=False, index=False)
print('Copied to clipboard, scores only.')
else:
print('No output.')
if not in_spyder:
# Note: in Spyder, copy/paste will stall while input is blocked.
input('Press Enter to quit and clear clipboard.')
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Analyze CSV file into scores.
Created on Sat Feb 12 22:15:29 2022 // @hk_nien
"""
from pathlib import Path
import os
import re
import sys
import pandas as pd
import numpy as np
PCODES = dict([
# Regio Noord
(1011, 'Amsterdam'),
(1625, 'Hoorn|Zwaag'),
(1811, 'Alkmaar'),
(7471, 'Goor'),
(7556, 'Hengelo'),
(7903, 'Hoogeveen'),
(7942, 'Meppel'),
(8011, 'Zwolle'),
(8232, 'Lelystad'),
(8442, 'Heerenveen'),
(8911, 'Leeuwarden'),
(9291, 'Kollum'),
(9501, 'Stadskanaal'),
(9726, 'Groningen'),
# Regio Midden
(2406, 'Alphen a/d Rijn'),
(2515, 'Den Haag'),
(3013, 'Rotterdam'),
(3511, 'Utrecht'),
(3901, 'Veenendaal'),
((7137, 7131), 'Lichtenvoorde|Groenlo'),
(7311, 'Apeldoorn'),
# Regio Zuid
(4325, 'Renesse'),
(4462, 'Goes'),
(4701, 'Roosendaal'),
(5038, 'Tilburg'),
(5401, 'Uden'),
(5611, 'Eindhoven'),
(5801, 'Oostrum'),
(6101, 'Echt'),
(6229, 'Maastricht'),
(6541, 'Nijmegen'),
])
def get_bad_scan_times():
"""Return list of Timestamps with bad scan times, from CSV data."""
df = pd.read_csv('data-ggd/ggd_bad_scans.txt', comment='#')
tstamps = pd.to_datetime(df['Timestamp']).to_list()
return tstamps
def _mean_time(ts_list):
"""Return mean timestamp value from list of timestamps."""
ts0 = ts_list[0]
delta_sum = pd.Timedelta(0)
for ts in ts_list:
delta_sum += (ts -ts0)
ts_mean = ts0 + delta_sum / len(ts_list)
return ts_mean
def _delta_time_hhmm(hm):
"""Convert 'hh:mm' string to TimeDelta."""
return pd.Timedelta(f'{hm}:00')
def _summary_to_scores(summary):
"""Convert summary from _read_log to scores dict and effective timestamp.
Parameters:
- summary: dict with int(pc4) -> [(query_time, appt_time), ...]
Return:
- scores dict: int(pc4) -> score (int or float or '?')
- timestamp: middle query timestamp of this run.
"""
# Convert to number codes.
scores = {k: '?' for k in PCODES}
multi_pcs = {} # pc4 -> (pc4[0], pc4[1], ...)
for pc in PCODES:
if isinstance(pc, tuple):
for pc1 in pc:
multi_pcs[pc1] = pc
qtms = []
dhm = _delta_time_hhmm
for pc4, vlist in summary.items():
pc4 = int(pc4)
if pc4 not in scores:
if pc4 in multi_pcs:
pc4_key = multi_pcs[pc4]
else:
print(f'{pc4} not in list...')
continue
else:
pc4_key = pc4
if len(vlist) == 0:
scores[pc4_key] = 7
continue
qtm = _mean_time([v[0] for v in vlist]) # query time
qtms.append(qtm)
atm = min(v[1] for v in vlist) # earliest appointment time
qtm_00 = pd.Timestamp(qtm.strftime('%Y-%m-%dT00:00'))
thresholds = [
(3, qtm_00 + dhm('23:59')),
(4, qtm + dhm('24:00')),
(5, qtm_00 + dhm('48:00')),
(6, qtm + dhm('48:00')),
(6.3, qtm_00 + dhm('72:00')),
(6.7, qtm + dhm('72:00')),
(7, atm)
]
if qtm.hour < 9:
thresholds.insert(0, (1, qtm_00 + dhm('13:00')))
elif qtm.hour < 13:
thresholds.insert(0, (1, qtm + dhm('4:00')))
elif qtm.hour < 17:
thresholds.insert(0, (1, qtm_00 + dhm('24:00')))
thresholds.insert(1, (2, qtm + dhm('20:00')))
else:
thresholds.insert(0, (1, qtm_00 + dhm('24:00')))
thresholds.insert(1, (2, qtm_00 + dhm('37:00')))
for s, tm in thresholds:
if atm < tm:
scores[pc4_key] = s
break
if len(qtms) == 0:
qtm_mid = pd.Timestamp(None)
else:
qtm_min = min(qtms)
qtm_mid = qtm_min + (max(qtms) - qtm_min)/2
return scores, qtm_mid
def _get_min_wait(summary):
"""Return minimum and median wait Timedelta between scan time and appointment.
summary is dict of pc4 -> list of timestamps
No data -> 999 h.
For the median, NaT is counted as infinite.
"""
wtimes = []
for _, vlist in summary.items():
wtimes_this = [atm - qtm for qtm, atm in vlist]
wtimes.append(
min(wtimes_this) if wtimes_this else pd.Timedelta(99, 'h')
)
minwait = min(wtimes) if wtimes else 999
medwait = pd.Timedelta(np.median(wtimes))
return minwait, medwait
def load_csv(csv_fname):
"""Return DataFrame and list of start times (+1)."""
df = pd.read_csv(csv_fname, comment='#')
df['req_pc4'] = df['req_pc4'].astype(int)
for c in df.columns:
if c.endswith('_time') or c.endswith('_date'):
df[c] = pd.to_datetime(df[c])
else:
df.loc[df[c].isna(), c] = None
# start_tms: list of scan start times (plus one extra at the end)
start_tms = df.loc[df['scan_time'].diff() > pd.Timedelta('10 min'), 'scan_time']
start_tms = [df.iloc[0]['scan_time']] + list(start_tms)
start_tms += [df.iloc[-1]['scan_time'] + pd.Timedelta('1 min')]
return df, start_tms
def load_multi_csvs(csv_fnames):
"""Return DataFrame and list of start times (+1)"""
dfs = []
start_tms = []
for f in csv_fnames:
df, st = load_csv(f)
dfs.append(df)
start_tms.extend(st[:-1])
df = pd.concat(dfs).reset_index()
start_tms.append(df.iloc[-1]['scan_time'] + pd.Timedelta('1 min'))
return df, start_tms
def get_scan_scores(df, tm_range):
"""Get scan scores as pc4 -> score dict.
Parameters:
- df: DataFrame with scan_time, req_date, req_pc4, opt0_short_addr,
opt0_time, opt0_loc_id, etc.
- tm_range: (tm_start, tm_stop) timestamps.
Return:
- tstamp: timestamp of the scan (mid-point)
- scores: dict of pc4->score
- min_wait: Timedelta of minimum wait time from scan to appointment
"""
mask = (df['scan_time'] >= tm_range[0]) & (df['scan_time'] < tm_range[1])
df1 = df.loc[mask]
summary = {}
for pc4, city_re in PCODES.items():
pc4_tup = (pc4,) if isinstance(pc4, int) else pc4
options = []
req_pc4 = None
for _, row in df1.loc[df1['req_pc4'].isin(pc4_tup)].iterrows():
req_pc4 = int(row['req_pc4'])
for i in range(3):
addr = row[f'opt{i}_short_addr']
if addr and re.match(f'{city_re}$', addr[5:]):
options.append((row['scan_time'], row[f'opt{i}_time']))
if req_pc4 is not None:
summary[req_pc4] = options
scores, tstamp = _summary_to_scores(summary)
if pd.isna(tstamp):
tstamp = df1.iloc[len(df1)//2]['scan_time']
minwait, medwait = _get_min_wait(summary)
if medwait == 999:
medwait = pd.Timedelta(None)
return tstamp, scores, minwait, medwait
def get_scan_scores_df(df, tm_ranges, decimal_comma=True):
"""Get scan scores as dataframe, from csv dataframe.
Blacklisted scan times are dropped.
Parameters:
- df: DataFrame with scan_time, req_date, req_pc4, opt0_short_addr,
opt0_time, opt0_loc_id, etc.
- tm_ranges: list of timestamps (+one at the end) with boundaries
of timestamp ranges.
- decimal_comma: True to have string values 6,3 rather than float 6.3.
Return:
- Dataframe with scores, date_str, time_str, pc4, min_wait, med_wait as columns.
"""
n = len(tm_ranges)
records = []
index = []
minwait_hs = []
medwait_hs = []
bad_stimes = get_bad_scan_times()
for i in range(n-1):
tm_ra = tm_ranges[i:i+2]
is_ok = True
for tm in bad_stimes:
if tm_ra[0] <= tm < tm_ra[1]:
is_ok = False
break
if not is_ok:
print(f'Dropped scan at {tm_ra[0].strftime("%Y-%m-%d %H:%M")}')
continue
tm, scores, minwait, medwait = get_scan_scores(df, tm_ra)
records.append(scores)
index.append(tm)
minwait_hs.append(minwait.total_seconds() / 3600)
medwait_hs.append(medwait.total_seconds() / 3600)
dates = [t.strftime('%Y-%m-%d') for t in index]
times = [t.strftime('%H:%M') for t in index]
sdf = pd.DataFrame.from_records(records)
sdf.insert(0, 'Time', times)
sdf.insert(0, 'Date', dates)
sdf['min_wait_h'] = np.around(minwait_hs, 2)
sdf['med_wait_h'] = np.around(medwait_hs, 2)
sdf.loc[sdf['min_wait_h'].isna(), 'min_wait_h'] = 999
sdf.columns = [
('/'.join([str(x) for x in c]) if isinstance(c, tuple) else c)
for c in sdf.columns
]
if decimal_comma:
for c in sdf.columns[2:]:
sdf[c] = sdf[c].astype(str)
sdf[c] = sdf[c].str.replace('.', ',', regex=False)
sdf[c] = sdf[c].str.replace(',0$', '', regex=False)
sdf[c] = sdf[c].str.replace('?', '', regex=False)
return sdf
if __name__ == '__main__':
in_spyder = ('SPYDER_ARGS' in os.environ)
csv_fnames = sorted(Path('data-ggd').glob('ggd_scan-????-W??.csv'))
do_all = ('--all' in sys.argv)
do_all = do_all or in_spyder and input('(A)ll or latest?').lower() == 'a'
if do_all:
df, start_tms = load_multi_csvs(csv_fnames)
sdf = get_scan_scores_df(df, start_tms).iloc[::-1]
else:
df, start_tms = load_csv(csv_fnames[-1])
sdf = get_scan_scores_df(df, start_tms[-2:])
print(sdf)
if len(sdf) > 1:
sdf.to_clipboard(index=False)
print('Copied to clipboard including headers')
elif len(sdf) == 1:
sdf.iloc[[0], 2:].to_clipboard(header=False, index=False)
print('Copied to clipboard, scores only.')
else:
print('No output.')
if not in_spyder:
# Note: in Spyder, copy/paste will stall while input is blocked.
input('Press Enter to quit and clear clipboard.')
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import atexit
import collections
import collections.abc
import enum
import fcntl
import logging
import os
import os.path
import pathlib
import queue
import re
import select
import shlex
import shutil
import subprocess
import sys
import tarfile
import tempfile
import threading
import time
import json
import serial
import serial.tools.list_ports
import yaml
from tvm.micro.project_api import server
_LOG = logging.getLogger(__name__)
API_SERVER_DIR = pathlib.Path(os.path.dirname(__file__) or os.path.getcwd())
BUILD_DIR = API_SERVER_DIR / "build"
MODEL_LIBRARY_FORMAT_RELPATH = "model.tar"
IS_TEMPLATE = not (API_SERVER_DIR / MODEL_LIBRARY_FORMAT_RELPATH).exists()
BOARDS = API_SERVER_DIR / "boards.json"
# Data structure to hold the information microtvm_api_server.py needs
# to communicate with each of these boards.
try:
with open(BOARDS) as boards:
BOARD_PROPERTIES = json.load(boards)
except FileNotFoundError:
raise FileNotFoundError(f"Board file {{{BOARDS}}} does not exist.")
def check_call(cmd_args, *args, **kwargs):
cwd_str = "" if "cwd" not in kwargs else f" (in cwd: {kwargs["cwd"]})"
_LOG.info("run%s: %s", cwd_str, " ".join(shlex.quote(a) for a in cmd_args))
return subprocess.check_call(cmd_args, *args, **kwargs)
CACHE_ENTRY_RE = re.compile(r"(?P<name>[^:]+):(?P<type>[^=]+)=(?P<value>.*)")
CMAKE_BOOL_MAP = dict(
[(k, True) for k in ("1", "ON", "YES", "TRUE", "Y")]
+ [(k, False) for k in ("0", "OFF", "NO", "FALSE", "N", "IGNORE", "NOTFOUND", "")]
)
class CMakeCache(collections.abc.Mapping):
def __init__(self, path):
self._path = path
self._dict = None
def __iter__(self):
return iter(self._dict)
def __getitem__(self, key):
if self._dict is None:
self._dict = self._read_cmake_cache()
return self._dict[key]
def __len__(self):
return len(self._dict)
def _read_cmake_cache(self):
"""Read a CMakeCache.txt-like file and return a dictionary of values."""
entries = collections.OrderedDict()
with open(self._path, encoding="utf-8") as f:
for line in f:
m = CACHE_ENTRY_RE.match(line.rstrip("\n"))
if not m:
continue
if m.group("type") == "BOOL":
value = CMAKE_BOOL_MAP[m.group("value").upper()]
else:
value = m.group("value")
entries[m.group("name")] = value
return entries
CMAKE_CACHE = CMakeCache(BUILD_DIR / "CMakeCache.txt")
class BoardError(Exception):
"""Raised when an attached board cannot be opened (i.e. missing /dev nodes, etc)."""
class BoardAutodetectFailed(Exception):
"""Raised when no attached hardware is found matching the board= given to ZephyrCompiler."""
def _get_flash_runner():
flash_runner = CMAKE_CACHE.get("ZEPHYR_BOARD_FLASH_RUNNER")
if flash_runner is not None:
return flash_runner
with open(CMAKE_CACHE["ZEPHYR_RUNNERS_YAML"]) as f:
doc = yaml.load(f, Loader=yaml.FullLoader)
return doc["flash-runner"]
def _get_device_args(options):
flash_runner = _get_flash_runner()
if flash_runner == "nrfjprog":
return _get_nrf_device_args(options)
if flash_runner == "openocd":
return _get_openocd_device_args(options)
raise BoardError(
f"Don't know how to find serial terminal for board {CMAKE_CACHE["BOARD"]} with flash "
f"runner {flash_runner}"
)
# kwargs passed to usb.core.find to find attached boards for the openocd flash runner.
BOARD_USB_FIND_KW = {
"nucleo_l4r5zi": {"idVendor": 0x0483, "idProduct": 0x374B},
"nucleo_f746zg": {"idVendor": 0x0483, "idProduct": 0x374B},
"stm32f746g_disco": {"idVendor": 0x0483, "idProduct": 0x374B},
"mimxrt1050_evk": {"idVendor": 0x1366, "idProduct": 0x0105},
}
def openocd_serial(options):
"""Find the serial port to use for a board with OpenOCD flash strategy."""
if "openocd_serial" in options:
return options["openocd_serial"]
import usb # pylint: disable=import-outside-toplevel
find_kw = BOARD_USB_FIND_KW[CMAKE_CACHE["BOARD"]]
boards = usb.core.find(find_all=True, **find_kw)
serials = []
for b in boards:
serials.append(b.serial_number)
if len(serials) == 0:
raise BoardAutodetectFailed(f"No attached USB devices matching: {find_kw!r}")
serials.sort()
autodetected_openocd_serial = serials[0]
_LOG.debug("zephyr openocd driver: autodetected serial %s", serials[0])
return autodetected_openocd_serial
def _get_openocd_device_args(options):
return ["--serial", openocd_serial(options)]
def _get_nrf_device_args(options):
nrfjprog_args = ["nrfjprog", "--ids"]
nrfjprog_ids = subprocess.check_output(nrfjprog_args, encoding="utf-8")
if not nrfjprog_ids.strip("\n"):
raise BoardAutodetectFailed(f'No attached boards recognized by {' '.join(nrfjprog_args)}')
boards = nrfjprog_ids.split("\n")[:-1]
if len(boards) > 1:
if options["nrfjprog_snr"] is None:
raise BoardError(
"Multiple boards connected; specify one with nrfjprog_snr=: " f'{', '.join(boards)}'
)
if str(options["nrfjprog_snr"]) not in boards:
raise BoardError(
f"nrfjprog_snr ({options["nrfjprog_snr"]}) not found in {nrfjprog_args}: {boards}"
)
return ["--snr", options["nrfjprog_snr"]]
if not boards:
return []
return ["--snr", boards[0]]
PROJECT_TYPES = []
if IS_TEMPLATE:
for d in (API_SERVER_DIR / "src").iterdir():
if d.is_dir():
PROJECT_TYPES.append(d.name)
PROJECT_OPTIONS = [
server.ProjectOption(
"extra_files_tar",
help="If given, during generate_project, uncompress the tarball at this path into the project dir.",
),
server.ProjectOption(
"gdbserver_port", help=("If given, port number to use when running the local gdbserver.")
),
server.ProjectOption(
"nrfjprog_snr",
help=("When used with nRF targets, serial # of the attached board to use, from nrfjprog."),
),
server.ProjectOption(
"openocd_serial",
help=("When used with OpenOCD targets, serial # of the attached board to use."),
),
server.ProjectOption(
"project_type",
help="Type of project to generate.",
choices=tuple(PROJECT_TYPES),
),
server.ProjectOption("verbose", help="Run build with verbose output.", choices=(True, False)),
server.ProjectOption(
"west_cmd",
help=(
"Path to the west tool. If given, supersedes both the zephyr_base "
"option and ZEPHYR_BASE environment variable."
),
),
server.ProjectOption("zephyr_base", help="Path to the zephyr base directory."),
server.ProjectOption(
"zephyr_board",
choices=list(BOARD_PROPERTIES),
help="Name of the Zephyr board to build for.",
),
server.ProjectOption(
"config_main_stack_size",
help="Sets CONFIG_MAIN_STACK_SIZE for Zephyr board.",
),
]
class Handler(server.ProjectAPIHandler):
def __init__(self):
super(Handler, self).__init__()
self._proc = None
def server_info_query(self, tvm_version):
return server.ServerInfo(
platform_name="zephyr",
is_template=IS_TEMPLATE,
model_library_format_path=""
if IS_TEMPLATE
else (API_SERVER_DIR / MODEL_LIBRARY_FORMAT_RELPATH),
project_options=PROJECT_OPTIONS,
)
# These files and directories will be recursively copied into generated projects from the CRT.
CRT_COPY_ITEMS = ("include", "Makefile", "src")
# Maps extra line added to prj.conf to a tuple or list of zephyr_board for which it is needed.
EXTRA_PRJ_CONF_DIRECTIVES = {
"CONFIG_TIMER_RANDOM_GENERATOR=y": (
"qemu_x86",
"qemu_riscv32",
"qemu_cortex_r5",
"qemu_riscv64",
),
"CONFIG_ENTROPY_GENERATOR=y": (
"mps2_an521",
"nrf5340dk_nrf5340_cpuapp",
"nucleo_f746zg",
"nucleo_l4r5zi",
"stm32f746g_disco",
),
}
def _create_prj_conf(self, project_dir, options):
with open(project_dir / "prj.conf", "w") as f:
f.write(
"# For UART used from main().\n"
"CONFIG_RING_BUFFER=y\n"
"CONFIG_UART_CONSOLE=n\n"
"CONFIG_UART_INTERRUPT_DRIVEN=y\n"
"\n"
)
f.write("# For TVMPlatformAbort().\n" "CONFIG_REBOOT=y\n" "\n")
if options["project_type"] == "host_driven":
f.write("# For RPC server C++ bindings.\n" "CONFIG_CPLUSPLUS=y\n" "\n")
f.write("# For math routines\n" "CONFIG_NEWLIB_LIBC=y\n" "\n")
if self._has_fpu(options["zephyr_board"]):
f.write("# For models with floating point.\n" "CONFIG_FPU=y\n" "\n")
# Set main stack size, if needed.
if options.get("config_main_stack_size") is not None:
f.write(f"CONFIG_MAIN_STACK_SIZE={options["config_main_stack_size"]}\n")
f.write("# For random number generation.\n" "CONFIG_TEST_RANDOM_GENERATOR=y\n")
f.write("\n# Extra prj.conf directives\n")
for line, board_list in self.EXTRA_PRJ_CONF_DIRECTIVES.items():
if options["zephyr_board"] in board_list:
f.write(f"{line}\n")
f.write("\n")
API_SERVER_CRT_LIBS_TOKEN = "<API_SERVER_CRT_LIBS>"
CRT_LIBS_BY_PROJECT_TYPE = {
"host_driven": "microtvm_rpc_server microtvm_rpc_common common",
"aot_demo": "memory microtvm_rpc_common common",
}
def generate_project(self, model_library_format_path, standalone_crt_dir, project_dir, options):
project_dir = pathlib.Path(project_dir)
# Make project directory.
project_dir.mkdir()
# Copy ourselves to the generated project. TVM may perform further build steps on the generated project
# by launching the copy.
shutil.copy2(__file__, project_dir / os.path.basename(__file__))
# Copy boards.json file to generated project.
shutil.copy2(BOARDS, project_dir / BOARDS.name)
# Place Model Library Format tarball in the special location, which this script uses to decide
# whether it's being invoked in a template or generated project.
project_model_library_format_tar_path = project_dir / MODEL_LIBRARY_FORMAT_RELPATH
shutil.copy2(model_library_format_path, project_model_library_format_tar_path)
# Extract Model Library Format tarball.into <project_dir>/model.
extract_path = os.path.splitext(project_model_library_format_tar_path)[0]
with tarfile.TarFile(project_model_library_format_tar_path) as tf:
os.makedirs(extract_path)
tf.extractall(path=extract_path)
if self._is_qemu(options):
shutil.copytree(API_SERVER_DIR / "qemu-hack", project_dir / "qemu-hack")
# Populate CRT.
crt_path = project_dir / "crt"
crt_path.mkdir()
for item in self.CRT_COPY_ITEMS:
src_path = os.path.join(standalone_crt_dir, item)
dst_path = crt_path / item
if os.path.isdir(src_path):
shutil.copytree(src_path, dst_path)
else:
shutil.copy2(src_path, dst_path)
# Populate Makefile.
with open(API_SERVER_DIR / "CMakeLists.txt.template", "r") as cmake_template_f:
with open(project_dir / "CMakeLists.txt", "w") as cmake_f:
for line in cmake_template_f:
if self.API_SERVER_CRT_LIBS_TOKEN in line:
crt_libs = self.CRT_LIBS_BY_PROJECT_TYPE[options["project_type"]]
line = line.replace("<API_SERVER_CRT_LIBS>", crt_libs)
cmake_f.write(line)
self._create_prj_conf(project_dir, options)
# Populate crt-config.h
crt_config_dir = project_dir / "crt_config"
crt_config_dir.mkdir()
shutil.copy2(
API_SERVER_DIR / "crt_config" / "crt_config.h", crt_config_dir / "crt_config.h"
)
# Populate src/
src_dir = project_dir / "src"
shutil.copytree(API_SERVER_DIR / "src" / options["project_type"], src_dir)
# Populate extra_files
if options.get("extra_files_tar"):
with tarfile.open(options["extra_files_tar"], mode="r:*") as tf:
tf.extractall(project_dir)
def build(self, options):
BUILD_DIR.mkdir()
cmake_args = ["cmake", ".."]
if options.get("verbose"):
cmake_args.append("-DCMAKE_VERBOSE_MAKEFILE:BOOL=TRUE")
if options.get("zephyr_base"):
cmake_args.append(f"-DZEPHYR_BASE:STRING={options["zephyr_base"]}")
if options.get("west_cmd"):
cmake_args.append(f"-DWEST={options["west_cmd"]}")
cmake_args.append(f"-DBOARD:STRING={options["zephyr_board"]}")
check_call(cmake_args, cwd=BUILD_DIR)
args = ["make", "-j2"]
if options.get("verbose"):
args.append("VERBOSE=1")
check_call(args, cwd=BUILD_DIR)
# A list of all zephyr_board values which are known to launch using QEMU. Many platforms which
# launch through QEMU by default include "qemu" in their name. However, not all do. This list
# includes those tested platforms which do not include qemu.
_KNOWN_QEMU_ZEPHYR_BOARDS = ("mps2_an521",)
@classmethod
def _is_qemu(cls, options):
return (
"qemu" in options["zephyr_board"]
or options["zephyr_board"] in cls._KNOWN_QEMU_ZEPHYR_BOARDS
)
@classmethod
def _has_fpu(cls, zephyr_board):
fpu_boards = [name for name, board in BOARD_PROPERTIES.items() if board["fpu"]]
return zephyr_board in fpu_boards
def flash(self, options):
if self._is_qemu(options):
return # NOTE: qemu requires no flash step--it is launched from open_transport.
zephyr_board = options["zephyr_board"]
# The nRF5340DK requires an additional `nrfjprog --recover` before each flash cycle.
# This is because readback protection is enabled by default when this device is flashed.
# Otherwise, flashing may fail with an error such as the following:
# ERROR: The operation attempted is unavailable due to readback protection in
# ERROR: your device. Please use --recover to unlock the device.
if zephyr_board.startswith("nrf5340dk") and _get_flash_runner() == "nrfjprog":
recover_args = ["nrfjprog", "--recover"]
recover_args.extend(_get_nrf_device_args(options))
check_call(recover_args, cwd=API_SERVER_DIR / "build")
check_call(["make", "flash"], cwd=API_SERVER_DIR / "build")
def open_transport(self, options):
if self._is_qemu(options):
transport = ZephyrQemuTransport(options)
else:
transport = ZephyrSerialTransport(options)
to_return = transport.open()
self._transport = transport
atexit.register(lambda: self.close_transport())
return to_return
def close_transport(self):
if self._transport is not None:
self._transport.close()
self._transport = None
def read_transport(self, n, timeout_sec):
if self._transport is None:
raise server.TransportClosedError()
return self._transport.read(n, timeout_sec)
def write_transport(self, data, timeout_sec):
if self._transport is None:
raise server.TransportClosedError()
return self._transport.write(data, timeout_sec)
def _set_nonblock(fd):
flag = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flag | os.O_NONBLOCK)
new_flag = fcntl.fcntl(fd, fcntl.F_GETFL)
assert (new_flag & os.O_NONBLOCK) != 0, "Cannot set file descriptor {fd} to non-blocking"
class ZephyrSerialTransport:
@classmethod
def _lookup_baud_rate(cls, options):
zephyr_base = options.get("zephyr_base", os.environ["ZEPHYR_BASE"])
sys.path.insert(0, os.path.join(zephyr_base, "scripts", "dts"))
try:
import dtlib # pylint: disable=import-outside-toplevel
finally:
sys.path.pop(0)
dt_inst = dtlib.DT(BUILD_DIR / "zephyr" / "zephyr.dts")
uart_baud = (
dt_inst.get_node("/chosen")
.props["zephyr,console"]
.to_path()
.props["current-speed"]
.to_num()
)
_LOG.debug("zephyr transport: found UART baudrate from devicetree: %d", uart_baud)
return uart_baud
@classmethod
def _find_nrf_serial_port(cls, options):
com_ports = subprocess.check_output(
["nrfjprog", "--com"] + _get_device_args(options), encoding="utf-8"
)
ports_by_vcom = {}
for line in com_ports.split("\n")[:-1]:
parts = line.split()
ports_by_vcom[parts[2]] = parts[1]
return ports_by_vcom["VCOM2"]
@classmethod
def _find_openocd_serial_port(cls, options):
serial_number = openocd_serial(options)
ports = [p for p in serial.tools.list_ports.grep(serial_number)]
if len(ports) != 1:
raise Exception(
f"_find_openocd_serial_port: expected 1 port to match {serial_number}, "
f"found: {ports!r}"
)
return ports[0].device
@classmethod
def _find_jlink_serial_port(cls, options):
return cls._find_openocd_serial_port(options)
@classmethod
def _find_serial_port(cls, options):
flash_runner = _get_flash_runner()
if flash_runner == "nrfjprog":
return cls._find_nrf_serial_port(options)
if flash_runner == "openocd":
return cls._find_openocd_serial_port(options)
if flash_runner == "jlink":
return cls._find_jlink_serial_port(options)
raise RuntimeError(f"Don't know how to deduce serial port for flash runner {flash_runner}")
def __init__(self, options):
self._options = options
self._port = None
def open(self):
port_path = self._find_serial_port(self._options)
self._port = serial.Serial(port_path, baudrate=self._lookup_baud_rate(self._options))
return server.TransportTimeouts(
session_start_retry_timeout_sec=2.0,
session_start_timeout_sec=5.0,
session_established_timeout_sec=5.0,
)
def close(self):
self._port.close()
self._port = None
def read(self, n, timeout_sec):
self._port.timeout = timeout_sec
to_return = self._port.read(n)
if not to_return:
raise server.IoTimeoutError()
return to_return
def write(self, data, timeout_sec):
self._port.write_timeout = timeout_sec
bytes_written = 0
while bytes_written < len(data):
n = self._port.write(data)
data = data[n:]
bytes_written += n
class ZephyrQemuMakeResult(enum.Enum):
QEMU_STARTED = "qemu_started"
MAKE_FAILED = "make_failed"
EOF = "eof"
class ZephyrQemuTransport:
"""The user-facing Zephyr QEMU transport class."""
def __init__(self, options):
self.options = options
self.proc = None
self.pipe_dir = None
self.read_fd = None
self.write_fd = None
self._queue = queue.Queue()
def open(self):
self.pipe_dir = pathlib.Path(tempfile.mkdtemp())
self.pipe = self.pipe_dir / "fifo"
self.write_pipe = self.pipe_dir / "fifo.in"
self.read_pipe = self.pipe_dir / "fifo.out"
os.mkfifo(self.write_pipe)
os.mkfifo(self.read_pipe)
if "gdbserver_port" in self.options:
if "env" in self.kwargs:
self.kwargs["env"] = copy.copy(self.kwargs["env"])
else:
self.kwargs["env"] = os.environ.copy()
self.kwargs["env"]["TVM_QEMU_GDBSERVER_PORT"] = str(self.options["gdbserver_port"])
self.proc = subprocess.Popen(
["make", "run", f"QEMU_PIPE={self.pipe}"],
cwd=BUILD_DIR,
stdout=subprocess.PIPE,
)
self._wait_for_qemu()
# NOTE: although each pipe is unidirectional, open both as RDWR to work around a select
# limitation on linux. Without this, non-blocking I/O can't use timeouts because named
# FIFO are always considered ready to read when no one has opened them for writing.
self.read_fd = os.open(self.read_pipe, os.O_RDWR | os.O_NONBLOCK)
self.write_fd = os.open(self.write_pipe, os.O_RDWR | os.O_NONBLOCK)
_set_nonblock(self.read_fd)
_set_nonblock(self.write_fd)
return server.TransportTimeouts(
session_start_retry_timeout_sec=2.0,
session_start_timeout_sec=10.0,
session_established_timeout_sec=10.0,
)
def close(self):
did_write = False
if self.write_fd is not None:
try:
server.write_with_timeout(
self.write_fd, b"\x01x", 1.0
) # Use a short timeout since we will kill the process
did_write = True
except server.IoTimeoutError:
pass
os.close(self.write_fd)
self.write_fd = None
if self.proc:
if not did_write:
self.proc.terminate()
try:
self.proc.wait(5.0)
except subprocess.TimeoutExpired:
self.proc.kill()
if self.read_fd:
os.close(self.read_fd)
self.read_fd = None
if self.pipe_dir is not None:
shutil.rmtree(self.pipe_dir)
self.pipe_dir = None
def read(self, n, timeout_sec):
return server.read_with_timeout(self.read_fd, n, timeout_sec)
def write(self, data, timeout_sec):
to_write = bytearray()
escape_pos = []
for i, b in enumerate(data):
if b == 0x01:
to_write.append(b)
escape_pos.append(i)
to_write.append(b)
while to_write:
num_written = server.write_with_timeout(self.write_fd, to_write, timeout_sec)
to_write = to_write[num_written:]
def _qemu_check_stdout(self):
for line in self.proc.stdout:
line = str(line)
_LOG.info("%s", line)
if "[QEMU] CPU" in line:
self._queue.put(ZephyrQemuMakeResult.QEMU_STARTED)
else:
line = re.sub("[^a-zA-Z0-9 \n]", "", line)
pattern = r"recipe for target (\w*) failed"
if re.search(pattern, line, re.IGNORECASE):
self._queue.put(ZephyrQemuMakeResult.MAKE_FAILED)
self._queue.put(ZephyrQemuMakeResult.EOF)
def _wait_for_qemu(self):
threading.Thread(target=self._qemu_check_stdout, daemon=True).start()
while True:
try:
item = self._queue.get(timeout=120)
except Exception:
raise TimeoutError("QEMU setup timeout.")
if item == ZephyrQemuMakeResult.QEMU_STARTED:
break
if item in [ZephyrQemuMakeResult.MAKE_FAILED, ZephyrQemuMakeResult.EOF]:
raise RuntimeError("QEMU setup failed.")
raise ValueError(f"{item} not expected.")
if __name__ == "__main__":
server.main(Handler())
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import atexit
import collections
import collections.abc
import enum
import fcntl
import logging
import os
import os.path
import pathlib
import queue
import re
import select
import shlex
import shutil
import subprocess
import sys
import tarfile
import tempfile
import threading
import time
import json
import serial
import serial.tools.list_ports
import yaml
from tvm.micro.project_api import server
_LOG = logging.getLogger(__name__)
API_SERVER_DIR = pathlib.Path(os.path.dirname(__file__) or os.path.getcwd())
BUILD_DIR = API_SERVER_DIR / "build"
MODEL_LIBRARY_FORMAT_RELPATH = "model.tar"
IS_TEMPLATE = not (API_SERVER_DIR / MODEL_LIBRARY_FORMAT_RELPATH).exists()
BOARDS = API_SERVER_DIR / "boards.json"
# Data structure to hold the information microtvm_api_server.py needs
# to communicate with each of these boards.
try:
with open(BOARDS) as boards:
BOARD_PROPERTIES = json.load(boards)
except FileNotFoundError:
raise FileNotFoundError(f"Board file {{{BOARDS}}} does not exist.")
def check_call(cmd_args, *args, **kwargs):
cwd_str = "" if "cwd" not in kwargs else f" (in cwd: {kwargs['cwd']})"
_LOG.info("run%s: %s", cwd_str, " ".join(shlex.quote(a) for a in cmd_args))
return subprocess.check_call(cmd_args, *args, **kwargs)
CACHE_ENTRY_RE = re.compile(r"(?P<name>[^:]+):(?P<type>[^=]+)=(?P<value>.*)")
CMAKE_BOOL_MAP = dict(
[(k, True) for k in ("1", "ON", "YES", "TRUE", "Y")]
+ [(k, False) for k in ("0", "OFF", "NO", "FALSE", "N", "IGNORE", "NOTFOUND", "")]
)
class CMakeCache(collections.abc.Mapping):
def __init__(self, path):
self._path = path
self._dict = None
def __iter__(self):
return iter(self._dict)
def __getitem__(self, key):
if self._dict is None:
self._dict = self._read_cmake_cache()
return self._dict[key]
def __len__(self):
return len(self._dict)
def _read_cmake_cache(self):
"""Read a CMakeCache.txt-like file and return a dictionary of values."""
entries = collections.OrderedDict()
with open(self._path, encoding="utf-8") as f:
for line in f:
m = CACHE_ENTRY_RE.match(line.rstrip("\n"))
if not m:
continue
if m.group("type") == "BOOL":
value = CMAKE_BOOL_MAP[m.group("value").upper()]
else:
value = m.group("value")
entries[m.group("name")] = value
return entries
CMAKE_CACHE = CMakeCache(BUILD_DIR / "CMakeCache.txt")
class BoardError(Exception):
"""Raised when an attached board cannot be opened (i.e. missing /dev nodes, etc)."""
class BoardAutodetectFailed(Exception):
"""Raised when no attached hardware is found matching the board= given to ZephyrCompiler."""
def _get_flash_runner():
flash_runner = CMAKE_CACHE.get("ZEPHYR_BOARD_FLASH_RUNNER")
if flash_runner is not None:
return flash_runner
with open(CMAKE_CACHE["ZEPHYR_RUNNERS_YAML"]) as f:
doc = yaml.load(f, Loader=yaml.FullLoader)
return doc["flash-runner"]
def _get_device_args(options):
flash_runner = _get_flash_runner()
if flash_runner == "nrfjprog":
return _get_nrf_device_args(options)
if flash_runner == "openocd":
return _get_openocd_device_args(options)
raise BoardError(
f"Don't know how to find serial terminal for board {CMAKE_CACHE['BOARD']} with flash "
f"runner {flash_runner}"
)
# kwargs passed to usb.core.find to find attached boards for the openocd flash runner.
BOARD_USB_FIND_KW = {
"nucleo_l4r5zi": {"idVendor": 0x0483, "idProduct": 0x374B},
"nucleo_f746zg": {"idVendor": 0x0483, "idProduct": 0x374B},
"stm32f746g_disco": {"idVendor": 0x0483, "idProduct": 0x374B},
"mimxrt1050_evk": {"idVendor": 0x1366, "idProduct": 0x0105},
}
def openocd_serial(options):
"""Find the serial port to use for a board with OpenOCD flash strategy."""
if "openocd_serial" in options:
return options["openocd_serial"]
import usb # pylint: disable=import-outside-toplevel
find_kw = BOARD_USB_FIND_KW[CMAKE_CACHE["BOARD"]]
boards = usb.core.find(find_all=True, **find_kw)
serials = []
for b in boards:
serials.append(b.serial_number)
if len(serials) == 0:
raise BoardAutodetectFailed(f"No attached USB devices matching: {find_kw!r}")
serials.sort()
autodetected_openocd_serial = serials[0]
_LOG.debug("zephyr openocd driver: autodetected serial %s", serials[0])
return autodetected_openocd_serial
def _get_openocd_device_args(options):
return ["--serial", openocd_serial(options)]
def _get_nrf_device_args(options):
nrfjprog_args = ["nrfjprog", "--ids"]
nrfjprog_ids = subprocess.check_output(nrfjprog_args, encoding="utf-8")
if not nrfjprog_ids.strip("\n"):
raise BoardAutodetectFailed(f'No attached boards recognized by {" ".join(nrfjprog_args)}')
boards = nrfjprog_ids.split("\n")[:-1]
if len(boards) > 1:
if options["nrfjprog_snr"] is None:
raise BoardError(
"Multiple boards connected; specify one with nrfjprog_snr=: " f'{", ".join(boards)}'
)
if str(options["nrfjprog_snr"]) not in boards:
raise BoardError(
f"nrfjprog_snr ({options['nrfjprog_snr']}) not found in {nrfjprog_args}: {boards}"
)
return ["--snr", options["nrfjprog_snr"]]
if not boards:
return []
return ["--snr", boards[0]]
PROJECT_TYPES = []
if IS_TEMPLATE:
for d in (API_SERVER_DIR / "src").iterdir():
if d.is_dir():
PROJECT_TYPES.append(d.name)
PROJECT_OPTIONS = [
server.ProjectOption(
"extra_files_tar",
help="If given, during generate_project, uncompress the tarball at this path into the project dir.",
),
server.ProjectOption(
"gdbserver_port", help=("If given, port number to use when running the local gdbserver.")
),
server.ProjectOption(
"nrfjprog_snr",
help=("When used with nRF targets, serial # of the attached board to use, from nrfjprog."),
),
server.ProjectOption(
"openocd_serial",
help=("When used with OpenOCD targets, serial # of the attached board to use."),
),
server.ProjectOption(
"project_type",
help="Type of project to generate.",
choices=tuple(PROJECT_TYPES),
),
server.ProjectOption("verbose", help="Run build with verbose output.", choices=(True, False)),
server.ProjectOption(
"west_cmd",
help=(
"Path to the west tool. If given, supersedes both the zephyr_base "
"option and ZEPHYR_BASE environment variable."
),
),
server.ProjectOption("zephyr_base", help="Path to the zephyr base directory."),
server.ProjectOption(
"zephyr_board",
choices=list(BOARD_PROPERTIES),
help="Name of the Zephyr board to build for.",
),
server.ProjectOption(
"config_main_stack_size",
help="Sets CONFIG_MAIN_STACK_SIZE for Zephyr board.",
),
]
class Handler(server.ProjectAPIHandler):
def __init__(self):
super(Handler, self).__init__()
self._proc = None
def server_info_query(self, tvm_version):
return server.ServerInfo(
platform_name="zephyr",
is_template=IS_TEMPLATE,
model_library_format_path=""
if IS_TEMPLATE
else (API_SERVER_DIR / MODEL_LIBRARY_FORMAT_RELPATH),
project_options=PROJECT_OPTIONS,
)
# These files and directories will be recursively copied into generated projects from the CRT.
CRT_COPY_ITEMS = ("include", "Makefile", "src")
# Maps extra line added to prj.conf to a tuple or list of zephyr_board for which it is needed.
EXTRA_PRJ_CONF_DIRECTIVES = {
"CONFIG_TIMER_RANDOM_GENERATOR=y": (
"qemu_x86",
"qemu_riscv32",
"qemu_cortex_r5",
"qemu_riscv64",
),
"CONFIG_ENTROPY_GENERATOR=y": (
"mps2_an521",
"nrf5340dk_nrf5340_cpuapp",
"nucleo_f746zg",
"nucleo_l4r5zi",
"stm32f746g_disco",
),
}
def _create_prj_conf(self, project_dir, options):
with open(project_dir / "prj.conf", "w") as f:
f.write(
"# For UART used from main().\n"
"CONFIG_RING_BUFFER=y\n"
"CONFIG_UART_CONSOLE=n\n"
"CONFIG_UART_INTERRUPT_DRIVEN=y\n"
"\n"
)
f.write("# For TVMPlatformAbort().\n" "CONFIG_REBOOT=y\n" "\n")
if options["project_type"] == "host_driven":
f.write("# For RPC server C++ bindings.\n" "CONFIG_CPLUSPLUS=y\n" "\n")
f.write("# For math routines\n" "CONFIG_NEWLIB_LIBC=y\n" "\n")
if self._has_fpu(options["zephyr_board"]):
f.write("# For models with floating point.\n" "CONFIG_FPU=y\n" "\n")
# Set main stack size, if needed.
if options.get("config_main_stack_size") is not None:
f.write(f"CONFIG_MAIN_STACK_SIZE={options['config_main_stack_size']}\n")
f.write("# For random number generation.\n" "CONFIG_TEST_RANDOM_GENERATOR=y\n")
f.write("\n# Extra prj.conf directives\n")
for line, board_list in self.EXTRA_PRJ_CONF_DIRECTIVES.items():
if options["zephyr_board"] in board_list:
f.write(f"{line}\n")
f.write("\n")
API_SERVER_CRT_LIBS_TOKEN = "<API_SERVER_CRT_LIBS>"
CRT_LIBS_BY_PROJECT_TYPE = {
"host_driven": "microtvm_rpc_server microtvm_rpc_common common",
"aot_demo": "memory microtvm_rpc_common common",
}
def generate_project(self, model_library_format_path, standalone_crt_dir, project_dir, options):
project_dir = pathlib.Path(project_dir)
# Make project directory.
project_dir.mkdir()
# Copy ourselves to the generated project. TVM may perform further build steps on the generated project
# by launching the copy.
shutil.copy2(__file__, project_dir / os.path.basename(__file__))
# Copy boards.json file to generated project.
shutil.copy2(BOARDS, project_dir / BOARDS.name)
# Place Model Library Format tarball in the special location, which this script uses to decide
# whether it's being invoked in a template or generated project.
project_model_library_format_tar_path = project_dir / MODEL_LIBRARY_FORMAT_RELPATH
shutil.copy2(model_library_format_path, project_model_library_format_tar_path)
# Extract Model Library Format tarball.into <project_dir>/model.
extract_path = os.path.splitext(project_model_library_format_tar_path)[0]
with tarfile.TarFile(project_model_library_format_tar_path) as tf:
os.makedirs(extract_path)
tf.extractall(path=extract_path)
if self._is_qemu(options):
shutil.copytree(API_SERVER_DIR / "qemu-hack", project_dir / "qemu-hack")
# Populate CRT.
crt_path = project_dir / "crt"
crt_path.mkdir()
for item in self.CRT_COPY_ITEMS:
src_path = os.path.join(standalone_crt_dir, item)
dst_path = crt_path / item
if os.path.isdir(src_path):
shutil.copytree(src_path, dst_path)
else:
shutil.copy2(src_path, dst_path)
# Populate Makefile.
with open(API_SERVER_DIR / "CMakeLists.txt.template", "r") as cmake_template_f:
with open(project_dir / "CMakeLists.txt", "w") as cmake_f:
for line in cmake_template_f:
if self.API_SERVER_CRT_LIBS_TOKEN in line:
crt_libs = self.CRT_LIBS_BY_PROJECT_TYPE[options["project_type"]]
line = line.replace("<API_SERVER_CRT_LIBS>", crt_libs)
cmake_f.write(line)
self._create_prj_conf(project_dir, options)
# Populate crt-config.h
crt_config_dir = project_dir / "crt_config"
crt_config_dir.mkdir()
shutil.copy2(
API_SERVER_DIR / "crt_config" / "crt_config.h", crt_config_dir / "crt_config.h"
)
# Populate src/
src_dir = project_dir / "src"
shutil.copytree(API_SERVER_DIR / "src" / options["project_type"], src_dir)
# Populate extra_files
if options.get("extra_files_tar"):
with tarfile.open(options["extra_files_tar"], mode="r:*") as tf:
tf.extractall(project_dir)
def build(self, options):
BUILD_DIR.mkdir()
cmake_args = ["cmake", ".."]
if options.get("verbose"):
cmake_args.append("-DCMAKE_VERBOSE_MAKEFILE:BOOL=TRUE")
if options.get("zephyr_base"):
cmake_args.append(f"-DZEPHYR_BASE:STRING={options['zephyr_base']}")
if options.get("west_cmd"):
cmake_args.append(f"-DWEST={options['west_cmd']}")
cmake_args.append(f"-DBOARD:STRING={options['zephyr_board']}")
check_call(cmake_args, cwd=BUILD_DIR)
args = ["make", "-j2"]
if options.get("verbose"):
args.append("VERBOSE=1")
check_call(args, cwd=BUILD_DIR)
# A list of all zephyr_board values which are known to launch using QEMU. Many platforms which
# launch through QEMU by default include "qemu" in their name. However, not all do. This list
# includes those tested platforms which do not include qemu.
_KNOWN_QEMU_ZEPHYR_BOARDS = ("mps2_an521",)
@classmethod
def _is_qemu(cls, options):
return (
"qemu" in options["zephyr_board"]
or options["zephyr_board"] in cls._KNOWN_QEMU_ZEPHYR_BOARDS
)
@classmethod
def _has_fpu(cls, zephyr_board):
fpu_boards = [name for name, board in BOARD_PROPERTIES.items() if board["fpu"]]
return zephyr_board in fpu_boards
def flash(self, options):
if self._is_qemu(options):
return # NOTE: qemu requires no flash step--it is launched from open_transport.
zephyr_board = options["zephyr_board"]
# The nRF5340DK requires an additional `nrfjprog --recover` before each flash cycle.
# This is because readback protection is enabled by default when this device is flashed.
# Otherwise, flashing may fail with an error such as the following:
# ERROR: The operation attempted is unavailable due to readback protection in
# ERROR: your device. Please use --recover to unlock the device.
if zephyr_board.startswith("nrf5340dk") and _get_flash_runner() == "nrfjprog":
recover_args = ["nrfjprog", "--recover"]
recover_args.extend(_get_nrf_device_args(options))
check_call(recover_args, cwd=API_SERVER_DIR / "build")
check_call(["make", "flash"], cwd=API_SERVER_DIR / "build")
def open_transport(self, options):
if self._is_qemu(options):
transport = ZephyrQemuTransport(options)
else:
transport = ZephyrSerialTransport(options)
to_return = transport.open()
self._transport = transport
atexit.register(lambda: self.close_transport())
return to_return
def close_transport(self):
if self._transport is not None:
self._transport.close()
self._transport = None
def read_transport(self, n, timeout_sec):
if self._transport is None:
raise server.TransportClosedError()
return self._transport.read(n, timeout_sec)
def write_transport(self, data, timeout_sec):
if self._transport is None:
raise server.TransportClosedError()
return self._transport.write(data, timeout_sec)
def _set_nonblock(fd):
flag = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flag | os.O_NONBLOCK)
new_flag = fcntl.fcntl(fd, fcntl.F_GETFL)
assert (new_flag & os.O_NONBLOCK) != 0, "Cannot set file descriptor {fd} to non-blocking"
class ZephyrSerialTransport:
@classmethod
def _lookup_baud_rate(cls, options):
zephyr_base = options.get("zephyr_base", os.environ["ZEPHYR_BASE"])
sys.path.insert(0, os.path.join(zephyr_base, "scripts", "dts"))
try:
import dtlib # pylint: disable=import-outside-toplevel
finally:
sys.path.pop(0)
dt_inst = dtlib.DT(BUILD_DIR / "zephyr" / "zephyr.dts")
uart_baud = (
dt_inst.get_node("/chosen")
.props["zephyr,console"]
.to_path()
.props["current-speed"]
.to_num()
)
_LOG.debug("zephyr transport: found UART baudrate from devicetree: %d", uart_baud)
return uart_baud
@classmethod
def _find_nrf_serial_port(cls, options):
com_ports = subprocess.check_output(
["nrfjprog", "--com"] + _get_device_args(options), encoding="utf-8"
)
ports_by_vcom = {}
for line in com_ports.split("\n")[:-1]:
parts = line.split()
ports_by_vcom[parts[2]] = parts[1]
return ports_by_vcom["VCOM2"]
@classmethod
def _find_openocd_serial_port(cls, options):
serial_number = openocd_serial(options)
ports = [p for p in serial.tools.list_ports.grep(serial_number)]
if len(ports) != 1:
raise Exception(
f"_find_openocd_serial_port: expected 1 port to match {serial_number}, "
f"found: {ports!r}"
)
return ports[0].device
@classmethod
def _find_jlink_serial_port(cls, options):
return cls._find_openocd_serial_port(options)
@classmethod
def _find_serial_port(cls, options):
flash_runner = _get_flash_runner()
if flash_runner == "nrfjprog":
return cls._find_nrf_serial_port(options)
if flash_runner == "openocd":
return cls._find_openocd_serial_port(options)
if flash_runner == "jlink":
return cls._find_jlink_serial_port(options)
raise RuntimeError(f"Don't know how to deduce serial port for flash runner {flash_runner}")
def __init__(self, options):
self._options = options
self._port = None
def open(self):
port_path = self._find_serial_port(self._options)
self._port = serial.Serial(port_path, baudrate=self._lookup_baud_rate(self._options))
return server.TransportTimeouts(
session_start_retry_timeout_sec=2.0,
session_start_timeout_sec=5.0,
session_established_timeout_sec=5.0,
)
def close(self):
self._port.close()
self._port = None
def read(self, n, timeout_sec):
self._port.timeout = timeout_sec
to_return = self._port.read(n)
if not to_return:
raise server.IoTimeoutError()
return to_return
def write(self, data, timeout_sec):
self._port.write_timeout = timeout_sec
bytes_written = 0
while bytes_written < len(data):
n = self._port.write(data)
data = data[n:]
bytes_written += n
class ZephyrQemuMakeResult(enum.Enum):
QEMU_STARTED = "qemu_started"
MAKE_FAILED = "make_failed"
EOF = "eof"
class ZephyrQemuTransport:
"""The user-facing Zephyr QEMU transport class."""
def __init__(self, options):
self.options = options
self.proc = None
self.pipe_dir = None
self.read_fd = None
self.write_fd = None
self._queue = queue.Queue()
def open(self):
self.pipe_dir = pathlib.Path(tempfile.mkdtemp())
self.pipe = self.pipe_dir / "fifo"
self.write_pipe = self.pipe_dir / "fifo.in"
self.read_pipe = self.pipe_dir / "fifo.out"
os.mkfifo(self.write_pipe)
os.mkfifo(self.read_pipe)
if "gdbserver_port" in self.options:
if "env" in self.kwargs:
self.kwargs["env"] = copy.copy(self.kwargs["env"])
else:
self.kwargs["env"] = os.environ.copy()
self.kwargs["env"]["TVM_QEMU_GDBSERVER_PORT"] = str(self.options["gdbserver_port"])
self.proc = subprocess.Popen(
["make", "run", f"QEMU_PIPE={self.pipe}"],
cwd=BUILD_DIR,
stdout=subprocess.PIPE,
)
self._wait_for_qemu()
# NOTE: although each pipe is unidirectional, open both as RDWR to work around a select
# limitation on linux. Without this, non-blocking I/O can't use timeouts because named
# FIFO are always considered ready to read when no one has opened them for writing.
self.read_fd = os.open(self.read_pipe, os.O_RDWR | os.O_NONBLOCK)
self.write_fd = os.open(self.write_pipe, os.O_RDWR | os.O_NONBLOCK)
_set_nonblock(self.read_fd)
_set_nonblock(self.write_fd)
return server.TransportTimeouts(
session_start_retry_timeout_sec=2.0,
session_start_timeout_sec=10.0,
session_established_timeout_sec=10.0,
)
def close(self):
did_write = False
if self.write_fd is not None:
try:
server.write_with_timeout(
self.write_fd, b"\x01x", 1.0
) # Use a short timeout since we will kill the process
did_write = True
except server.IoTimeoutError:
pass
os.close(self.write_fd)
self.write_fd = None
if self.proc:
if not did_write:
self.proc.terminate()
try:
self.proc.wait(5.0)
except subprocess.TimeoutExpired:
self.proc.kill()
if self.read_fd:
os.close(self.read_fd)
self.read_fd = None
if self.pipe_dir is not None:
shutil.rmtree(self.pipe_dir)
self.pipe_dir = None
def read(self, n, timeout_sec):
return server.read_with_timeout(self.read_fd, n, timeout_sec)
def write(self, data, timeout_sec):
to_write = bytearray()
escape_pos = []
for i, b in enumerate(data):
if b == 0x01:
to_write.append(b)
escape_pos.append(i)
to_write.append(b)
while to_write:
num_written = server.write_with_timeout(self.write_fd, to_write, timeout_sec)
to_write = to_write[num_written:]
def _qemu_check_stdout(self):
for line in self.proc.stdout:
line = str(line)
_LOG.info("%s", line)
if "[QEMU] CPU" in line:
self._queue.put(ZephyrQemuMakeResult.QEMU_STARTED)
else:
line = re.sub("[^a-zA-Z0-9 \n]", "", line)
pattern = r"recipe for target (\w*) failed"
if re.search(pattern, line, re.IGNORECASE):
self._queue.put(ZephyrQemuMakeResult.MAKE_FAILED)
self._queue.put(ZephyrQemuMakeResult.EOF)
def _wait_for_qemu(self):
threading.Thread(target=self._qemu_check_stdout, daemon=True).start()
while True:
try:
item = self._queue.get(timeout=120)
except Exception:
raise TimeoutError("QEMU setup timeout.")
if item == ZephyrQemuMakeResult.QEMU_STARTED:
break
if item in [ZephyrQemuMakeResult.MAKE_FAILED, ZephyrQemuMakeResult.EOF]:
raise RuntimeError("QEMU setup failed.")
raise ValueError(f"{item} not expected.")
if __name__ == "__main__":
server.main(Handler())
|
import logging
import azure.functions as func
import json
import os
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
# Connect to Azure Table Storage
table_service = TableService(connection_string= os.environ['AzureWebJobsStorage'])
table_service.create_table('intents') if not table_service.exists('intents') else None
req_body = req.get_json()
if req_body:
# Create row to be saved on Azure Table Storage
print(req_body.get('ConversationId'))
data = req_body
data["PartitionKey"] = req_body.get('ConversationId')
data["RowKey"] = req_body.get('MessageId')
# Save row on Azure Table Storage
table_service.insert_or_replace_entity('intents', data)
return func.HttpResponse(f"Row {req_body.get("MessageId")} for {req_body.get("ConversationId")} added")
else:
return func.HttpResponse(
"Please pass valid request body",
status_code=400
) | import logging
import azure.functions as func
import json
import os
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
# Connect to Azure Table Storage
table_service = TableService(connection_string= os.environ['AzureWebJobsStorage'])
table_service.create_table('intents') if not table_service.exists('intents') else None
req_body = req.get_json()
if req_body:
# Create row to be saved on Azure Table Storage
print(req_body.get('ConversationId'))
data = req_body
data["PartitionKey"] = req_body.get('ConversationId')
data["RowKey"] = req_body.get('MessageId')
# Save row on Azure Table Storage
table_service.insert_or_replace_entity('intents', data)
return func.HttpResponse(f"Row {req_body.get('MessageId')} for {req_body.get('ConversationId')} added")
else:
return func.HttpResponse(
"Please pass valid request body",
status_code=400
) |
import datetime
import logging
import traceback
from dis_snek.models import ComponentContext
from dis_snek.models import InteractionContext
from ElevatorBot.misc.formating import embed_message
def get_now_with_tz() -> datetime.datetime:
"""Returns the current datetime (timezone aware)"""
return datetime.datetime.now(tz=datetime.timezone.utc)
def localize_datetime(obj: datetime.datetime) -> datetime.datetime:
"""Returns a timezone aware object, localized to the system timezone"""
return obj.astimezone()
async def log_error(
ctx: InteractionContext | ComponentContext,
error: Exception,
situation: str,
) -> None:
"""Respond to the context and log error"""
if not ctx.responded:
await ctx.send(
embeds=embed_message(
"Error",
f"Sorry, something went wrong\nThe Error has been logged and will be worked on",
str(error),
)
)
# log the error
logger = logging.getLogger(situation)
logger.exception(
f"InteractionID '{ctx.interaction_id}" - Error {error} - Traceback: \n{"".join(traceback.format_tb(error.__traceback__))}"
)
# raising error again to making deving easier
raise error
| import datetime
import logging
import traceback
from dis_snek.models import ComponentContext
from dis_snek.models import InteractionContext
from ElevatorBot.misc.formating import embed_message
def get_now_with_tz() -> datetime.datetime:
"""Returns the current datetime (timezone aware)"""
return datetime.datetime.now(tz=datetime.timezone.utc)
def localize_datetime(obj: datetime.datetime) -> datetime.datetime:
"""Returns a timezone aware object, localized to the system timezone"""
return obj.astimezone()
async def log_error(
ctx: InteractionContext | ComponentContext,
error: Exception,
situation: str,
) -> None:
"""Respond to the context and log error"""
if not ctx.responded:
await ctx.send(
embeds=embed_message(
"Error",
f"Sorry, something went wrong\nThe Error has been logged and will be worked on",
str(error),
)
)
# log the error
logger = logging.getLogger(situation)
logger.exception(
f"InteractionID '{ctx.interaction_id}' - Error {error} - Traceback: \n{''.join(traceback.format_tb(error.__traceback__))}"
)
# raising error again to making deving easier
raise error
|
from datetime import datetime
import cv2
import re
import base64
from flask import Flask, render_template, request, jsonify
from flask_cors import CORS
import numpy as np
from io import BytesIO
from PIL import Image, ImageOps
import os,sys
import requests
from graphpipe import remote
from matplotlib import pylab as plt
app = Flask(__name__)
CORS(app) # To Post by Ajax
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
ans,t1,t2,t3 = get_answer(request)
return jsonify({'ans': ans, 't1': t1, 't2': t2, 't3': t3})
else:
return render_template('index.html')
def result(img):
img = img.reshape(1, 784)
img = img.astype(np.float32)
img = np.multiply(img, 1.0 / 255.0)
pred = remote.execute("http://localhost:9001", img)
r = np.argmax(pred, axis=1)
pp = pred*100
top1 = str(np.argsort(-pp)[0][0])+ " (" +str(int(np.sort(-pp)[0][0]*-1))+"%)"
top2 = str(np.argsort(-pp)[0][1])+ " (" +str(int(np.sort(-pp)[0][1]*-1))+"%)"
top3 = str(np.argsort(-pp)[0][2])+ " (" +str(int(np.sort(-pp)[0][2]*-1))+"%)"
# return int(r)
return r,top1,top2,top3
def get_answer(req):
img_str = re.search(r'base64,(.*)', req.form['img']).group(1)
nparr = np.fromstring(base64.b64decode(img_str), np.uint8)
img_src = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
img_negaposi = 255 - img_src
img_gray = cv2.cvtColor(img_negaposi, cv2.COLOR_BGR2GRAY)
img_resize = cv2.resize(img_gray,(28,28))
cv2.imwrite(f"images/{datetime.now().strftime("%s")}.jpg",img_resize)
ans,t1,t2,t3 = result(img_resize)
return int(ans),t1,t2,t3
if __name__ == "__main__":
app.run(debug=False, host='0.0.0.0', port=8001)
| from datetime import datetime
import cv2
import re
import base64
from flask import Flask, render_template, request, jsonify
from flask_cors import CORS
import numpy as np
from io import BytesIO
from PIL import Image, ImageOps
import os,sys
import requests
from graphpipe import remote
from matplotlib import pylab as plt
app = Flask(__name__)
CORS(app) # To Post by Ajax
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
ans,t1,t2,t3 = get_answer(request)
return jsonify({'ans': ans, 't1': t1, 't2': t2, 't3': t3})
else:
return render_template('index.html')
def result(img):
img = img.reshape(1, 784)
img = img.astype(np.float32)
img = np.multiply(img, 1.0 / 255.0)
pred = remote.execute("http://localhost:9001", img)
r = np.argmax(pred, axis=1)
pp = pred*100
top1 = str(np.argsort(-pp)[0][0])+ " (" +str(int(np.sort(-pp)[0][0]*-1))+"%)"
top2 = str(np.argsort(-pp)[0][1])+ " (" +str(int(np.sort(-pp)[0][1]*-1))+"%)"
top3 = str(np.argsort(-pp)[0][2])+ " (" +str(int(np.sort(-pp)[0][2]*-1))+"%)"
# return int(r)
return r,top1,top2,top3
def get_answer(req):
img_str = re.search(r'base64,(.*)', req.form['img']).group(1)
nparr = np.fromstring(base64.b64decode(img_str), np.uint8)
img_src = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
img_negaposi = 255 - img_src
img_gray = cv2.cvtColor(img_negaposi, cv2.COLOR_BGR2GRAY)
img_resize = cv2.resize(img_gray,(28,28))
cv2.imwrite(f"images/{datetime.now().strftime('%s')}.jpg",img_resize)
ans,t1,t2,t3 = result(img_resize)
return int(ans),t1,t2,t3
if __name__ == "__main__":
app.run(debug=False, host='0.0.0.0', port=8001)
|
"import abc\nimport enum\nimport itertools\nimport logging\nimport uuid\nfrom copy import deepcopy\n(...TRUNCATED) | "import abc\nimport enum\nimport itertools\nimport logging\nimport uuid\nfrom copy import deepcopy\n(...TRUNCATED) |
"\"\"\"\nThis is an end to end release test automation script used to kick off periodic\nrelease tes(...TRUNCATED) | "\"\"\"\nThis is an end to end release test automation script used to kick off periodic\nrelease tes(...TRUNCATED) |
"\"\"\"Option helper functions\"\"\"\n__docformat__ = \"numpy\"\n\nimport argparse\nfrom typing impo(...TRUNCATED) | "\"\"\"Option helper functions\"\"\"\n__docformat__ = \"numpy\"\n\nimport argparse\nfrom typing impo(...TRUNCATED) |
"# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"L(...TRUNCATED) | "# Copyright 2020 - 2021 MONAI Consortium\n# Licensed under the Apache License, Version 2.0 (the \"L(...TRUNCATED) |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 44