diff --git "a/.ragatouille/colbert/indexes/new_idx/docid_metadata_map.json" "b/.ragatouille/colbert/indexes/new_idx/docid_metadata_map.json" new file mode 100644--- /dev/null +++ "b/.ragatouille/colbert/indexes/new_idx/docid_metadata_map.json" @@ -0,0 +1,11905 @@ +{ + "03_shallow_networks_v02.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":0, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":1, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":2, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":3, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":4, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":5, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":6, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":7, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":8, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":9, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":10, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":11, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":12, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":13, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":14, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":15, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":16, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":17, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":18, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":19, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":20, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":21, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":22, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":23, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":24, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":25, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":26, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":27, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":28, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":29, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":30, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":31, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":32, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":33, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":34, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":35, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":36, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":37, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":38, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":39, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":40, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":41, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":42, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":43, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":44, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":45, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":46, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":47, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":48, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":49, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":50, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":51, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":52, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":53, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":54, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":55, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":56, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "03_shallow_networks_v02.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/03_shallow_networks_v02.pdf", + "page":57, + "date":"01/25/2024\n Thursday", + "tldr":"In this lecture we consider networks with one layer of hidden units and explore their representational power.", + "title":"03 - Shallow Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_hiqqtfqp", + "suggested_readings":"UDL Chapter 3", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":0, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":1, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":2, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":3, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":4, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":5, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":6, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":7, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":8, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":9, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":10, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":11, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":12, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":13, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":14, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":15, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":16, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":17, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":18, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":19, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":20, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":21, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":22, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":23, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":24, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":25, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":26, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":27, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":28, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":29, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":30, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":31, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":32, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":33, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":34, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":35, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":36, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":37, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":38, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":39, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":40, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":41, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":42, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":43, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":44, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":45, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":46, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":47, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":48, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":49, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":50, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":51, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":52, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":53, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":54, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":55, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":56, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":57, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":58, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":59, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":60, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":61, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":62, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":63, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":64, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":65, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":66, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":67, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_68":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":68, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_69":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":69, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "04_deep_networks_v2.pdf_70":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/04_deep_networks_v2.pdf", + "page":70, + "date":"01/30/2024\n Tuesday", + "tldr":"We dive into deep networks by composing two shallow networks and visualizing their representational capabilities. We then generalize fully connected networks with two and more layers of hidden units. We'll compare the modeling efficiency between deep and shallow networks.", + "title":"04 - Deep Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_krgh9894", + "suggested_readings":"UDL Chapter 4", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":0, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":1, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":2, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":3, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":4, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":5, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":6, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":7, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":8, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":9, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":10, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":11, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":12, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":13, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":14, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":15, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":16, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":17, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":18, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":19, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":20, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":21, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":22, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":23, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":24, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":25, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":26, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":27, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":28, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":29, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":30, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":31, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":32, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":33, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":34, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":35, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":36, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":37, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":38, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":39, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":40, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":41, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":42, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":43, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":44, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":45, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":46, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":47, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":48, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":49, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":50, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":51, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":52, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":53, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":54, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":55, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":56, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":57, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":58, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":59, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":60, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":61, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":62, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":63, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":64, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":65, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":66, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":67, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_68":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":68, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_69":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":69, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_70":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":70, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_71":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":71, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_72":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":72, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_73":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":73, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_74":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":74, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_75":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":75, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_76":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":76, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_77":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":77, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_78":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":78, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_79":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":79, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_80":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":80, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_81":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":81, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_82":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":82, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_83":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":83, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_84":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":84, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_85":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":85, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_86":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":86, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_87":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":87, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_88":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":88, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_89":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":89, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_90":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":90, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_91":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":91, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_92":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":92, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_93":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":93, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_94":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":94, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_95":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":95, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_96":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":96, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_97":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":97, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_98":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":98, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_99":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":99, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_100":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":100, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_101":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":101, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_102":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":102, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_103":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":103, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_104":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":104, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_105":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":105, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_106":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":106, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_107":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":107, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_108":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":108, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_109":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":109, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "05_loss_functions_v2.pdf_110":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/05_loss_functions_v2.pdf", + "page":110, + "date":"02/01/2024\n Thursday", + "tldr":"We reconsider loss functions as a measure of how well the data fits to parametric probability distribution. We show that for univariate gaussian distributions we arrive back at least squares loss. We then introduce the notion of maximum likelihood and see how we can use that to define loss functions for many types data distributions. We cover some examples and then show how to generalize. This is a key topic to aid you in applying deep learning models to new types of data.", + "title":"05 - Loss Functions", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_k2t4xjkd", + "suggested_readings":"UDL Chapter 5", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":0, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":1, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":2, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":3, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":4, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":5, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":6, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":7, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":8, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":9, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":10, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":11, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":12, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":13, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":14, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":15, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":16, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":17, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":18, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":19, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":20, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":21, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":22, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":23, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":24, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":25, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":26, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":27, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":28, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":29, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":30, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":31, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":32, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":33, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":34, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":35, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":36, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":37, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":38, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":39, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":40, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":41, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":42, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":43, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":44, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":45, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":46, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":47, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":48, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":49, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":50, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":51, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":52, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":53, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":54, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":55, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":56, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":57, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":58, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":59, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":60, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":61, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":62, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":63, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":64, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":65, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":66, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":67, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "06_fitting_models_v2.pdf_68":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/06_fitting_models_v2.pdf", + "page":68, + "date":"02/06/2024\n Tuesday", + "tldr":"In this lecture we look at different ways minimizing the loss function for models given a training dataset. We'll formally define gradient descent, then show the advantages of stochastic gradient descent and then finally see how momentum and normalized gradients (ADAM) can improve model training farther.", + "title":"06 - Fitting Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5kbu6znm", + "suggested_readings":"UDL Chapter 6", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":0, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":1, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":2, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":3, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":4, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":5, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":6, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":7, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":8, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":9, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":10, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":11, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":12, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":13, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":14, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":15, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":16, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":17, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":18, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":19, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":20, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":21, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":22, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":23, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":24, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":25, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":26, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":27, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":28, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":29, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":30, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":31, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":32, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":33, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":34, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":35, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":36, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":37, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":38, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":39, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":40, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":41, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":42, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":43, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":44, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":45, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":46, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":47, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":48, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":49, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":50, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":51, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":52, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":53, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":54, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":55, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":56, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":57, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":58, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":59, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":60, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":61, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":62, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":63, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07a_gradients_v02.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07a_gradients_v02.pdf", + "page":64, + "date":"02/08/2024\n Thursday", + "tldr":"In this lecture we show how to efficienctly calculate gradients over more complex functions like deep neural networks using backpropagation. We also show an example simple implementation in the accompanying Jupyter notebook.", + "title":"07a - Gradients and Backpropagation", + "lecture_recording":null, + "suggested_readings":"UDL Sections 7.1 - 7.4", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":0, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":1, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":2, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":3, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":4, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":5, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":6, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":7, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":8, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":9, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":10, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":11, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":12, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":13, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":14, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":15, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":16, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":17, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":18, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":19, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":20, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":21, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":22, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":23, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":24, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":25, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":26, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":27, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":28, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":29, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":30, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":31, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":32, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":33, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":34, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":35, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":36, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":37, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":38, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07b_initialization_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07b_initialization_v2.pdf", + "page":39, + "date":"02/15/2024\n Thursday", + "tldr":"In this lecture we talk about weight initialization and how it can impact the training results. We'll also go back and finish model fitting with the Adam optimizer. We'll also give some tips and tricks on how to efficiently scan and read research papers.", + "title":"07b - Initialization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jzamn0ol", + "suggested_readings":"UDL Sections 7.5 - 7.6", + "source_type":"lecture" + }, + "07_how_to_read_paper.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07_how_to_read_paper.pdf", + "page":0, + "source_type":"lecture" + }, + "07_how_to_read_paper.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07_how_to_read_paper.pdf", + "page":1, + "source_type":"lecture" + }, + "07_how_to_read_paper.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07_how_to_read_paper.pdf", + "page":2, + "source_type":"lecture" + }, + "07_how_to_read_paper.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07_how_to_read_paper.pdf", + "page":3, + "source_type":"lecture" + }, + "07_how_to_read_paper.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/07_how_to_read_paper.pdf", + "page":4, + "source_type":"lecture" + }, + "08_measuring_performance.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":0, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":1, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":2, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":3, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":4, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":5, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":6, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":7, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":8, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":9, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":10, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":11, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":12, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":13, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":14, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":15, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":16, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":17, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":18, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":19, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":20, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":21, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":22, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":23, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":24, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":25, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":26, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":27, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":28, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":29, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":30, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":31, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":32, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":33, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":34, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":35, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":36, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":37, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":38, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":39, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":40, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":41, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":42, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":43, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":44, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":45, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "08_measuring_performance.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/08_measuring_performance.pdf", + "page":46, + "date":"02/20/2024\n Tuesday", + "tldr":"We look at measuring model training performance, the importance of test sets as well as how noise, bias and variance play a role in training outcomes.", + "title":"08 - Measuring Performance", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tpvohig1", + "suggested_readings":"UDL Chapter 8", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":0, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":1, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":2, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":3, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":4, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":5, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":6, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":7, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":8, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":9, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":10, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":11, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":12, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":13, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":14, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":15, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":16, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":17, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":18, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":19, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":20, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":21, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":22, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":23, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":24, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":25, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":26, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":27, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":28, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":29, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":30, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":31, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":32, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":33, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":34, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":35, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":36, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":37, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":38, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":39, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "09_regularization_v2.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/09_regularization_v2.pdf", + "page":40, + "date":"02/22/2024\n Thursday", + "tldr":"We explain explicit and implicit regularization techniques and how they help generalize models.", + "title":"09 - Regularization", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_jkgs505n", + "suggested_readings":"UDL Chapter 9", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":0, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":1, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":2, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":3, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":4, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":5, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":6, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":7, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":8, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":9, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":10, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":11, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":12, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":13, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":14, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":15, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":16, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":17, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":18, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":19, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":20, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":21, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":22, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":23, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":24, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":25, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":26, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":27, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":28, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":29, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":30, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":31, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":32, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":33, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":34, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":35, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":36, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":37, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":38, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":39, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":40, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":41, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":42, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":43, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":44, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":45, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":46, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":47, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":48, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":49, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":50, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":51, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":52, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":53, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":54, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":55, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":56, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":57, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":58, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":59, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":60, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":61, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":62, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":63, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":64, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":65, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":66, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":67, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_68":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":68, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_69":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":69, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_70":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":70, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_71":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":71, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_72":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":72, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_73":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":73, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_74":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":74, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_75":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":75, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_76":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":76, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_77":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":77, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_78":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":78, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_79":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":79, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_80":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":80, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_81":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":81, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_82":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":82, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_83":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":83, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_84":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":84, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_85":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":85, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_86":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":86, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_87":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":87, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_88":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":88, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_89":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":89, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_90":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":90, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_91":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":91, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "10_convolutional_networks.pdf_92":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/10_convolutional_networks.pdf", + "page":92, + "date":"02/27/2024\n Tuesday", + "tldr":"We cover 1D and 2D convolutional neural networks along with subsampling and upsampling operations.", + "title":"10 - Convolutional Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_8ejvyib4", + "suggested_readings":"UDL Chapter 10", + "source_type":"lecture" + }, + "11_residual_networks.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":0, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":1, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":2, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":3, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":4, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":5, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":6, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":7, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":8, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":9, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":10, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":11, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":12, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":13, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":14, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":15, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":16, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":17, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":18, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":19, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":20, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":21, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":22, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":23, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":24, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":25, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":26, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":27, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":28, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":29, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":30, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":31, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":32, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":33, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":34, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":35, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":36, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":37, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":38, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":39, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11_residual_networks.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11_residual_networks.pdf", + "page":40, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce residual networks, the types of problems they solve, why we need batch normalization and then review some example residual network architectures.", + "title":"11 - Residual Networks", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":0, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":1, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":2, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":3, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":4, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":5, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":6, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":7, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":8, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":9, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":10, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":11, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":12, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":13, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":14, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":15, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":16, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":17, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":18, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":19, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":20, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":21, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":22, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":23, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":24, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":25, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":26, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":27, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":28, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":29, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":30, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":31, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":32, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":33, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "11a_recurrent_networks_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/11a_recurrent_networks_v2.pdf", + "page":34, + "date":"02/29/2024\n Thursday", + "tldr":"In this lecture we introduce recurrent neural networks, starting the plain vanilla RNN, the problem of vanishing gradients, LSTM and GRU and batch normalization.", + "title":"11a - Recurrent Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_6mp1tttq", + "suggested_readings":"UDL Chapter 11", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":0, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":1, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":2, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":3, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":4, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":5, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":6, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":7, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":8, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":9, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":10, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":11, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":12, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":13, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":14, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":15, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":16, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":17, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":18, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":19, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":20, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":21, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":22, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":23, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":24, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":25, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":26, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":27, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":28, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":29, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":30, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":31, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":32, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":33, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":34, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":35, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":36, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":37, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":38, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":39, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":40, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":41, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":42, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":43, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":44, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":45, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":46, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":47, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":48, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":49, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":50, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":51, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":52, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":53, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":54, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":55, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":56, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":57, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":58, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":59, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":60, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":61, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":62, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":63, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":64, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":65, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":66, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":67, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_68":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":68, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_69":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":69, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_70":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":70, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_71":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":71, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_72":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":72, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_73":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":73, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_74":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":74, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "12_transformers_v3_export.pdf_75":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/12_transformers_v3_export.pdf", + "page":75, + "date":"03/05/2024\n Tuesday", + "tldr":"In this lecture we cover the transformer architecture, starting with the motivation that required a new type of model, the concept and implementation of self-attention and then the full transformer architecture for encoder, decoder and encoder-decoder type models.", + "title":"12 - Transformers", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_r59pkzbo", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":0, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":1, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":2, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":3, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":4, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":5, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":6, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":7, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":8, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":9, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":10, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":11, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":12, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":13, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":14, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":15, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":16, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":17, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":18, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":19, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":20, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":21, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":22, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":23, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":24, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":25, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":26, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":27, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":28, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":29, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":30, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":31, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":32, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":33, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":34, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":35, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":36, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":37, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":38, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":39, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":40, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":41, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":42, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":43, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":44, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":45, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":46, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":47, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":48, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":49, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":50, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":51, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":52, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":53, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":54, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":55, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":56, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":57, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":58, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":59, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":60, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":61, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":62, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":63, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":64, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":65, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":66, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "13_transformers_part2_v2.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/13_transformers_part2_v2.pdf", + "page":67, + "date":"03/07/2024\n Thursday", + "tldr":"In this lecture we continue to review the transformer architecture. We continue the discussion of decoders and encoder-decoder architectures, then discuss scaling to large contexts and then tokenization and embedding.", + "title":"13 - Transformers Part 2", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_m9hhz175", + "suggested_readings":"UDL Chapter 12\n\n\nOptional \nThe Illustrated Transformer", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":0, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":1, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":2, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":3, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":4, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":5, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":6, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":7, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":8, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":9, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":10, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":11, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":12, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":13, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":14, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":15, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":16, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":17, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":18, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":19, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":20, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":21, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":22, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":23, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":24, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":25, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":26, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":27, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":28, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":29, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":30, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":31, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":32, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":33, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":34, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":35, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":36, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":37, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":38, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":39, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":40, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":41, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":42, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":43, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":44, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":45, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":46, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":47, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "15_RAG_CoT.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/15_RAG_CoT.pdf", + "page":48, + "date":"03/21/2024\n Thursday", + "tldr":"In this lecture we talk about ways to improve LLM performance short of retraining or finetuning. We cover more sophisticated prompt strategies, retrieval augmentation and cognitive architectures building systems and agents based on LLMs.", + "title":"15 -- Improving LLM Perf", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_0woco0rx", + "suggested_readings":"See slides for references", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":0, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":1, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":2, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":3, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":4, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":5, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":6, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":7, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":8, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":9, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":10, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":11, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":12, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":13, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":14, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":15, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":16, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":17, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":18, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":19, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":20, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":21, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":22, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":23, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":24, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":25, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":26, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":27, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":28, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":29, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":30, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":31, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":32, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":33, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":34, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":35, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "16_PEFT_of_LLMs_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/16_PEFT_of_LLMs_v2.pdf", + "page":36, + "date":"03/26/2024\n Tuesday", + "tldr":"In this lecture we'll do a quick review of full model fine tuning then review the parameter efficient finetuning techniques Low Rank Adaptation and Prompt Tuning.,", + "title":"16 - Parameter Efficient Fine Tuning", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_5nl4ew2x", + "suggested_readings":"No specific readings provided.", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":0, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":1, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":2, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":3, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":4, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":5, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":6, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":7, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":8, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":9, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":10, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":11, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":12, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":13, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":14, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":15, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":16, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":17, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":18, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":19, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":20, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":21, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":22, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":23, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":24, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":25, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":26, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":27, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":28, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":29, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":30, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":31, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":32, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":33, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":34, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":35, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":36, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":37, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":38, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":39, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":40, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":41, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":42, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":43, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":44, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":45, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":46, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":47, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":48, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":49, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":50, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":51, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":52, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":53, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":54, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":55, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":56, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":57, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":58, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":59, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":60, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":61, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":62, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_63":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":63, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_64":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":64, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_65":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":65, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_66":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":66, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_67":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":67, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_68":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":68, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_69":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":69, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_70":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":70, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_71":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":71, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_72":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":72, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_73":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":73, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_74":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":74, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_75":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":75, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_76":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":76, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_77":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":77, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "17_unsup_learning_gan_v2_1.pdf_78":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/17_unsup_learning_gan_v2_1.pdf", + "page":78, + "date":"04/04/2024\n Thursday", + "tldr":"In this lecture we revisit the concept of unsupervised learning in the context of generative models. We will then dive into Generative Adversarial Networks (GANs) and their applications. We will also discuss the challenges and limitations of GANs and some of the recent advances in the field.", + "title":"17 -- Unsupervised Learning and GANs", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_11s31rno", + "suggested_readings":"UDL Chapters 14 and 15", + "source_type":"lecture" + }, + "18_VAEs.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":0, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":1, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":2, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":3, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":4, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":5, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":6, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":7, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":8, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":9, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":10, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":11, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":12, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":13, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":14, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":15, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":16, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":17, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":18, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":19, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":20, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":21, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":22, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":23, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":24, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":25, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":26, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":27, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":28, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":29, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":30, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":31, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":32, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":33, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":34, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":35, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":36, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":37, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":38, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":39, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":40, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":41, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":42, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":43, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":44, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":45, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":46, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":47, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":48, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":49, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":50, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":51, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":52, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":53, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "18_VAEs.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/18_VAEs.pdf", + "page":54, + "date":"04/09/2024\n Tuesday", + "tldr":"In this lecture we dive into Variational Autoencoders or VAEs. We start by looking at autoencoders and their ability to reduce dimensions of inputs into a latent space. We'll see why they don't make good generative models and then generalize to VAEs. We'll finish with some examples of generative output of VAEs.", + "title":"18 - Variational Autoencoders (VAEs)", + "lecture_recording":null, + "suggested_readings":"Understanding Variational Autoencoders\n\n\nUDL Chapter 17 (optional)", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":0, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":1, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":2, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":3, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":4, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":5, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":6, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":7, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":8, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":9, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":10, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":11, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":12, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":13, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":14, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":15, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":16, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":17, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":18, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":19, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":20, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":21, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":22, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":23, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":24, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":25, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":26, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":27, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":28, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":29, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":30, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":31, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":32, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":33, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":34, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":35, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":36, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":37, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":38, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":39, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":40, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":41, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":42, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":43, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":44, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":45, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":46, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":47, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":48, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":49, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":50, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":51, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":52, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":53, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":54, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "19_diffusion_models_v2.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/19_diffusion_models_v2.pdf", + "page":55, + "date":"04/11/2024\n Thursday", + "tldr":"Short text to discribe what this lecture is about.", + "title":"19 -- Diffusion Models", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_tzuwe6b9", + "suggested_readings":"Rocca, Understanding Diffusion Probabilistic Models\n\n\nUDL Chapter 18", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":0, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":1, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":2, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":3, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":4, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":5, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":6, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":7, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":8, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":9, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":10, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":11, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":12, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":13, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":14, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":15, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":16, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":17, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":18, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":19, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":20, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":21, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":22, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":23, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":24, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":25, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":26, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":27, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":28, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":29, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":30, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_31":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":31, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_32":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":32, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_33":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":33, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_34":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":34, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_35":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":35, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_36":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":36, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_37":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":37, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_38":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":38, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_39":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":39, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_40":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":40, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_41":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":41, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_42":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":42, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_43":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":43, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_44":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":44, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_45":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":45, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_46":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":46, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_47":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":47, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_48":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":48, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_49":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":49, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_50":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":50, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_51":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":51, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_52":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":52, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_53":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":53, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_54":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":54, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_55":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":55, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_56":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":56, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_57":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":57, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_58":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":58, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_59":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":59, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_60":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":60, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_61":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":61, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "20_graph_neural_networks.pdf_62":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/20_graph_neural_networks.pdf", + "page":62, + "date":"04/16/2024\n Tuesday", + "tldr":"In this lecture we introduce graph neural networks, define matrix representations, how to do graph level classification and regression, and how to define graph convolutional network layers.", + "title":"20 -- Graph Neural Networks", + "lecture_recording":"https://mymedia.bu.edu/media/t/1_bt7qtymk", + "suggested_readings":"UDL Chapter 13", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_0":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":0, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_1":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":1, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_2":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":2, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_3":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":3, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_4":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":4, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_5":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":5, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_6":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":6, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_7":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":7, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_8":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":8, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_9":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":9, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_10":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":10, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_11":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":11, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_12":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":12, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_13":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":13, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_14":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":14, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_15":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":15, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_16":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":16, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_17":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":17, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_18":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":18, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_19":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":19, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_20":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":20, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_21":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":21, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_22":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":22, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_23":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":23, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_24":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":24, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_25":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":25, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_26":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":26, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_27":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":27, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_28":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":28, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_29":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":29, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "21_RL_RLHF_v2.pdf_30":{ + "source":"https://dl4ds.github.io/sp2024/static_files/lectures/21_RL_RLHF_v2.pdf", + "page":30, + "date":"04/23/2024\n Tuesday", + "tldr":"We cover the basic concepts of reinforcement learning then review reinforcement learning from human feedback via the two seminal papers on the topic.", + "title":"21 - Reinforcement Learning", + "lecture_recording":null, + "suggested_readings":"UDL Chapter 19", + "source_type":"lecture" + }, + "https://dl4ds.github.io/sp2024/":{ + "source":"https://dl4ds.github.io/sp2024/", + "page":0 + }, + "https://dl4ds.github.io/sp2024/schedule/":{ + "source":"https://dl4ds.github.io/sp2024/schedule/", + "page":0 + }, + "https://dl4ds.github.io/sp2024/lectures/":{ + "source":"https://dl4ds.github.io/sp2024/lectures/", + "page":0 + }, + "https://dl4ds.github.io/sp2024/discussions/":{ + "source":"https://dl4ds.github.io/sp2024/discussions/", + "page":0 + }, + "https://dl4ds.github.io/sp2024/assignments/":{ + "source":"https://dl4ds.github.io/sp2024/assignments/", + "page":0 + }, + "https://dl4ds.github.io/sp2024/project/":{ + "source":"https://dl4ds.github.io/sp2024/project/", + "page":0 + }, + "https://dl4ds.github.io/sp2024/materials/":{ + "source":"https://dl4ds.github.io/sp2024/materials/", + "page":0 + } +} \ No newline at end of file