diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/.keep" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/.keep"
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/README.md" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/README.md"
new file mode 100644
index 0000000000000000000000000000000000000000..c9506908f3224709d4af1fb8ebed4c48ed20d286
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/README.md"
@@ -0,0 +1,94 @@
+## 第一步:准备动作数据集
+
+### test数据集下载地址
+
+链接:https://pan.baidu.com/s/1M0IW4fhvWPHNYFwDNo7DYg
+
+提取码:zav5
+
+大小:6.22G
+
+### 标签定义
+
+| label | meaning | label | meaning |
+| :---- | :---------------- | ----- | ----------------- |
+| 1 | holding_cloth | 5 | open_door_enter |
+| 2 | eating | 6 | carrying_bag |
+| 3 | go_up_down_stairs | 7 | smoking_cigarette |
+| 4 | open_door_leave | 0 | others |
+
+### train val 数据集下载
+
+**where are the train/test datasets from**
+1. holding_cloth
+ - Charades
+
+2. eating
+ - Kinetics 400
+
+3. go_up_down_stairs
+
+
+4. open_door_leave
+ - AVA (exit)
+
+5. open_door_enter
+ - AVA (enter)
+
+6. carrying_bag
+ - Charades
+
+7. smoking_cigarette
+ - Kinetics 400
+
+8. others
+
+## 第二步 环境搭建
+
+相关参考及教程:
+https://pytorchvideo.org/docs/tutorial_classification
+
+## 参考
+
+https://github.com/xiaobai1217/Awesome-Video-Datasets/blob/main/README.md
+
+@article{sigurdsson2016hollywood,
+author = {Gunnar A. Sigurdsson and G{\"u}l Varol and Xiaolong Wang and Ivan Laptev and Ali Farhadi and Abhinav Gupta},
+title = {Hollywood in Homes: Crowdsourcing Data Collection for Activity Understanding},
+journal = {ArXiv e-prints},
+eprint = {1604.01753},
+year = {2016},
+url = {http://arxiv.org/abs/1604.01753},
+}
+
+@inproceedings{TSN2016ECCV,
+ author = {Limin Wang and
+ Yuanjun Xiong and
+ Zhe Wang and
+ Yu Qiao and
+ Dahua Lin and
+ Xiaoou Tang and
+ Luc {Val Gool}},
+ title = {Temporal Segment Networks: Towards Good Practices for Deep Action Recognition},
+ booktitle = {ECCV},
+ year = {2016},
+}
+
+@inproceedings{inproceedings,
+ author = {Carreira, J. and Zisserman, Andrew},
+ year = {2017},
+ month = {07},
+ pages = {4724-4733},
+ title = {Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset},
+ doi = {10.1109/CVPR.2017.502}
+}
+
+@inproceedings{gu2018ava,
+ title={Ava: A video dataset of spatio-temporally localized atomic visual actions},
+ author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others},
+ booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
+ pages={6047--6056},
+ year={2018}
+}
+
+Donahue, J., Hendricks, L. A., Guadarrama, S., Rohrbach, M., Venugopalan, S., Saenko, K., & Darrell, T. (2014). Long-term recurrent convolutional networks for visual recognition and description. arXiv preprint arXiv:1411.4389.Chicago
\ No newline at end of file
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_t.txt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_t.txt"
new file mode 100644
index 0000000000000000000000000000000000000000..f3ceab288ab86eff592dd42a2e22f7f8a946d274
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_t.txt"
@@ -0,0 +1,108 @@
+-OyDO1g74vc.mp4
+-ZFgsrolSxo.mkv
+0f39OWEqJ24.mp4
+26V9UzqSguo.mp4
+2E_e8JlvTlg.mkv
+2KpThOF_QmE.mkv
+2fwni_Kjf2M.mkv
+2qQs3Y9OJX0.mkv
+32HR3MnDZ8g.mp4
+3_VjIRdXVdM.mkv
+5MxjqHfkWFI.mkv
+5milLu-6bWI.mp4
+7YpF6DntOYw.mkv
+7g37N3eoQ9s.mkv
+7nHkh4sP5Ks.mkv
+8JSxLhDMGtE.mkv
+8nO5FFbIAog.webm
+9IF8uTRrWAM.mkv
+9eAOr_ttXp0.mkv
+9mLYmkonWZQ.mkv
+B1MAUxpKaV8.mkv
+BCiuXAuCKAU.mp4
+BY3sZmvUp-0.mp4
+C25wkwAMB-w.mkv
+C3qk4yAMANk.mkv
+CG98XdYsgrA.mkv
+E7JcKooKVsM.mp4
+Feu1_8NazPE.mp4
+G5Yr20A5z_Q.mkv
+HTYT2vF-j_w.mkv
+HV0H6oc4Kvs.mkv
+Hi8QeP_VPu0.mkv
+HymKCzQJbB8.mkv
+I8j6Xq2B5ys.mp4
+IKdBLciu_-A.mp4
+J1jDc2rTJlg.mkv
+JNb4nWexD0I.mkv
+KWoSGtglCms.mkv
+Kb1fduj-jdY.mp4
+Ksd1JQFHYWA.mp4
+LIavUJVrXaI.mkv
+LrDT25hmApw.mkv
+N5UD8FGzDek.mkv
+O5m_0Yay4EU.mkv
+OfMdakd4bHI.mkv
+P90hF2S1JzA.mkv
+PNZQ2UJfyQE.mp4
+PcFEhUKhN6g.mkv
+QotkBTEePI8.mkv
+Rm518TUhbRY.mkv
+S0tkhGJjwLA.mkv
+SHBMiL5f_3Q.mkv
+TCmNvNLRWrc.mkv
+TcB0IFBwk-k.mkv
+UOfuzrwkclM.mkv
+U_WzY2k8IBM.mkv
+UgZFdrNT6W0.mkv
+UsLnxI_zGpY.mkv
+Vmef_8MY46w.mkv
+WVde9pyaHg4.mkv
+YYWdB7h1INo.mkv
+Ytga8ciKWJc.mkv
+_a9SWtcaNj8.mkv
+_mAfwH6i90E.mkv
+aDEYi1OG0vU.mkv
+b5pRYl_djbs.mp4
+bAVXp1oGjHA.mkv
+cKA-qeZuH_w.mkv
+cWYJHb25EVs.mp4
+cc4y-yYm5Ao.mkv
+dMH8L7mqCNI.mkv
+gjdgj04FzR0.mp4
+hbYvDvJrpNk.mp4
+iSlDMboCSao.mkv
+jBs_XYHI7gM.mkv
+jI0HIlSsa3s.mkv
+jgAwJ0RqmYg.mp4
+jqZpiHlJUig.mkv
+kplbKz3_fZk.mkv
+l-jxh8gpxuY.mkv
+lWXhqIAvarw.mkv
+mfsbYdLx9wE.mkv
+mkcDANJjDcM.mkv
+oq_bufAhyl8.mkv
+phVLLTMzmKk.mkv
+plkJ45_-pMk.mp4
+qpoWHELxL-4.mp4
+qrkff49p4E4.mp4
+rFgb2ECMcrY.mkv
+rJKeqfTlAeY.mkv
+rk8Xm0EAOWs.mkv
+skiZueh4lfY.mkv
+t0V4drbYDnc.mkv
+t1LXrJOvPDg.mkv
+tt0t_a1EDCE.mkv
+uq_HBsvP548.mkv
+uzPI7FcF79U.mkv
+vfjywN5CN0Y.mkv
+wogRuPNBUi8.mp4
+x-6CtPWVi6E.mkv
+xO4ABy2iOQA.mp4
+xp67EC-Hvwk.mkv
+y7ncweROe9U.mkv
+yo-Kg2YxlZs.mkv
+yqImJuC5UzI.mp4
+yvgCGJ6vfkY.mkv
+z3kgrh0L_80.mkv
+zR725veL-DI.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_t_pro" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_t_pro"
new file mode 100644
index 0000000000000000000000000000000000000000..c1904ce78a1e1fdb664b352bbae90d092b8260f4
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_t_pro"
@@ -0,0 +1,108 @@
+https://s3.amazonaws.com/ava-dataset/trainval/-OyDO1g74vc.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/-ZFgsrolSxo.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/0f39OWEqJ24.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/26V9UzqSguo.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/2E_e8JlvTlg.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/2KpThOF_QmE.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/2fwni_Kjf2M.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/2qQs3Y9OJX0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/32HR3MnDZ8g.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/3_VjIRdXVdM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/5MxjqHfkWFI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/5milLu-6bWI.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/7YpF6DntOYw.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/7g37N3eoQ9s.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/7nHkh4sP5Ks.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/8JSxLhDMGtE.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/8nO5FFbIAog.webm
+https://s3.amazonaws.com/ava-dataset/trainval/9IF8uTRrWAM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/9eAOr_ttXp0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/9mLYmkonWZQ.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/B1MAUxpKaV8.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/BCiuXAuCKAU.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/BY3sZmvUp-0.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/C25wkwAMB-w.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/C3qk4yAMANk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/CG98XdYsgrA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/E7JcKooKVsM.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/Feu1_8NazPE.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/G5Yr20A5z_Q.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/HTYT2vF-j_w.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/HV0H6oc4Kvs.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Hi8QeP_VPu0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/HymKCzQJbB8.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/I8j6Xq2B5ys.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/IKdBLciu_-A.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/J1jDc2rTJlg.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/JNb4nWexD0I.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/KWoSGtglCms.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Kb1fduj-jdY.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/Ksd1JQFHYWA.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/LIavUJVrXaI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/LrDT25hmApw.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/N5UD8FGzDek.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/O5m_0Yay4EU.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/OfMdakd4bHI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/P90hF2S1JzA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/PNZQ2UJfyQE.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/PcFEhUKhN6g.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/QotkBTEePI8.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Rm518TUhbRY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/S0tkhGJjwLA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/SHBMiL5f_3Q.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/TCmNvNLRWrc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/TcB0IFBwk-k.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UOfuzrwkclM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/U_WzY2k8IBM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UgZFdrNT6W0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UsLnxI_zGpY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Vmef_8MY46w.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/WVde9pyaHg4.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/YYWdB7h1INo.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Ytga8ciKWJc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/_a9SWtcaNj8.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/_mAfwH6i90E.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/aDEYi1OG0vU.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/b5pRYl_djbs.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/bAVXp1oGjHA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/cKA-qeZuH_w.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/cWYJHb25EVs.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/cc4y-yYm5Ao.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/dMH8L7mqCNI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/gjdgj04FzR0.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/hbYvDvJrpNk.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/iSlDMboCSao.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/jBs_XYHI7gM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/jI0HIlSsa3s.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/jgAwJ0RqmYg.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/jqZpiHlJUig.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/kplbKz3_fZk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/l-jxh8gpxuY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/lWXhqIAvarw.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/mfsbYdLx9wE.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/mkcDANJjDcM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/oq_bufAhyl8.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/phVLLTMzmKk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/plkJ45_-pMk.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/qpoWHELxL-4.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/qrkff49p4E4.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/rFgb2ECMcrY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/rJKeqfTlAeY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/rk8Xm0EAOWs.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/skiZueh4lfY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/t0V4drbYDnc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/t1LXrJOvPDg.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/tt0t_a1EDCE.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/uq_HBsvP548.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/uzPI7FcF79U.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/vfjywN5CN0Y.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/wogRuPNBUi8.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/x-6CtPWVi6E.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/xO4ABy2iOQA.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/xp67EC-Hvwk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/y7ncweROe9U.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/yo-Kg2YxlZs.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/yqImJuC5UzI.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/yvgCGJ6vfkY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/z3kgrh0L_80.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/zR725veL-DI.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_v.txt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_v.txt"
new file mode 100644
index 0000000000000000000000000000000000000000..31ebe31d7502d91484da94cd7ce53ec5ead0600f
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_v.txt"
@@ -0,0 +1,29 @@
+55Ihr6uVIDA.mkv
+914yZXz-iRs.mkv
+9Y_l9NsnYE0.mp4
+BXCh3r-pPAM.mkv
+CMCPhm2L400.mkv
+Di1MG6auDYo.mkv
+Gvp-cj3bmIY.webm
+IzvOYVMltkI.mp4
+KHHgQ_Pe4cI.mkv
+NO2esmws190.mkv
+O_NYCUhZ9zw.mp4
+QCLQYnt3aMo.webm
+TEQ9sAj-DPo.mp4
+UOyyTUX5Vo4.mkv
+WSPvfxtqisg.mkv
+XpGRS72ghag.mkv
+_7oWZq_s_Sk.mkv
+_eBah6c5kyA.mkv
+fpprSy6AzKk.mkv
+l2XO3tQk8lI.mkv
+lDmLcWWBp1E.mkv
+om_83F5VwTQ.mp4
+qx2vAO5ofmo.mp4
+rXFlJbXyZyc.mkv
+sNQJfYvhcPk.mp4
+u1ltv6r14KQ.mkv
+uNT6HrrnqPU.webm
+xeGWXqSvC-8.webm
+z-fsLpGHq6o.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_v_pro" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_v_pro"
new file mode 100644
index 0000000000000000000000000000000000000000..8a64543dc826cfd195eeeb0fcf32ea3ac8542048
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/enter_v_pro"
@@ -0,0 +1,29 @@
+https://s3.amazonaws.com/ava-dataset/trainval/55Ihr6uVIDA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/914yZXz-iRs.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/9Y_l9NsnYE0.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/BXCh3r-pPAM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/CMCPhm2L400.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Di1MG6auDYo.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Gvp-cj3bmIY.webm
+https://s3.amazonaws.com/ava-dataset/trainval/IzvOYVMltkI.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/KHHgQ_Pe4cI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/NO2esmws190.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/O_NYCUhZ9zw.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/QCLQYnt3aMo.webm
+https://s3.amazonaws.com/ava-dataset/trainval/TEQ9sAj-DPo.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/UOyyTUX5Vo4.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/WSPvfxtqisg.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/XpGRS72ghag.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/_7oWZq_s_Sk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/_eBah6c5kyA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/fpprSy6AzKk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/l2XO3tQk8lI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/lDmLcWWBp1E.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/om_83F5VwTQ.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/qx2vAO5ofmo.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/rXFlJbXyZyc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/sNQJfYvhcPk.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/u1ltv6r14KQ.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/uNT6HrrnqPU.webm
+https://s3.amazonaws.com/ava-dataset/trainval/xeGWXqSvC-8.webm
+https://s3.amazonaws.com/ava-dataset/trainval/z-fsLpGHq6o.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_t.txt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_t.txt"
new file mode 100644
index 0000000000000000000000000000000000000000..49714129b5a48f2ef0f1c0eade9415b08cd7f00e
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_t.txt"
@@ -0,0 +1,52 @@
+-IELREHX_js.mp4
+2E_e8JlvTlg.mkv
+2qQs3Y9OJX0.mkv
+9IF8uTRrWAM.mkv
+9bK05eBt1GM.mp4
+9mLYmkonWZQ.mkv
+9tyiDEYiWiA.mkv
+B1MAUxpKaV8.mkv
+C3qk4yAMANk.mkv
+CG98XdYsgrA.mkv
+Ie35yEssHko.mkv
+JNb4nWexD0I.mkv
+KWoSGtglCms.mkv
+KWoSGtglCms.mkv
+OfMdakd4bHI.mkv
+OfMdakd4bHI.mkv
+PNZQ2UJfyQE.mp4
+PcFEhUKhN6g.mkv
+TM5MPJIq1Is.mkv
+TM5MPJIq1Is.mkv
+UgZFdrNT6W0.mkv
+UgZFdrNT6W0.mkv
+UgZFdrNT6W0.mkv
+UgZFdrNT6W0.mkv
+VsYPP2I0aUQ.mkv
+XglAvHaEtHY.mp4
+Ytga8ciKWJc.mkv
+_ithRWANKB0.mp4
+aMYcLyh9OhU.mkv
+cLiJgvrDlWw.mp4
+eA55_shhKko.mkv
+fZs-yXm-uUs.mp4
+iSlDMboCSao.mkv
+jBs_XYHI7gM.mkv
+lWXhqIAvarw.mkv
+lWXhqIAvarw.mkv
+miB-wo2PfLI.mkv
+phrYEKv0rmw.mkv
+pieVIsGmLsc.mkv
+qrkff49p4E4.mp4
+qrkff49p4E4.mp4
+rk8Xm0EAOWs.mkv
+rk8Xm0EAOWs.mkv
+t0V4drbYDnc.mkv
+tt0t_a1EDCE.mkv
+x-6CtPWVi6E.mkv
+x-6CtPWVi6E.mkv
+xO4ABy2iOQA.mp4
+xO4ABy2iOQA.mp4
+xp67EC-Hvwk.mkv
+yMtGmGa8KZ0.mkv
+yo-Kg2YxlZs.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_t_pro" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_t_pro"
new file mode 100644
index 0000000000000000000000000000000000000000..a409947ca8231cf064a76342d0f4aef0152895e3
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_t_pro"
@@ -0,0 +1,52 @@
+https://s3.amazonaws.com/ava-dataset/trainval/-IELREHX_js.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/2E_e8JlvTlg.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/2qQs3Y9OJX0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/9IF8uTRrWAM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/9bK05eBt1GM.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/9mLYmkonWZQ.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/9tyiDEYiWiA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/B1MAUxpKaV8.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/C3qk4yAMANk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/CG98XdYsgrA.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/Ie35yEssHko.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/JNb4nWexD0I.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/KWoSGtglCms.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/KWoSGtglCms.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/OfMdakd4bHI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/OfMdakd4bHI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/PNZQ2UJfyQE.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/PcFEhUKhN6g.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/TM5MPJIq1Is.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/TM5MPJIq1Is.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UgZFdrNT6W0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UgZFdrNT6W0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UgZFdrNT6W0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/UgZFdrNT6W0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/VsYPP2I0aUQ.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/XglAvHaEtHY.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/Ytga8ciKWJc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/_ithRWANKB0.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/aMYcLyh9OhU.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/cLiJgvrDlWw.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/eA55_shhKko.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/fZs-yXm-uUs.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/iSlDMboCSao.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/jBs_XYHI7gM.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/lWXhqIAvarw.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/lWXhqIAvarw.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/miB-wo2PfLI.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/phrYEKv0rmw.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/pieVIsGmLsc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/qrkff49p4E4.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/qrkff49p4E4.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/rk8Xm0EAOWs.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/rk8Xm0EAOWs.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/t0V4drbYDnc.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/tt0t_a1EDCE.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/x-6CtPWVi6E.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/x-6CtPWVi6E.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/xO4ABy2iOQA.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/xO4ABy2iOQA.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/xp67EC-Hvwk.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/yMtGmGa8KZ0.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/yo-Kg2YxlZs.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_v.txt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_v.txt"
new file mode 100644
index 0000000000000000000000000000000000000000..f3b8fd26bbf873e987bb34aadf76caf18202a8f1
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_v.txt"
@@ -0,0 +1,12 @@
+9Y_l9NsnYE0.mp4
+KVq6If6ozMY.mkv
+QCLQYnt3aMo.webm
+XpGRS72ghag.mkv
+XpGRS72ghag.mkv
+ZosVdkY76FU.mkv
+ZosVdkY76FU.mkv
+lDmLcWWBp1E.mkv
+qx2vAO5ofmo.mp4
+tghXjom3120.mkv
+xeGWXqSvC-8.webm
+yn9WN9lsHRE.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_v_pro" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_v_pro"
new file mode 100644
index 0000000000000000000000000000000000000000..ea3ecf2a7bbf7de1aef422c43a43c4a7eb22745f
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/exit_v_pro"
@@ -0,0 +1,12 @@
+https://s3.amazonaws.com/ava-dataset/trainval/9Y_l9NsnYE0.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/KVq6If6ozMY.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/QCLQYnt3aMo.webm
+https://s3.amazonaws.com/ava-dataset/trainval/XpGRS72ghag.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/XpGRS72ghag.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/ZosVdkY76FU.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/ZosVdkY76FU.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/lDmLcWWBp1E.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/qx2vAO5ofmo.mp4
+https://s3.amazonaws.com/ava-dataset/trainval/tghXjom3120.mkv
+https://s3.amazonaws.com/ava-dataset/trainval/xeGWXqSvC-8.webm
+https://s3.amazonaws.com/ava-dataset/trainval/yn9WN9lsHRE.mkv
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/tmp.py" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/tmp.py"
new file mode 100644
index 0000000000000000000000000000000000000000..196a955d3206b0a3ac66ff83562550b8fa62f807
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-download/tmp.py"
@@ -0,0 +1,9 @@
+f1 = open("exit_t.txt")
+str1 = "https://s3.amazonaws.com/ava-dataset/trainval/"
+
+with open("exit_t_pro", "w", encoding="utf-8") as f2:
+ for i in f1.readlines():
+ f2.write(str1 + i)
+
+
+f1.close()
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-extra.sh" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-extra.sh"
new file mode 100644
index 0000000000000000000000000000000000000000..92a440a4661db191f206e5ef94cd6a882035bc65
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava-data-extra.sh"
@@ -0,0 +1,126 @@
+# IN_DATA_DIR="../../data/ava/videos"
+# OUT_DATA_DIR="../../data/ava/videos_15min"
+
+# if [[ ! -d "${OUT_DATA_DIR}" ]]; then
+# echo "${OUT_DATA_DIR} doesn't exist. Creating it.";
+# mkdir -p ${OUT_DATA_DIR}
+# fi
+
+# for video in $(ls -A1 -U ${IN_DATA_DIR}/*)
+# do
+# out_name="${OUT_DATA_DIR}/${video##*/}"
+# if [ ! -f "${out_name}" ]; then
+# ffmpeg -ss 900 -t 901 -i "${video}" "${out_name}"
+# fi
+# done
+
+目录结构
+|--ava
+ |--enter-train
+ |--enter-val
+ |--exit-train
+ |--exit-val
+ |--short_videos
+ |--enter-train
+ |--enter-val
+ |--exit-train
+ |--exit-val
+ |--frames
+ |--enter-train
+ |--enter-val
+ |--exit-train
+ |--exit-val
+
+
+for dir in `cat dir.log`; do
+ cd $dir;
+ IN_DATA_DIR="./${dir}"
+ OUT_DATA_DIR="../short_videos/${dir}"
+
+ if [[ ! -d "${OUT_DATA_DIR}" ]]; then
+ echo "${OUT_DATA_DIR} doesn't exist. Creating it.";
+ mkdir -p ${OUT_DATA_DIR}
+ fi
+
+ i = 2
+ for line in `cat *.csv` # csv
+ do
+ echo line $i:$line
+ youtube_id=`echo $line | cut -d ',' -f1`
+ video_type=`echo $line | cut -d ',' -f2`
+ keyframe=`echo $line | cut -d ',' -f3`
+ video_name = "${youtube_id}.${video_type}"
+ # smoking eating carryingbag updownstairs others holding cloth
+ start_time=`exp $keyframe -2`
+ end_time=`exp $keyframe +2`
+ # enter/exit 的特殊需求
+ start_time=`exp $keyframe -2`
+ end_time=`exp $keyframe +2`
+
+ ffmpeg -ss "${start_time}" -t 4 -i "${video_name}" "${OUT_DATA_DIR}/${video_name}"
+
+ let "i=$i+1"
+ done
+
+ cd ..
+done
+
+for dir in `cat dir.log`;
+do
+
+IN_DATA_DIR="./short_videos"
+OUT_DATA_DIR="./frames/${dir}"
+
+if [[ ! -d "${OUT_DATA_DIR}" ]]; then
+ echo "${OUT_DATA_DIR} doesn't exist. Creating it.";
+ mkdir -p ${OUT_DATA_DIR}
+fi
+
+i = 2
+for line in `cat $1` # csv
+do
+ echo line $i:$line
+ youtube_id=`echo $line | cut -d ',' -f1`
+ video_type=`echo $line | cut -d ',' -f2`
+ keyframe=`echo $line | cut -d ',' -f3`
+ video_name = "${youtube_id}.${video_type}"
+ # smoking eating carryingbag updownstairs others holding cloth
+ start_time=`exp $keyframe -2`
+ end_time=`exp $keyframe +2`
+ # enter/exit 的特殊需求
+ start_time=`exp $keyframe -2`
+ end_time=`exp $keyframe +2`
+
+ ffmpeg -ss "${start_time}" -t 4 -i "${IN_DATA_DIR}/${video_name}" -codec copy "${OUT_DATA_DIR}/${video_name}"
+
+ let "i=$i+1"
+done
+
+echo "Finish cutting videos ! "
+echo "Starting extracting frames ! "
+
+if [[ ! -d "${OUT_DATA_DIR}" ]]; then
+ echo "${OUT_DATA_DIR} doesn't exist. Creating it.";
+ mkdir -p ${OUT_DATA_DIR}
+fi
+
+for video in $(ls -A1 -U ${IN_DATA_DIR}/*)
+do
+ video_name=${video##*/}
+
+ if [[ $video_name = *".webm" ]]; then
+ video_name=${video_name::-5}
+ else
+ video_name=${video_name::-4}
+ fi
+
+ out_video_dir=${OUT_DATA_DIR}/${video_name}/
+ mkdir -p "${out_video_dir}"
+
+ out_name="${out_video_dir}/${video_name}_%06d.jpg"
+
+ ffmpeg -i "${video}" -r 30 -q:v 1 "${out_name}"
+done
+
+
+echo "done extracting ! "
\ No newline at end of file
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_action_list_v2.1.pbtxt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_action_list_v2.1.pbtxt"
new file mode 100644
index 0000000000000000000000000000000000000000..d400eff9ed10affa508acae6d8fe83fe4be1809b
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_action_list_v2.1.pbtxt"
@@ -0,0 +1,400 @@
+label {
+ name: "bend/bow (at the waist)"
+ label_id: 1
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "crawl"
+ label_id: 2
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "crouch/kneel"
+ label_id: 3
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "dance"
+ label_id: 4
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "fall down"
+ label_id: 5
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "get up"
+ label_id: 6
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "jump/leap"
+ label_id: 7
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "lie/sleep"
+ label_id: 8
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "martial art"
+ label_id: 9
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "run/jog"
+ label_id: 10
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "sit"
+ label_id: 11
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "stand"
+ label_id: 12
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "swim"
+ label_id: 13
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "walk"
+ label_id: 14
+ label_type: PERSON_MOVEMENT
+}
+label {
+ name: "answer phone"
+ label_id: 15
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "brush teeth"
+ label_id: 16
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "carry/hold (an object)"
+ label_id: 17
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "catch (an object)"
+ label_id: 18
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "chop"
+ label_id: 19
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "climb (e.g., a mountain)"
+ label_id: 20
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "clink glass"
+ label_id: 21
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "close (e.g., a door, a box)"
+ label_id: 22
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "cook"
+ label_id: 23
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "cut"
+ label_id: 24
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "dig"
+ label_id: 25
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "dress/put on clothing"
+ label_id: 26
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "drink"
+ label_id: 27
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "drive (e.g., a car, a truck)"
+ label_id: 28
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "eat"
+ label_id: 29
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "enter"
+ label_id: 30
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "exit"
+ label_id: 31
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "extract"
+ label_id: 32
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "fishing"
+ label_id: 33
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "hit (an object)"
+ label_id: 34
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "kick (an object)"
+ label_id: 35
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "lift/pick up"
+ label_id: 36
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "listen (e.g., to music)"
+ label_id: 37
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "open (e.g., a window, a car door)"
+ label_id: 38
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "paint"
+ label_id: 39
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "play board game"
+ label_id: 40
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "play musical instrument"
+ label_id: 41
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "play with pets"
+ label_id: 42
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "point to (an object)"
+ label_id: 43
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "press"
+ label_id: 44
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "pull (an object)"
+ label_id: 45
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "push (an object)"
+ label_id: 46
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "put down"
+ label_id: 47
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "read"
+ label_id: 48
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "ride (e.g., a bike, a car, a horse)"
+ label_id: 49
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "row boat"
+ label_id: 50
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "sail boat"
+ label_id: 51
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "shoot"
+ label_id: 52
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "shovel"
+ label_id: 53
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "smoke"
+ label_id: 54
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "stir"
+ label_id: 55
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "take a photo"
+ label_id: 56
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "text on/look at a cellphone"
+ label_id: 57
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "throw"
+ label_id: 58
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "touch (an object)"
+ label_id: 59
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "turn (e.g., a screwdriver)"
+ label_id: 60
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "watch (e.g., TV)"
+ label_id: 61
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "work on a computer"
+ label_id: 62
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "write"
+ label_id: 63
+ label_type: OBJECT_MANIPULATION
+}
+label {
+ name: "fight/hit (a person)"
+ label_id: 64
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "give/serve (an object) to (a person)"
+ label_id: 65
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "grab (a person)"
+ label_id: 66
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "hand clap"
+ label_id: 67
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "hand shake"
+ label_id: 68
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "hand wave"
+ label_id: 69
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "hug (a person)"
+ label_id: 70
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "kick (a person)"
+ label_id: 71
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "kiss (a person)"
+ label_id: 72
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "lift (a person)"
+ label_id: 73
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "listen to (a person)"
+ label_id: 74
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "play with kids"
+ label_id: 75
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "push (another person)"
+ label_id: 76
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "sing to (e.g., self, a person, a group)"
+ label_id: 77
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "take (an object) from (a person)"
+ label_id: 78
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "talk to (e.g., self, a person, a group)"
+ label_id: 79
+ label_type: PERSON_INTERACTION
+}
+label {
+ name: "watch (a person)"
+ label_id: 80
+ label_type: PERSON_INTERACTION
+}
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_action_list_v2.1_for_activitynet_2018.pbtxt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_action_list_v2.1_for_activitynet_2018.pbtxt"
new file mode 100644
index 0000000000000000000000000000000000000000..5e2c485682830919a09300ac851e6b0e4bdf3efb
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_action_list_v2.1_for_activitynet_2018.pbtxt"
@@ -0,0 +1,240 @@
+item {
+ name: "bend/bow (at the waist)"
+ id: 1
+}
+item {
+ name: "crouch/kneel"
+ id: 3
+}
+item {
+ name: "dance"
+ id: 4
+}
+item {
+ name: "fall down"
+ id: 5
+}
+item {
+ name: "get up"
+ id: 6
+}
+item {
+ name: "jump/leap"
+ id: 7
+}
+item {
+ name: "lie/sleep"
+ id: 8
+}
+item {
+ name: "martial art"
+ id: 9
+}
+item {
+ name: "run/jog"
+ id: 10
+}
+item {
+ name: "sit"
+ id: 11
+}
+item {
+ name: "stand"
+ id: 12
+}
+item {
+ name: "swim"
+ id: 13
+}
+item {
+ name: "walk"
+ id: 14
+}
+item {
+ name: "answer phone"
+ id: 15
+}
+item {
+ name: "carry/hold (an object)"
+ id: 17
+}
+item {
+ name: "climb (e.g., a mountain)"
+ id: 20
+}
+item {
+ name: "close (e.g., a door, a box)"
+ id: 22
+}
+item {
+ name: "cut"
+ id: 24
+}
+item {
+ name: "dress/put on clothing"
+ id: 26
+}
+item {
+ name: "drink"
+ id: 27
+}
+item {
+ name: "drive (e.g., a car, a truck)"
+ id: 28
+}
+item {
+ name: "eat"
+ id: 29
+}
+item {
+ name: "enter"
+ id: 30
+}
+item {
+ name: "hit (an object)"
+ id: 34
+}
+item {
+ name: "lift/pick up"
+ id: 36
+}
+item {
+ name: "listen (e.g., to music)"
+ id: 37
+}
+item {
+ name: "open (e.g., a window, a car door)"
+ id: 38
+}
+item {
+ name: "play musical instrument"
+ id: 41
+}
+item {
+ name: "point to (an object)"
+ id: 43
+}
+item {
+ name: "pull (an object)"
+ id: 45
+}
+item {
+ name: "push (an object)"
+ id: 46
+}
+item {
+ name: "put down"
+ id: 47
+}
+item {
+ name: "read"
+ id: 48
+}
+item {
+ name: "ride (e.g., a bike, a car, a horse)"
+ id: 49
+}
+item {
+ name: "sail boat"
+ id: 51
+}
+item {
+ name: "shoot"
+ id: 52
+}
+item {
+ name: "smoke"
+ id: 54
+}
+item {
+ name: "take a photo"
+ id: 56
+}
+item {
+ name: "text on/look at a cellphone"
+ id: 57
+}
+item {
+ name: "throw"
+ id: 58
+}
+item {
+ name: "touch (an object)"
+ id: 59
+}
+item {
+ name: "turn (e.g., a screwdriver)"
+ id: 60
+}
+item {
+ name: "watch (e.g., TV)"
+ id: 61
+}
+item {
+ name: "work on a computer"
+ id: 62
+}
+item {
+ name: "write"
+ id: 63
+}
+item {
+ name: "fight/hit (a person)"
+ id: 64
+}
+item {
+ name: "give/serve (an object) to (a person)"
+ id: 65
+}
+item {
+ name: "grab (a person)"
+ id: 66
+}
+item {
+ name: "hand clap"
+ id: 67
+}
+item {
+ name: "hand shake"
+ id: 68
+}
+item {
+ name: "hand wave"
+ id: 69
+}
+item {
+ name: "hug (a person)"
+ id: 70
+}
+item {
+ name: "kiss (a person)"
+ id: 72
+}
+item {
+ name: "lift (a person)"
+ id: 73
+}
+item {
+ name: "listen to (a person)"
+ id: 74
+}
+item {
+ name: "push (another person)"
+ id: 76
+}
+item {
+ name: "sing to (e.g., self, a person, a group)"
+ id: 77
+}
+item {
+ name: "take (an object) from (a person)"
+ id: 78
+}
+item {
+ name: "talk to (e.g., self, a person, a group)"
+ id: 79
+}
+item {
+ name: "watch (a person)"
+ id: 80
+}
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_included_timestamps_v2.1.txt" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_included_timestamps_v2.1.txt"
new file mode 100644
index 0000000000000000000000000000000000000000..0475eb7493988239db9d75e67d07df02c9392b10
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_included_timestamps_v2.1.txt"
@@ -0,0 +1,897 @@
+0902
+0903
+0904
+0905
+0906
+0907
+0908
+0909
+0910
+0911
+0912
+0913
+0914
+0915
+0916
+0917
+0918
+0919
+0920
+0921
+0922
+0923
+0924
+0925
+0926
+0927
+0928
+0929
+0930
+0931
+0932
+0933
+0934
+0935
+0936
+0937
+0938
+0939
+0940
+0941
+0942
+0943
+0944
+0945
+0946
+0947
+0948
+0949
+0950
+0951
+0952
+0953
+0954
+0955
+0956
+0957
+0958
+0959
+0960
+0961
+0962
+0963
+0964
+0965
+0966
+0967
+0968
+0969
+0970
+0971
+0972
+0973
+0974
+0975
+0976
+0977
+0978
+0979
+0980
+0981
+0982
+0983
+0984
+0985
+0986
+0987
+0988
+0989
+0990
+0991
+0992
+0993
+0994
+0995
+0996
+0997
+0998
+0999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_test_excluded_timestamps_v2.1.csv" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_test_excluded_timestamps_v2.1.csv"
new file mode 100644
index 0000000000000000000000000000000000000000..2082715f073e717d1da7ea5b08ed0d552e50f3d2
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/ava_v2.1/ava_test_excluded_timestamps_v2.1.csv"
@@ -0,0 +1,66 @@
+-FLn0aeA6EU,0913
+72MzYjWz_7g,1184
+72MzYjWz_7g,1185
+72MzYjWz_7g,1186
+72MzYjWz_7g,1187
+72MzYjWz_7g,1188
+72MzYjWz_7g,1189
+72MzYjWz_7g,1190
+72MzYjWz_7g,1191
+72MzYjWz_7g,1192
+72MzYjWz_7g,1193
+72MzYjWz_7g,1197
+72MzYjWz_7g,1198
+72MzYjWz_7g,1199
+72MzYjWz_7g,1200
+BnIFkfDhJ2w,1600
+BnIFkfDhJ2w,1604
+GElolK2jG50,1623
+GQxKfbvL3mg,1434
+IIyYHprTP58,1530
+WMFTBgYWJS8,1286
+WMFTBgYWJS8,1287
+XOe9GeojzCs,1180
+bNP8Q_8u89A,1101
+bNP8Q_8u89A,1490
+bNP8Q_8u89A,1494
+bNP8Q_8u89A,1495
+bNP8Q_8u89A,1505
+bNP8Q_8u89A,1506
+bNP8Q_8u89A,1542
+bNP8Q_8u89A,1543
+ipBRBABLSAk,0936
+ipBRBABLSAk,0937
+ipBRBABLSAk,0938
+ipBRBABLSAk,0939
+ipBRBABLSAk,0940
+ipBRBABLSAk,0941
+ipBRBABLSAk,0942
+ipBRBABLSAk,0944
+ipBRBABLSAk,0946
+ipBRBABLSAk,0947
+ipBRBABLSAk,1538
+ipBRBABLSAk,1539
+ipBRBABLSAk,1540
+ipBRBABLSAk,1541
+ipBRBABLSAk,1542
+ipBRBABLSAk,1546
+ipBRBABLSAk,1548
+ipBRBABLSAk,1552
+ipBRBABLSAk,1553
+ipBRBABLSAk,1558
+ipBRBABLSAk,1559
+ipBRBABLSAk,1560
+ipBRBABLSAk,1561
+ipBRBABLSAk,1562
+ipBRBABLSAk,1567
+ipBRBABLSAk,1568
+ipBRBABLSAk,1569
+ipBRBABLSAk,1570
+ipBRBABLSAk,1571
+ipBRBABLSAk,1589
+ipBRBABLSAk,1590
+ipBRBABLSAk,1592
+ipBRBABLSAk,1598
+ipBRBABLSAk,1600
+zm78XnWN7MU,1607
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/frame_path.csv" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/frame_path.csv"
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/frame_path.sh" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/frame_path.sh"
new file mode 100644
index 0000000000000000000000000000000000000000..63cf1ea0c620ed12075edcb349af4d65b2455491
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data/frame_path.sh"
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+cd ./val/cloth
+for clip in `ls`
+do
+ cd $clip
+
+ # path num_frame label
+ path="./val/cloth/${clip}"
+ num_frame=`ls -l |grep "^-"|wc -l`
+ label=1
+ echo "${path} ${num_frame} ${label}" >> ../../val_cloth.txt
+
+ cd ..
+done
+echo "val_cloth is done!!!"
+
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data_prepare.ipynb" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data_prepare.ipynb"
new file mode 100644
index 0000000000000000000000000000000000000000..35a8b20b82d63dd5fa1ccaefd7bc162a3719c009
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/data_prepare.ipynb"
@@ -0,0 +1,1903 @@
+{
+ "nbformat": 4,
+ "nbformat_minor": 0,
+ "metadata": {
+ "colab": {
+ "name": "data-prepare.ipynb",
+ "provenance": [],
+ "collapsed_sections": []
+ },
+ "kernelspec": {
+ "name": "python3",
+ "display_name": "Python 3"
+ },
+ "language_info": {
+ "name": "python"
+ }
+ },
+ "cells": [
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "A2b5-15KnlCz",
+ "outputId": "145d56e8-9bf4-40f9-d901-4c3476df9638"
+ },
+ "source": [
+ "!ls"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "sample_data test.csv train.csv validate.csv\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "fab7JHSRoAWq"
+ },
+ "source": [
+ "import pandas as pd \n",
+ "import numpy as np \n",
+ "import math\n",
+ "import os"
+ ],
+ "execution_count": 1,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "YDV2mW9epD3w"
+ },
+ "source": [
+ "df_train = pd.read_csv('train.csv')\n",
+ "df_test = pd.read_csv('test.csv')\n",
+ "df_val = pd.read_csv('validate.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "rWKI4EZgjoQn"
+ },
+ "source": [
+ "li = ['eating burger','eating cake','eating carrots','eating chips','eating doughnuts','eating hotdog','eating ice cream','eating spaghetti','eating watermelon']\n",
+ "\n",
+ "def change2eating(label):\n",
+ " if label in li:\n",
+ " return 'eating'\n",
+ " return label\n",
+ "df_train['label'] = df_train['label'].apply(change2eating)\n",
+ "df_test['label'] = df_test['label'].apply(change2eating)\n",
+ "df_val['label'] = df_val['label'].apply(change2eating)\n"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 206
+ },
+ "id": "alsCY0etsbeh",
+ "outputId": "1654e1d6-4cb3-4aca-b694-33a89dd62f29"
+ },
+ "source": [
+ "df_train.head()"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " label | \n",
+ " youtube_id | \n",
+ " time_start | \n",
+ " time_end | \n",
+ " split | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " testifying | \n",
+ " ---QUuC4vJs | \n",
+ " 84 | \n",
+ " 94 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " eating spaghetti | \n",
+ " --3ouPhoy2A | \n",
+ " 20 | \n",
+ " 30 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " dribbling basketball | \n",
+ " --4-0ihtnBU | \n",
+ " 58 | \n",
+ " 68 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " playing tennis | \n",
+ " --56QUhyDQM | \n",
+ " 185 | \n",
+ " 195 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " tap dancing | \n",
+ " --6q_33gNew | \n",
+ " 132 | \n",
+ " 142 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " label youtube_id time_start time_end split\n",
+ "0 testifying ---QUuC4vJs 84 94 train\n",
+ "1 eating spaghetti --3ouPhoy2A 20 30 train\n",
+ "2 dribbling basketball --4-0ihtnBU 58 68 train\n",
+ "3 playing tennis --56QUhyDQM 185 195 train\n",
+ "4 tap dancing --6q_33gNew 132 142 train"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 10
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 206
+ },
+ "id": "iquBGXDxo_L2",
+ "outputId": "58292b4b-0804-4cec-b91b-54547d550f53"
+ },
+ "source": [
+ "# df_see = df_train.groupby(['label'])\n",
+ "df_val[df_val['label']=='smoking'].head()"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " label | \n",
+ " youtube_id | \n",
+ " time_start | \n",
+ " time_end | \n",
+ " split | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 60 | \n",
+ " smoking | \n",
+ " -0vzKnJrPQs | \n",
+ " 96 | \n",
+ " 106 | \n",
+ " validate | \n",
+ "
\n",
+ " \n",
+ " 111 | \n",
+ " smoking | \n",
+ " -3D9cHBM4bo | \n",
+ " 27 | \n",
+ " 37 | \n",
+ " validate | \n",
+ "
\n",
+ " \n",
+ " 446 | \n",
+ " smoking | \n",
+ " -cui-aLqI3g | \n",
+ " 541 | \n",
+ " 551 | \n",
+ " validate | \n",
+ "
\n",
+ " \n",
+ " 850 | \n",
+ " smoking | \n",
+ " 0y_OCH1FnSU | \n",
+ " 0 | \n",
+ " 10 | \n",
+ " validate | \n",
+ "
\n",
+ " \n",
+ " 1088 | \n",
+ " smoking | \n",
+ " 1o-qGrLLihw | \n",
+ " 21 | \n",
+ " 31 | \n",
+ " validate | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " label youtube_id time_start time_end split\n",
+ "60 smoking -0vzKnJrPQs 96 106 validate\n",
+ "111 smoking -3D9cHBM4bo 27 37 validate\n",
+ "446 smoking -cui-aLqI3g 541 551 validate\n",
+ "850 smoking 0y_OCH1FnSU 0 10 validate\n",
+ "1088 smoking 1o-qGrLLihw 21 31 validate"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 6
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "I9wrFwkfonBw"
+ },
+ "source": [
+ "\n",
+ "def t(label_name):\n",
+ " df_1 = df_train[df_train['label']==label_name]\n",
+ " df_2 = df_val[df_val['label']==label_name]\n",
+ " df_3 = df_test[df_test['label']==label_name]\n",
+ " return pd.concat([df_1,df_2,df_3])\n",
+ "\n"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "uhzRR-h9dJFM"
+ },
+ "source": [
+ "\n",
+ "df_val_eat = df_val[df_val['label']=='eating']\n",
+ "df_val_smoke = df_val[df_val['label']=='smoking']\n",
+ "df_val_cloth = df_val[df_val['label']=='folding clothes']\n",
+ "\n",
+ "df_val_all = pd.concat([df_val_eat,df_val_smoke,df_val_cloth])\n",
+ "df_val_all.sort_values('youtube_id').to_csv('kinectics_val_sort_id.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "Xnv0QbZ4h2EV"
+ },
+ "source": [
+ "df_test_eat = df_test[df_test['label']=='eating']\n",
+ "df_test_smoke = df_test[df_test['label']=='smoking']\n",
+ "df_test_cloth = df_test[df_test['label']=='folding clothes']\n",
+ "\n",
+ "df_test_all = pd.concat([df_test_eat,df_test_smoke,df_test_cloth])\n",
+ "df_test_all.sort_values('youtube_id').to_csv('kinectics_test_sort_id.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "fLTcIE2K7srC"
+ },
+ "source": [
+ "### smoking 7"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 206
+ },
+ "id": "LGXk7R044Atm",
+ "outputId": "15ad0fe8-f31a-4a6a-c3a5-70819dfabd19"
+ },
+ "source": [
+ "df_smoke = t('smoking')\n",
+ "df_smoke.head()"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " label | \n",
+ " youtube_id | \n",
+ " time_start | \n",
+ " time_end | \n",
+ " split | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 150 | \n",
+ " smoking | \n",
+ " -3oeeJz_bjk | \n",
+ " 9 | \n",
+ " 19 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 578 | \n",
+ " smoking | \n",
+ " -EDIE7Ywri0 | \n",
+ " 108 | \n",
+ " 118 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 802 | \n",
+ " smoking | \n",
+ " -Jt8UtIUgDg | \n",
+ " 19 | \n",
+ " 29 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 847 | \n",
+ " smoking | \n",
+ " -KmI1S3A9Rc | \n",
+ " 252 | \n",
+ " 262 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 967 | \n",
+ " smoking | \n",
+ " -N4zRqR3Gqo | \n",
+ " 0 | \n",
+ " 10 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " label youtube_id time_start time_end split\n",
+ "150 smoking -3oeeJz_bjk 9 19 train\n",
+ "578 smoking -EDIE7Ywri0 108 118 train\n",
+ "802 smoking -Jt8UtIUgDg 19 29 train\n",
+ "847 smoking -KmI1S3A9Rc 252 262 train\n",
+ "967 smoking -N4zRqR3Gqo 0 10 train"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 8
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "NmG5kVxM74Bp"
+ },
+ "source": [
+ "### eating 2"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 206
+ },
+ "id": "3tA9bhxv6MgJ",
+ "outputId": "2038cc8a-3354-4459-a8ec-a4837fa81ba6"
+ },
+ "source": [
+ "df_eat = t('eating')\n",
+ "df_eat.head()"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " label | \n",
+ " youtube_id | \n",
+ " time_start | \n",
+ " time_end | \n",
+ " split | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 1 | \n",
+ " eating | \n",
+ " --3ouPhoy2A | \n",
+ " 20 | \n",
+ " 30 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 41 | \n",
+ " eating | \n",
+ " -0R6wpipD-c | \n",
+ " 35 | \n",
+ " 45 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 49 | \n",
+ " eating | \n",
+ " -0aDlftNdyw | \n",
+ " 280 | \n",
+ " 290 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 145 | \n",
+ " eating | \n",
+ " -3gv9XDUOFA | \n",
+ " 19 | \n",
+ " 29 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 169 | \n",
+ " eating | \n",
+ " -4CxOUPL6o4 | \n",
+ " 49 | \n",
+ " 59 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " label youtube_id time_start time_end split\n",
+ "1 eating --3ouPhoy2A 20 30 train\n",
+ "41 eating -0R6wpipD-c 35 45 train\n",
+ "49 eating -0aDlftNdyw 280 290 train\n",
+ "145 eating -3gv9XDUOFA 19 29 train\n",
+ "169 eating -4CxOUPL6o4 49 59 train"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 10
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "DDXRaYzx77bN"
+ },
+ "source": [
+ "### cloth 1\n",
+ "c000 Holding some clothes \n",
+ "\n",
+ "c001 Putting clothes somewhere \n",
+ "\n",
+ "c002 Taking some clothes from somewhere \n",
+ "\n",
+ "c003 Throwing clothes somewhere\n",
+ "\n",
+ "c004 Tidying some clothes\n",
+ "\n",
+ "c005 Washing some clothes\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "3GKmcwR6kF6S"
+ },
+ "source": [
+ "df_cloth = pd.read_csv(\"/content/Charades_v1_train.csv\")\n"
+ ],
+ "execution_count": 23,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 639
+ },
+ "id": "C7G7A9Nukohw",
+ "outputId": "24605d2d-8ec7-4d8e-d7f0-9cba720fc6b4"
+ },
+ "source": [
+ "df_cloth.head()"
+ ],
+ "execution_count": 24,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " id | \n",
+ " subject | \n",
+ " scene | \n",
+ " quality | \n",
+ " relevance | \n",
+ " verified | \n",
+ " script | \n",
+ " objects | \n",
+ " descriptions | \n",
+ " actions | \n",
+ " length | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 46GP8 | \n",
+ " HR43 | \n",
+ " Kitchen | \n",
+ " 6.0 | \n",
+ " 7.0 | \n",
+ " Yes | \n",
+ " A person cooking on a stove while watching som... | \n",
+ " food;stove;window | \n",
+ " A person cooks food on a stove before looking ... | \n",
+ " c092 11.90 21.20;c147 0.00 12.60 | \n",
+ " 24.83 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " N11GT | \n",
+ " 0KZ7 | \n",
+ " Stairs | \n",
+ " 6.0 | \n",
+ " 7.0 | \n",
+ " Yes | \n",
+ " One person opens up a folded blanket, then sne... | \n",
+ " blanket;broom;floor | \n",
+ " Person at the bottom of the staircase shakes a... | \n",
+ " c098 8.60 14.20;c075 0.00 11.70;c127 0.00 15.2... | \n",
+ " 18.33 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 0IH69 | \n",
+ " 6RE8 | \n",
+ " Bedroom | \n",
+ " 6.0 | \n",
+ " 5.0 | \n",
+ " Yes | \n",
+ " A person is seen leaving a cabinet. They then ... | \n",
+ " book;box;cabinet;shelf | \n",
+ " A person is standing in a bedroom. They walk o... | \n",
+ " NaN | \n",
+ " 30.25 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " KRF68 | \n",
+ " YA10 | \n",
+ " Laundry room | \n",
+ " 6.0 | \n",
+ " 7.0 | \n",
+ " Yes | \n",
+ " A person runs into their laundry room. They gr... | \n",
+ " clothes;door;phone | \n",
+ " A person runs in and shuts door. The person gr... | \n",
+ " c018 22.60 27.80;c141 4.10 9.60;c148 10.30 25.... | \n",
+ " 30.33 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " MJO7C | \n",
+ " 6RE8 | \n",
+ " Kitchen | \n",
+ " 6.0 | \n",
+ " 6.0 | \n",
+ " Yes | \n",
+ " A person runs into their pantry holding a bott... | \n",
+ " cup;phone | \n",
+ " A person runs in place while holding a bottle ... | \n",
+ " c015 0.00 32.00;c107 0.00 32.00 | \n",
+ " 31.38 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " id subject ... actions length\n",
+ "0 46GP8 HR43 ... c092 11.90 21.20;c147 0.00 12.60 24.83\n",
+ "1 N11GT 0KZ7 ... c098 8.60 14.20;c075 0.00 11.70;c127 0.00 15.2... 18.33\n",
+ "2 0IH69 6RE8 ... NaN 30.25\n",
+ "3 KRF68 YA10 ... c018 22.60 27.80;c141 4.10 9.60;c148 10.30 25.... 30.33\n",
+ "4 MJO7C 6RE8 ... c015 0.00 32.00;c107 0.00 32.00 31.38\n",
+ "\n",
+ "[5 rows x 11 columns]"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 24
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "frziAWwNkbyc"
+ },
+ "source": [
+ "cloth_list = ['c000','c001','c002','c003','c004','c005']\n",
+ "def cloth(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return 0\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in cloth_list:\n",
+ " return i.split(\" \")[0]\n",
+ " return 0\n"
+ ],
+ "execution_count": 25,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "apbU3bZYlMCo"
+ },
+ "source": [
+ "df_cloth_act = df_cloth[[\"id\",\"actions\"]]"
+ ],
+ "execution_count": 26,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "DhrNurRzlL6H",
+ "outputId": "1c6c0065-1204-43d8-ed28-5e1231f01af1"
+ },
+ "source": [
+ "df_cloth_act['act_cloth_exist'] = df_cloth_act['actions'].apply(cloth)"
+ ],
+ "execution_count": 27,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:1: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \"\"\"Entry point for launching an IPython kernel.\n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "B0cCbs_clLxR"
+ },
+ "source": [
+ "def cloth_start(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in cloth_list:\n",
+ " return i.split(\" \")[1]\n",
+ " return -1\n",
+ " \n",
+ "def cloth_end(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in cloth_list:\n",
+ " return i.split(\" \")[2]\n",
+ " return -1\n",
+ "\n",
+ "def cloth_time(action):\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in cloth_list:\n",
+ " return round(float(i.split(\" \")[2])-float(i.split(\" \")[1]),2)\n",
+ " return -1"
+ ],
+ "execution_count": 28,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "ocdDUwdHmCL9",
+ "outputId": "b078b4d0-1e9b-42f1-958a-cf245a9d3d7b"
+ },
+ "source": [
+ "df_cloth_act['start_time'] = df_cloth_act['actions'].apply(cloth_start)\n",
+ "df_cloth_act['end_time'] = df_cloth_act['actions'].apply(cloth_end)\n",
+ "df_cloth_act['dtime'] = df_cloth_act['actions'].apply(cloth_time)\n",
+ "df_tmp = df_cloth_act[df_cloth_act['act_cloth_exist']!=0]\n",
+ "df_tmp[[\"id\",\"start_time\",\"dtime\"]].to_csv('cloth.csv')"
+ ],
+ "execution_count": 29,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:1: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \"\"\"Entry point for launching an IPython kernel.\n",
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:2: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \n",
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:3: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " This is separate from the ipykernel package so we can avoid doing imports until\n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 206
+ },
+ "id": "PTKIFc816Wp4",
+ "outputId": "8ed2433e-b133-4ee5-8728-26cbefa0dfb8"
+ },
+ "source": [
+ "df_cloth = t('folding clothes')\n",
+ "df_cloth.head()"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " label | \n",
+ " youtube_id | \n",
+ " time_start | \n",
+ " time_end | \n",
+ " split | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 853 | \n",
+ " folding clothes | \n",
+ " -KtT7Q730Yg | \n",
+ " 8 | \n",
+ " 18 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 1349 | \n",
+ " folding clothes | \n",
+ " -U2IQ5qOGmQ | \n",
+ " 4 | \n",
+ " 14 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 1549 | \n",
+ " folding clothes | \n",
+ " -YKDJrc2p5w | \n",
+ " 9 | \n",
+ " 19 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 2563 | \n",
+ " folding clothes | \n",
+ " -qlJmXRti-g | \n",
+ " 152 | \n",
+ " 162 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ " 2608 | \n",
+ " folding clothes | \n",
+ " -riGpxoagdg | \n",
+ " 93 | \n",
+ " 103 | \n",
+ " train | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " label youtube_id time_start time_end split\n",
+ "853 folding clothes -KtT7Q730Yg 8 18 train\n",
+ "1349 folding clothes -U2IQ5qOGmQ 4 14 train\n",
+ "1549 folding clothes -YKDJrc2p5w 9 19 train\n",
+ "2563 folding clothes -qlJmXRti-g 152 162 train\n",
+ "2608 folding clothes -riGpxoagdg 93 103 train"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 11
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "v9482sYiXTGN"
+ },
+ "source": [
+ "df_eat_smoke_cloth = pd.concat([df_eat,df_smoke,df_cloth])\n",
+ "df_kinetics_id = df_eat_smoke_cloth.drop_duplicates(['youtube_id'])[['youtube_id']]"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "ocz3tqSvkluU"
+ },
+ "source": [
+ "df_kinetics_id.sort_values('youtube_id').to_csv('kinectics_sort_id.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "z1JdZ6AbXczh"
+ },
+ "source": [
+ "df_kinetics_id.to_csv('kinetics_id.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "kD0Qehn4FMYy"
+ },
+ "source": [
+ "### ava enter 5/30"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "Te6z9Zs6612G"
+ },
+ "source": [
+ "df_ava_train = pd.read_csv('ava_train_v2.1.csv')\n",
+ "df_ava_val = pd.read_csv('ava_val_v2.1.csv')\n",
+ "df_ava_train = df_ava_train.rename(columns={\"id\":\"youtube_id\"})\n",
+ "df_ava_val = df_ava_val.rename(columns={\"id\":\"youtube_id\"})"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 206
+ },
+ "id": "TRI5mvYgTpBM",
+ "outputId": "9ced2662-837f-4c98-9c77-878a12e1f4d8"
+ },
+ "source": [
+ "df_ava_train.head()"
+ ],
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " youtube_id | \n",
+ " a | \n",
+ " b | \n",
+ " c | \n",
+ " d | \n",
+ " e | \n",
+ " label | \n",
+ " f | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " -5KQ66BBWC4 | \n",
+ " 902 | \n",
+ " 0.077 | \n",
+ " 0.151 | \n",
+ " 0.283 | \n",
+ " 0.811 | \n",
+ " 80 | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " -5KQ66BBWC4 | \n",
+ " 902 | \n",
+ " 0.077 | \n",
+ " 0.151 | \n",
+ " 0.283 | \n",
+ " 0.811 | \n",
+ " 9 | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " -5KQ66BBWC4 | \n",
+ " 902 | \n",
+ " 0.226 | \n",
+ " 0.032 | \n",
+ " 0.366 | \n",
+ " 0.497 | \n",
+ " 12 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " -5KQ66BBWC4 | \n",
+ " 902 | \n",
+ " 0.226 | \n",
+ " 0.032 | \n",
+ " 0.366 | \n",
+ " 0.497 | \n",
+ " 17 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " -5KQ66BBWC4 | \n",
+ " 902 | \n",
+ " 0.226 | \n",
+ " 0.032 | \n",
+ " 0.366 | \n",
+ " 0.497 | \n",
+ " 80 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " youtube_id a b c d e label f\n",
+ "0 -5KQ66BBWC4 902 0.077 0.151 0.283 0.811 80 1\n",
+ "1 -5KQ66BBWC4 902 0.077 0.151 0.283 0.811 9 1\n",
+ "2 -5KQ66BBWC4 902 0.226 0.032 0.366 0.497 12 0\n",
+ "3 -5KQ66BBWC4 902 0.226 0.032 0.366 0.497 17 0\n",
+ "4 -5KQ66BBWC4 902 0.226 0.032 0.366 0.497 80 0"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 12
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "cjCUGhkGWD5a"
+ },
+ "source": [
+ "df_ava_train = df_ava_train[[\"youtube_id\",\"label\"]]\n",
+ "df_ava_val = df_ava_val[[\"youtube_id\",\"label\"]]"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "ArUJrRrFRshx"
+ },
+ "source": [
+ "# 先把txt分开,变成df\n",
+ "df_tmp = pd.DataFrame(columns=['youtube_id','type',])\n",
+ "with open('/content/ava_file_names_trainval_v2.1.txt') as f:\n",
+ " for i in f.readlines():\n",
+ " s = i.split('.')\n",
+ " id = s[0]\n",
+ " tp = s[1].strip('\\n') \n",
+ " new = pd.DataFrame({\"youtube_id\":[id], \"type\":[tp]})\n",
+ " df_tmp = df_tmp.append(new,ignore_index=True)\n"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "-hf8b_myYZdc"
+ },
+ "source": [
+ "df_enter_train = df_ava_train[df_ava_train['label']==30]\n",
+ "df_enter_val = df_ava_val[df_ava_val['label']==30]"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "akkgjQWSXb3u"
+ },
+ "source": [
+ "# 合并两个df\n",
+ "df_enter_train_type = df_enter_train.merge(df_tmp, on=\"youtube_id\", how=\"inner\")\n",
+ "df_enter_val_type = df_enter_val.merge(df_tmp, on=\"youtube_id\", how=\"inner\")"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "dJ625uK9aNH1"
+ },
+ "source": [
+ "# 去重\n",
+ "df_enter_train_type = df_enter_train_type.drop_duplicates(['youtube_id','a'])[['youtube_id','type','a']]\n",
+ "df_enter_val_type=df_enter_val_type.drop_duplicates(['youtube_id','a'])[['youtube_id','type','a']]"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "u8rXrKv9asYm"
+ },
+ "source": [
+ "# 存入csv\n",
+ "# df_enter_val_type.to_csv('enter_v.csv')\n",
+ "# df_enter_train_type.to_csv('enter_t.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "Yk_YN9YbhE9b"
+ },
+ "source": [
+ "# 存入csv\n",
+ "df_enter_val_type.to_csv('enter_v_time.csv')\n",
+ "df_enter_train_type.to_csv('enter_t_time.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "SncTTE_MFZnM"
+ },
+ "source": [
+ "### ava leave 4/31"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "RbePfrNftbEY"
+ },
+ "source": [
+ "df_exit_train = df_ava_train[df_ava_train['label']==31]\n",
+ "df_exit_val = df_ava_val[df_ava_val['label']==31]"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "LX3rdoiQtjQ1"
+ },
+ "source": [
+ "# 合并两个df\n",
+ "df_exit_train_type = df_exit_train.merge(df_tmp, on=\"youtube_id\", how=\"inner\")\n",
+ "df_exit_val_type = df_exit_val.merge(df_tmp, on=\"youtube_id\", how=\"inner\")\n",
+ "# 去重\n",
+ "df_exit_train_type = df_exit_train_type.drop_duplicates(['youtube_id','a'])[['youtube_id','type','a']]\n",
+ "df_exit_val_type=df_exit_val_type.drop_duplicates(['youtube_id','a'])[['youtube_id','type','a']]\n"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "Jlt_60C9uBE8"
+ },
+ "source": [
+ ""
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "5XD71iKutqTQ"
+ },
+ "source": [
+ "# 存入csv\n",
+ "df_exit_train_type.to_csv('exit_t_time.csv')\n",
+ "df_exit_val_type.to_csv('exit_v_time.csv')"
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "A2NWZWykuAIj"
+ },
+ "source": [
+ "### Holding a bag 6\n",
+ "c020 Holding a bag"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "3GO6nmNLwiPn"
+ },
+ "source": [
+ "df_bag = pd.read_csv(\"/content/Charades_v1_train.csv\")\n"
+ ],
+ "execution_count": 2,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "uf3vP2irnETH"
+ },
+ "source": [
+ ""
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "mPgZ2D54xfVg"
+ },
+ "source": [
+ "bag_list = ['c020']\n",
+ "cloth_list = ['c000','c001','c002','c003','c004','c005']\n",
+ "def bag(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return 0\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in bag_list:\n",
+ " return i.split(\" \")[0]\n",
+ " return 0\n"
+ ],
+ "execution_count": 6,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "LQomSAnaw_jr"
+ },
+ "source": [
+ "df_bag_act = df_bag[[\"id\",\"actions\"]]"
+ ],
+ "execution_count": 7,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "jCRBZxkYzGaL",
+ "outputId": "d4881b7e-b60e-4305-a1b9-39cb7d0c8568"
+ },
+ "source": [
+ "df_bag_act['act_bag_exist'] = df_bag_act['actions'].apply(bag)"
+ ],
+ "execution_count": 8,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:1: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \"\"\"Entry point for launching an IPython kernel.\n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "SIetyoYJ22y2"
+ },
+ "source": [
+ "def bag_start(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in bag_list:\n",
+ " return i.split(\" \")[1]\n",
+ " return -1\n"
+ ],
+ "execution_count": 9,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "qU96Sgvd3fYc"
+ },
+ "source": [
+ "def bag_end(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in bag_list:\n",
+ " return i.split(\" \")[2]\n",
+ " return -1\n"
+ ],
+ "execution_count": 10,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "iSmvNeNrh2TZ"
+ },
+ "source": [
+ "def bag_time(action):\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in bag_list:\n",
+ " return round(float(i.split(\" \")[2])-float(i.split(\" \")[1]),2)\n",
+ " return -1"
+ ],
+ "execution_count": 19,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "D_p_0gk128k9",
+ "outputId": "2d2462ed-268f-4083-aa34-0221e4fd18eb"
+ },
+ "source": [
+ "df_bag_act['start_time'] = df_bag_act['actions'].apply(bag_start)\n",
+ "df_bag_act['end_time'] = df_bag_act['actions'].apply(bag_end)\n",
+ "df_bag_act['dtime'] = df_bag_act['actions'].apply(bag_time)"
+ ],
+ "execution_count": 20,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:1: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \"\"\"Entry point for launching an IPython kernel.\n",
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:2: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \n",
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:3: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " This is separate from the ipykernel package so we can avoid doing imports until\n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "Ct7jbkwgzee2"
+ },
+ "source": [
+ "df_tmp = df_bag_act[df_bag_act['act_bag_exist']!=0]"
+ ],
+ "execution_count": 21,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "J1U9248l3LJi"
+ },
+ "source": [
+ "df_tmp[[\"id\",\"start_time\",\"dtime\"]].to_csv('bag.csv')"
+ ],
+ "execution_count": 22,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "OrzyWuV33_Ay"
+ },
+ "source": [
+ ""
+ ],
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "_h6130lbGGEu"
+ },
+ "source": [
+ "### 切分视频 \n",
+ "华为云 解压+切分:Kinectics\n",
+ "\n",
+ "eating smoking 进度较慢。\n",
+ "\n",
+ "pip下载ffmpeg-python\n",
+ "
\n",
+ "
\n",
+ "\n",
+ "\n",
+ "enter 和 leave(exit)进度较快,可以在电脑切分。\n",
+ "\n",
+ "cloth 和 bag数据集已经下载,但是还没有检查能否使用。\n",
+ "\n",
+ "stairs还没有找到\n",
+ "\n",
+ "\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "h5aaGPS8npdL"
+ },
+ "source": [
+ "### others\n",
+ "c018\n",
+ "c026\n",
+ "c041\n",
+ "c053\n",
+ "c083\n",
+ "c094\n",
+ "c103\n",
+ "c115\n",
+ "c122\n",
+ "c134\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "_PO3a6UrnoY3"
+ },
+ "source": [
+ "others_list = [\"c018\",\"c026\",\"c041\",\"c053\",\"c083\",\"c094\",\"c103\",\"c115\",\"c122\",\"c134\"]"
+ ],
+ "execution_count": 32,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "oRYhP7x0pcUO"
+ },
+ "source": [
+ "df_others = df_bag"
+ ],
+ "execution_count": 36,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 639
+ },
+ "id": "YapkdZDTpe5w",
+ "outputId": "1d07d2c4-2081-4953-9eaa-37c73a3d560f"
+ },
+ "source": [
+ "df_others.head()"
+ ],
+ "execution_count": 37,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " id | \n",
+ " subject | \n",
+ " scene | \n",
+ " quality | \n",
+ " relevance | \n",
+ " verified | \n",
+ " script | \n",
+ " objects | \n",
+ " descriptions | \n",
+ " actions | \n",
+ " length | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 46GP8 | \n",
+ " HR43 | \n",
+ " Kitchen | \n",
+ " 6.0 | \n",
+ " 7.0 | \n",
+ " Yes | \n",
+ " A person cooking on a stove while watching som... | \n",
+ " food;stove;window | \n",
+ " A person cooks food on a stove before looking ... | \n",
+ " c092 11.90 21.20;c147 0.00 12.60 | \n",
+ " 24.83 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " N11GT | \n",
+ " 0KZ7 | \n",
+ " Stairs | \n",
+ " 6.0 | \n",
+ " 7.0 | \n",
+ " Yes | \n",
+ " One person opens up a folded blanket, then sne... | \n",
+ " blanket;broom;floor | \n",
+ " Person at the bottom of the staircase shakes a... | \n",
+ " c098 8.60 14.20;c075 0.00 11.70;c127 0.00 15.2... | \n",
+ " 18.33 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 0IH69 | \n",
+ " 6RE8 | \n",
+ " Bedroom | \n",
+ " 6.0 | \n",
+ " 5.0 | \n",
+ " Yes | \n",
+ " A person is seen leaving a cabinet. They then ... | \n",
+ " book;box;cabinet;shelf | \n",
+ " A person is standing in a bedroom. They walk o... | \n",
+ " NaN | \n",
+ " 30.25 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " KRF68 | \n",
+ " YA10 | \n",
+ " Laundry room | \n",
+ " 6.0 | \n",
+ " 7.0 | \n",
+ " Yes | \n",
+ " A person runs into their laundry room. They gr... | \n",
+ " clothes;door;phone | \n",
+ " A person runs in and shuts door. The person gr... | \n",
+ " c018 22.60 27.80;c141 4.10 9.60;c148 10.30 25.... | \n",
+ " 30.33 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " MJO7C | \n",
+ " 6RE8 | \n",
+ " Kitchen | \n",
+ " 6.0 | \n",
+ " 6.0 | \n",
+ " Yes | \n",
+ " A person runs into their pantry holding a bott... | \n",
+ " cup;phone | \n",
+ " A person runs in place while holding a bottle ... | \n",
+ " c015 0.00 32.00;c107 0.00 32.00 | \n",
+ " 31.38 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " id subject ... actions length\n",
+ "0 46GP8 HR43 ... c092 11.90 21.20;c147 0.00 12.60 24.83\n",
+ "1 N11GT 0KZ7 ... c098 8.60 14.20;c075 0.00 11.70;c127 0.00 15.2... 18.33\n",
+ "2 0IH69 6RE8 ... NaN 30.25\n",
+ "3 KRF68 YA10 ... c018 22.60 27.80;c141 4.10 9.60;c148 10.30 25.... 30.33\n",
+ "4 MJO7C 6RE8 ... c015 0.00 32.00;c107 0.00 32.00 31.38\n",
+ "\n",
+ "[5 rows x 11 columns]"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 37
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "ct0Y0o70uGa4"
+ },
+ "source": [
+ "def others(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return 0\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in others_list:\n",
+ " return i.split(\" \")[0]\n",
+ " return 0"
+ ],
+ "execution_count": 38,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "dWzlc7IQFIwC",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "aa4db5b7-d526-4855-803b-71b930afc3ea"
+ },
+ "source": [
+ "df_others_act = df_others[[\"id\",\"actions\"]]\n",
+ "df_others_act['act_others_exist'] = df_others_act['actions'].apply(others)\n",
+ "def others_start(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in others_list:\n",
+ " return i.split(\" \")[1]\n",
+ " return -1\n",
+ " \n",
+ "def others_end(action):\n",
+ " # 去除所有的nan\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in others_list:\n",
+ " return i.split(\" \")[2]\n",
+ " return -1\n",
+ "\n",
+ "def others_time(action):\n",
+ " if action != action:\n",
+ " return -1\n",
+ " tmp = action.split(\";\")\n",
+ " for i in tmp:\n",
+ " if i.split(\" \")[0] in others_list:\n",
+ " return round(float(i.split(\" \")[2])-float(i.split(\" \")[1]),2)\n",
+ " return -1\n",
+ "\n"
+ ],
+ "execution_count": 39,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:2: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "0FhwNBFmpY3v",
+ "outputId": "c245225f-f49e-4da2-b6a4-625a81e1a79d"
+ },
+ "source": [
+ "df_others_act['start_time'] = df_others_act['actions'].apply(others_start)\n",
+ "df_others_act['end_time'] = df_others_act['actions'].apply(others_end)\n",
+ "df_others_act['dtime'] = df_others_act['actions'].apply(others_time)\n",
+ "df_tmp = df_others_act[df_others_act['act_others_exist']!=0]\n",
+ "df_tmp[[\"id\",\"start_time\",\"dtime\"]].to_csv('others.csv')"
+ ],
+ "execution_count": 40,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:1: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \"\"\"Entry point for launching an IPython kernel.\n",
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:2: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " \n",
+ "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:3: SettingWithCopyWarning: \n",
+ "A value is trying to be set on a copy of a slice from a DataFrame.\n",
+ "Try using .loc[row_indexer,col_indexer] = value instead\n",
+ "\n",
+ "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
+ " This is separate from the ipykernel package so we can avoid doing imports until\n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "1WlMxRnSple7"
+ },
+ "source": [
+ ""
+ ],
+ "execution_count": null,
+ "outputs": []
+ }
+ ]
+}
\ No newline at end of file
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/main.ipynb" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/main.ipynb"
new file mode 100644
index 0000000000000000000000000000000000000000..e281ae5fd303cadcf9396a9ea3305150299b8e26
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/main.ipynb"
@@ -0,0 +1 @@
+{"cells":[{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["import cv2 \r\n","import math \r\n","import matplotlib.pyplot as plt \r\n","%matplotlib inline\r\n","import pandas as pd\r\n","from keras.preprocessing import image \r\n","import numpy as np \r\n","from keras.utils import np_utils\r\n","from skimage.transform import resize \r\n","from sklearn.model_selection import train_test_split\r\n","from glob import glob\r\n","from tqdm import tqdm\r\n","import moxing as mox\r\n","import glob\r\n","import os\r\n","\r\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["train_image = []\r\n","\r\n","for i in tqdm(range(df_train.shape[0])):\r\n"," dir = df_train['path'][i]\r\n"," \r\n"," img = image.load_img(dir+\"/000005.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," \r\n"," img = image.load_img(dir+\"/000010.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n","\r\n"," img = image.load_img(dir+\"/000015.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," \r\n"," img = image.load_img(dir+\"/000020.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," \r\n","X = np.array(train_image)\r\n","\r\n","X.shape"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["mox.file.copy_parallel('obs://mlpr-classification-data/data/val.csv', 'val.csv')\n","mox.file.copy_parallel('obs://mlpr-classification-data/data/train.csv', 'train.csv')\n","df_train = pd.read_csv('train.csv')\n","df_val = pd.read_csv(\"val.csv\")"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["# !unzip -d train train.zip\n","# !unzip -d val val.zip"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["mox.file.copy_parallel('obs://mlpr-classification-data/data/val/val_frames.zip', 'val.zip')"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["train_img = []"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["val_image = []\r\n","for i in tqdm(range(df_val.shape[0])):\r\n"," dir = df_val['path'][i]\r\n"," cls = df_val['class'][i]\r\n"," \r\n"," if df_val['num_frame'][i]==0:\r\n"," continue\r\n","\r\n"," img = image.load_img(dir+\"/000005.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," val_image.append(img)\r\n"," df_new= df_new.append(pd.DataFrame({'img_path':[dir+\"/000005.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n"," img = image.load_img(dir+\"/000010.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," val_image.append(img)\r\n"," df_new= df_new.append(pd.DataFrame({'img_path':[dir+\"/000010.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n","\r\n"," img = image.load_img(dir+\"/000015.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," val_image.append(img)\r\n"," df_new= df_new.append(pd.DataFrame({'img_path':[dir+\"/000015.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n"," img = image.load_img(dir+\"/000020.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," val_image.append(img)\r\n"," df_new= df_new.append(pd.DataFrame({'img_path':[dir+\"/000020.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n"," \r\n"," \r\n","X_val = np.array(val_image)\r\n","X_val.shape"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["t = df_train[df_train['num_frame']!=0][['class']]"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["df_new = pd.DataFrame(columns=['img_path', 'class'])"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["df_new.head()"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["df_new_train = pd.DataFrame(columns=['img_path', 'class'])"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["train_image = []\r\n","for i in tqdm(range(df_train.shape[0])):\r\n"," dir = df_train['path'][i]\r\n"," cls = df_train['class'][i]\r\n"," \r\n"," if df_train['num_frame'][i]==0:\r\n"," continue\r\n","\r\n","\r\n"," img = image.load_img(dir+\"/000005.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," df_new_train= df_new_train.append(pd.DataFrame({'img_path':[dir+\"/000005.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n"," img = image.load_img(dir+\"/000010.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," df_new_train= df_new_train.append(pd.DataFrame({'img_path':[dir+\"/000010.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n","\r\n"," img = image.load_img(dir+\"/000015.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," df_new_train= df_new_train.append(pd.DataFrame({'img_path':[dir+\"/000015.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n"," img = image.load_img(dir+\"/000020.jpg\", target_size=(224,224,3))\r\n"," img = image.img_to_array(img)\r\n"," img = img/255\r\n"," train_image.append(img)\r\n"," df_new_train= df_new_train.append(pd.DataFrame({'img_path':[dir+\"/000020.jpg\"],'class':[cls]}), ignore_index=True)\r\n","\r\n"," \r\n","X_train = np.array(train_image)\r\n","\r\n","X_train.shape"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["df_new_train.head()"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["y_train = df_new_train['class']\n","y_val = df_new['class']\n","y_train = pd.get_dummies(y_train)\n","y_val = pd.get_dummies(y_val)"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["base_model = VGG16(weights='imagenet', include_top=False)"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["import keras\n","from keras.models import Sequential\n","from keras.applications.vgg16 import VGG16\n","from keras.layers import Dense, InputLayer, Dropout, Flatten\n","from keras.layers import Conv2D, MaxPooling2D, GlobalMaxPooling2D\n","from keras.preprocessing import image\n","import numpy as np\n","import pandas as pd\n","import matplotlib.pyplot as plt\n","from tqdm import tqdm\n","from sklearn.model_selection import train_test_split\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["# base_model = VGG16(weights='imagenet', include_top=False)\r\n","\r\n","X_val = base_model.predict(X_val)\r\n","X_val.shape\r\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["X_train = base_model.predict(X_train)\r\n","X_train.shape"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["# mox.file.copy_parallel('obs://mlpr-classification-data/data/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5', 'vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5')\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["X_train = X_train.reshape(4036, 7*7*512)\n","X_val = X_val.reshape(656, 7*7*512)"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["max = X_train.max()\n","X_train = X_train/max\n","X_val = X_val/max"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["X_train.shape"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["model = Sequential()\r\n","model.add(Dense(1024, activation='relu', input_shape=(25088,)))\r\n","model.add(Dropout(0.5))\r\n","model.add(Dense(512, activation='relu'))\r\n","model.add(Dropout(0.5))\r\n","model.add(Dense(256, activation='relu'))\r\n","model.add(Dropout(0.5))\r\n","model.add(Dense(128, activation='relu'))\r\n","model.add(Dropout(0.5))\r\n","model.add(Dense(8, activation='softmax'))"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["from keras.callbacks import ModelCheckpoint\n","mcp_save = ModelCheckpoint('weight.hdf5', save_best_only=True, monitor='val_loss', mode='min')"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=['accuracy'])"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["model.fit(X_train, y_train, epochs=200, validation_data=(X_val, y_val), callbacks=[mcp_save], batch_size=128)"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["mox.file.copy_parallel('weight.hdf5','obs://mlpr-classification-data/data/weight.hdf5', )\n"]}],"metadata":{"interpreter":{"hash":"9c3aa38a2d651aa120c9aa3dfa4256cf305d5ea997da8fd3b7d02e7067957313"},"kernelspec":{"display_name":"Python 3.8.6 64-bit","name":"python3"},"language_info":{"name":"python","version":""}},"nbformat":4,"nbformat_minor":4}
\ No newline at end of file
diff --git "a/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/tmp.ipynb" "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/tmp.ipynb"
new file mode 100644
index 0000000000000000000000000000000000000000..0aff356dec3479c5c40023e401dabde28f151d9c
--- /dev/null
+++ "b/code/2021_spring/\345\212\250\344\275\234\350\257\206\345\210\253-\346\235\250\346\200\235\351\233\250/tmp.ipynb"
@@ -0,0 +1 @@
+{"cells":[{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["from keras.models import Sequential\n","from keras.layers import Dense, Dropout, Flatten\n","from keras.layers import Conv2D, MaxPooling2D\n","from keras.preprocessing import image\n","import numpy as np\n","import pandas as pd\n","from tqdm import tqdm\n","from keras.applications.vgg16 import VGG16\n","import cv2\n","import math\n","import os\n","from glob import glob\n","from scipy import stats as s\n","\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["base_model = VGG16(weights='imagenet', include_top=False)"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["model = Sequential()\n","model.add(Dense(1024, activation='relu', input_shape=(25088,)))\n","model.add(Dropout(0.5))\n","model.add(Dense(512, activation='relu'))\n","model.add(Dropout(0.5))\n","model.add(Dense(256, activation='relu'))\n","model.add(Dropout(0.5))\n","model.add(Dense(128, activation='relu'))\n","model.add(Dropout(0.5))\n","model.add(Dense(8, activation='softmax'))\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["import moxing as mox\n","mox.file.copy_parallel('obs://mlpr-classification-data/data/weight.hdf5', 'weight.hdf5')"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["model.load_weights(\"weight.hdf5\")\n"]},{"cell_type":"code","execution_count":null,"metadata":{"trusted":true},"outputs":[],"source":["model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=['accuracy'])"]}],"metadata":{"interpreter":{"hash":"9c3aa38a2d651aa120c9aa3dfa4256cf305d5ea997da8fd3b7d02e7067957313"},"kernelspec":{"display_name":"Python 3.8.6 64-bit","name":"python3"},"language_info":{"name":"python","version":""}},"nbformat":4,"nbformat_minor":4}
\ No newline at end of file