Refactor hysteretic curves processing and prediction scripts

- Added a new function `load_tfdmap` to load TFDMap data from files, ensuring proper error handling for missing or malformed data. - Updated the hysteretic curves processing script to include TFDMap data in the point-level DataFrame, maintaining alignment with case folders. - Enhanced the prediction script to handle multiple target columns dynamically, improving flexibility for model training and evaluation. - Modified caching mechanism for window sizes to use a tuple key for better organization and retrieval. - Improved logging and error messages for better debugging and user feedback. - Updated the run script to ensure consistent execution of prediction tasks with specified parameters.
parent f2ad8555
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
Case,tw1,tw2,Displ,CumDispl,LoadDir,CycleNum,MaxAmpl,Force,Force_RNN
12,12.76,15.0,0.0,0.0,1.0,1,0.049600873,0.0,
12,12.76,15.0,0.0009600229,0.0009600229,1.0,1,0.049600873,0.0,
12,12.76,15.0,0.0019200301,0.0019200301,1.0,1,0.049600873,-73125.12,
12,12.76,15.0,0.0028800159,0.0028800159,1.0,1,0.049600873,-151487.12,
12,12.76,15.0,0.0038399855,0.0038399855,1.0,1,0.049600873,-225485.12,
12,12.76,15.0,0.0047999951,0.0047999951,1.0,1,0.049600873,-289811.98,
12,12.76,15.0,0.0057599741,0.0057599741,1.0,1,0.049600873,-329528.94,
12,12.76,15.0,0.0067199625,0.0067199625,1.0,1,0.049600873,-347578.5,
12,12.76,15.0,0.0076800027,0.0076800027,1.0,1,0.049600873,-359537.43,
12,12.76,15.0,0.0086399859,0.0086399859,1.0,1,0.049600873,-369359.6,
12,12.76,15.0,0.0095999885,0.0095999885,1.0,1,0.049600873,-377965.75,
12,12.76,15.0,0.010559995,0.010559995,1.0,1,0.049600873,-385727.06,
12,12.76,15.0,0.011519997,0.011519997,1.0,1,0.049600873,-392382.95,
12,12.76,15.0,0.011520085,0.012288039,-1.0,2,0.049600873,-345240.24,
12,12.76,15.0,0.010560089,0.013248035,-1.0,2,0.049600873,-263238.67,
12,12.76,15.0,0.009600078,0.014208046,-1.0,2,0.049600873,-184305.38,
12,12.76,15.0,0.0086400498,0.0151680742,-1.0,2,0.049600873,-106730.64,
12,12.76,15.0,0.0076800899,0.0161280340999999,-1.0,2,0.049600873,-32994.613,
12,12.76,15.0,0.0067200305,0.0170880935,-1.0,2,0.049600873,0.0,
12,12.76,15.0,0.0057600554,0.0180480686,-1.0,2,0.049600873,1274.1645,11563.1328125
12,12.76,15.0,0.0048000575,0.0190080665,-1.0,2,0.049600873,59143.247,58353.359375
12,12.76,15.0,0.003840103,0.019968021,-1.0,2,0.049600873,117899.36,119181.9609375
12,12.76,15.0,0.002880074,0.02092805,-1.0,2,0.049600873,172293.01,172919.15625
12,12.76,15.0,0.0019201034,0.0218880206,-1.0,2,0.049600873,220117.37,222401.484375
12,12.76,15.0,0.00096008975,0.02284803425,-1.0,2,0.049600873,258703.81,258541.453125
12,12.76,15.0,7.8867329e-08,0.023808045132671,-1.0,2,0.049600873,287977.49,284805.9375
12,12.76,15.0,-0.00095990783,0.02476803183,-1.0,2,0.049600873,309649.37,305397.3125
12,12.76,15.0,-0.001919891,0.025728015,-1.0,2,0.049600873,325858.74,322626.84375
12,12.76,15.0,-0.0028799375,0.0266880615,-1.0,2,0.049600873,339047.44,337545.09375
12,12.76,15.0,-0.003839951,0.027648075,-1.0,2,0.049600873,351058.48,350782.40625
12,12.76,15.0,-0.0047999289,0.0286080529,-1.0,2,0.049600873,361809.08,361918.53125
12,12.76,15.0,-0.0057599624,0.0295680864,-1.0,2,0.049600873,371157.27,370323.1875
12,12.76,15.0,-0.00671997,0.0305280939999999,-1.0,2,0.049600873,379405.7,378727.84375
12,12.76,15.0,-0.0076799603,0.0314880843,-1.0,2,0.049600873,386813.52,389233.625
12,12.76,15.0,-0.0086399481,0.0324480720999999,-1.0,2,0.049600873,393522.49,388603.28125
12,12.76,15.0,-0.0095999443,0.0334080682999999,-1.0,2,0.049600873,399997.11,400369.78125
12,12.76,15.0,-0.010559948,0.034368072,-1.0,2,0.049600873,406233.63,404362.0
12,12.76,15.0,-0.011519962,0.035328086,-1.0,2,0.049600873,411882.43,399949.5625
12,12.76,15.0,-0.011519962,0.036096004,1.0,3,0.049600873,365636.16,348261.0
12,12.76,15.0,-0.010559962,0.037056004,1.0,3,0.049600873,284305.32,277662.0
12,12.76,15.0,-0.0095999076,0.0380160584,1.0,3,0.049600873,207052.03,197502.734375
12,12.76,15.0,-0.008639969,0.038975997,1.0,3,0.049600873,129930.93,117658.6171875
12,12.76,15.0,-0.0076799712,0.0399359948,1.0,3,0.049600873,57076.753,52995.3984375
12,12.76,15.0,-0.0067199087,0.0408960573,1.0,3,0.049600873,0.0,11182.2978515625
12,12.76,15.0,-0.005759939,0.041856027,1.0,3,0.049600873,0.0,-16553.025390625
12,12.76,15.0,-0.0047998948,0.0428160712,1.0,3,0.049600873,-36588.629,-46862.2734375
12,12.76,15.0,-0.0038399758,0.0437759901999999,1.0,3,0.049600873,-94568.829,-93928.2734375
12,12.76,15.0,-0.0028799434,0.0447360226,1.0,3,0.049600873,-150514.62,-150239.375
12,12.76,15.0,-0.0019199088,0.0456960572,1.0,3,0.049600873,-201342.41,-199511.59375
12,12.76,15.0,-0.00095997256,0.04665599344,1.0,3,0.049600873,-244843.12,-242165.140625
12,12.76,15.0,6.8131205e-08,0.047616034131205,1.0,3,0.049600873,-279940.85,-280196.1875
12,12.76,15.0,0.00096010865,0.04857607465,1.0,3,0.049600873,-306909.65,-310452.90625
12,12.76,15.0,0.0019200577,0.0495360237,1.0,3,0.049600873,-327340.74,-333145.4375
12,12.76,15.0,0.0028801068,0.0504960727999999,1.0,3,0.049600873,-343487.44,-349954.71875
12,12.76,15.0,0.0038400806,0.0514560465999999,1.0,3,0.049600873,-357557.72,-362561.6875
12,12.76,15.0,0.0048000654,0.0524160313999999,1.0,3,0.049600873,-369655.17,-372647.25
12,12.76,15.0,0.0057600514,0.0533760173999999,1.0,3,0.049600873,-379743.8,-380421.53125
12,12.76,15.0,0.0067200322,0.0543359982,1.0,3,0.049600873,-387883.56,-386514.90625
12,12.76,15.0,0.0076800881,0.0552960541,1.0,3,0.049600873,-394858.79,-391767.8125
12,12.76,15.0,0.0086400472,0.0562560132,1.0,3,0.049600873,-400523.79,-397020.71875
12,12.76,15.0,0.0096000768,0.0572160428,1.0,3,0.049600873,-405778.62,-402063.5
12,12.76,15.0,0.010560094,0.05817606,1.0,3,0.049600873,-410655.44,-406896.15625
12,12.76,15.0,0.011520096,0.059136062,1.0,3,0.049600873,-414897.5,-407106.28125
12,12.76,15.0,0.011520182,0.0599041379999999,-1.0,4,0.049600873,-367996.08,-382312.59375
12,12.76,15.0,0.010560187,0.0608641329999999,-1.0,4,0.049600873,-287543.45,-308351.71875
12,12.76,15.0,0.0096001757,0.0618241442999999,-1.0,4,0.049600873,-211157.82,-219472.609375
12,12.76,15.0,0.0086402332,0.0627840867999999,-1.0,4,0.049600873,-134861.59,-145721.875
12,12.76,15.0,0.0076801853,0.0637441347,-1.0,4,0.049600873,-62499.333,-69922.5078125
12,12.76,15.0,0.006720206,0.064704114,-1.0,4,0.049600873,0.0,-21228.107421875
12,12.76,15.0,0.0057602204,0.0656640996,-1.0,4,0.049600873,0.0,8916.9833984375
12,12.76,15.0,0.0048002256,0.0666240944,-1.0,4,0.049600873,31540.194,43553.3046875
12,12.76,15.0,0.0038401912,0.0675841288,-1.0,4,0.049600873,88295.394,99063.34375
12,12.76,15.0,0.002880183,0.068544137,-1.0,4,0.049600873,144226.08,149911.4375
12,12.76,15.0,0.0019201723,0.0695041477,-1.0,4,0.049600873,195479.48,201600.0
12,12.76,15.0,0.00096022418,0.07046409582,-1.0,4,0.049600873,240004.19,246354.71875
12,12.76,15.0,1.7269626e-07,0.07142414730374,-1.0,4,0.049600873,276756.37,279763.15625
12,12.76,15.0,-0.00095983223,0.07238415223,-1.0,4,0.049600873,305075.32,304556.84375
12,12.76,15.0,-0.0019197623,0.0733440823,-1.0,4,0.049600873,326677.63,324307.78125
12,12.76,15.0,-0.0028797837,0.0743041037,-1.0,4,0.049600873,343607.91,340696.8125
12,12.76,15.0,-0.0038398009,0.0752641209,-1.0,4,0.049600873,358212.51,354564.46875
12,12.76,15.0,-0.0047998111,0.0762241311,-1.0,4,0.049600873,370649.0,365910.75
12,12.76,15.0,-0.005759811,0.077184131,-1.0,4,0.049600873,380924.28,374315.40625
12,12.76,15.0,-0.0067198051,0.0781441251,-1.0,4,0.049600873,389242.0,383560.5
12,12.76,15.0,-0.007679804,0.079104124,-1.0,4,0.049600873,396329.19,390914.5625
12,12.76,15.0,-0.0086398152,0.0800641352,-1.0,4,0.049600873,402067.93,394486.53125
12,12.76,15.0,-0.0095998312,0.0810241512,-1.0,4,0.049600873,407371.07,405622.6875
12,12.76,15.0,-0.01055976,0.08198408,-1.0,4,0.049600873,412278.88,410035.125
12,12.76,15.0,-0.011519778,0.082944098,-1.0,4,0.049600873,416528.01,407303.625
12,12.76,15.0,-0.011519868,0.083712096,1.0,5,0.049600873,369854.31,357296.0
12,12.76,15.0,-0.010559876,0.084672088,1.0,5,0.049600873,290181.63,287117.21875
12,12.76,15.0,-0.0095998283,0.0856321357000001,1.0,5,0.049600873,214496.42,206747.84375
12,12.76,15.0,-0.0086398972,0.0865920668000001,1.0,5,0.049600873,138884.31,125958.203125
12,12.76,15.0,-0.0076799073,0.0875520567,1.0,5,0.049600873,66887.207,58799.8515625
12,12.76,15.0,-0.006719854,0.08851211,1.0,5,0.049600873,629.76786,14675.4775390625
12,12.76,15.0,-0.0057598955,0.0894720685000001,1.0,5,0.049600873,0.0,-13611.400390625
12,12.76,15.0,-0.0047998582,0.0904321058000001,1.0,5,0.049600873,-27814.934,-42975.1171875
12,12.76,15.0,-0.0038398607,0.0913921033000001,1.0,5,0.049600873,-83140.925,-89095.6015625
12,12.76,15.0,-0.0028798336,0.0923521304000001,1.0,5,0.049600873,-138967.58,-147507.859375
12,12.76,15.0,-0.0019198869,0.0933120771000001,1.0,5,0.049600873,-190231.74,-200562.171875
12,12.76,15.0,-0.00095986541,0.0942720985900001,1.0,5,0.049600873,-235261.69,-246577.578125
12,12.76,15.0,1.5436796e-07,0.0952321183679601,1.0,5,0.049600873,-272754.91,-287550.25
12,12.76,15.0,0.0020001588,0.0972321228000001,1.0,5,0.049600873,-333523.8,-325161.03125
12,12.76,15.0,0.0040001755,0.0992321395000001,1.0,5,0.049600873,-367082.15,-356888.53125
12,12.76,15.0,0.00600004,0.1012320040000001,1.0,5,0.049600873,-390968.09,-382102.46875
12,12.76,15.0,0.0080000526,0.1032320166000001,1.0,5,0.049600873,-406756.71,-401853.375
12,12.76,15.0,0.010000188,0.1052321520000001,1.0,5,0.049600873,-418303.32,-416561.5
12,12.76,15.0,0.01200008,0.1072320440000001,1.0,5,0.049600873,-427609.39,-428328.0
12,12.76,15.0,0.014000051,0.1092320150000001,1.0,5,0.049600873,-435661.82,-437573.125
12,12.76,15.0,0.016000068,0.1112320320000001,1.0,5,0.049600873,-444516.41,-446818.21875
12,12.76,15.0,0.018000104,0.1132320680000001,1.0,5,0.049600873,-453624.01,-456903.78125
12,12.76,15.0,0.020000155,0.1152321190000001,1.0,5,0.049600873,-462593.31,-469931.0
12,12.76,15.0,0.022000036,0.1172320000000001,1.0,5,0.049600873,-471308.99,-485059.34375
12,12.76,15.0,0.024000109,0.1192320730000001,1.0,5,0.049600873,-479746.49,-477915.40625
12,12.76,15.0,0.024000292,0.1208320680000001,-1.0,6,0.049600873,-383953.62,-392608.28125
12,12.76,15.0,0.022000317,0.1228320430000001,-1.0,6,0.049600873,-221509.43,-235231.328125
12,12.76,15.0,0.020000272,0.1248320880000001,-1.0,6,0.049600873,-71734.652,-89200.65625
12,12.76,15.0,0.018000141,0.1268322190000001,-1.0,6,0.049600873,24901.57,27998.1484375
12,12.76,15.0,0.016000145,0.1288322150000001,-1.0,6,0.049600873,141879.29,138775.28125
12,12.76,15.0,0.014000254,0.1308321060000001,-1.0,6,0.049600873,226958.62,219985.15625
12,12.76,15.0,0.012000307,0.1328320530000001,-1.0,6,0.049600873,279260.42,273459.6875
12,12.76,15.0,0.01000028,0.1348320800000001,-1.0,6,0.049600873,311565.26,310440.09375
12,12.76,15.0,0.008000236,0.1368321240000001,-1.0,6,0.049600873,332888.17,330401.125
12,12.76,15.0,0.0060002174,0.1388321426000001,-1.0,6,0.049600873,348990.11,342587.875
12,12.76,15.0,0.0040002289,0.1408321311000001,-1.0,6,0.049600873,362473.33,356455.53125
12,12.76,15.0,0.0020001437,0.1428322163000001,-1.0,6,0.049600873,377256.14,377257.03125
12,12.76,15.0,1.5892522e-07,0.1448322010747801,-1.0,6,0.049600873,395245.42,396167.46875
12,12.76,15.0,-0.0019997257,0.1468320857000001,-1.0,6,0.049600873,411228.24,413186.875
12,12.76,15.0,-0.0039998562,0.1488322162000001,-1.0,6,0.049600873,427008.47,427684.875
12,12.76,15.0,-0.0059998537,0.1508322137000001,-1.0,6,0.049600873,441662.44,440081.71875
12,12.76,15.0,-0.0079997082,0.1528320682000001,-1.0,6,0.049600873,452039.9,451007.75
12,12.76,15.0,-0.0099997658,0.1548321258000001,-1.0,6,0.049600873,460211.56,460462.96875
12,12.76,15.0,-0.011999851,0.1568322110000001,-1.0,6,0.049600873,468299.52,468027.15625
12,12.76,15.0,-0.013999782,0.1588321420000001,-1.0,6,0.049600873,476152.86,479373.4375
12,12.76,15.0,-0.01599973,0.16083209,-1.0,6,0.049600873,484129.34,487778.0625
12,12.76,15.0,-0.017999695,0.162832055,-1.0,6,0.049600873,492079.0,491560.15625
12,12.76,15.0,-0.019999834,0.164832194,-1.0,6,0.049600873,499908.52,499124.34375
12,12.76,15.0,-0.02199976,0.16683212,-1.0,6,0.049600873,507543.45,506268.28125
12,12.76,15.0,-0.023999815,0.168832175,-1.0,6,0.049600873,514961.8,510890.84375
12,12.76,15.0,-0.023999862,0.17043207,1.0,7,0.049600873,415746.91,407303.625
12,12.76,15.0,-0.02199991,0.172432022,1.0,7,0.049600873,257821.69,251397.5
12,12.76,15.0,-0.019999946,0.174431986,1.0,7,0.049600873,107920.86,98643.109375
12,12.76,15.0,-0.01799995,0.1764319820000001,1.0,7,0.049600873,15794.489,-19520.912109375
12,12.76,15.0,-0.015999824,0.1784321080000001,1.0,7,0.049600873,-110553.53,-112838.7109375
12,12.76,15.0,-0.01399989,0.180432042,1.0,7,0.049600873,-199710.95,-205394.84375
12,12.76,15.0,-0.011999892,0.18243204,1.0,7,0.049600873,-263295.27,-270110.59375
12,12.76,15.0,-0.0099997785,0.1844321535,1.0,7,0.049600873,-304913.4,-311083.25
12,12.76,15.0,-0.0079998688,0.1864320632,1.0,7,0.049600873,-333057.67,-335876.9375
12,12.76,15.0,-0.0059999042,0.1884320278,1.0,7,0.049600873,-353736.13,-354367.15625
12,12.76,15.0,-0.003999881,0.1904320510000001,1.0,7,0.049600873,-370058.81,-370546.09375
12,12.76,15.0,-0.001999789,0.1924321430000001,1.0,7,0.049600873,-386512.44,-387565.5
12,12.76,15.0,1.9164559e-07,0.1944321236455901,1.0,7,0.049600873,-405669.06,-406686.0625
12,12.76,15.0,0.0020000653,0.1964319973000001,1.0,7,0.049600873,-423187.56,-426647.09375
12,12.76,15.0,0.0040001921,0.1984321241000001,1.0,7,0.049600873,-439664.08,-444296.84375
12,12.76,15.0,0.006000214,0.2004321460000001,1.0,7,0.049600873,-455151.16,-459004.96875
12,12.76,15.0,0.0080001322,0.2024320642000001,1.0,7,0.049600873,-467333.92,-469931.0
12,12.76,15.0,0.010000109,0.2044320410000001,1.0,7,0.049600873,-475301.62,-477915.40625
12,12.76,15.0,0.012000136,0.2064320680000002,1.0,7,0.049600873,-482804.56,-483378.40625
12,12.76,15.0,0.014000142,0.2084320740000002,1.0,7,0.049600873,-488885.66,-487160.5
12,12.76,15.0,0.016000178,0.2104321100000003,1.0,7,0.049600873,-494666.52,-490522.375
12,12.76,15.0,0.018000108,0.2124320400000003,1.0,7,0.049600873,-500132.27,-494304.46875
12,12.76,15.0,0.020000143,0.2144320750000003,1.0,7,0.049600873,-505330.13,-501028.15625
12,12.76,15.0,0.022000139,0.2164320710000003,1.0,7,0.049600873,-510283.31,-511954.21875
12,12.76,15.0,0.024000134,0.2184320660000003,1.0,7,0.049600873,-515047.29,-515316.09375
12,12.76,15.0,0.024000294,0.2200320560000003,-1.0,8,0.049600873,-415249.31,-434631.5
12,12.76,15.0,0.022000291,0.2220320590000003,-1.0,8,0.049600873,-257005.01,-275153.40625
12,12.76,15.0,0.020000201,0.2240321490000003,-1.0,8,0.049600873,-107426.53,-110632.4921875
12,12.76,15.0,0.018000211,0.2260321390000003,-1.0,8,0.049600873,-15435.138,1779.603515625
12,12.76,15.0,0.016000204,0.2280321460000003,-1.0,8,0.049600873,110483.45,102215.0859375
12,12.76,15.0,0.014000169,0.2300321810000003,-1.0,8,0.049600873,200116.13,195821.796875
12,12.76,15.0,0.012000264,0.2320320860000003,-1.0,8,0.049600873,264950.49,256860.515625
12,12.76,15.0,0.010000213,0.2340321370000003,-1.0,8,0.049600873,307870.27,306447.90625
12,12.76,15.0,0.0080002459,0.2360321041000003,-1.0,8,0.049600873,337046.97,336074.28125
12,12.76,15.0,0.0060002378,0.2380321122000003,-1.0,8,0.049600873,358331.18,352463.3125
12,12.76,15.0,0.0040002383,0.2400321117000003,-1.0,8,0.049600873,375104.65,371583.875
12,12.76,15.0,0.00200016,0.2420321900000003,-1.0,8,0.049600873,392067.9,393646.0625
12,12.76,15.0,2.177377e-07,0.2440321322623003,-1.0,8,0.049600873,412038.32,413607.09375
12,12.76,15.0,-0.0019997543,0.2460321043000003,-1.0,8,0.049600873,429657.96,431046.71875
12,12.76,15.0,-0.0039997454,0.2480320954000003,-1.0,8,0.049600873,445535.87,445754.84375
12,12.76,15.0,-0.0059997252,0.2500320752000003,-1.0,8,0.049600873,460998.98,457941.59375
12,12.76,15.0,-0.0079998472,0.2520321972000003,-1.0,8,0.049600873,472748.42,468447.40625
12,12.76,15.0,-0.0099997517,0.2540321017000004,-1.0,8,0.049600873,480636.77,477272.28125
12,12.76,15.0,-0.011999816,0.2560321660000003,-1.0,8,0.049600873,487977.68,483995.96875
12,12.76,15.0,-0.01399985,0.2580322000000004,-1.0,8,0.049600873,493990.28,491139.9375
12,12.76,15.0,-0.015999688,0.2600320380000004,-1.0,8,0.049600873,499658.17,499124.34375
12,12.76,15.0,-0.017999709,0.2620320590000004,-1.0,8,0.049600873,505012.41,502065.96875
12,12.76,15.0,-0.01999973,0.2640320800000004,-1.0,8,0.049600873,510098.54,507108.75
12,12.76,15.0,-0.021999756,0.2660321060000004,-1.0,8,0.049600873,514936.01,514672.9375
12,12.76,15.0,-0.023999776,0.2680321260000004,-1.0,8,0.049600873,519541.74,517614.5625
12,12.76,15.0,-0.023999992,0.2696324240000003,1.0,9,0.049600873,419324.13,416758.84375
12,12.76,15.0,-0.022000034,0.2716323820000004,1.0,9,0.049600873,262049.86,263164.0
12,12.76,15.0,-0.019999908,0.2736325080000004,1.0,9,0.049600873,112806.33,109411.5625
12,12.76,15.0,-0.017999953,0.2756324630000004,1.0,9,0.049600873,44919.279,-12560.818359375
12,12.76,15.0,-0.016000018,0.2776323980000004,1.0,9,0.049600873,-103369.37,-104644.1875
12,12.76,15.0,-0.013999889,0.2796325270000004,1.0,9,0.049600873,-194825.76,-196149.734375
12,12.76,15.0,-0.011999894,0.2816325220000005,1.0,9,0.049600873,-260028.52,-264857.71875
12,12.76,15.0,-0.0099999645,0.2836324515000005,1.0,9,0.049600873,-303699.37,-309402.3125
12,12.76,15.0,-0.0079999077,0.2856325083000005,1.0,9,0.049600873,-333371.56,-336507.28125
12,12.76,15.0,-0.0059999948,0.2876324212000004,1.0,9,0.049600873,-354868.72,-357729.0
12,12.76,15.0,-0.0040000244,0.2896323916000003,1.0,9,0.049600873,-372051.94,-376219.21875
12,12.76,15.0,-0.0020000108,0.2916324052000004,1.0,9,0.049600873,-388819.4,-395760.03125
12,12.76,15.0,6.0256808e-08,0.2936324762568083,1.0,9,0.049600873,-408093.21,-417401.96875
12,12.76,15.0,0.0030399591,0.2966723751000004,1.0,9,0.049600873,-442824.43,-442615.90625
12,12.76,15.0,0.0060801387,0.2997125547000004,1.0,9,0.049600873,-467756.56,-466989.375
12,12.76,15.0,0.0091200559,0.3027524719000004,1.0,9,0.049600873,-484343.79,-486740.28125
12,12.76,15.0,0.012159904,0.3057923200000004,1.0,9,0.049600873,-496174.66,-501028.15625
12,12.76,15.0,0.015199983,0.3088323990000005,1.0,9,0.049600873,-505072.49,-511954.21875
12,12.76,15.0,0.018240008,0.3118724240000005,1.0,9,0.049600873,-513386.61,-519938.59375
12,12.76,15.0,0.021280116,0.3149125320000005,1.0,9,0.049600873,-521062.27,-526242.0625
12,12.76,15.0,0.024320113,0.3179525290000005,1.0,9,0.049600873,-528232.05,-532125.3125
12,12.76,15.0,0.027360034,0.3209924500000006,1.0,9,0.049600873,-535953.09,-540109.75
12,12.76,15.0,0.030399915,0.3240323310000006,1.0,9,0.049600873,-545131.55,-553136.9375
12,12.76,15.0,0.033439916,0.3270723320000006,1.0,9,0.049600873,-555855.48,-574148.5625
12,12.76,15.0,0.036480018,0.3301124340000006,1.0,9,0.049600873,-577304.65,-583393.625
12,12.76,15.0,0.036480092,0.3325446200000006,-1.0,10,0.049600873,-430421.12,-445137.28125
12,12.76,15.0,0.033440002,0.3355847100000006,-1.0,10,0.049600873,-191853.56,-203713.90625
12,12.76,15.0,0.030399993,0.3386247190000006,-1.0,10,0.049600873,25226.638,13001.115234375
12,12.76,15.0,0.027360177,0.3416645350000006,-1.0,10,0.049600873,185829.22,173234.328125
12,12.76,15.0,0.024320183,0.3447045290000006,-1.0,10,0.049600873,279235.44,274720.375
12,12.76,15.0,0.021280171,0.3477445410000006,-1.0,10,0.049600873,329825.92,323887.53125
12,12.76,15.0,0.018240179,0.3507845330000005,-1.0,10,0.049600873,364122.23,358556.6875
12,12.76,15.0,0.015200128,0.3538245840000006,-1.0,10,0.049600873,384300.17,381459.34375
12,12.76,15.0,0.012160197,0.3568645150000006,-1.0,10,0.049600873,400388.32,395957.34375
12,12.76,15.0,0.0091200328,0.3599046792000006,-1.0,10,0.049600873,413906.29,406463.15625
12,12.76,15.0,0.0060800154,0.3629446966000006,-1.0,10,0.049600873,427020.93,420540.9375
12,12.76,15.0,0.0030400341,0.3659846779000005,-1.0,10,0.049600873,441867.15,443863.8125
12,12.76,15.0,-2.9466151e-08,0.3690247414661515,-1.0,10,0.049600873,467305.05,466766.46875
12,12.76,15.0,-0.0030398073,0.3720645193000005,-1.0,10,0.049600873,493069.12,488198.3125
12,12.76,15.0,-0.0060799214,0.3751046334000005,-1.0,10,0.049600873,511127.77,507108.75
12,12.76,15.0,-0.0091199238,0.3781446358000004,-1.0,10,0.049600873,524369.59,522657.34375
12,12.76,15.0,-0.012159879,0.3811845910000004,-1.0,10,0.049600873,532420.82,534003.625
12,12.76,15.0,-0.015199802,0.3842245140000003,-1.0,10,0.049600873,541019.29,542828.5
12,12.76,15.0,-0.018239973,0.3872646850000003,-1.0,10,0.049600873,547981.65,550392.6875
12,12.76,15.0,-0.021279825,0.3903045370000003,-1.0,10,0.049600873,554650.34,554174.8125
12,12.76,15.0,-0.024319844,0.3933445560000003,-1.0,10,0.049600873,561918.26,562579.4375
12,12.76,15.0,-0.027360068,0.3963847800000002,-1.0,10,0.049600873,570107.35,570563.8125
12,12.76,15.0,-0.030399956,0.3994246680000002,-1.0,10,0.049600873,578944.97,577287.5625
12,12.76,15.0,-0.033439981,0.4024646930000003,-1.0,10,0.049600873,592437.88,589054.0625
12,12.76,15.0,-0.036479809,0.4055045210000003,-1.0,10,0.049600873,610454.22,606283.5625
12,12.76,15.0,-0.036480019,0.4079367150000003,1.0,11,0.049600873,463068.5,457941.59375
12,12.76,15.0,-0.03344012,0.4109766140000003,1.0,11,0.049600873,225706.19,223347.015625
12,12.76,15.0,-0.030400175,0.4140165590000003,1.0,11,0.049600873,7043.9335,2541.2734375
12,12.76,15.0,-0.027360097,0.4170566370000004,1.0,11,0.049600873,-157356.01,-161060.34375
12,12.76,15.0,-0.024320163,0.4200965710000004,1.0,11,0.049600873,-263000.19,-268219.5625
12,12.76,15.0,-0.021280125,0.4231366090000004,1.0,11,0.049600873,-320008.19,-327262.1875
12,12.76,15.0,-0.018240039,0.4261766950000004,1.0,11,0.049600873,-360810.72,-367394.34375
12,12.76,15.0,-0.015200034,0.4292167000000004,1.0,11,0.049600873,-386023.9,-391137.46875
12,12.76,15.0,-0.012160102,0.4322566320000004,1.0,11,0.049600873,-403242.73,-408577.09375
12,12.76,15.0,-0.0091200464,0.4352966876000004,1.0,11,0.049600873,-417838.67,-421604.28125
12,12.76,15.0,-0.0060800275,0.4383367065000004,1.0,11,0.049600873,-431402.56,-434211.25
12,12.76,15.0,-0.0030402566,0.4413764774000004,1.0,11,0.049600873,-448015.39,-451020.53125
12,12.76,15.0,-1.9112045e-07,0.4444165428795504,1.0,11,0.049600873,-472109.3,-474133.3125
12,12.76,15.0,0.0030397883,0.4474565223000004,1.0,11,0.049600873,-495599.36,-498927.0
12,12.76,15.0,0.0060799065,0.4504966405000005,1.0,11,0.049600873,-517688.49,-519098.15625
12,12.76,15.0,0.0091197493,0.4535364833000005,1.0,11,0.049600873,-532625.2,-533386.0
12,12.76,15.0,0.012159963,0.4565766970000005,1.0,11,0.049600873,-543066.92,-543051.375
12,12.76,15.0,0.01519978,0.4596165140000005,1.0,11,0.049600873,-550963.29,-550615.5625
12,12.76,15.0,0.018239949,0.4626566830000005,1.0,11,0.049600873,-558348.82,-557339.25
12,12.76,15.0,0.021279804,0.4656965380000005,1.0,11,0.049600873,-565263.41,-563642.75
12,12.76,15.0,0.024319738,0.4687364720000005,1.0,11,0.049600873,-571708.2,-570366.4375
12,12.76,15.0,0.027359846,0.4717765800000005,1.0,11,0.049600873,-577732.36,-578350.875
12,12.76,15.0,0.030399941,0.4748166750000005,1.0,11,0.049600873,-583317.66,-588856.6875
12,12.76,15.0,0.033439825,0.4778565590000005,1.0,11,0.049600873,-593954.67,-605245.6875
12,12.76,15.0,0.036479733,0.4808964670000005,1.0,11,0.049600873,-609407.21,-613650.375
12,12.76,15.0,0.03648025,0.4833287640000005,-1.0,12,0.049600873,-460481.68,-488000.96875
12,12.76,15.0,0.033440032,0.4863689820000004,-1.0,12,0.049600873,-223058.08,-238172.953125
12,12.76,15.0,0.030400173,0.4894088410000004,-1.0,12,0.049600873,-5716.2657,-16658.083984375
12,12.76,15.0,0.027360033,0.4924489810000004,-1.0,12,0.049600873,162378.95,152012.609375
12,12.76,15.0,0.024320095,0.4954889190000004,-1.0,12,0.049600873,266420.04,264634.8125
12,12.76,15.0,0.021280076,0.4985289380000005,-1.0,12,0.049600873,324373.03,321576.25
12,12.76,15.0,0.018240164,0.5015688500000005,-1.0,12,0.049600873,367237.37,362548.90625
12,12.76,15.0,0.015200207,0.5046088070000004,-1.0,12,0.049600873,391467.38,389443.75
12,12.76,15.0,0.012160181,0.5076488330000003,-1.0,12,0.049600873,409011.65,405622.6875
12,12.76,15.0,0.0091200561,0.5106889579000002,-1.0,12,0.049600873,423794.39,416548.71875
12,12.76,15.0,0.0060800342,0.5137289798000003,-1.0,12,0.049600873,437346.37,434198.46875
12,12.76,15.0,0.0030402696,0.5167687444000002,-1.0,12,0.049600873,455000.98,457101.125
12,12.76,15.0,5.9955722e-08,0.5198089540442782,-1.0,12,0.049600873,479590.99,479373.4375
12,12.76,15.0,-0.0030399466,0.5228489606000002,-1.0,12,0.049600873,502940.45,499964.8125
12,12.76,15.0,-0.0060798885,0.5258889025000002,-1.0,12,0.049600873,525003.52,518455.03125
12,12.76,15.0,-0.0091197249,0.5289287389000003,-1.0,12,0.049600873,535540.66,533163.1875
12,12.76,15.0,-0.012159781,0.5319687950000003,-1.0,12,0.049600873,547962.42,544089.1875
12,12.76,15.0,-0.015199751,0.5350087650000003,-1.0,12,0.049600873,556303.83,553334.3125
12,12.76,15.0,-0.018239882,0.5380488960000003,-1.0,12,0.049600873,563567.52,561318.75
12,12.76,15.0,-0.021279883,0.5410888970000003,-1.0,12,0.049600873,570262.0,566361.5
12,12.76,15.0,-0.024319736,0.5441287500000003,-1.0,12,0.049600873,576530.66,573925.6875
12,12.76,15.0,-0.027359955,0.5471689690000002,-1.0,12,0.049600873,582382.23,583170.8125
12,12.76,15.0,-0.030399936,0.5502089500000004,-1.0,12,0.049600873,588198.81,588213.5625
12,12.76,15.0,-0.03343981,0.5532488240000002,-1.0,12,0.049600873,598624.64,601661.0
12,12.76,15.0,-0.036479724,0.5562887380000002,-1.0,12,0.049600873,613595.86,617629.8125
12,12.76,15.0,-0.036480496,0.5587214920000003,1.0,13,0.049600873,464600.85,462564.15625
12,12.76,15.0,-0.033440329,0.5617616590000004,1.0,13,0.049600873,227961.12,225133.0
12,12.76,15.0,-0.030400406,0.5648015820000004,1.0,13,0.049600873,9480.0309,2396.818359375
12,12.76,15.0,-0.027360377,0.5678416110000004,1.0,13,0.049600873,-155286.04,-161585.640625
12,12.76,15.0,-0.024320299,0.5708816890000005,1.0,13,0.049600873,-262235.41,-269690.375
12,12.76,15.0,-0.021280255,0.5739217330000005,1.0,13,0.049600873,-322505.07,-332304.96875
12,12.76,15.0,-0.018240342,0.5769616460000005,1.0,13,0.049600873,-362857.43,-372227.03125
12,12.76,15.0,-0.015200498,0.5800014900000005,1.0,13,0.049600873,-388477.74,-395970.125
12,12.76,15.0,-0.012160269,0.5830417190000005,1.0,13,0.049600873,-405829.36,-413199.65625
12,12.76,15.0,-0.0091204159,0.5860815721000003,1.0,13,0.049600873,-420567.66,-427067.3125
12,12.76,15.0,-0.0060802606,0.5891217274000004,1.0,13,0.049600873,-434135.21,-441775.4375
12,12.76,15.0,-0.0030403114,0.5921616766000004,1.0,13,0.049600873,-450984.84,-461526.34375
12,12.76,15.0,-3.0057512e-07,0.5952016874248803,1.0,13,0.049600873,-475186.6,-487160.5
12,12.76,15.0,0.0039996692,0.5992016572000003,1.0,13,0.049600873,-514717.29,-516576.78125
12,12.76,15.0,0.0079994688,0.6032014568000003,1.0,13,0.049600873,-540775.47,-540529.9375
12,12.76,15.0,0.011999598,0.6072015860000002,1.0,13,0.049600873,-555340.36,-556919.0
12,12.76,15.0,0.015999627,0.6112016150000001,1.0,13,0.049600873,-565367.69,-568685.5
12,12.76,15.0,0.019999723,0.6152017110000002,1.0,13,0.049600873,-574569.66,-578350.875
12,12.76,15.0,0.023999639,0.6192016270000003,1.0,13,0.049600873,-583024.39,-587175.75
12,12.76,15.0,0.027999501,0.6232014890000002,1.0,13,0.049600873,-590731.94,-596420.8125
12,12.76,15.0,0.031999765,0.627201753,1.0,13,0.049600873,-600230.8,-606926.625
12,12.76,15.0,0.035999696,0.631201684,1.0,13,0.049600873,-617494.86,-619953.8125
12,12.76,15.0,0.039999655,0.635201643,1.0,13,0.049600873,-639410.14,-637603.5625
12,12.76,15.0,0.043999739,0.6392017269999999,1.0,13,0.049600873,-661651.31,-665338.9375
12,12.76,15.0,0.047999672,0.6432016599999999,1.0,13,0.049600873,-683089.49,-683829.125
12,12.76,15.0,0.048000263,0.6464022529999998,-1.0,14,0.049600873,-481691.4,-494724.6875
12,12.76,15.0,0.044000273,0.6504022429999998,-1.0,14,0.049600873,-180494.6,-198250.890625
12,12.76,15.0,0.040000117,0.6544023989999999,-1.0,14,0.049600873,75457.377,66495.3515625
12,12.76,15.0,0.036000439,0.6584020769999999,-1.0,14,0.049600873,243877.78,234903.390625
12,12.76,15.0,0.03200013,0.6624023859999999,-1.0,14,0.049600873,337475.58,330611.25
12,12.76,15.0,0.028000204,0.666402312,-1.0,14,0.049600873,386375.27,372214.21875
12,12.76,15.0,0.024000195,0.6704023210000001,-1.0,14,0.049600873,396430.96,401630.46875
12,12.76,15.0,0.020000311,0.6744022050000001,-1.0,14,0.049600873,432094.72,422642.09375
12,12.76,15.0,0.016000357,0.6784021590000001,-1.0,14,0.049600873,445929.2,438610.90625
12,12.76,15.0,0.012000254,0.6824022620000001,-1.0,14,0.049600873,459358.06,451217.875
12,12.76,15.0,0.0080002725,0.6864022435,-1.0,14,0.049600873,473311.71,464245.0625
12,12.76,15.0,0.0040003923,0.6904021237,-1.0,14,0.049600873,495504.89,492820.84375
12,12.76,15.0,2.6998025e-07,0.6944022460197499,-1.0,14,0.049600873,526580.21,519715.71875
12,12.76,15.0,-0.0039997475,0.6984022634999999,-1.0,14,0.049600873,555031.34,544509.4375
12,12.76,15.0,-0.0079996272,0.7024021431999998,-1.0,14,0.049600873,573448.41,565941.3125
12,12.76,15.0,-0.011999714,0.7064022299999998,-1.0,14,0.049600873,584837.34,582750.5625
12,12.76,15.0,-0.015999607,0.7104021229999997,-1.0,14,0.049600873,593448.03,593676.625
12,12.76,15.0,-0.019999617,0.7144021329999997,-1.0,14,0.049600873,601259.65,601661.0
12,12.76,15.0,-0.023999704,0.7184022199999996,-1.0,14,0.049600873,608408.14,609645.4375
12,12.76,15.0,-0.027999744,0.7224022599999996,-1.0,14,0.049600873,614959.12,617209.625
12,12.76,15.0,-0.031999882,0.7264023979999996,-1.0,14,0.049600873,626413.85,628135.625
12,12.76,15.0,-0.035999895,0.7304024109999997,-1.0,14,0.049600873,646468.37,645365.1875
12,12.76,15.0,-0.039999781,0.7344022969999996,-1.0,14,0.049600873,667991.17,662174.4375
12,12.76,15.0,-0.043999775,0.7384022909999997,-1.0,14,0.049600873,688749.21,682765.8125
12,12.76,15.0,-0.047999663,0.7424021789999997,-1.0,14,0.049600873,708202.85,702516.75
12,12.76,15.0,-0.047999834,0.7456023639999997,1.0,15,0.049600873,505830.87,507949.21875
12,12.76,15.0,-0.043999663,0.7496025349999996,1.0,15,0.049600873,207242.51,201284.828125
12,12.76,15.0,-0.03999998,0.7536022179999996,1.0,15,0.049600873,-49058.703,-57420.6015625
12,12.76,15.0,-0.035999884,0.7576023139999997,1.0,15,0.049600873,-222970.07,-232079.578125
12,12.76,15.0,-0.031999825,0.7616023729999997,1.0,15,0.049600873,-325822.82,-328102.65625
12,12.76,15.0,-0.027999827,0.7656023709999996,1.0,15,0.049600873,-382731.37,-385044.09375
12,12.76,15.0,-0.023999658,0.7696025399999996,1.0,15,0.049600873,-406486.97,-416561.5
12,12.76,15.0,-0.019999727,0.7736024709999997,1.0,15,0.049600873,-432177.32,-437152.875
12,12.76,15.0,-0.015999797,0.7776024009999997,1.0,15,0.049600873,-446639.43,-453541.9375
12,12.76,15.0,-0.011999825,0.7816023729999997,1.0,15,0.049600873,-461045.37,-466569.125
12,12.76,15.0,-0.0079997508,0.7856024471999997,1.0,15,0.049600873,-474888.52,-479596.34375
12,12.76,15.0,-0.0039997608,0.7896024371999998,1.0,15,0.049600873,-493664.15,-497246.09375
12,12.76,15.0,5.4958764e-08,0.7936022529587639,1.0,15,0.049600873,-523867.13,-524561.125
12,12.76,15.0,0.0040001561,0.7976023540999999,1.0,15,0.049600873,-553568.27,-555238.0625
12,12.76,15.0,0.0080000114,0.8016022093999998,1.0,15,0.049600873,-577551.79,-577930.625
12,12.76,15.0,0.012000139,0.8056023369999998,1.0,15,0.049600873,-589527.88,-591798.3125
12,12.76,15.0,0.01600031,0.8096025079999999,1.0,15,0.049600873,-598333.89,-601463.625
12,12.76,15.0,0.020000335,0.813602533,1.0,15,0.049600873,-606542.93,-609027.8125
12,12.76,15.0,0.024000024,0.8176022219999999,1.0,15,0.049600873,-614009.11,-616592.0
12,12.76,15.0,0.028000216,0.8216024139999999,1.0,15,0.049600873,-620803.52,-625416.875
12,12.76,15.0,0.032000283,0.8256024809999999,1.0,15,0.049600873,-630142.1,-636763.125
12,12.76,15.0,0.036000054,0.8296022519999999,1.0,15,0.049600873,-647997.25,-651471.25
12,12.76,15.0,0.040000145,0.833602343,1.0,15,0.049600873,-668313.7,-672482.875
12,12.76,15.0,0.044000247,0.837602445,1.0,15,0.049600873,-686645.37,-701478.875
12,12.76,15.0,0.048000322,0.84160252,1.0,15,0.049600873,-703534.22,-723330.9375
12,12.76,15.0,0.048000854,0.8448030900000001,-1.0,16,0.049600873,-500365.79,-547673.9375
12,12.76,15.0,0.04400058,0.8488033640000001,-1.0,16,0.049600873,-201116.23,-234180.734375
12,12.76,15.0,0.040000821,0.8528031230000002,-1.0,16,0.049600873,55310.017,33848.5703125
12,12.76,15.0,0.03600071,0.8568032340000001,-1.0,16,0.049600873,229331.02,214627.1875
12,12.76,15.0,0.032000765,0.8608031790000001,-1.0,16,0.049600873,331677.59,328089.84375
12,12.76,15.0,0.02800054,0.8648034040000001,-1.0,16,0.049600873,385866.75,374105.28125
12,12.76,15.0,0.024000578,0.8688033660000002,-1.0,16,0.049600873,396132.43,405832.8125
12,12.76,15.0,0.020000836,0.8728031080000002,-1.0,16,0.049600873,434149.53,428105.09375
12,12.76,15.0,0.016000847,0.8768030970000003,-1.0,16,0.049600873,450973.87,444494.15625
12,12.76,15.0,0.012000638,0.8808033060000001,-1.0,16,0.049600873,464209.68,457521.34375
12,12.76,15.0,0.0080008446,0.8848030994,-1.0,16,0.049600873,478635.93,472229.46875
12,12.76,15.0,0.0040005462,0.8888033978000001,-1.0,16,0.049600873,500146.16,502486.1875
12,12.76,15.0,5.7932109e-07,0.8928033646789101,-1.0,16,0.049600873,531866.08,530221.5625
12,12.76,15.0,-0.0039991344,0.8968030784000001,-1.0,16,0.049600873,561139.34,555855.6875
12,12.76,15.0,-0.007999294,0.9008032380000002,-1.0,16,0.049600873,596325.5,577287.5625
12,12.76,15.0,-0.011999188,0.9048031320000002,-1.0,16,0.049600873,608322.15,592836.125
12,12.76,15.0,-0.015999423,0.9088033670000002,-1.0,16,0.049600873,619836.34,602501.5
12,12.76,15.0,-0.01999925,0.9128031940000002,-1.0,16,0.049600873,628778.21,611326.375
12,12.76,15.0,-0.02399934,0.9168032840000002,-1.0,16,0.049600873,636547.35,622672.625
12,12.76,15.0,-0.027999209,0.9208031530000002,-1.0,16,0.049600873,653692.96,635279.5625
12,12.76,15.0,-0.031999329,0.924803273,-1.0,16,0.049600873,680645.01,652088.875
12,12.76,15.0,-0.035999384,0.928803328,-1.0,16,0.049600873,707095.92,675201.625
12,12.76,15.0,-0.039999118,0.9328030620000002,-1.0,16,0.049600873,731629.97,697894.1875
12,12.76,15.0,-0.043999166,0.9368031100000002,-1.0,16,0.049600873,754568.35,723948.5625
12,12.76,15.0,-0.047999125,0.940803069,-1.0,16,0.049600873,777012.69,743699.5
12,12.76,15.0,-0.047999564,0.944003528,1.0,17,0.049600873,578641.48,536945.25
12,12.76,15.0,-0.043999591,0.9480035010000002,1.0,17,0.049600873,265989.46,218934.578125
12,12.76,15.0,-0.039999705,0.9520033870000002,1.0,17,0.049600873,83.169636,-37774.75
12,12.76,15.0,-0.035999569,0.9560035230000002,1.0,17,0.049600873,-183911.21,-217791.6875
12,12.76,15.0,-0.031999679,0.9600034130000002,1.0,17,0.049600873,-301092.13,-326421.71875
12,12.76,15.0,-0.02799965,0.9640034420000004,1.0,17,0.049600873,-379081.18,-388826.1875
12,12.76,15.0,-0.023999482,0.9680036100000003,1.0,17,0.049600873,-411965.55,-419923.375
12,12.76,15.0,-0.019999661,0.9720034310000004,1.0,17,0.049600873,-434395.61,-440514.75
12,12.76,15.0,-0.01599982,0.9760032720000006,1.0,17,0.049600873,-449550.76,-456483.5625
12,12.76,15.0,-0.011999574,0.9800035180000006,1.0,17,0.049600873,-464618.85,-469510.75
12,12.76,15.0,-0.0079994856,0.9840036064000006,1.0,17,0.049600873,-482208.3,-483798.65625
12,12.76,15.0,-0.0039997777,0.9880033143000008,1.0,17,0.049600873,-511776.54,-504390.03125
12,12.76,15.0,3.5415353e-07,0.9920034461535306,1.0,17,0.049600873,-542005.43,-535907.4375
Case,tw1,tw2,Displ,CumDispl,LoadDir,CycleNum,MaxAmpl,Force,TFDMapW1,TFDMapW2,TFDMapF,Force_RNN
12,12.76,15.0,0.0,0.0,1.0,1,0.049600873,0.0,0.0,0.0,0.0,
12,12.76,15.0,0.0047999951,0.0047999951,1.0,1,0.049600873,-289811.98,0.0,0.0,0.056928,
12,12.76,15.0,0.0095999885,0.0095999885,1.0,1,0.049600873,-377965.75,0.470046,0.639734,0.683278,
12,12.76,15.0,0.009600078,0.014208046,-1.0,2,0.049600873,-184305.38,0.785147,0.974071,0.975768,
12,12.76,15.0,0.0048000575,0.0190080665,-1.0,2,0.049600873,59143.247,0.785147,0.974071,0.98076,
12,12.76,15.0,7.8867329e-08,0.023808045132671,-1.0,2,0.049600873,287977.49,0.851529,1.020403,1.248425,
12,12.76,15.0,-0.0047999289,0.0286080529,-1.0,2,0.049600873,361809.08,1.356749,1.702603,1.88383,
12,12.76,15.0,-0.0095999443,0.0334080682999999,-1.0,2,0.049600873,399997.11,2.014539,2.400686,2.469553,
12,12.76,15.0,-0.0095999076,0.0380160584,1.0,3,0.049600873,207052.03,2.325355,2.709506,2.736589,
12,12.76,15.0,-0.0047998948,0.0428160712,1.0,3,0.049600873,-36588.629,2.325355,2.709506,2.74145,
12,12.76,15.0,6.8131205e-08,0.047616034131205,1.0,3,0.049600873,-279940.85,2.378317,2.724726,2.9574,
12,12.76,15.0,0.0048000654,0.0524160313999999,1.0,3,0.049600873,-369655.17,2.845332,3.246218,3.488382,
12,12.76,15.0,0.0096000768,0.0572160428,1.0,3,0.049600873,-405778.62,3.465901,3.951151,4.104137,
12,12.76,15.0,0.0096001757,0.0618241442999999,-1.0,4,0.049600873,-211157.82,3.790219,4.291442,4.401733,
12,12.76,15.0,0.0048002256,0.0666240944,-1.0,4,0.049600873,31540.194,3.790219,4.291442,4.404056,
12,12.76,15.0,1.7269626e-07,0.07142414730374,-1.0,4,0.049600873,276756.37,3.836265,4.299633,4.602282,
12,12.76,15.0,-0.0047998111,0.0762241311,-1.0,4,0.049600873,370649.0,4.287116,4.798409,5.112079,
12,12.76,15.0,-0.0095998312,0.0810241512,-1.0,4,0.049600873,407371.07,4.902065,5.503762,5.721379,
12,12.76,15.0,-0.0095998283,0.0856321357000001,1.0,5,0.049600873,214496.42,5.22577,5.846686,6.017521,
12,12.76,15.0,-0.0047998582,0.0904321058000001,1.0,5,0.049600873,-27814.934,5.22577,5.846686,6.022228,-6528.94921875
12,12.76,15.0,1.5436796e-07,0.0952321183679601,1.0,5,0.049600873,-272754.91,5.266926,5.851645,6.230035,-265175.625
12,12.76,15.0,0.010000188,0.1052321520000001,1.0,5,0.049600873,-418303.32,6.382042,7.098776,7.409964,-394390.5625
12,12.76,15.0,0.020000155,0.1152321190000001,1.0,5,0.049600873,-462593.31,7.618197,8.354361,8.545721,-447724.25
12,12.76,15.0,0.020000272,0.1248320880000001,-1.0,6,0.049600873,-71734.652,8.172086,8.889643,9.060265,-81217.7890625
12,12.76,15.0,0.01000028,0.1348320800000001,-1.0,6,0.049600873,311565.26,8.442799,9.117646,9.561961,302632.9375
12,12.76,15.0,1.5892522e-07,0.1448322010747801,-1.0,6,0.049600873,395245.42,9.423494,10.19456,10.662497,402579.4375
12,12.76,15.0,-0.0099997658,0.1548321258000001,-1.0,6,0.049600873,460211.56,10.709636,11.52057,11.770478,461116.375
12,12.76,15.0,-0.019999834,0.164832194,-1.0,6,0.049600873,499908.52,11.864922,12.610947,12.772742,494937.75
12,12.76,15.0,-0.019999946,0.174431986,1.0,7,0.049600873,107920.86,12.414463,13.135973,13.282014,119000.328125
12,12.76,15.0,-0.0099997785,0.1844321535,1.0,7,0.049600873,-304913.4,12.628146,13.292991,13.721678,-288156.8125
12,12.76,15.0,1.9164559e-07,0.1944321236455901,1.0,7,0.049600873,-405669.06,13.520816,14.255532,14.780805,-399593.875
12,12.76,15.0,0.010000109,0.2044320410000001,1.0,7,0.049600873,-475301.62,14.789293,15.571822,15.907708,-480244.8125
12,12.76,15.0,0.020000143,0.2144320750000003,1.0,7,0.049600873,-505330.13,16.000783,16.726672,16.976107,-516234.1875
12,12.76,15.0,0.020000201,0.2240321490000003,-1.0,8,0.049600873,-107426.53,16.580634,17.273795,17.497072,-114822.34375
12,12.76,15.0,0.010000213,0.2340321370000003,-1.0,8,0.049600873,307870.27,16.788,17.420432,17.905947,297863.3125
12,12.76,15.0,2.177377e-07,0.2440321322623003,-1.0,8,0.049600873,412038.32,17.695412,18.391117,18.950234,407782.6875
12,12.76,15.0,-0.0099997517,0.2540321017000004,-1.0,8,0.049600873,480636.77,18.987376,19.729206,20.075531,473691.0
12,12.76,15.0,-0.01999973,0.2640320800000004,-1.0,8,0.049600873,510098.54,20.21402,20.897078,21.142058,515317.3125
12,12.76,15.0,-0.019999908,0.2736325080000004,1.0,9,0.049600873,112806.33,20.795068,21.454233,21.669493,145341.96875
12,12.76,15.0,-0.0099999645,0.2836324515000005,1.0,9,0.049600873,-303699.37,21.001226,21.598277,22.097359,-289240.8125
12,12.76,15.0,6.0256808e-08,0.2936324762568083,1.0,9,0.049600873,-408093.21,21.905376,22.564824,23.155952,-408049.1875
12,12.76,15.0,0.015199983,0.3088323990000005,1.0,9,0.049600873,-505072.49,23.840373,24.511011,24.850612,-496288.25
12,12.76,15.0,0.030399915,0.3240323310000006,1.0,9,0.049600873,-545131.55,25.5655,26.153381,26.429387,-537047.375
12,12.76,15.0,0.030399993,0.3386247190000006,-1.0,10,0.049600873,25226.638,26.478181,27.057659,27.230124,809.343994140625
12,12.76,15.0,0.015200128,0.3538245840000006,-1.0,10,0.049600873,384300.17,27.365417,27.891973,28.371927,375262.1875
12,12.76,15.0,-2.9466151e-08,0.3690247414661515,-1.0,10,0.049600873,467305.05,28.873296,29.372548,29.929423,471956.5625
12,12.76,15.0,-0.015199802,0.3842245140000003,-1.0,10,0.049600873,541019.29,30.756317,31.244145,31.547355,540466.5
12,12.76,15.0,-0.030399956,0.3994246680000002,-1.0,10,0.049600873,578944.97,32.470902,32.858169,33.113327,577323.125
12,12.76,15.0,-0.030400175,0.4140165590000003,1.0,11,0.049600873,7043.9335,33.402315,33.820487,33.965346,14372.271484375
12,12.76,15.0,-0.015200034,0.4292167000000004,1.0,11,0.049600873,-386023.9,34.203135,34.560337,35.077852,-369458.1875
12,12.76,15.0,-1.9112045e-07,0.4444165428795504,1.0,11,0.049600873,-472109.3,35.660223,35.98214,36.650857,-455962.8125
12,12.76,15.0,0.01519978,0.4596165140000005,1.0,11,0.049600873,-550963.29,37.523316,37.830723,38.273459,-540516.1875
12,12.76,15.0,0.030399941,0.4748166750000005,1.0,11,0.049600873,-583317.66,39.25601,39.464577,39.880519,-587345.75
12,12.76,15.0,0.030400173,0.4894088410000004,-1.0,12,0.049600873,-5716.2657,40.207903,40.467612,40.734218,-15323.044921875
12,12.76,15.0,0.015200207,0.5046088070000004,-1.0,12,0.049600873,391467.38,41.003578,41.198965,41.803809,383934.3125
12,12.76,15.0,5.9955722e-08,0.5198089540442782,-1.0,12,0.049600873,479590.99,42.490777,42.64704,43.363202,483663.9375
12,12.76,15.0,-0.015199751,0.5350087650000003,-1.0,12,0.049600873,556303.83,44.366392,44.513131,44.978406,559111.625
12,12.76,15.0,-0.030399936,0.5502089500000004,-1.0,12,0.049600873,588198.81,46.110903,46.151415,46.581383,609410.0
12,12.76,15.0,-0.030400406,0.5648015820000004,1.0,13,0.049600873,9480.0309,47.06955,47.165274,47.460719,21845.220703125
12,12.76,15.0,-0.015200498,0.5800014900000005,1.0,13,0.049600873,-388477.74,47.863234,47.893506,48.565213,-387452.875
12,12.76,15.0,-3.0057512e-07,0.5952016874248803,1.0,13,0.049600873,-475186.6,49.337284,49.328373,50.146634,-478076.75
12,12.76,15.0,0.019999723,0.6152017110000002,1.0,13,0.049600873,-574569.66,51.776768,51.709318,52.27778,-574337.5625
12,12.76,15.0,0.039999655,0.635201643,1.0,13,0.049600873,-639410.14,54.142161,54.083376,54.418228,-622034.375
12,12.76,15.0,0.040000117,0.6544023989999999,-1.0,14,0.049600873,75457.377,55.643614,55.470939,55.476663,76832.03125
12,12.76,15.0,0.020000311,0.6744022050000001,-1.0,14,0.049600873,432094.72,57.112977,56.857027,57.189769,432281.5
12,12.76,15.0,2.6998025e-07,0.6944022460197499,-1.0,14,0.049600873,526580.21,59.039439,58.688025,59.185845,526591.0625
12,12.76,15.0,-0.019999617,0.7144021329999997,-1.0,14,0.049600873,601259.65,61.46112,61.082516,61.370082,608976.4375
12,12.76,15.0,-0.039999781,0.7344022969999996,-1.0,14,0.049600873,667991.17,63.857757,63.486548,63.572054,664044.5625
12,12.76,15.0,-0.03999998,0.7536022179999996,1.0,15,0.049600873,-49058.703,65.372264,64.96707,64.707603,-34144.3046875
12,12.76,15.0,-0.019999727,0.7736024709999997,1.0,15,0.049600873,-432177.32,66.725609,66.255224,66.459216,-419973.375
12,12.76,15.0,5.4958764e-08,0.7936022529587639,1.0,15,0.049600873,-523867.13,68.570203,67.975814,68.471332,-509730.0625
12,12.76,15.0,0.020000335,0.813602533,1.0,15,0.049600873,-606542.93,70.958061,70.33707,70.676546,-609026.125
12,12.76,15.0,0.040000145,0.833602343,1.0,15,0.049600873,-668313.7,73.261157,72.756927,73.014903,-681004.9375
12,12.76,15.0,0.040000821,0.8528031230000002,-1.0,16,0.049600873,55310.017,74.779705,74.321542,74.18843,34501.12890625
12,12.76,15.0,0.020000836,0.8728031080000002,-1.0,16,0.049600873,434149.53,76.140579,75.612265,75.907919,443121.6875
12,12.76,15.0,5.7932109e-07,0.8928033646789101,-1.0,16,0.049600873,531866.08,78.054646,77.40104,77.907096,540900.125
12,12.76,15.0,-0.01999925,0.9128031940000002,-1.0,16,0.049600873,628778.21,80.658909,80.040507,80.145302,628922.375
12,12.76,15.0,-0.039999118,0.9328030620000002,-1.0,16,0.049600873,731629.97,83.439583,83.256788,82.557774,703069.1875
12,12.76,15.0,-0.039999705,0.9520033870000002,1.0,17,0.049600873,83.169636,85.26927,85.301306,83.775157,-6610.2509765625
12,12.76,15.0,-0.019999661,0.9720034310000004,1.0,17,0.049600873,-434395.61,86.721778,86.774096,85.592634,-435800.0625
12,12.76,15.0,3.5415353e-07,0.9920034461535306,1.0,17,0.049600873,-542005.43,88.818848,88.788557,87.670866,-530543.25
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
import os
import pandas as pd
import numpy as np
import pandas as pd
# import matplotlib.pyplot as plt
# Configuration constants and bounds
W = 5 # Number of windows
B = 34 # Width identifier
W = 2 # Number of windows
B = 29 # Width identifier
H = 30
if W == 2:
H = 30 # Adjust height identifier for two window case
......@@ -40,22 +42,56 @@ print(f"Number of cases: {N_CASES}")
print(f"Number of windows per case: {n_windows}")
# Column names for tw1, tw2, ...
thickness_cols = [f"tw{i+1}" for i in range(n_windows)]
thickness_cols = [f"tw{i + 1}" for i in range(n_windows)]
all_rows = []
def load_tfdmap(case_folder_path):
tfdmap_file = os.path.join(case_folder_path, "TFDMap.txt")
if not os.path.isfile(tfdmap_file):
raise FileNotFoundError(f"TFDMap file not found: {tfdmap_file}")
df_tfdmap = pd.read_csv(
tfdmap_file,
comment="#",
sep=r"\s+",
engine="python",
header=None,
)
# Some files contain an extra non-comment header row such as
# "Tiempo promW1 ... promF0". Keep only fully numeric rows.
df_tfdmap = df_tfdmap.apply(pd.to_numeric, errors="coerce")
df_tfdmap = df_tfdmap.dropna(axis=0, how="any").reset_index(drop=True)
if df_tfdmap.empty:
raise ValueError(f"TFDMap file has no numeric data rows: {tfdmap_file}")
if df_tfdmap.shape[1] < 2:
raise ValueError(
f"TFDMap file must contain at least two columns after ignoring headers: {tfdmap_file}"
)
df_tfdmap = df_tfdmap.iloc[:, 1:].copy()
n_cols = df_tfdmap.shape[1]
if n_cols == 1:
df_tfdmap.columns = ["TFDMapF"]
else:
df_tfdmap.columns = [f"TFDMapW{i + 1}" for i in range(n_cols - 1)] + ["TFDMapF"]
return df_tfdmap
# Detect all numeric folders automatically
case_folders = sorted(
[f for f in os.listdir(CURVES_DIR) if f.isdigit()],
key=int
)
case_folders = sorted([f for f in os.listdir(CURVES_DIR) if f.isdigit()], key=int)
case_folders_sorted = sorted(case_folders, key=lambda x: int(x))
# thickness_data debe tener la MISMA longitud y orden que case_folders_sorted
thickness_by_case = {
int(folder): thickness_data[idx]
for idx, folder in enumerate(case_folders_sorted)
int(folder): thickness_data[idx] for idx, folder in enumerate(case_folders_sorted)
}
# === Loop ===
......@@ -67,7 +103,13 @@ for folder_name in case_folders_sorted:
print(f"⚠️ Folder not found: {folder_path}, skipping.")
continue
files = [f for f in os.listdir(folder_path) if f.endswith((".txt", ".dat", ".csv"))]
files = [
f
for f in os.listdir(folder_path)
if f.endswith((".txt", ".dat", ".csv"))
and f != "TFDMap.txt"
and not f.startswith("merged_dataset_")
]
if not files:
print(f"⚠️ No hysteresis file in {folder_path}.")
continue
......@@ -76,8 +118,12 @@ for folder_name in case_folders_sorted:
print(f"Processing Case {i}: {datafile}")
df_curve = pd.read_csv(
datafile, comment="#", sep=r"\s+", engine="python",
names=["Displ", "Force"], header=None
datafile,
comment="#",
sep=r"\s+",
engine="python",
names=["Displ", "Force"],
header=None,
)
df_curve["Case"] = i
......@@ -116,9 +162,8 @@ def compute_load_dir(displ):
df["LoadDir"] = df.groupby("Case")["Displ"].transform(compute_load_dir)
# CumDispl: cumulative |ΔΔ| per case
df["CumDispl"] = (
df.groupby("Case")["Displ"]
.transform(lambda s: s.diff().abs().fillna(0).cumsum())
df["CumDispl"] = df.groupby("Case")["Displ"].transform(
lambda s: s.diff().abs().fillna(0).cumsum()
)
......@@ -143,18 +188,47 @@ df["MaxAmpl"] = df.groupby("Case")["Displ"].transform(lambda x: x.abs().max())
# 4) Reorder point-level columns (Force last)
# ------------------------------------------------------------------
ordered_cols_point = (
["Case"] +
thickness_cols +
["Displ", "CumDispl", "LoadDir", "CycleNum", "MaxAmpl", "Force"]
["Case"]
+ thickness_cols
+ ["Displ", "CumDispl", "LoadDir", "CycleNum", "MaxAmpl", "Force"]
)
df = df[ordered_cols_point]
print("\n=== Point-level DataFrame (df) ===")
print(df.head())
# Point-level export: keep 1 out of every 5 rows within each case
keep_mask = df.groupby("Case", sort=False).cumcount() % 5 == 0
df_points = df.loc[keep_mask].reset_index(drop=True)
# Add TFDMap data from the corresponding file in each case folder
tfdmap_frames = []
for folder_name in case_folders_sorted:
case_id = int(folder_name)
folder_path = os.path.join(CURVES_DIR, folder_name)
df_case_points = df_points[df_points["Case"] == case_id].reset_index(drop=True)
if df_case_points.empty:
continue
df_tfdmap = load_tfdmap(folder_path)
if len(df_tfdmap) != len(df_case_points):
raise ValueError(
f"TFDMap length mismatch for case {case_id}: "
f"{len(df_tfdmap)} rows in TFDMap.txt vs {len(df_case_points)} sampled rows."
)
df_case_points = pd.concat(
[df_case_points, df_tfdmap.reset_index(drop=True)],
axis=1,
)
tfdmap_frames.append(df_case_points)
df_points = pd.concat(tfdmap_frames, ignore_index=True)
print("\n=== Point-level DataFrame (df_points) ===")
# Save point-level dataset
output_file_points = os.path.join(CURVES_DIR, "merged_dataset_points.csv")
df.to_csv(output_file_points, index=False)
df_points.to_csv(output_file_points, index=False)
print(f"\nSaved point-level dataset to: {output_file_points}")
# ==================================================================
......@@ -238,21 +312,25 @@ df_cycles["StiffnessRatio"] = df_cycles.groupby("Case")["SecantStiffness"].trans
# Order cycle-level columns nicely
ordered_cols_cycles = (
["Case", "CycleNum"] +
thickness_cols +
[
["Case", "CycleNum"]
+ thickness_cols
+ [
"CycleMaxAmpl",
"PosEnvDispl", "PosEnvForce",
"NegEnvDispl", "NegEnvForce",
"LoopEnergySigned", "LoopEnergyAbs", "CumLoopEnergyAbs",
"SecantStiffness", "StiffnessRatio"
"PosEnvDispl",
"PosEnvForce",
"NegEnvDispl",
"NegEnvForce",
"LoopEnergySigned",
"LoopEnergyAbs",
"CumLoopEnergyAbs",
"SecantStiffness",
"StiffnessRatio",
]
)
df_cycles = df_cycles[ordered_cols_cycles]
print("\n=== Cycle-level DataFrame (df_cycles) ===")
print(df_cycles.head())
# Save cycle-level summary
output_file_cycles = os.path.join(CURVES_DIR, "merged_dataset_cycles.csv")
......
......@@ -127,7 +127,7 @@ def make_windows_for_case_fast(
CaseWindows = Dict[
int, Tuple[np.ndarray, np.ndarray, np.ndarray]
] # case -> (x, y, idx)
WINDOW_CACHE: Dict[int, CaseWindows] = {} # window_size -> cache
WINDOW_CACHE: Dict[Tuple[str, int], CaseWindows] = {} # (target_col, window_size) -> cache
def precompute_windows_for_cases(
......@@ -154,15 +154,16 @@ def get_cache_for_window_size(
target_col: str,
window_size: int,
) -> CaseWindows:
if window_size not in WINDOW_CACHE:
WINDOW_CACHE[window_size] = precompute_windows_for_cases(
cache_key = (target_col, window_size)
if cache_key not in WINDOW_CACHE:
WINDOW_CACHE[cache_key] = precompute_windows_for_cases(
df=df,
cases=cases_to_cache,
feature_cols=feature_cols,
target_col=target_col,
window_size=window_size,
)
return WINDOW_CACHE[window_size]
return WINDOW_CACHE[cache_key]
def build_from_cache(
......@@ -493,6 +494,23 @@ def strip_compile_prefix(state_dict: dict) -> dict:
return {k.replace("_orig_mod.", "", 1): v for k, v in state_dict.items()}
def target_sort_key(col_name: str) -> Tuple[int, int]:
if col_name == "Force":
return (0, 0)
if col_name.startswith("TFDMapW"):
try:
return (1, int(col_name.replace("TFDMapW", "")))
except ValueError:
return (1, 9999)
if col_name == "TFDMapF":
return (2, 0)
return (3, 0)
def target_prediction_col(target_col: str) -> str:
return f"{target_col}_RNN"
# -------------------------
# Final training on 100% train_pool (cached) - no val
# Keep best by TRAIN loss
......@@ -688,7 +706,6 @@ def main():
model_dir = f"../../models/hysteretic_curves/{w_val}W/models"
os.makedirs(model_dir, exist_ok=True)
model_path = os.path.join(model_dir, f"B{b_val}_H{h_val}.joblib")
n_train_case = 0
predict_case = 0
......@@ -727,19 +744,17 @@ def main():
df = pd.read_csv(data_file)
# Keep only 1 out of every 5 rows within each case.
keep_mask = df.groupby("Case", sort=False).cumcount() % 5 == 0
df = df.loc[keep_mask].reset_index(drop=True)
thickness_cols = [c for c in df.columns if c.startswith("tw")]
feature_cols = [
"Displ",
"CumDispl",
# "LoadDir",
# "CycleNum",
# "MaxAmpl",
] + thickness_cols
target_col = "Force"
target_cols = sorted(
[c for c in df.columns if c == "Force" or c.startswith("TFDMap")],
key=target_sort_key,
)
if not target_cols:
raise RuntimeError("No target columns found. Expected Force and/or TFDMap* columns.")
all_cases = sorted(df["Case"].unique())
train_pool = [int(c) for c in all_cases if c <= n_train_case]
......@@ -757,6 +772,8 @@ def main():
print("Predict (test) case:", predict_case)
print("Train pool cases (for CV):", train_pool)
print("n_cases total:", len(all_cases), "| train_pool:", len(train_pool))
print("Feature columns:", feature_cols)
print("Target columns:", target_cols)
print("torch.compile CV:", "ON" if (device == "cuda" and compile_in_cv) else "OFF")
print(
"torch.compile FINAL:",
......@@ -770,269 +787,266 @@ def main():
# Cache cases needed (train_pool for CV, plus test case for prediction)
cases_to_cache_all = sorted(set(train_pool + [int(predict_case)]))
# ---- CV Tuning ----
if optuna is None:
raise ImportError(
"Optuna is required for Bayesian CV search. Install it with `pip install optuna`."
)
print(f"Optuna search: TPE sampler | n_trials={n_trials}")
optuna.logging.set_verbosity(optuna.logging.WARNING)
sampler = optuna.samplers.TPESampler(seed=SEED)
pruner = optuna.pruners.MedianPruner(
n_startup_trials=min(5, n_trials), n_warmup_steps=2
)
study = optuna.create_study(direction="minimize", sampler=sampler, pruner=pruner)
df_test = df[df["Case"] == predict_case].copy()
df_test_pred = df_test.copy()
rmse_by_target = {}
for target_col in target_cols:
print("\n============================================================")
print(f"TARGET: {target_col}")
print(f"Optuna search: TPE sampler | n_trials={n_trials}")
sampler = optuna.samplers.TPESampler(seed=SEED)
pruner = optuna.pruners.MedianPruner(
n_startup_trials=min(5, n_trials), n_warmup_steps=2
)
study = optuna.create_study(
direction="minimize", sampler=sampler, pruner=pruner
)
def objective(trial) -> float:
params = suggest_params(trial, device=device)
ws = int(params["window_size"])
params_model = dict(params)
params_model.pop("window_size", None)
cache_ws = get_cache_for_window_size(
df=df,
cases_to_cache=cases_to_cache_all,
feature_cols=feature_cols,
target_col=target_col,
window_size=ws,
)
feature_dim = None
for c in cases_to_cache_all:
x_c = cache_ws[int(c)][0]
if x_c.shape[0] > 0:
feature_dim = x_c.shape[-1]
break
if feature_dim is None:
raise RuntimeError(
"All cases have 0 windows for this window_size. Lower window_size."
)
mean_rmse, std_rmse, fold_rmses = cv_score_for_params_cached(
cache_ws=cache_ws,
feature_dim=feature_dim,
folds=folds,
params_model=params_model,
max_epochs=max_epachs_tune,
patience=patience_tune,
grad_clip=1.0,
device=device,
compile_model=(compile_in_cv and device == "cuda"),
trial=trial,
)
if fold_rmses is None:
print(f"[Trial {trial.number + 1:02d}] OOM -> pruning trial | {params}")
raise optuna.TrialPruned()
fold_str = ", ".join([f"{v:.3f}" for v in fold_rmses.tolist()])
print(
f"[{target_col}][Trial {trial.number + 1:02d}]"
f" CV mean RMSE={mean_rmse:.4f} | std={std_rmse:.4f} | folds=[{fold_str}]"
)
print(f" params: {params}")
def objective(trial) -> float:
params = suggest_params(trial, device=device)
trial.set_user_attr("std_rmse", float(std_rmse))
trial.set_user_attr("params_full", params)
# Separate window_size from model params (cached funcs must NOT receive window_size)
ws = int(params["window_size"])
params_model = dict(params)
params_model.pop("window_size", None)
gc.collect()
if torch.cuda.is_available():
torch.cuda.empty_cache()
return float(mean_rmse)
study.optimize(objective, n_trials=n_trials, show_progress_bar=False)
completed_trials = [
trial
for trial in study.trials
if trial.state == optuna.trial.TrialState.COMPLETE
]
if not completed_trials:
raise RuntimeError(
f"No valid Optuna trial found for target {target_col}. "
"Try lowering window sizes or check your data."
)
# Window cache for this ws (build once, reuse across folds)
cache_ws = get_cache_for_window_size(
best_trial = min(
completed_trials,
key=lambda trial: (
float(trial.value),
float(trial.user_attrs.get("std_rmse", np.inf)),
),
)
best = {
"mean": float(best_trial.value),
"std": float(best_trial.user_attrs.get("std_rmse", np.inf)),
"params": best_trial.user_attrs.get("params_full"),
}
print("\n====================")
print(f"BEST (OPTUNA CV TUNING) - {target_col}")
print("CV mean RMSE:", best["mean"])
print("CV std RMSE :", best["std"])
print("params:", best["params"])
print("====================\n")
if best["params"] is None:
raise RuntimeError(
f"No valid trial found for target {target_col}. "
"Try lowering window sizes or check your data."
)
best_params = cast(dict, best["params"])
ws_best = int(best_params["window_size"])
best_params_model = dict(best_params)
best_params_model.pop("window_size", None)
cache_ws_best = get_cache_for_window_size(
df=df,
cases_to_cache=cases_to_cache_all,
feature_cols=feature_cols,
target_col=target_col,
window_size=ws,
window_size=ws_best,
)
feature_dim = None
for c in cases_to_cache_all:
x_c = cache_ws[int(c)][0]
x_c = cache_ws_best[int(c)][0]
if x_c.shape[0] > 0:
feature_dim = x_c.shape[-1]
break
if feature_dim is None:
raise RuntimeError(
"All cases have 0 windows for this window_size. Lower window_size."
f"All cases have 0 windows for best window_size in target {target_col}."
)
mean_rmse, std_rmse, fold_rmses = cv_score_for_params_cached(
cache_ws=cache_ws,
print("============================================================")
print(f"FINAL TRAIN (100% train_pool, no val) - {target_col}")
print("Train cases:", train_pool)
print("Using best parameters:", best_params)
print("============================================================")
final_state, final_scalers = train_final_full_trainpool_cached(
cache_ws=cache_ws_best,
feature_dim=feature_dim,
folds=folds,
params_model=params_model,
max_epochs=max_epachs_tune,
patience=patience_tune,
train_cases=train_pool,
max_epochs=max_epachs_final,
grad_clip=1.0,
device=device,
compile_model=(compile_in_cv and device == "cuda"),
trial=trial,
compile_model=(compile_in_final and device == "cuda"),
**best_params_model,
)
if fold_rmses is None:
print(f"[Trial {trial.number + 1:02d}] OOM -> pruning trial | {params}")
raise optuna.TrialPruned()
fold_str = ", ".join([f"{v:.3f}" for v in fold_rmses.tolist()])
print(
f"[Trial {trial.number + 1:02d}]"
f" CV mean RMSE={mean_rmse:.4f} | std={std_rmse:.4f} | folds=[{fold_str}]"
)
print(f" params: {params}")
trial.set_user_attr("std_rmse", float(std_rmse))
trial.set_user_attr("params_full", params)
gc.collect()
if torch.cuda.is_available():
torch.cuda.empty_cache()
return float(mean_rmse)
study.optimize(objective, n_trials=n_trials, show_progress_bar=False)
completed_trials = [
trial
for trial in study.trials
if trial.state == optuna.trial.TrialState.COMPLETE
]
if not completed_trials:
raise RuntimeError(
"No valid Optuna trial found. Try lowering window sizes or check your data."
)
best_trial = min(
completed_trials,
key=lambda trial: (
float(trial.value),
float(trial.user_attrs.get("std_rmse", np.inf)),
),
)
best = {
"mean": float(best_trial.value),
"std": float(best_trial.user_attrs.get("std_rmse", np.inf)),
"params": best_trial.user_attrs.get("params_full"),
}
print("\n====================")
print("BEST (OPTUNA CV TUNING)")
print("CV mean RMSE:", best["mean"])
print("CV std RMSE :", best["std"])
print("params:", best["params"])
print("====================\n")
scaler_x, scaler_y = final_scalers
model_path = os.path.join(model_dir, f"B{b_val}_H{h_val}_{target_col}.joblib")
bundle = {
"framework": "pytorch",
"model_class": "LSTMRegressor",
"state_dict": final_state,
"params": {**best_params_model, "window_size": ws_best},
"feature_cols": feature_cols,
"target_col": target_col,
"scaler_x": scaler_x,
"scaler_y": scaler_y,
"meta": {
"W": w_val,
"B": b_val,
"H": h_val,
"n_train_case": n_train_case,
"predict_case": predict_case,
"seed": SEED,
"device_trained": device,
},
}
joblib.dump(bundle, model_path, compress=3)
print(f"Saved best model bundle to: {model_path}")
x_test, y_test, test_indices = cache_ws_best[int(predict_case)]
if x_test.shape[0] == 0:
raise RuntimeError(
f"Case {predict_case} has fewer points ({len(df_test)}) than "
f"window_size ({ws_best}) for target {target_col}."
)
if best["params"] is None:
raise RuntimeError(
"No valid trial found. Try lowering window sizes or check your data."
x_test_scaled = (
scaler_x.transform(x_test.reshape(-1, feature_dim))
.reshape(x_test.shape)
.astype(np.float32, copy=False)
)
best_params = cast(dict, best["params"])
ws_best = int(best_params["window_size"])
best_params_model = dict(best_params)
best_params_model.pop("window_size", None)
# Ensure cache exists for best ws
cache_ws_best = get_cache_for_window_size(
df=df,
cases_to_cache=cases_to_cache_all,
feature_cols=feature_cols,
target_col=target_col,
window_size=ws_best,
)
# feature_dim again
feature_dim = None
for c in cases_to_cache_all:
x_c = cache_ws_best[int(c)][0]
if x_c.shape[0] > 0:
feature_dim = x_c.shape[-1]
break
if feature_dim is None:
raise RuntimeError(
"All cases have 0 windows for best window_size. Lower window_size."
final_model = LSTMRegressor(
input_dim=feature_dim,
hidden_dim=best_params_model["hidden_dim"],
dense_dim=best_params_model["dense_dim"],
num_layers=best_params_model["num_layers"],
dropout=best_params_model["dropout"],
).to(device)
final_model.load_state_dict(strip_compile_prefix(final_state))
final_model.eval()
test_ds = WindowDataset(
x_test_scaled, np.zeros((x_test_scaled.shape[0],), dtype=np.float32)
)
# ---- Final train on 100% train_pool (NO VAL) ----
print("============================================================")
print("FINAL TRAIN (100% train_pool, no val)")
print("Train cases:", train_pool)
print("Using best parameters:", best_params)
print("============================================================")
final_state, final_scalers = train_final_full_trainpool_cached(
cache_ws=cache_ws_best,
feature_dim=feature_dim,
train_cases=train_pool,
max_epochs=max_epachs_final,
grad_clip=1.0,
device=device,
compile_model=(compile_in_final and device == "cuda"),
**best_params_model,
)
scaler_x, scaler_y = final_scalers
# -------------------------
# Save best model bundle (joblib)
# -------------------------
bundle = {
"framework": "pytorch",
"model_class": "LSTMRegressor",
"state_dict": final_state, # already CPU tensors
"params": {**best_params_model, "window_size": ws_best},
"feature_cols": feature_cols,
"target_col": target_col,
"scaler_x": scaler_x,
"scaler_y": scaler_y,
"meta": {
"W": w_val,
"B": b_val,
"H": h_val,
"n_train_case": n_train_case,
"predict_case": predict_case,
"seed": SEED,
"device_trained": device,
},
}
joblib.dump(bundle, model_path, compress=3)
print(f"Saved best model bundle to: {model_path}")
# ---- Predict predict_case ----
df_test = df[df["Case"] == predict_case].copy()
x_test, y_test, test_indices = cache_ws_best[int(predict_case)]
if x_test.shape[0] == 0:
raise RuntimeError(
f"Case {predict_case} has fewer points ({len(df_test)}) than window_size ({ws_best})."
test_loader = make_loader(
test_ds,
batch_size=best_params_model["batch_size"],
shuffle=False,
device=device,
use_workers=False,
)
x_test_scaled = (
scaler_x.transform(x_test.reshape(-1, feature_dim))
.reshape(x_test.shape)
.astype(np.float32, copy=False)
)
final_model = LSTMRegressor(
input_dim=feature_dim,
hidden_dim=best_params_model["hidden_dim"],
dense_dim=best_params_model["dense_dim"],
num_layers=best_params_model["num_layers"],
dropout=best_params_model["dropout"],
).to(device)
final_state = strip_compile_prefix(final_state)
use_amp = device == "cuda"
preds = []
with torch.no_grad():
for xb, _ in test_loader:
xb = xb.to(device, non_blocking=True)
if use_amp:
try:
with torch.amp.autocast("cuda", enabled=True):
pb = final_model(xb)
except Exception:
with torch.cuda.amp.autocast(enabled=True):
pb = final_model(xb)
else:
pb = final_model(xb)
preds.append(pb.detach().float().cpu().numpy())
# compile not necessary for predict, but harmless if final was compiled; keep OFF to avoid
# overhead
final_model.load_state_dict(final_state)
final_model.eval()
y_pred_scaled = np.concatenate(preds, axis=0).ravel()
y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel()
test_ds = WindowDataset(
x_test_scaled, np.zeros((x_test_scaled.shape[0],), dtype=np.float32)
)
test_loader = make_loader(
test_ds,
batch_size=best_params_model["batch_size"],
shuffle=False,
device=device,
use_workers=False,
)
test_rmse = rmse(y_test, y_pred)
rmse_by_target[target_col] = test_rmse
print(f"Test RMSE on Case {predict_case} for {target_col} = {test_rmse:.4f}")
use_amp = device == "cuda"
preds = []
with torch.no_grad():
for xb, _ in test_loader:
xb = xb.to(device, non_blocking=True)
if use_amp:
try:
with torch.amp.autocast("cuda", enabled=True):
pb = final_model(xb)
except Exception:
with torch.cuda.amp.autocast(enabled=True):
pb = final_model(xb)
else:
pb = final_model(xb)
preds.append(pb.detach().float().cpu().numpy())
y_pred_scaled = np.concatenate(preds, axis=0).ravel()
y_pred = scaler_y.inverse_transform(y_pred_scaled.reshape(-1, 1)).ravel()
pred_col = target_prediction_col(target_col)
df_test_pred[pred_col] = np.nan
for idx, pred in zip(test_indices, y_pred):
df_test_pred.loc[idx, pred_col] = pred
test_rmse = rmse(y_test, y_pred)
print(f"Test RMSE on Case {predict_case} = {test_rmse:.4f}")
# ---- Save results ----
df_test_pred = df_test.copy()
df_test_pred["Force_RNN"] = np.nan
for idx, pred in zip(test_indices, y_pred):
df_test_pred.loc[idx, "Force_RNN"] = pred
del final_model, test_loader, test_ds
gc.collect()
if torch.cuda.is_available():
torch.cuda.empty_cache()
out_folder = f"../../reports/hysteretic_curves/{w_val}W/H{h_val}_B{b_val}"
os.makedirs(out_folder, exist_ok=True)
out_file = os.path.join(out_folder, f"case_{predict_case}_with_rnn_preds.csv")
df_test_pred.to_csv(out_file, index=False)
print(f"\nSaved predictions to: {out_file}")
del final_model, test_loader, test_ds
gc.collect()
if torch.cuda.is_available():
torch.cuda.empty_cache()
print("Test RMSE summary:", rmse_by_target)
if __name__ == "__main__":
......
#!/bin/bash
# python predict_hysteretic_curves.py --W 2 --B 29 --n-trials 25
python predict_hysteretic_curves.py --W 2 --B 29 --n-trials 25
python predict_hysteretic_curves.py --W 2 --B 34 --n-trials 25
python predict_hysteretic_curves.py --W 3 --B 29 --n-trials 25
python predict_hysteretic_curves.py --W 3 --B 34 --n-trials 25
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment